-
-
-name:xetex like specs
-name@virtual font spec
-name*context specification
-
---ldx]]--
-
--- currently fonts are scaled while constructing the font, so we
--- have to do scaling of commands in the vf at that point using e.g.
--- "local scale = g.parameters.factor or 1" after all, we need to
--- work with copies anyway and scaling needs to be done at some point;
--- however, when virtual tricks are used as feature (makes more
--- sense) we scale the commands in fonts.constructors.scale (and set the
--- factor there)
-
-local loadfont = definers.loadfont
-
-function definers.loadfont(specification,size,id) -- overloads the one in font-def
- local variants = definers.methods.variants
- local virtualfeatures = specification.features.virtual
- if virtualfeatures and virtualfeatures.preset then
- local variant = variants[virtualfeatures.preset]
- if variant then
- return variant(specification,size,id)
- end
- else
- local tfmdata = loadfont(specification,size,id)
- -- constructors.checkvirtualid(tfmdata,id)
- return tfmdata
- end
-end
-
-local function predefined(specification)
- local variants = definers.methods.variants
- local detail = specification.detail
- if detail ~= "" and variants[detail] then
- specification.features.virtual = { preset = detail }
- end
- return specification
-end
-
-definers.registersplit("@", predefined,"virtual")
-
-local normalize_features = otffeatures.normalize -- should be general
-
-local function definecontext(name,t) -- can be shared
- local number = setups[name] and setups[name].number or 0 -- hm, numbers[name]
- if number == 0 then
- number = #numbers + 1
- numbers[number] = name
- end
- t.number = number
- setups[name] = t
- return number, t
-end
-
-local function presetcontext(name,parent,features) -- will go to con and shared
- if features == "" and find(parent,"=") then
- features = parent
- parent = ""
- end
- if not features or features == "" then
- features = { }
- elseif type(features) == "string" then
- features = normalize_features(settings_to_hash(features))
- else
- features = normalize_features(features)
- end
- -- todo: synonyms, and not otf bound
- if parent ~= "" then
- for p in gmatch(parent,"[^, ]+") do
- local s = setups[p]
- if s then
- for k,v in next, s do
- if features[k] == nil then
- features[k] = v
- end
- end
- else
- -- just ignore an undefined one .. i.e. we can refer to not yet defined
- end
- end
- end
- -- these are auto set so in order to prevent redundant definitions
- -- we need to preset them (we hash the features and adding a default
- -- setting during initialization may result in a different hash)
- --
- -- for k,v in next, triggers do
- -- if features[v] == nil then -- not false !
- -- local vv = default_features[v]
- -- if vv then features[v] = vv end
- -- end
- -- end
- --
- for feature,value in next, features do
- if value == nil then -- not false !
- local default = default_features[feature]
- if default ~= nil then
- features[feature] = default
- end
- end
- end
- -- sparse 'm so that we get a better hash and less test (experimental
- -- optimization)
- local t = { } -- can we avoid t ?
- for k,v in next, features do
--- if v then t[k] = v end
- t[k] = v
- end
- -- needed for dynamic features
- -- maybe number should always be renewed as we can redefine features
- local number = setups[name] and setups[name].number or 0 -- hm, numbers[name]
- if number == 0 then
- number = #numbers + 1
- numbers[number] = name
- end
- t.number = number
- setups[name] = t
- return number, t
-end
-
-local function contextnumber(name) -- will be replaced
- local t = setups[name]
- if not t then
- return 0
- elseif t.auto then
- local lng = tonumber(tex.language)
- local tag = name .. ":" .. lng
- local s = setups[tag]
- if s then
- return s.number or 0
- else
- local script, language = languages.association(lng)
- if t.script ~= script or t.language ~= language then
- local s = fastcopy(t)
- local n = #numbers + 1
- setups[tag] = s
- numbers[n] = tag
- s.number = n
- s.script = script
- s.language = language
- return n
- else
- setups[tag] = t
- return t.number or 0
- end
- end
- else
- return t.number or 0
- end
-end
-
-local function mergecontext(currentnumber,extraname,option) -- number string number (used in scrp-ini
- local extra = setups[extraname]
- if extra then
- local current = setups[numbers[currentnumber]]
- local mergedfeatures, mergedname = { }, nil
- if option < 0 then
- if current then
- for k, v in next, current do
- if not extra[k] then
- mergedfeatures[k] = v
- end
- end
- end
- mergedname = currentnumber .. "-" .. extraname
- else
- if current then
- for k, v in next, current do
- mergedfeatures[k] = v
- end
- end
- for k, v in next, extra do
- mergedfeatures[k] = v
- end
- mergedname = currentnumber .. "+" .. extraname
- end
- local number = #numbers + 1
- mergedfeatures.number = number
- numbers[number] = mergedname
- merged[number] = option
- setups[mergedname] = mergedfeatures
- return number -- contextnumber(mergedname)
- else
- return currentnumber
- end
-end
-
-local extrasets = { }
-
-setmetatableindex(extrasets,function(t,k)
- local v = mergehashes(setups,k)
- t[k] = v
- return v
-end)
-
-local function mergecontextfeatures(currentname,extraname,how,mergedname) -- string string
- local extra = setups[extraname] or extrasets[extraname]
- if extra then
- local current = setups[currentname]
- local mergedfeatures = { }
- if how == "+" then
- if current then
- for k, v in next, current do
- mergedfeatures[k] = v
- end
- end
- for k, v in next, extra do
- mergedfeatures[k] = v
- end
- elseif how == "-" then
- if current then
- for k, v in next, current do
- mergedfeatures[k] = v
- end
- end
- for k, v in next, extra do
- -- only boolean features
- if v == true then
- mergedfeatures[k] = false
- end
- end
- else -- =
- for k, v in next, extra do
- mergedfeatures[k] = v
- end
- end
- local number = #numbers + 1
- mergedfeatures.number = number
- numbers[number] = mergedname
- merged[number] = option
- setups[mergedname] = mergedfeatures
- return number
- else
- return numbers[currentname] or 0
- end
-end
-
-local function registercontext(fontnumber,extraname,option)
- local extra = setups[extraname]
- if extra then
- local mergedfeatures, mergedname = { }, nil
- if option < 0 then
- mergedname = fontnumber .. "-" .. extraname
- else
- mergedname = fontnumber .. "+" .. extraname
- end
- for k, v in next, extra do
- mergedfeatures[k] = v
- end
- local number = #numbers + 1
- mergedfeatures.number = number
- numbers[number] = mergedname
- merged[number] = option
- setups[mergedname] = mergedfeatures
- return number -- contextnumber(mergedname)
- else
- return 0
- end
-end
-
-local function registercontextfeature(mergedname,extraname,how)
- local extra = setups[extraname]
- if extra then
- local mergedfeatures = { }
- for k, v in next, extra do
- mergedfeatures[k] = v
- end
- local number = #numbers + 1
- mergedfeatures.number = number
- numbers[number] = mergedname
- merged[number] = how == "=" and 1 or 2 -- 1=replace, 2=combine
- setups[mergedname] = mergedfeatures
- return number -- contextnumber(mergedname)
- else
- return 0
- end
-end
-
-specifiers.presetcontext = presetcontext
-specifiers.contextnumber = contextnumber
-specifiers.mergecontext = mergecontext
-specifiers.registercontext = registercontext
-specifiers.definecontext = definecontext
-
--- we extend the hasher:
-
-constructors.hashmethods.virtual = function(list)
- local s = { }
- local n = 0
- for k, v in next, list do
- n = n + 1
- s[n] = k -- no checking on k
- end
- if n > 0 then
- sort(s)
- for i=1,n do
- local k = s[i]
- s[i] = k .. '=' .. tostring(list[k])
- end
- return concat(s,"+")
- end
-end
-
--- end of redefine
-
--- local withcache = { } -- concat might be less efficient than nested tables
---
--- local function withset(name,what)
--- local zero = texattribute[0]
--- local hash = zero .. "+" .. name .. "*" .. what
--- local done = withcache[hash]
--- if not done then
--- done = mergecontext(zero,name,what)
--- withcache[hash] = done
--- end
--- texattribute[0] = done
--- end
---
--- local function withfnt(name,what,font)
--- local font = font or currentfont()
--- local hash = font .. "*" .. name .. "*" .. what
--- local done = withcache[hash]
--- if not done then
--- done = registercontext(font,name,what)
--- withcache[hash] = done
--- end
--- texattribute[0] = done
--- end
-
-function specifiers.showcontext(name)
- return setups[name] or setups[numbers[name]] or setups[numbers[tonumber(name)]] or { }
-end
-
--- we need a copy as we will add (fontclass) goodies to the features and
--- that is bad for a shared table
-
--- local function splitcontext(features) -- presetcontext creates dummy here
--- return fastcopy(setups[features] or (presetcontext(features,"","") and setups[features]))
--- end
-
-local function splitcontext(features) -- presetcontext creates dummy here
- local sf = setups[features]
- if not sf then
- local n -- number
- if find(features,",") then
- -- let's assume a combination which is not yet defined but just specified (as in math)
- n, sf = presetcontext(features,features,"")
- else
- -- we've run into an unknown feature and or a direct spec so we create a dummy
- n, sf = presetcontext(features,"","")
- end
- end
- return fastcopy(sf)
-end
-
--- local splitter = lpeg.splitat("=")
---
--- local function splitcontext(features)
--- local setup = setups[features]
--- if setup then
--- return setup
--- elseif find(features,",") then
--- -- This is not that efficient but handy anyway for quick and dirty tests
--- -- beware, due to the way of caching setups you can get the wrong results
--- -- when components change. A safeguard is to nil the cache.
--- local merge = nil
--- for feature in gmatch(features,"[^, ]+") do
--- if find(feature,"=") then
--- local k, v = lpegmatch(splitter,feature)
--- if k and v then
--- if not merge then
--- merge = { k = v }
--- else
--- merge[k] = v
--- end
--- end
--- else
--- local s = setups[feature]
--- if not s then
--- -- skip
--- elseif not merge then
--- merge = s
--- else
--- for k, v in next, s do
--- merge[k] = v
--- end
--- end
--- end
--- end
--- setup = merge and presetcontext(features,"",merge) and setups[features]
--- -- actually we have to nil setups[features] in order to permit redefinitions
--- setups[features] = nil
--- end
--- return setup or (presetcontext(features,"","") and setups[features]) -- creates dummy
--- end
-
-specifiers.splitcontext = splitcontext
-
-function specifiers.contexttostring(name,kind,separator,yes,no,strict,omit) -- not used
- return hash_to_string(mergedtable(handlers[kind].features.defaults or {},setups[name] or {}),separator,yes,no,strict,omit)
-end
-
-local function starred(features) -- no longer fallbacks here
- local detail = features.detail
- if detail and detail ~= "" then
- features.features.normal = splitcontext(detail)
- else
- features.features.normal = { }
- end
- return features
-end
-
-definers.registersplit('*',starred,"featureset")
-
--- sort of xetex mode, but without [] and / as we have file: and name: etc
-
-local space = P(" ")
-local separator = S(";,")
-local equal = P("=")
-local spaces = space^0
-local sometext = C((1-equal-space-separator)^1)
-local truevalue = P("+") * spaces * sometext * Cc(true) -- "yes"
-local falsevalue = P("-") * spaces * sometext * Cc(false) -- "no"
-local keyvalue = sometext * spaces * equal * spaces * sometext
-local somevalue = sometext * spaces * Cc(true) -- "yes"
-local pattern = Cf(Ct("") * (space + separator + Cg(keyvalue + falsevalue + truevalue + somevalue))^0, rawset)
-
-local function colonized(specification)
- specification.features.normal = normalize_features(lpegmatch(pattern,specification.detail))
- return specification
-end
-
-definers.registersplit(":",colonized,"direct")
-
--- define (two steps)
-
-local space = P(" ")
-local spaces = space^0
-local leftparent = (P"(")
-local rightparent = (P")")
-local value = C((leftparent * (1-rightparent)^0 * rightparent + (1-space))^1)
-local dimension = C((space/"" + P(1))^1)
-local rest = C(P(1)^0)
-local scale_none = Cc(0)
-local scale_at = P("at") * Cc(1) * spaces * dimension -- value
-local scale_sa = P("sa") * Cc(2) * spaces * dimension -- value
-local scale_mo = P("mo") * Cc(3) * spaces * dimension -- value
-local scale_scaled = P("scaled") * Cc(4) * spaces * dimension -- value
-
-local sizepattern = spaces * (scale_at + scale_sa + scale_mo + scale_scaled + scale_none)
-local splitpattern = spaces * value * spaces * rest
-
-function helpers.splitfontpattern(str)
- local name, size = lpegmatch(splitpattern,str)
- local kind, size = lpegmatch(sizepattern,size)
- return name, kind, size
-end
-
-function helpers.fontpatternhassize(str)
- local name, size = lpegmatch(splitpattern,str)
- local kind, size = lpegmatch(sizepattern,size)
- return size or false
-end
-
-local specification -- still needed as local ?
-
-local getspecification = definers.getspecification
-
--- we can make helper macros which saves parsing (but normaly not
--- that many calls, e.g. in mk a couple of 100 and in metafun 3500)
-
-local setdefaultfontname = context.fntsetdefname
-local setsomefontname = context.fntsetsomename
-local setemptyfontsize = context.fntsetnopsize
-local setsomefontsize = context.fntsetsomesize
-local letvaluerelax = context.letvaluerelax
-
-function commands.definefont_one(str)
- statistics.starttiming(fonts)
- if trace_defining then
- report_defining("memory usage before: %s",statistics.memused())
- report_defining("start stage one: %s",str)
- end
- local fullname, size = lpegmatch(splitpattern,str)
- local lookup, name, sub, method, detail = getspecification(fullname)
- if not name then
- report_defining("strange definition %a",str)
- setdefaultfontname()
- elseif name == "unknown" then
- setdefaultfontname()
- else
- setsomefontname(name)
- end
- -- we can also use a count for the size
- if size and size ~= "" then
- local mode, size = lpegmatch(sizepattern,size)
- if size and mode then
- texcount.scaledfontmode = mode
- setsomefontsize(size)
- else
- texcount.scaledfontmode = 0
- setemptyfontsize()
- end
- elseif true then
- -- so we don't need to check in tex
- texcount.scaledfontmode = 2
- setemptyfontsize()
- else
- texcount.scaledfontmode = 0
- setemptyfontsize()
- end
- specification = definers.makespecification(str,lookup,name,sub,method,detail,size)
- if trace_defining then
- report_defining("stop stage one")
- end
-end
-
-local n = 0
-
--- we can also move rscale to here (more consistent)
--- the argument list will become a table
-
-local function nice_cs(cs)
- return (gsub(cs,".->", ""))
-end
-
-function commands.definefont_two(global,cs,str,size,inheritancemode,classfeatures,fontfeatures,classfallbacks,fontfallbacks,
- mathsize,textsize,relativeid,classgoodies,goodies,classdesignsize,fontdesignsize)
- if trace_defining then
- report_defining("start stage two: %s (size %s)",str,size)
- end
- -- name is now resolved and size is scaled cf sa/mo
- local lookup, name, sub, method, detail = getspecification(str or "")
- -- new (todo: inheritancemode)
- local designsize = fontdesignsize ~= "" and fontdesignsize or classdesignsize or ""
- local designname = designsizefilename(name,designsize,size)
- if designname and designname ~= "" then
- if trace_defining or trace_designsize then
- report_defining("remapping name %a, specification %a, size %a, designsize %a",name,designsize,size,designname)
- end
- -- we don't catch detail here
- local o_lookup, o_name, o_sub, o_method, o_detail = getspecification(designname)
- if o_lookup and o_lookup ~= "" then lookup = o_lookup end
- if o_method and o_method ~= "" then method = o_method end
- if o_detail and o_detail ~= "" then detail = o_detail end
- name = o_name
- sub = o_sub
- end
- -- so far
- -- some settings can have been overloaded
- if lookup and lookup ~= "" then
- specification.lookup = lookup
- end
- if relativeid and relativeid ~= "" then -- experimental hook
- local id = tonumber(relativeid) or 0
- specification.relativeid = id > 0 and id
- end
- specification.name = name
- specification.size = size
- specification.sub = (sub and sub ~= "" and sub) or specification.sub
- specification.mathsize = mathsize
- specification.textsize = textsize
- specification.goodies = goodies
- specification.cs = cs
- specification.global = global
- if detail and detail ~= "" then
- specification.method = method or "*"
- specification.detail = detail
- elseif specification.detail and specification.detail ~= "" then
- -- already set
- elseif inheritancemode == 0 then
- -- nothing
- elseif inheritancemode == 1 then
- -- fontonly
- if fontfeatures and fontfeatures ~= "" then
- specification.method = "*"
- specification.detail = fontfeatures
- end
- if fontfallbacks and fontfallbacks ~= "" then
- specification.fallbacks = fontfallbacks
- end
- elseif inheritancemode == 2 then
- -- classonly
- if classfeatures and classfeatures ~= "" then
- specification.method = "*"
- specification.detail = classfeatures
- end
- if classfallbacks and classfallbacks ~= "" then
- specification.fallbacks = classfallbacks
- end
- elseif inheritancemode == 3 then
- -- fontfirst
- if fontfeatures and fontfeatures ~= "" then
- specification.method = "*"
- specification.detail = fontfeatures
- elseif classfeatures and classfeatures ~= "" then
- specification.method = "*"
- specification.detail = classfeatures
- end
- if fontfallbacks and fontfallbacks ~= "" then
- specification.fallbacks = fontfallbacks
- elseif classfallbacks and classfallbacks ~= "" then
- specification.fallbacks = classfallbacks
- end
- elseif inheritancemode == 4 then
- -- classfirst
- if classfeatures and classfeatures ~= "" then
- specification.method = "*"
- specification.detail = classfeatures
- elseif fontfeatures and fontfeatures ~= "" then
- specification.method = "*"
- specification.detail = fontfeatures
- end
- if classfallbacks and classfallbacks ~= "" then
- specification.fallbacks = classfallbacks
- elseif fontfallbacks and fontfallbacks ~= "" then
- specification.fallbacks = fontfallbacks
- end
- end
- local tfmdata = definers.read(specification,size) -- id not yet known (size in spec?)
- --
- local lastfontid = 0
- if not tfmdata then
- report_defining("unable to define %a as %a",name,nice_cs(cs))
- lastfontid = -1
- letvaluerelax(cs) -- otherwise the current definition takes the previous one
- elseif type(tfmdata) == "number" then
- if trace_defining then
- report_defining("reusing %s, id %a, target %a, features %a / %a, fallbacks %a / %a, goodies %a / %a, designsize %a / %a",
- name,tfmdata,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks,classgoodies,goodies,classdesignsize,fontdesignsize)
- end
- csnames[tfmdata] = specification.cs
- tex.definefont(global,cs,tfmdata)
- -- resolved (when designsize is used):
- setsomefontsize((fontdata[tfmdata].parameters.size or 0) .. "sp")
- lastfontid = tfmdata
- else
- -- setting the extra characters will move elsewhere
- local characters = tfmdata.characters
- local parameters = tfmdata.parameters
- -- we use char0 as signal; cf the spec pdf can handle this (no char in slot)
- characters[0] = nil
- -- characters[0x00A0] = { width = parameters.space }
- -- characters[0x2007] = { width = characters[0x0030] and characters[0x0030].width or parameters.space } -- figure
- -- characters[0x2008] = { width = characters[0x002E] and characters[0x002E].width or parameters.space } -- period
- --
- local id = font.define(tfmdata)
- csnames[id] = specification.cs
- tfmdata.properties.id = id
- definers.register(tfmdata,id) -- to be sure, normally already done
- tex.definefont(global,cs,id)
- constructors.cleanuptable(tfmdata)
- constructors.finalize(tfmdata)
- if trace_defining then
- report_defining("defining %a, id %a, target %a, features %a / %a, fallbacks %a / %a",
- name,id,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks)
- end
- -- resolved (when designsize is used):
- setsomefontsize((tfmdata.parameters.size or 655360) .. "sp")
- lastfontid = id
- end
- if trace_defining then
- report_defining("memory usage after: %s",statistics.memused())
- report_defining("stop stage two")
- end
- --
- texsetcount("global","lastfontid",lastfontid)
- if not mathsize then
- -- forget about it
- elseif mathsize == 0 then
- lastmathids[1] = lastfontid
- else
- lastmathids[mathsize] = lastfontid
- end
- --
- statistics.stoptiming(fonts)
-end
-
-function definers.define(specification)
- --
- local name = specification.name
- if not name or name == "" then
- return -1
- else
- statistics.starttiming(fonts)
- --
- -- following calls expect a few properties to be set:
- --
- local lookup, name, sub, method, detail = getspecification(name or "")
- --
- specification.name = (name ~= "" and name) or specification.name
- --
- specification.lookup = specification.lookup or (lookup ~= "" and lookup) or "file"
- specification.size = specification.size or 655260
- specification.sub = specification.sub or (sub ~= "" and sub) or ""
- specification.method = specification.method or (method ~= "" and method) or "*"
- specification.detail = specification.detail or (detail ~= "" and detail) or ""
- --
- if type(specification.size) == "string" then
- specification.size = tex.sp(specification.size) or 655260
- end
- --
- specification.specification = "" -- not used
- specification.resolved = ""
- specification.forced = ""
- specification.features = { } -- via detail, maybe some day
- --
- -- we don't care about mathsize textsize goodies fallbacks
- --
- local cs = specification.cs
- if cs == "" then
- cs = nil
- specification.cs = nil
- specification.global = false
- elseif specification.global == nil then
- specification.global = false
- end
- --
- local tfmdata = definers.read(specification,specification.size)
- if not tfmdata then
- return -1, nil
- elseif type(tfmdata) == "number" then
- if cs then
- tex.definefont(specification.global,cs,tfmdata)
- csnames[tfmdata] = cs
- end
- return tfmdata, fontdata[tfmdata]
- else
- local id = font.define(tfmdata)
- tfmdata.properties.id = id
- definers.register(tfmdata,id)
- if cs then
- tex.definefont(specification.global,cs,id)
- csnames[id] = cs
- end
- constructors.cleanuptable(tfmdata)
- constructors.finalize(tfmdata)
- return id, tfmdata
- end
- statistics.stoptiming(fonts)
- end
-end
-
--- local id, cs = fonts.definers.internal { }
--- local id, cs = fonts.definers.internal { number = 2 }
--- local id, cs = fonts.definers.internal { name = "dejavusans" }
-
-local n = 0
-
-function definers.internal(specification,cs)
- specification = specification or { }
- local name = specification.name
- local size = specification.size and number.todimen(specification.size) or texdimen.bodyfontsize
- local number = tonumber(specification.number)
- local id = nil
- if number then
- id = number
- elseif name and name ~= "" then
- local cs = cs or specification.cs
- if not cs then
- n = n + 1 -- beware ... there can be many and they are often used once
- -- cs = formatters["internal font %s"](n)
- cs = "internal font " .. n
- else
- specification.cs = cs
- end
- id = definers.define {
- name = name,
- size = size,
- cs = cs,
- }
- end
- if not id then
- id = currentfont()
- end
- return id, csnames[id]
-end
-
-local enable_auto_r_scale = false
-
-experiments.register("fonts.autorscale", function(v)
- enable_auto_r_scale = v
-end)
-
--- Not ok, we can best use a database for this. The problem is that we
--- have delayed definitions and so we never know what style is taken
--- as start.
-
-local calculatescale = constructors.calculatescale
-
-function constructors.calculatescale(tfmdata,scaledpoints,relativeid)
- local scaledpoints, delta = calculatescale(tfmdata,scaledpoints)
- -- if enable_auto_r_scale and relativeid then -- for the moment this is rather context specific
- -- local relativedata = fontdata[relativeid]
- -- local rfmdata = relativedata and relativedata.unscaled and relativedata.unscaled
- -- local id_x_height = rfmdata and rfmdata.parameters and rfmdata.parameters.x_height
- -- local tf_x_height = tfmdata and tfmdata.parameters and tfmdata.parameters.x_height
- -- if id_x_height and tf_x_height then
- -- local rscale = id_x_height/tf_x_height
- -- delta = rscale * delta
- -- scaledpoints = rscale * scaledpoints
- -- end
- -- end
- return scaledpoints, delta
-end
-
--- We overload the (generic) resolver:
-
-local resolvers = definers.resolvers
-local hashfeatures = constructors.hashfeatures
-
-function definers.resolve(specification) -- overload function in font-con.lua
- if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash
- local r = resolvers[specification.lookup]
- if r then
- r(specification)
- end
- end
- if specification.forced == "" then
- specification.forced = nil
- else
- specification.forced = specification.forced
- end
- -- goodies are a context specific thing and not always defined
- -- as feature, so we need to make sure we add them here before
- -- hashing because otherwise we get funny goodies applied
- local goodies = specification.goodies
- if goodies and goodies ~= "" then
- -- this adapts the features table so it has best be a copy
- local normal = specification.features.normal
- if not normal then
- specification.features.normal = { goodies = goodies }
- elseif not normal.goodies then
- local g = normal.goodies
- if g and g ~= "" then
- normal.goodies = formatters["%s,%s"](g,goodies)
- else
- normal.goodies = goodies
- end
- end
- end
- -- so far for goodie hacks
- specification.hash = lower(specification.name .. ' @ ' .. hashfeatures(specification))
- if specification.sub and specification.sub ~= "" then
- specification.hash = specification.sub .. ' @ ' .. specification.hash
- end
- return specification
-end
-
-
--- soon to be obsolete:
-
-local mappings = fonts.mappings
-
-local loaded = { -- prevent loading (happens in cont-sys files)
- ["original-base.map" ] = true,
- ["original-ams-base.map" ] = true,
- ["original-ams-euler.map"] = true,
- ["original-public-lm.map"] = true,
-}
-
-function mappings.loadfile(name)
- name = file.addsuffix(name,"map")
- if not loaded[name] then
- if trace_mapfiles then
- report_mapfiles("loading map file %a",name)
- end
- pdf.mapfile(name)
- loaded[name] = true
- end
-end
-
-local loaded = { -- prevent double loading
-}
-
-function mappings.loadline(how,line)
- if line then
- how = how .. " " .. line
- elseif how == "" then
- how = "= " .. line
- end
- if not loaded[how] then
- if trace_mapfiles then
- report_mapfiles("processing map line %a",line)
- end
- pdf.mapline(how)
- loaded[how] = true
- end
-end
-
-function mappings.reset()
- pdf.mapfile("")
-end
-
-mappings.reset() -- resets the default file
-
--- we need an 'do after the banner hook'
-
--- => commands
-
-local function nametoslot(name)
- local t = type(name)
- if t == "string" then
- return resources[true].unicodes[name]
- elseif t == "number" then
- return n
- end
-end
-
-helpers.nametoslot = nametoslot
-
--- this will change ...
-
-function loggers.reportdefinedfonts()
- if trace_usage then
- local t, tn = { }, 0
- for id, data in sortedhash(fontdata) do
- local properties = data.properties or { }
- local parameters = data.parameters or { }
- tn = tn + 1
- t[tn] = {
- format("%03i",id or 0),
- format("%09i",parameters.size or 0),
- properties.type or "real",
- properties.format or "unknown",
- properties.name or "",
- properties.psname or "",
- properties.fullname or "",
- }
- report_status("%s: % t",properties.name,sortedkeys(data))
- end
- formatcolumns(t," ")
- report_status()
- report_status("defined fonts:")
- report_status()
- for k=1,tn do
- report_status(t[k])
- end
- end
-end
-
-luatex.registerstopactions(loggers.reportdefinedfonts)
-
-function loggers.reportusedfeatures()
- -- numbers, setups, merged
- if trace_usage then
- local t, n = { }, #numbers
- for i=1,n do
- local name = numbers[i]
- local setup = setups[name]
- local n = setup.number
- setup.number = nil -- we have no reason to show this
- t[i] = { i, name, sequenced(setup,false,true) } -- simple mode
- setup.number = n -- restore it (normally not needed as we're done anyway)
- end
- formatcolumns(t," ")
- report_status()
- report_status("defined featuresets:")
- report_status()
- for k=1,n do
- report_status(t[k])
- end
- end
-end
-
-luatex.registerstopactions(loggers.reportusedfeatures)
-
-statistics.register("fonts load time", function()
- return statistics.elapsedseconds(fonts)
-end)
-
--- experimental mechanism for Mojca:
---
--- fonts.definetypeface {
--- name = "mainbodyfont-light",
--- preset = "antykwapoltawskiego-light",
--- }
---
--- fonts.definetypeface {
--- name = "mojcasfavourite",
--- preset = "antykwapoltawskiego",
--- normalweight = "light",
--- boldweight = "bold",
--- width = "condensed",
--- }
-
-local Shapes = {
- serif = "Serif",
- sans = "Sans",
- mono = "Mono",
-}
-
-function fonts.definetypeface(name,t)
- if type(name) == "table" then
- -- {name=abc,k=v,...}
- t = name
- elseif t then
- if type(t) == "string" then
- -- "abc", "k=v,..."
- t = settings_to_hash(name)
- else
- -- "abc", {k=v,...}
- end
- t.name = t.name or name
- else
- -- "name=abc,k=v,..."
- t = settings_to_hash(name)
- end
- local p = t.preset and fonts.typefaces[t.preset] or { }
- local name = t.name or "unknowntypeface"
- local shortcut = t.shortcut or p.shortcut or "rm"
- local size = t.size or p.size or "default"
- local shape = t.shape or p.shape or "serif"
- local fontname = t.fontname or p.fontname or "unknown"
- local normalweight = t.normalweight or t.weight or p.normalweight or p.weight or "normal"
- local boldweight = t.boldweight or t.weight or p.boldweight or p.weight or "normal"
- local normalwidth = t.normalwidth or t.width or p.normalwidth or p.width or "normal"
- local boldwidth = t.boldwidth or t.width or p.boldwidth or p.width or "normal"
- Shape = Shapes[shape] or "Serif"
- context.startfontclass { name }
- context.definefontsynonym( { format("%s", Shape) }, { format("spec:%s-%s-regular-%s", fontname, normalweight, normalwidth) } )
- context.definefontsynonym( { format("%sBold", Shape) }, { format("spec:%s-%s-regular-%s", fontname, boldweight, boldwidth ) } )
- context.definefontsynonym( { format("%sBoldItalic", Shape) }, { format("spec:%s-%s-italic-%s", fontname, boldweight, boldwidth ) } )
- context.definefontsynonym( { format("%sItalic", Shape) }, { format("spec:%s-%s-italic-%s", fontname, normalweight, normalwidth) } )
- context.stopfontclass()
- local settings = sequenced({ features= t.features },",")
- context.dofastdefinetypeface(name, shortcut, shape, size, settings)
-end
-
-function fonts.current() -- todo: also handle name
- return fontdata[currentfont()] or fontdata[0]
-end
-
-function fonts.currentid()
- return currentfont() or 0
-end
-
--- interfaces
-
-function commands.fontchar(n)
- n = nametoslot(n)
- if n then
- context.char(n)
- end
-end
-
-function commands.doifelsecurrentfonthasfeature(name) -- can be made faster with a supportedfeatures hash
- local f = fontdata[currentfont()]
- f = f and f.shared
- f = f and f.rawdata
- f = f and f.resources
- f = f and f.features
- commands.doifelse(f and (f.gpos[name] or f.gsub[name]))
-end
-
-local p, f = 1, formatters["%0.1fpt"] -- normally this value is changed only once
-
-local stripper = lpeg.patterns.stripzeros
-
-function commands.nbfs(amount,precision)
- if precision ~= p then
- p = precision
- f = formatters["%0." .. p .. "fpt"]
- end
- context(lpegmatch(stripper,f(amount/65536)))
-end
-
-function commands.featureattribute(tag)
- context(contextnumber(tag))
-end
-
-function commands.setfontfeature(tag)
- texattribute[0] = contextnumber(tag)
-end
-
-function commands.resetfontfeature()
- texattribute[0] = 0
-end
-
--- function commands.addfs(tag) withset(tag, 1) end
--- function commands.subfs(tag) withset(tag,-1) end
--- function commands.addff(tag) withfnt(tag, 2) end -- on top of font features
--- function commands.subff(tag) withfnt(tag,-2) end -- on top of font features
-
-function commands.cleanfontname (name) context(names.cleanname(name)) end
-
-function commands.fontlookupinitialize (name) names.lookup(name) end
-function commands.fontlookupnoffound () context(names.noflookups()) end
-function commands.fontlookupgetkeyofindex(key,index) context(names.getlookupkey(key,index)) end
-function commands.fontlookupgetkey (key) context(names.getlookupkey(key)) end
-
--- this might move to a runtime module:
-
-function commands.showchardata(n)
- local tfmdata = fontdata[currentfont()]
- if tfmdata then
- if type(n) == "string" then
- n = utfbyte(n)
- end
- local chr = tfmdata.characters[n]
- if chr then
- report_status("%s @ %s => %U => %c => %s",tfmdata.properties.fullname,tfmdata.parameters.size,n,n,serialize(chr,false))
- end
- end
-end
-
-function commands.showfontparameters(tfmdata)
- -- this will become more clever
- local tfmdata = tfmdata or fontdata[currentfont()]
- if tfmdata then
- local parameters = tfmdata.parameters
- local mathparameters = tfmdata.mathparameters
- local properties = tfmdata.properties
- local hasparameters = parameters and next(parameters)
- local hasmathparameters = mathparameters and next(mathparameters)
- if hasparameters then
- report_status("%s @ %s => text parameters => %s",properties.fullname,parameters.size,serialize(parameters,false))
- end
- if hasmathparameters then
- report_status("%s @ %s => math parameters => %s",properties.fullname,parameters.size,serialize(mathparameters,false))
- end
- if not hasparameters and not hasmathparameters then
- report_status("%s @ %s => no text parameters and/or math parameters",properties.fullname,parameters.size)
- end
- end
-end
-
--- for the moment here, this will become a chain of extras that is
--- hooked into the ctx registration (or scaler or ...)
-
-local dimenfactors = number.dimenfactors
-
-function helpers.dimenfactor(unit,tfmdata) -- could be a method of a font instance
- if unit == "ex" then
- return (tfmdata and tfmdata.parameters.x_height) or 655360
- elseif unit == "em" then
- return (tfmdata and tfmdata.parameters.em_width) or 655360
- else
- local du = dimenfactors[unit]
- return du and 1/du or tonumber(unit) or 1
- end
-end
-
-local function digitwidth(font) -- max(quad/2,wd(0..9))
- local tfmdata = fontdata[font]
- local parameters = tfmdata.parameters
- local width = parameters.digitwidth
- if not width then
- width = round(parameters.quad/2) -- maybe tex.scale
- local characters = tfmdata.characters
- for i=48,57 do
- local wd = round(characters[i].width)
- if wd > width then
- width = wd
- end
- end
- parameters.digitwidth = width
- end
- return width
-end
-
-helpers.getdigitwidth = digitwidth
-helpers.setdigitwidth = digitwidth
-
---
-
-function helpers.getparameters(tfmdata)
- local p = { }
- local m = p
- local parameters = tfmdata.parameters
- while true do
- for k, v in next, parameters do
- m[k] = v
- end
- parameters = getmetatable(parameters)
- parameters = parameters and parameters.__index
- if type(parameters) == "table" then
- m = { }
- p.metatable = m
- else
- break
- end
- end
- return p
-end
-
-if environment.initex then
-
- local function names(t)
- local nt = #t
- if nt > 0 then
- local n = { }
- for i=1,nt do
- n[i] = t[i].name
- end
- return concat(n," ")
- else
- return "-"
- end
- end
-
- statistics.register("font processing", function()
- local l = { }
- for what, handler in table.sortedpairs(handlers) do
- local features = handler.features
- if features then
- l[#l+1] = format("[%s (base initializers: %s) (base processors: %s) (base manipulators: %s) (node initializers: %s) (node processors: %s) (node manipulators: %s)]",
- what,
- names(features.initializers.base),
- names(features.processors .base),
- names(features.manipulators.base),
- names(features.initializers.node),
- names(features.processors .node),
- names(features.manipulators.node)
- )
- end
- end
- return concat(l, " | ")
- end)
-
-end
-
--- redefinition
-
-local quads = hashes.quads
-local xheights = hashes.xheights
-
-setmetatableindex(number.dimenfactors, function(t,k)
- if k == "ex" then
- return xheigths[currentfont()]
- elseif k == "em" then
- return quads[currentfont()]
- elseif k == "%" then
- return dimen.hsize/100
- else
- -- error("wrong dimension: " .. (s or "?")) -- better a message
- return false
- end
-end)
-
---[[ldx--
-
Before a font is passed to we scale it. Here we also need
-to scale virtual characters.
---ldx]]--
-
-function constructors.checkvirtualids(tfmdata)
- -- begin of experiment: we can use { "slot", 0, number } in virtual fonts
- local fonts = tfmdata.fonts
- local selfid = font.nextid()
- if fonts and #fonts > 0 then
- for i=1,#fonts do
- if fonts[i][2] == 0 then
- fonts[i][2] = selfid
- end
- end
- else
- -- tfmdata.fonts = { "id", selfid } -- conflicts with other next id's (vf math), too late anyway
- end
- -- end of experiment
-end
-
--- function constructors.getvirtualid(tfmdata)
--- -- since we don't know the id yet, we use 0 as signal
--- local tf = tfmdata.fonts
--- if not tf then
--- local properties = tfmdata.properties
--- if properties then
--- properties.virtualized = true
--- else
--- tfmdata.properties = { virtualized = true }
--- end
--- tf = { }
--- tfmdata.fonts = tf
--- end
--- local ntf = #tf + 1
--- tf[ntf] = { id = 0 }
--- return ntf
--- end
---
--- function constructors.checkvirtualid(tfmdata, id) -- will go
--- local properties = tfmdata.properties
--- if tfmdata and tfmdata.type == "virtual" or (properties and properties.virtualized) then
--- local vfonts = tfmdata.fonts
--- if not vffonts or #vfonts == 0 then
--- if properties then
--- properties.virtualized = false
--- end
--- tfmdata.fonts = nil
--- else
--- for f=1,#vfonts do
--- local fnt = vfonts[f]
--- if fnt.id and fnt.id == 0 then
--- fnt.id = id
--- end
--- end
--- end
--- end
--- end
-
-function commands.setfontofid(id)
- context.getvalue(csnames[id])
-end
-
--- more interfacing:
-
-commands.definefontfeature = presetcontext
-
-local cache = { }
-
-local hows = {
- ["+"] = "add",
- ["-"] = "subtract",
- ["="] = "replace",
-}
-
-function commands.feature(how,parent,name,font)
- if not how then
- if trace_features and texattribute[0] ~= 0 then
- report_cummulative("font %!font:name!, reset",fontdata[font or true])
- end
- texattribute[0] = 0
- elseif how == true then
- local hash = "feature > " .. parent
- local done = cache[hash]
- if trace_features and done then
- report_cummulative("font %!font:name!, revive %a : %!font:features!",fontdata[font or true],parent,setups[numbers[done]])
- end
- texattribute[0] = done or 0
- else
- local full = parent .. how .. name
- local hash = "feature > " .. full
- local done = cache[hash]
- if not done then
- local n = setups[full]
- if n then
- -- already defined
- else
- n = mergecontextfeatures(parent,name,how,full)
- end
- done = registercontextfeature(hash,full,how)
- cache[hash] = done
- if trace_features then
- report_cummulative("font %!font:name!, %s %a : %!font:features!",fontdata[font or true],hows[how],full,setups[numbers[done]])
- end
- end
- texattribute[0] = done
- end
-end
-
-function commands.featurelist(...)
- context(fonts.specifiers.contexttostring(...))
-end
-
-function commands.registerlanguagefeatures()
- local specifications = languages.data.specifications
- for i=1,#specifications do
- local specification = specifications[i]
- local language = specification.opentype
- if language then
- local script = specification.opentypescript or specification.script
- if script then
- local context = specification.context
- if type(context) == "table" then
- for i=1,#context do
- definecontext(context[i], { language = language, script = script})
- end
- elseif type(context) == "string" then
- definecontext(context, { language = language, script = script})
- end
- end
- end
- end
-end
-
--- a fontkern plug:
-
-local copy_node = node.copy
-local kern = nodes.pool.register(nodes.pool.kern())
-
-node.set_attribute(kern,attributes.private('fontkern'),1) -- we can have several, attributes are shared
-
-nodes.injections.installnewkern(function(k)
- local c = copy_node(kern)
- c.kern = k
- return c
-end)
-
-directives.register("nodes.injections.fontkern", function(v) kern.subtype = v and 0 or 1 end)
-
--- here
-
-local trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
-local analyzers = fonts.analyzers
-local methods = analyzers.methods
-
-local unsetvalue = attributes.unsetvalue
-
-local traverse_by_id = node.traverse_id
-
-local a_color = attributes.private('color')
-local a_colormodel = attributes.private('colormodel')
-local a_state = attributes.private('state')
-local m_color = attributes.list[a_color] or { }
-
-local glyph_code = nodes.nodecodes.glyph
-
-local states = analyzers.states
-
-local names = {
- [states.init] = "font:1",
- [states.medi] = "font:2",
- [states.fina] = "font:3",
- [states.isol] = "font:4",
- [states.mark] = "font:5",
- [states.rest] = "font:6",
- [states.rphf] = "font:1",
- [states.half] = "font:2",
- [states.pref] = "font:3",
- [states.blwf] = "font:4",
- [states.pstf] = "font:5",
-}
-
-local function markstates(head)
- if head then
- local model = head[a_colormodel] or 1
- for glyph in traverse_by_id(glyph_code,head) do
- local a = glyph[a_state]
- if a then
- local name = names[a]
- if name then
- local color = m_color[name]
- if color then
- glyph[a_colormodel] = model
- glyph[a_color] = color
- end
- end
- end
- end
- end
-end
-
-local function analyzeprocessor(head,font,attr)
- local tfmdata = fontdata[font]
- local script, language = otf.scriptandlanguage(tfmdata,attr)
- local action = methods[script]
- if not action then
- return head, false
- end
- if type(action) == "function" then
- local head, done = action(head,font,attr)
- if done and trace_analyzing then
- markstates(head)
- end
- return head, done
- end
- action = action[language]
- if action then
- local head, done = action(head,font,attr)
- if done and trace_analyzing then
- markstates(head)
- end
- return head, done
- else
- return head, false
- end
-end
-
-registerotffeature { -- adapts
- name = "analyze",
- processors = {
- node = analyzeprocessor,
- }
-}
-
-function methods.nocolor(head,font,attr)
- for n in traverse_by_id(glyph_code,head) do
- if not font or n.font == font then
- n[a_color] = unsetvalue
- end
- end
- return head, true
-end
+if not modules then modules = { } end modules ['font-ctx'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- At some point I will clean up the code here so that at the tex end
+-- the table interface is used.
+--
+-- Todo: make a proper 'next id' mechanism (register etc) or wait till 'true'
+-- in virtual fonts indices is implemented.
+
+local context, commands = context, commands
+
+local texcount, texsetcount = tex.count, tex.setcount
+local format, gmatch, match, find, lower, gsub, byte = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub, string.byte
+local concat, serialize, sort, fastcopy, mergedtable = table.concat, table.serialize, table.sort, table.fastcopy, table.merged
+local sortedhash, sortedkeys, sequenced = table.sortedhash, table.sortedkeys, table.sequenced
+local settings_to_hash, hash_to_string = utilities.parsers.settings_to_hash, utilities.parsers.hash_to_string
+local formatcolumns = utilities.formatters.formatcolumns
+local mergehashes = utilities.parsers.mergehashes
+local formatters = string.formatters
+
+local tostring, next, type, rawget, tonumber = tostring, next, type, rawget, tonumber
+local utfchar, utfbyte = utf.char, utf.byte
+local round = math.round
+
+local P, S, C, Cc, Cf, Cg, Ct, lpegmatch = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cf, lpeg.Cg, lpeg.Ct, lpeg.match
+
+local trace_features = false trackers.register("fonts.features", function(v) trace_features = v end)
+local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
+local trace_designsize = false trackers.register("fonts.designsize", function(v) trace_designsize = v end)
+local trace_usage = false trackers.register("fonts.usage", function(v) trace_usage = v end)
+local trace_mapfiles = false trackers.register("fonts.mapfiles", function(v) trace_mapfiles = v end)
+local trace_automode = false trackers.register("fonts.automode", function(v) trace_automode = v end)
+
+local report_features = logs.reporter("fonts","features")
+local report_cummulative = logs.reporter("fonts","cummulative")
+local report_defining = logs.reporter("fonts","defining")
+local report_status = logs.reporter("fonts","status")
+local report_mapfiles = logs.reporter("fonts","mapfiles")
+
+local setmetatableindex = table.setmetatableindex
+
+local fonts = fonts
+local handlers = fonts.handlers
+local otf = handlers.otf -- brrr
+local names = fonts.names
+local definers = fonts.definers
+local specifiers = fonts.specifiers
+local constructors = fonts.constructors
+local loggers = fonts.loggers
+local fontgoodies = fonts.goodies
+local helpers = fonts.helpers
+local hashes = fonts.hashes
+local currentfont = font.current
+local texattribute = tex.attribute
+local texdimen = tex.dimen
+
+local fontdata = hashes.identifiers
+local characters = hashes.chardata
+local descriptions = hashes.descriptions
+local properties = hashes.properties
+local resources = hashes.resources
+local csnames = hashes.csnames
+local marks = hashes.markdata
+local lastmathids = hashes.lastmathids
+
+local designsizefilename = fontgoodies.designsizes.filename
+
+local otffeatures = otf.features
+local otftables = otf.tables
+
+local registerotffeature = otffeatures.register
+local baseprocessors = otffeatures.processors.base
+local baseinitializers = otffeatures.initializers.base
+
+local sequencers = utilities.sequencers
+local appendgroup = sequencers.appendgroup
+local appendaction = sequencers.appendaction
+
+specifiers.contextsetups = specifiers.contextsetups or { }
+specifiers.contextnumbers = specifiers.contextnumbers or { }
+specifiers.contextmerged = specifiers.contextmerged or { }
+specifiers.synonyms = specifiers.synonyms or { }
+
+local setups = specifiers.contextsetups
+local numbers = specifiers.contextnumbers
+local merged = specifiers.contextmerged
+local synonyms = specifiers.synonyms
+
+storage.register("fonts/setups" , setups , "fonts.specifiers.contextsetups" )
+storage.register("fonts/numbers", numbers, "fonts.specifiers.contextnumbers")
+storage.register("fonts/merged", merged, "fonts.specifiers.contextmerged")
+storage.register("fonts/synonyms", synonyms, "fonts.specifiers.synonyms")
+
+-- inspect(setups)
+
+if environment.initex then
+ setmetatableindex(setups,function(t,k)
+ return type(k) == "number" and rawget(t,numbers[k]) or nil
+ end)
+else
+ setmetatableindex(setups,function(t,k)
+ local v = type(k) == "number" and rawget(t,numbers[k])
+ if v then
+ t[k] = v
+ return v
+ end
+ end)
+end
+
+-- this will move elsewhere ...
+
+utilities.strings.formatters.add(formatters,"font:name", [["'"..file.basename(%s.properties.name).."'"]])
+utilities.strings.formatters.add(formatters,"font:features",[["'"..table.sequenced(%s," ",true).."'"]])
+
+-- ... like font-sfm or so
+
+constructors.resolvevirtualtoo = true -- context specific (due to resolver)
+
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ fontloader.open = i_limiter.protect(fontloader.open)
+ fontloader.info = i_limiter.protect(fontloader.info)
+ limited = true
+ end
+ end
+end)
+
+function definers.resetnullfont()
+ -- resetting is needed because tikz misuses nullfont
+ local parameters = fonts.nulldata.parameters
+ --
+ parameters.slant = 0 -- 1
+ parameters.space = 0 -- 2
+ parameters.space_stretch = 0 -- 3
+ parameters.space_shrink = 0 -- 4
+ parameters.x_height = 0 -- 5
+ parameters.quad = 0 -- 6
+ parameters.extra_space = 0 -- 7
+ --
+ constructors.enhanceparameters(parameters) -- official copies for us
+ --
+ definers.resetnullfont = function() end
+end
+
+commands.resetnullfont = definers.resetnullfont
+
+-- this cannot be a feature initializer as there is no auto namespace
+-- so we never enter the loop then; we can store the defaults in the tma
+-- file (features.gpos.mkmk = 1 etc)
+
+local needsnodemode = {
+ gpos_mark2mark = true,
+ gpos_mark2base = true,
+ gpos_mark2ligature = true,
+}
+
+otftables.scripts.auto = "automatic fallback to latn when no dflt present"
+
+-- setmetatableindex(otffeatures.descriptions,otftables.features)
+
+local privatefeatures = {
+ tlig = true,
+ trep = true,
+ anum = true,
+}
+
+local function checkedscript(tfmdata,resources,features)
+ local latn = false
+ local script = false
+ for g, list in next, resources.features do
+ for f, scripts in next, list do
+ if privatefeatures[f] then
+ -- skip
+ elseif scripts.dflt then
+ script = "dflt"
+ break
+ elseif scripts.latn then
+ latn = true
+ end
+ end
+ end
+ if not script then
+ script = latn and "latn" or "dflt"
+ end
+ if trace_automode then
+ report_defining("auto script mode, using script %a in font %!font:name!",script,tfmdata)
+ end
+ features.script = script
+ return script
+end
+
+local function checkedmode(tfmdata,resources,features)
+ local sequences = resources.sequences
+ if sequences and #sequences > 0 then
+ local script = features.script or "dflt"
+ local language = features.language or "dflt"
+ for feature, value in next, features do
+ if value then
+ local found = false
+ for i=1,#sequences do
+ local sequence = sequences[i]
+ local features = sequence.features
+ if features then
+ local scripts = features[feature]
+ if scripts then
+ local languages = scripts[script]
+ if languages and languages[language] then
+ if found then
+ -- more than one lookup
+ if trace_automode then
+ report_defining("forcing mode %a, font %!font:name!, feature %a, script %a, language %a, %s",
+ "node",tfmdata,feature,script,language,"multiple lookups")
+ end
+ features.mode = "node"
+ return "node"
+ elseif needsnodemode[sequence.type] then
+ if trace_automode then
+ report_defining("forcing mode %a, font %!font:name!, feature %a, script %a, language %a, %s",
+ "node",tfmdata,feature,script,language,"no base support")
+ end
+ features.mode = "node"
+ return "node"
+ else
+ -- at least one lookup
+ found = true
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ features.mode = "base" -- new, or is this wrong?
+ return "base"
+end
+
+definers.checkedscript = checkedscript
+definers.checkedmode = checkedmode
+
+local function modechecker(tfmdata,features,mode) -- we cannot adapt features as they are shared!
+ if trace_features then
+ report_features("fontname %!font:name!, features %!font:features!",tfmdata,features)
+ end
+ local rawdata = tfmdata.shared.rawdata
+ local resources = rawdata and rawdata.resources
+ local script = features.script
+ if resources then
+ if script == "auto" then
+ script = checkedscript(tfmdata,resources,features)
+ end
+ if mode == "auto" then
+ mode = checkedmode(tfmdata,resources,features)
+ end
+ else
+ report_features("missing resources for font %!font:name!",tfmdata)
+ end
+ return mode
+end
+
+registerotffeature {
+ -- we only set the checker and leave other settings of the mode
+ -- feature as they are
+ name = "mode",
+ modechecker = modechecker,
+}
+
+-- -- default = true anyway
+--
+-- local normalinitializer = constructors.getfeatureaction("otf","initializers","node","analyze")
+--
+-- local function analyzeinitializer(tfmdata,value,features) -- attr
+-- if value == "auto" and features then
+-- value = features.init or features.medi or features.fina or features.isol or false
+-- end
+-- return normalinitializer(tfmdata,value,features)
+-- end
+--
+-- registerotffeature {
+-- name = "analyze",
+-- initializers = {
+-- node = analyzeinitializer,
+-- },
+-- }
+
+local beforecopyingcharacters = sequencers.new {
+ name = "beforecopyingcharacters",
+ arguments = "target,original",
+}
+
+appendgroup(beforecopyingcharacters,"before") -- user
+appendgroup(beforecopyingcharacters,"system") -- private
+appendgroup(beforecopyingcharacters,"after" ) -- user
+
+function constructors.beforecopyingcharacters(original,target)
+ local runner = beforecopyingcharacters.runner
+ if runner then
+ runner(original,target)
+ end
+end
+
+local aftercopyingcharacters = sequencers.new {
+ name = "aftercopyingcharacters",
+ arguments = "target,original",
+}
+
+appendgroup(aftercopyingcharacters,"before") -- user
+appendgroup(aftercopyingcharacters,"system") -- private
+appendgroup(aftercopyingcharacters,"after" ) -- user
+
+function constructors.aftercopyingcharacters(original,target)
+ local runner = aftercopyingcharacters.runner
+ if runner then
+ runner(original,target)
+ end
+end
+
+--[[ldx--
+
So far we haven't really dealt with features (or whatever we want
+to pass along with the font definition. We distinguish the following
+situations:
+situations:
+
+
+name:xetex like specs
+name@virtual font spec
+name*context specification
+
+--ldx]]--
+
+-- currently fonts are scaled while constructing the font, so we
+-- have to do scaling of commands in the vf at that point using e.g.
+-- "local scale = g.parameters.factor or 1" after all, we need to
+-- work with copies anyway and scaling needs to be done at some point;
+-- however, when virtual tricks are used as feature (makes more
+-- sense) we scale the commands in fonts.constructors.scale (and set the
+-- factor there)
+
+local loadfont = definers.loadfont
+
+function definers.loadfont(specification,size,id) -- overloads the one in font-def
+ local variants = definers.methods.variants
+ local virtualfeatures = specification.features.virtual
+ if virtualfeatures and virtualfeatures.preset then
+ local variant = variants[virtualfeatures.preset]
+ if variant then
+ return variant(specification,size,id)
+ end
+ else
+ local tfmdata = loadfont(specification,size,id)
+ -- constructors.checkvirtualid(tfmdata,id)
+ return tfmdata
+ end
+end
+
+local function predefined(specification)
+ local variants = definers.methods.variants
+ local detail = specification.detail
+ if detail ~= "" and variants[detail] then
+ specification.features.virtual = { preset = detail }
+ end
+ return specification
+end
+
+definers.registersplit("@", predefined,"virtual")
+
+local normalize_features = otffeatures.normalize -- should be general
+
+local function definecontext(name,t) -- can be shared
+ local number = setups[name] and setups[name].number or 0 -- hm, numbers[name]
+ if number == 0 then
+ number = #numbers + 1
+ numbers[number] = name
+ end
+ t.number = number
+ setups[name] = t
+ return number, t
+end
+
+local function presetcontext(name,parent,features) -- will go to con and shared
+ if features == "" and find(parent,"=") then
+ features = parent
+ parent = ""
+ end
+ if not features or features == "" then
+ features = { }
+ elseif type(features) == "string" then
+ features = normalize_features(settings_to_hash(features))
+ else
+ features = normalize_features(features)
+ end
+ -- todo: synonyms, and not otf bound
+ if parent ~= "" then
+ for p in gmatch(parent,"[^, ]+") do
+ local s = setups[p]
+ if s then
+ for k,v in next, s do
+ if features[k] == nil then
+ features[k] = v
+ end
+ end
+ else
+ -- just ignore an undefined one .. i.e. we can refer to not yet defined
+ end
+ end
+ end
+ -- these are auto set so in order to prevent redundant definitions
+ -- we need to preset them (we hash the features and adding a default
+ -- setting during initialization may result in a different hash)
+ --
+ -- for k,v in next, triggers do
+ -- if features[v] == nil then -- not false !
+ -- local vv = default_features[v]
+ -- if vv then features[v] = vv end
+ -- end
+ -- end
+ --
+ for feature,value in next, features do
+ if value == nil then -- not false !
+ local default = default_features[feature]
+ if default ~= nil then
+ features[feature] = default
+ end
+ end
+ end
+ -- sparse 'm so that we get a better hash and less test (experimental
+ -- optimization)
+ local t = { } -- can we avoid t ?
+ for k,v in next, features do
+-- if v then t[k] = v end
+ t[k] = v
+ end
+ -- needed for dynamic features
+ -- maybe number should always be renewed as we can redefine features
+ local number = setups[name] and setups[name].number or 0 -- hm, numbers[name]
+ if number == 0 then
+ number = #numbers + 1
+ numbers[number] = name
+ end
+ t.number = number
+ setups[name] = t
+ return number, t
+end
+
+local function contextnumber(name) -- will be replaced
+ local t = setups[name]
+ if not t then
+ return 0
+ elseif t.auto then
+ local lng = tonumber(tex.language)
+ local tag = name .. ":" .. lng
+ local s = setups[tag]
+ if s then
+ return s.number or 0
+ else
+ local script, language = languages.association(lng)
+ if t.script ~= script or t.language ~= language then
+ local s = fastcopy(t)
+ local n = #numbers + 1
+ setups[tag] = s
+ numbers[n] = tag
+ s.number = n
+ s.script = script
+ s.language = language
+ return n
+ else
+ setups[tag] = t
+ return t.number or 0
+ end
+ end
+ else
+ return t.number or 0
+ end
+end
+
+local function mergecontext(currentnumber,extraname,option) -- number string number (used in scrp-ini
+ local extra = setups[extraname]
+ if extra then
+ local current = setups[numbers[currentnumber]]
+ local mergedfeatures, mergedname = { }, nil
+ if option < 0 then
+ if current then
+ for k, v in next, current do
+ if not extra[k] then
+ mergedfeatures[k] = v
+ end
+ end
+ end
+ mergedname = currentnumber .. "-" .. extraname
+ else
+ if current then
+ for k, v in next, current do
+ mergedfeatures[k] = v
+ end
+ end
+ for k, v in next, extra do
+ mergedfeatures[k] = v
+ end
+ mergedname = currentnumber .. "+" .. extraname
+ end
+ local number = #numbers + 1
+ mergedfeatures.number = number
+ numbers[number] = mergedname
+ merged[number] = option
+ setups[mergedname] = mergedfeatures
+ return number -- contextnumber(mergedname)
+ else
+ return currentnumber
+ end
+end
+
+local extrasets = { }
+
+setmetatableindex(extrasets,function(t,k)
+ local v = mergehashes(setups,k)
+ t[k] = v
+ return v
+end)
+
+local function mergecontextfeatures(currentname,extraname,how,mergedname) -- string string
+ local extra = setups[extraname] or extrasets[extraname]
+ if extra then
+ local current = setups[currentname]
+ local mergedfeatures = { }
+ if how == "+" then
+ if current then
+ for k, v in next, current do
+ mergedfeatures[k] = v
+ end
+ end
+ for k, v in next, extra do
+ mergedfeatures[k] = v
+ end
+ elseif how == "-" then
+ if current then
+ for k, v in next, current do
+ mergedfeatures[k] = v
+ end
+ end
+ for k, v in next, extra do
+ -- only boolean features
+ if v == true then
+ mergedfeatures[k] = false
+ end
+ end
+ else -- =
+ for k, v in next, extra do
+ mergedfeatures[k] = v
+ end
+ end
+ local number = #numbers + 1
+ mergedfeatures.number = number
+ numbers[number] = mergedname
+ merged[number] = option
+ setups[mergedname] = mergedfeatures
+ return number
+ else
+ return numbers[currentname] or 0
+ end
+end
+
+local function registercontext(fontnumber,extraname,option)
+ local extra = setups[extraname]
+ if extra then
+ local mergedfeatures, mergedname = { }, nil
+ if option < 0 then
+ mergedname = fontnumber .. "-" .. extraname
+ else
+ mergedname = fontnumber .. "+" .. extraname
+ end
+ for k, v in next, extra do
+ mergedfeatures[k] = v
+ end
+ local number = #numbers + 1
+ mergedfeatures.number = number
+ numbers[number] = mergedname
+ merged[number] = option
+ setups[mergedname] = mergedfeatures
+ return number -- contextnumber(mergedname)
+ else
+ return 0
+ end
+end
+
+local function registercontextfeature(mergedname,extraname,how)
+ local extra = setups[extraname]
+ if extra then
+ local mergedfeatures = { }
+ for k, v in next, extra do
+ mergedfeatures[k] = v
+ end
+ local number = #numbers + 1
+ mergedfeatures.number = number
+ numbers[number] = mergedname
+ merged[number] = how == "=" and 1 or 2 -- 1=replace, 2=combine
+ setups[mergedname] = mergedfeatures
+ return number -- contextnumber(mergedname)
+ else
+ return 0
+ end
+end
+
+specifiers.presetcontext = presetcontext
+specifiers.contextnumber = contextnumber
+specifiers.mergecontext = mergecontext
+specifiers.registercontext = registercontext
+specifiers.definecontext = definecontext
+
+-- we extend the hasher:
+
+constructors.hashmethods.virtual = function(list)
+ local s = { }
+ local n = 0
+ for k, v in next, list do
+ n = n + 1
+ s[n] = k -- no checking on k
+ end
+ if n > 0 then
+ sort(s)
+ for i=1,n do
+ local k = s[i]
+ s[i] = k .. '=' .. tostring(list[k])
+ end
+ return concat(s,"+")
+ end
+end
+
+-- end of redefine
+
+-- local withcache = { } -- concat might be less efficient than nested tables
+--
+-- local function withset(name,what)
+-- local zero = texattribute[0]
+-- local hash = zero .. "+" .. name .. "*" .. what
+-- local done = withcache[hash]
+-- if not done then
+-- done = mergecontext(zero,name,what)
+-- withcache[hash] = done
+-- end
+-- texattribute[0] = done
+-- end
+--
+-- local function withfnt(name,what,font)
+-- local font = font or currentfont()
+-- local hash = font .. "*" .. name .. "*" .. what
+-- local done = withcache[hash]
+-- if not done then
+-- done = registercontext(font,name,what)
+-- withcache[hash] = done
+-- end
+-- texattribute[0] = done
+-- end
+
+function specifiers.showcontext(name)
+ return setups[name] or setups[numbers[name]] or setups[numbers[tonumber(name)]] or { }
+end
+
+-- we need a copy as we will add (fontclass) goodies to the features and
+-- that is bad for a shared table
+
+-- local function splitcontext(features) -- presetcontext creates dummy here
+-- return fastcopy(setups[features] or (presetcontext(features,"","") and setups[features]))
+-- end
+
+local function splitcontext(features) -- presetcontext creates dummy here
+ local sf = setups[features]
+ if not sf then
+ local n -- number
+ if find(features,",") then
+ -- let's assume a combination which is not yet defined but just specified (as in math)
+ n, sf = presetcontext(features,features,"")
+ else
+ -- we've run into an unknown feature and or a direct spec so we create a dummy
+ n, sf = presetcontext(features,"","")
+ end
+ end
+ return fastcopy(sf)
+end
+
+-- local splitter = lpeg.splitat("=")
+--
+-- local function splitcontext(features)
+-- local setup = setups[features]
+-- if setup then
+-- return setup
+-- elseif find(features,",") then
+-- -- This is not that efficient but handy anyway for quick and dirty tests
+-- -- beware, due to the way of caching setups you can get the wrong results
+-- -- when components change. A safeguard is to nil the cache.
+-- local merge = nil
+-- for feature in gmatch(features,"[^, ]+") do
+-- if find(feature,"=") then
+-- local k, v = lpegmatch(splitter,feature)
+-- if k and v then
+-- if not merge then
+-- merge = { k = v }
+-- else
+-- merge[k] = v
+-- end
+-- end
+-- else
+-- local s = setups[feature]
+-- if not s then
+-- -- skip
+-- elseif not merge then
+-- merge = s
+-- else
+-- for k, v in next, s do
+-- merge[k] = v
+-- end
+-- end
+-- end
+-- end
+-- setup = merge and presetcontext(features,"",merge) and setups[features]
+-- -- actually we have to nil setups[features] in order to permit redefinitions
+-- setups[features] = nil
+-- end
+-- return setup or (presetcontext(features,"","") and setups[features]) -- creates dummy
+-- end
+
+specifiers.splitcontext = splitcontext
+
+function specifiers.contexttostring(name,kind,separator,yes,no,strict,omit) -- not used
+ return hash_to_string(mergedtable(handlers[kind].features.defaults or {},setups[name] or {}),separator,yes,no,strict,omit)
+end
+
+local function starred(features) -- no longer fallbacks here
+ local detail = features.detail
+ if detail and detail ~= "" then
+ features.features.normal = splitcontext(detail)
+ else
+ features.features.normal = { }
+ end
+ return features
+end
+
+definers.registersplit('*',starred,"featureset")
+
+-- sort of xetex mode, but without [] and / as we have file: and name: etc
+
+local space = P(" ")
+local separator = S(";,")
+local equal = P("=")
+local spaces = space^0
+local sometext = C((1-equal-space-separator)^1)
+local truevalue = P("+") * spaces * sometext * Cc(true) -- "yes"
+local falsevalue = P("-") * spaces * sometext * Cc(false) -- "no"
+local keyvalue = sometext * spaces * equal * spaces * sometext
+local somevalue = sometext * spaces * Cc(true) -- "yes"
+local pattern = Cf(Ct("") * (space + separator + Cg(keyvalue + falsevalue + truevalue + somevalue))^0, rawset)
+
+local function colonized(specification)
+ specification.features.normal = normalize_features(lpegmatch(pattern,specification.detail))
+ return specification
+end
+
+definers.registersplit(":",colonized,"direct")
+
+-- define (two steps)
+
+local space = P(" ")
+local spaces = space^0
+local leftparent = (P"(")
+local rightparent = (P")")
+local value = C((leftparent * (1-rightparent)^0 * rightparent + (1-space))^1)
+local dimension = C((space/"" + P(1))^1)
+local rest = C(P(1)^0)
+local scale_none = Cc(0)
+local scale_at = P("at") * Cc(1) * spaces * dimension -- value
+local scale_sa = P("sa") * Cc(2) * spaces * dimension -- value
+local scale_mo = P("mo") * Cc(3) * spaces * dimension -- value
+local scale_scaled = P("scaled") * Cc(4) * spaces * dimension -- value
+
+local sizepattern = spaces * (scale_at + scale_sa + scale_mo + scale_scaled + scale_none)
+local splitpattern = spaces * value * spaces * rest
+
+function helpers.splitfontpattern(str)
+ local name, size = lpegmatch(splitpattern,str)
+ local kind, size = lpegmatch(sizepattern,size)
+ return name, kind, size
+end
+
+function helpers.fontpatternhassize(str)
+ local name, size = lpegmatch(splitpattern,str)
+ local kind, size = lpegmatch(sizepattern,size)
+ return size or false
+end
+
+local specification -- still needed as local ?
+
+local getspecification = definers.getspecification
+
+-- we can make helper macros which saves parsing (but normaly not
+-- that many calls, e.g. in mk a couple of 100 and in metafun 3500)
+
+local setdefaultfontname = context.fntsetdefname
+local setsomefontname = context.fntsetsomename
+local setemptyfontsize = context.fntsetnopsize
+local setsomefontsize = context.fntsetsomesize
+local letvaluerelax = context.letvaluerelax
+
+function commands.definefont_one(str)
+ statistics.starttiming(fonts)
+ if trace_defining then
+ report_defining("memory usage before: %s",statistics.memused())
+ report_defining("start stage one: %s",str)
+ end
+ local fullname, size = lpegmatch(splitpattern,str)
+ local lookup, name, sub, method, detail = getspecification(fullname)
+ if not name then
+ report_defining("strange definition %a",str)
+ setdefaultfontname()
+ elseif name == "unknown" then
+ setdefaultfontname()
+ else
+ setsomefontname(name)
+ end
+ -- we can also use a count for the size
+ if size and size ~= "" then
+ local mode, size = lpegmatch(sizepattern,size)
+ if size and mode then
+ texcount.scaledfontmode = mode
+ setsomefontsize(size)
+ else
+ texcount.scaledfontmode = 0
+ setemptyfontsize()
+ end
+ elseif true then
+ -- so we don't need to check in tex
+ texcount.scaledfontmode = 2
+ setemptyfontsize()
+ else
+ texcount.scaledfontmode = 0
+ setemptyfontsize()
+ end
+ specification = definers.makespecification(str,lookup,name,sub,method,detail,size)
+ if trace_defining then
+ report_defining("stop stage one")
+ end
+end
+
+local n = 0
+
+-- we can also move rscale to here (more consistent)
+-- the argument list will become a table
+
+local function nice_cs(cs)
+ return (gsub(cs,".->", ""))
+end
+
+function commands.definefont_two(global,cs,str,size,inheritancemode,classfeatures,fontfeatures,classfallbacks,fontfallbacks,
+ mathsize,textsize,relativeid,classgoodies,goodies,classdesignsize,fontdesignsize)
+ if trace_defining then
+ report_defining("start stage two: %s (size %s)",str,size)
+ end
+ -- name is now resolved and size is scaled cf sa/mo
+ local lookup, name, sub, method, detail = getspecification(str or "")
+ -- new (todo: inheritancemode)
+ local designsize = fontdesignsize ~= "" and fontdesignsize or classdesignsize or ""
+ local designname = designsizefilename(name,designsize,size)
+ if designname and designname ~= "" then
+ if trace_defining or trace_designsize then
+ report_defining("remapping name %a, specification %a, size %a, designsize %a",name,designsize,size,designname)
+ end
+ -- we don't catch detail here
+ local o_lookup, o_name, o_sub, o_method, o_detail = getspecification(designname)
+ if o_lookup and o_lookup ~= "" then lookup = o_lookup end
+ if o_method and o_method ~= "" then method = o_method end
+ if o_detail and o_detail ~= "" then detail = o_detail end
+ name = o_name
+ sub = o_sub
+ end
+ -- so far
+ -- some settings can have been overloaded
+ if lookup and lookup ~= "" then
+ specification.lookup = lookup
+ end
+ if relativeid and relativeid ~= "" then -- experimental hook
+ local id = tonumber(relativeid) or 0
+ specification.relativeid = id > 0 and id
+ end
+ specification.name = name
+ specification.size = size
+ specification.sub = (sub and sub ~= "" and sub) or specification.sub
+ specification.mathsize = mathsize
+ specification.textsize = textsize
+ specification.goodies = goodies
+ specification.cs = cs
+ specification.global = global
+ if detail and detail ~= "" then
+ specification.method = method or "*"
+ specification.detail = detail
+ elseif specification.detail and specification.detail ~= "" then
+ -- already set
+ elseif inheritancemode == 0 then
+ -- nothing
+ elseif inheritancemode == 1 then
+ -- fontonly
+ if fontfeatures and fontfeatures ~= "" then
+ specification.method = "*"
+ specification.detail = fontfeatures
+ end
+ if fontfallbacks and fontfallbacks ~= "" then
+ specification.fallbacks = fontfallbacks
+ end
+ elseif inheritancemode == 2 then
+ -- classonly
+ if classfeatures and classfeatures ~= "" then
+ specification.method = "*"
+ specification.detail = classfeatures
+ end
+ if classfallbacks and classfallbacks ~= "" then
+ specification.fallbacks = classfallbacks
+ end
+ elseif inheritancemode == 3 then
+ -- fontfirst
+ if fontfeatures and fontfeatures ~= "" then
+ specification.method = "*"
+ specification.detail = fontfeatures
+ elseif classfeatures and classfeatures ~= "" then
+ specification.method = "*"
+ specification.detail = classfeatures
+ end
+ if fontfallbacks and fontfallbacks ~= "" then
+ specification.fallbacks = fontfallbacks
+ elseif classfallbacks and classfallbacks ~= "" then
+ specification.fallbacks = classfallbacks
+ end
+ elseif inheritancemode == 4 then
+ -- classfirst
+ if classfeatures and classfeatures ~= "" then
+ specification.method = "*"
+ specification.detail = classfeatures
+ elseif fontfeatures and fontfeatures ~= "" then
+ specification.method = "*"
+ specification.detail = fontfeatures
+ end
+ if classfallbacks and classfallbacks ~= "" then
+ specification.fallbacks = classfallbacks
+ elseif fontfallbacks and fontfallbacks ~= "" then
+ specification.fallbacks = fontfallbacks
+ end
+ end
+ local tfmdata = definers.read(specification,size) -- id not yet known (size in spec?)
+ --
+ local lastfontid = 0
+ if not tfmdata then
+ report_defining("unable to define %a as %a",name,nice_cs(cs))
+ lastfontid = -1
+ letvaluerelax(cs) -- otherwise the current definition takes the previous one
+ elseif type(tfmdata) == "number" then
+ if trace_defining then
+ report_defining("reusing %s, id %a, target %a, features %a / %a, fallbacks %a / %a, goodies %a / %a, designsize %a / %a",
+ name,tfmdata,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks,classgoodies,goodies,classdesignsize,fontdesignsize)
+ end
+ csnames[tfmdata] = specification.cs
+ tex.definefont(global,cs,tfmdata)
+ -- resolved (when designsize is used):
+ setsomefontsize((fontdata[tfmdata].parameters.size or 0) .. "sp")
+ lastfontid = tfmdata
+ else
+ -- setting the extra characters will move elsewhere
+ local characters = tfmdata.characters
+ local parameters = tfmdata.parameters
+ -- we use char0 as signal; cf the spec pdf can handle this (no char in slot)
+ characters[0] = nil
+ -- characters[0x00A0] = { width = parameters.space }
+ -- characters[0x2007] = { width = characters[0x0030] and characters[0x0030].width or parameters.space } -- figure
+ -- characters[0x2008] = { width = characters[0x002E] and characters[0x002E].width or parameters.space } -- period
+ --
+ local id = font.define(tfmdata)
+ csnames[id] = specification.cs
+ tfmdata.properties.id = id
+ definers.register(tfmdata,id) -- to be sure, normally already done
+ tex.definefont(global,cs,id)
+ constructors.cleanuptable(tfmdata)
+ constructors.finalize(tfmdata)
+ if trace_defining then
+ report_defining("defining %a, id %a, target %a, features %a / %a, fallbacks %a / %a",
+ name,id,nice_cs(cs),classfeatures,fontfeatures,classfallbacks,fontfallbacks)
+ end
+ -- resolved (when designsize is used):
+ setsomefontsize((tfmdata.parameters.size or 655360) .. "sp")
+ lastfontid = id
+ end
+ if trace_defining then
+ report_defining("memory usage after: %s",statistics.memused())
+ report_defining("stop stage two")
+ end
+ --
+ texsetcount("global","lastfontid",lastfontid)
+ if not mathsize then
+ -- forget about it
+ elseif mathsize == 0 then
+ lastmathids[1] = lastfontid
+ else
+ lastmathids[mathsize] = lastfontid
+ end
+ --
+ statistics.stoptiming(fonts)
+end
+
+function definers.define(specification)
+ --
+ local name = specification.name
+ if not name or name == "" then
+ return -1
+ else
+ statistics.starttiming(fonts)
+ --
+ -- following calls expect a few properties to be set:
+ --
+ local lookup, name, sub, method, detail = getspecification(name or "")
+ --
+ specification.name = (name ~= "" and name) or specification.name
+ --
+ specification.lookup = specification.lookup or (lookup ~= "" and lookup) or "file"
+ specification.size = specification.size or 655260
+ specification.sub = specification.sub or (sub ~= "" and sub) or ""
+ specification.method = specification.method or (method ~= "" and method) or "*"
+ specification.detail = specification.detail or (detail ~= "" and detail) or ""
+ --
+ if type(specification.size) == "string" then
+ specification.size = tex.sp(specification.size) or 655260
+ end
+ --
+ specification.specification = "" -- not used
+ specification.resolved = ""
+ specification.forced = ""
+ specification.features = { } -- via detail, maybe some day
+ --
+ -- we don't care about mathsize textsize goodies fallbacks
+ --
+ local cs = specification.cs
+ if cs == "" then
+ cs = nil
+ specification.cs = nil
+ specification.global = false
+ elseif specification.global == nil then
+ specification.global = false
+ end
+ --
+ local tfmdata = definers.read(specification,specification.size)
+ if not tfmdata then
+ return -1, nil
+ elseif type(tfmdata) == "number" then
+ if cs then
+ tex.definefont(specification.global,cs,tfmdata)
+ csnames[tfmdata] = cs
+ end
+ return tfmdata, fontdata[tfmdata]
+ else
+ local id = font.define(tfmdata)
+ tfmdata.properties.id = id
+ definers.register(tfmdata,id)
+ if cs then
+ tex.definefont(specification.global,cs,id)
+ csnames[id] = cs
+ end
+ constructors.cleanuptable(tfmdata)
+ constructors.finalize(tfmdata)
+ return id, tfmdata
+ end
+ statistics.stoptiming(fonts)
+ end
+end
+
+-- local id, cs = fonts.definers.internal { }
+-- local id, cs = fonts.definers.internal { number = 2 }
+-- local id, cs = fonts.definers.internal { name = "dejavusans" }
+
+local n = 0
+
+function definers.internal(specification,cs)
+ specification = specification or { }
+ local name = specification.name
+ local size = specification.size and number.todimen(specification.size) or texdimen.bodyfontsize
+ local number = tonumber(specification.number)
+ local id = nil
+ if number then
+ id = number
+ elseif name and name ~= "" then
+ local cs = cs or specification.cs
+ if not cs then
+ n = n + 1 -- beware ... there can be many and they are often used once
+ -- cs = formatters["internal font %s"](n)
+ cs = "internal font " .. n
+ else
+ specification.cs = cs
+ end
+ id = definers.define {
+ name = name,
+ size = size,
+ cs = cs,
+ }
+ end
+ if not id then
+ id = currentfont()
+ end
+ return id, csnames[id]
+end
+
+local enable_auto_r_scale = false
+
+experiments.register("fonts.autorscale", function(v)
+ enable_auto_r_scale = v
+end)
+
+-- Not ok, we can best use a database for this. The problem is that we
+-- have delayed definitions and so we never know what style is taken
+-- as start.
+
+local calculatescale = constructors.calculatescale
+
+function constructors.calculatescale(tfmdata,scaledpoints,relativeid)
+ local scaledpoints, delta = calculatescale(tfmdata,scaledpoints)
+ -- if enable_auto_r_scale and relativeid then -- for the moment this is rather context specific
+ -- local relativedata = fontdata[relativeid]
+ -- local rfmdata = relativedata and relativedata.unscaled and relativedata.unscaled
+ -- local id_x_height = rfmdata and rfmdata.parameters and rfmdata.parameters.x_height
+ -- local tf_x_height = tfmdata and tfmdata.parameters and tfmdata.parameters.x_height
+ -- if id_x_height and tf_x_height then
+ -- local rscale = id_x_height/tf_x_height
+ -- delta = rscale * delta
+ -- scaledpoints = rscale * scaledpoints
+ -- end
+ -- end
+ return scaledpoints, delta
+end
+
+-- We overload the (generic) resolver:
+
+local resolvers = definers.resolvers
+local hashfeatures = constructors.hashfeatures
+
+function definers.resolve(specification) -- overload function in font-con.lua
+ if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash
+ local r = resolvers[specification.lookup]
+ if r then
+ r(specification)
+ end
+ end
+ if specification.forced == "" then
+ specification.forced = nil
+ else
+ specification.forced = specification.forced
+ end
+ -- goodies are a context specific thing and not always defined
+ -- as feature, so we need to make sure we add them here before
+ -- hashing because otherwise we get funny goodies applied
+ local goodies = specification.goodies
+ if goodies and goodies ~= "" then
+ -- this adapts the features table so it has best be a copy
+ local normal = specification.features.normal
+ if not normal then
+ specification.features.normal = { goodies = goodies }
+ elseif not normal.goodies then
+ local g = normal.goodies
+ if g and g ~= "" then
+ normal.goodies = formatters["%s,%s"](g,goodies)
+ else
+ normal.goodies = goodies
+ end
+ end
+ end
+ -- so far for goodie hacks
+ specification.hash = lower(specification.name .. ' @ ' .. hashfeatures(specification))
+ if specification.sub and specification.sub ~= "" then
+ specification.hash = specification.sub .. ' @ ' .. specification.hash
+ end
+ return specification
+end
+
+
+-- soon to be obsolete:
+
+local mappings = fonts.mappings
+
+local loaded = { -- prevent loading (happens in cont-sys files)
+ ["original-base.map" ] = true,
+ ["original-ams-base.map" ] = true,
+ ["original-ams-euler.map"] = true,
+ ["original-public-lm.map"] = true,
+}
+
+function mappings.loadfile(name)
+ name = file.addsuffix(name,"map")
+ if not loaded[name] then
+ if trace_mapfiles then
+ report_mapfiles("loading map file %a",name)
+ end
+ pdf.mapfile(name)
+ loaded[name] = true
+ end
+end
+
+local loaded = { -- prevent double loading
+}
+
+function mappings.loadline(how,line)
+ if line then
+ how = how .. " " .. line
+ elseif how == "" then
+ how = "= " .. line
+ end
+ if not loaded[how] then
+ if trace_mapfiles then
+ report_mapfiles("processing map line %a",line)
+ end
+ pdf.mapline(how)
+ loaded[how] = true
+ end
+end
+
+function mappings.reset()
+ pdf.mapfile("")
+end
+
+mappings.reset() -- resets the default file
+
+-- we need an 'do after the banner hook'
+
+-- => commands
+
+local function nametoslot(name)
+ local t = type(name)
+ if t == "string" then
+ return resources[true].unicodes[name]
+ elseif t == "number" then
+ return n
+ end
+end
+
+helpers.nametoslot = nametoslot
+
+-- this will change ...
+
+function loggers.reportdefinedfonts()
+ if trace_usage then
+ local t, tn = { }, 0
+ for id, data in sortedhash(fontdata) do
+ local properties = data.properties or { }
+ local parameters = data.parameters or { }
+ tn = tn + 1
+ t[tn] = {
+ format("%03i",id or 0),
+ format("%09i",parameters.size or 0),
+ properties.type or "real",
+ properties.format or "unknown",
+ properties.name or "",
+ properties.psname or "",
+ properties.fullname or "",
+ }
+ report_status("%s: % t",properties.name,sortedkeys(data))
+ end
+ formatcolumns(t," ")
+ report_status()
+ report_status("defined fonts:")
+ report_status()
+ for k=1,tn do
+ report_status(t[k])
+ end
+ end
+end
+
+luatex.registerstopactions(loggers.reportdefinedfonts)
+
+function loggers.reportusedfeatures()
+ -- numbers, setups, merged
+ if trace_usage then
+ local t, n = { }, #numbers
+ for i=1,n do
+ local name = numbers[i]
+ local setup = setups[name]
+ local n = setup.number
+ setup.number = nil -- we have no reason to show this
+ t[i] = { i, name, sequenced(setup,false,true) } -- simple mode
+ setup.number = n -- restore it (normally not needed as we're done anyway)
+ end
+ formatcolumns(t," ")
+ report_status()
+ report_status("defined featuresets:")
+ report_status()
+ for k=1,n do
+ report_status(t[k])
+ end
+ end
+end
+
+luatex.registerstopactions(loggers.reportusedfeatures)
+
+statistics.register("fonts load time", function()
+ return statistics.elapsedseconds(fonts)
+end)
+
+-- experimental mechanism for Mojca:
+--
+-- fonts.definetypeface {
+-- name = "mainbodyfont-light",
+-- preset = "antykwapoltawskiego-light",
+-- }
+--
+-- fonts.definetypeface {
+-- name = "mojcasfavourite",
+-- preset = "antykwapoltawskiego",
+-- normalweight = "light",
+-- boldweight = "bold",
+-- width = "condensed",
+-- }
+
+local Shapes = {
+ serif = "Serif",
+ sans = "Sans",
+ mono = "Mono",
+}
+
+function fonts.definetypeface(name,t)
+ if type(name) == "table" then
+ -- {name=abc,k=v,...}
+ t = name
+ elseif t then
+ if type(t) == "string" then
+ -- "abc", "k=v,..."
+ t = settings_to_hash(name)
+ else
+ -- "abc", {k=v,...}
+ end
+ t.name = t.name or name
+ else
+ -- "name=abc,k=v,..."
+ t = settings_to_hash(name)
+ end
+ local p = t.preset and fonts.typefaces[t.preset] or { }
+ local name = t.name or "unknowntypeface"
+ local shortcut = t.shortcut or p.shortcut or "rm"
+ local size = t.size or p.size or "default"
+ local shape = t.shape or p.shape or "serif"
+ local fontname = t.fontname or p.fontname or "unknown"
+ local normalweight = t.normalweight or t.weight or p.normalweight or p.weight or "normal"
+ local boldweight = t.boldweight or t.weight or p.boldweight or p.weight or "normal"
+ local normalwidth = t.normalwidth or t.width or p.normalwidth or p.width or "normal"
+ local boldwidth = t.boldwidth or t.width or p.boldwidth or p.width or "normal"
+ Shape = Shapes[shape] or "Serif"
+ context.startfontclass { name }
+ context.definefontsynonym( { format("%s", Shape) }, { format("spec:%s-%s-regular-%s", fontname, normalweight, normalwidth) } )
+ context.definefontsynonym( { format("%sBold", Shape) }, { format("spec:%s-%s-regular-%s", fontname, boldweight, boldwidth ) } )
+ context.definefontsynonym( { format("%sBoldItalic", Shape) }, { format("spec:%s-%s-italic-%s", fontname, boldweight, boldwidth ) } )
+ context.definefontsynonym( { format("%sItalic", Shape) }, { format("spec:%s-%s-italic-%s", fontname, normalweight, normalwidth) } )
+ context.stopfontclass()
+ local settings = sequenced({ features= t.features },",")
+ context.dofastdefinetypeface(name, shortcut, shape, size, settings)
+end
+
+function fonts.current() -- todo: also handle name
+ return fontdata[currentfont()] or fontdata[0]
+end
+
+function fonts.currentid()
+ return currentfont() or 0
+end
+
+-- interfaces
+
+function commands.fontchar(n)
+ n = nametoslot(n)
+ if n then
+ context.char(n)
+ end
+end
+
+function commands.doifelsecurrentfonthasfeature(name) -- can be made faster with a supportedfeatures hash
+ local f = fontdata[currentfont()]
+ f = f and f.shared
+ f = f and f.rawdata
+ f = f and f.resources
+ f = f and f.features
+ commands.doifelse(f and (f.gpos[name] or f.gsub[name]))
+end
+
+local p, f = 1, formatters["%0.1fpt"] -- normally this value is changed only once
+
+local stripper = lpeg.patterns.stripzeros
+
+function commands.nbfs(amount,precision)
+ if precision ~= p then
+ p = precision
+ f = formatters["%0." .. p .. "fpt"]
+ end
+ context(lpegmatch(stripper,f(amount/65536)))
+end
+
+function commands.featureattribute(tag)
+ context(contextnumber(tag))
+end
+
+function commands.setfontfeature(tag)
+ texattribute[0] = contextnumber(tag)
+end
+
+function commands.resetfontfeature()
+ texattribute[0] = 0
+end
+
+-- function commands.addfs(tag) withset(tag, 1) end
+-- function commands.subfs(tag) withset(tag,-1) end
+-- function commands.addff(tag) withfnt(tag, 2) end -- on top of font features
+-- function commands.subff(tag) withfnt(tag,-2) end -- on top of font features
+
+function commands.cleanfontname (name) context(names.cleanname(name)) end
+
+function commands.fontlookupinitialize (name) names.lookup(name) end
+function commands.fontlookupnoffound () context(names.noflookups()) end
+function commands.fontlookupgetkeyofindex(key,index) context(names.getlookupkey(key,index)) end
+function commands.fontlookupgetkey (key) context(names.getlookupkey(key)) end
+
+-- this might move to a runtime module:
+
+function commands.showchardata(n)
+ local tfmdata = fontdata[currentfont()]
+ if tfmdata then
+ if type(n) == "string" then
+ n = utfbyte(n)
+ end
+ local chr = tfmdata.characters[n]
+ if chr then
+ report_status("%s @ %s => %U => %c => %s",tfmdata.properties.fullname,tfmdata.parameters.size,n,n,serialize(chr,false))
+ end
+ end
+end
+
+function commands.showfontparameters(tfmdata)
+ -- this will become more clever
+ local tfmdata = tfmdata or fontdata[currentfont()]
+ if tfmdata then
+ local parameters = tfmdata.parameters
+ local mathparameters = tfmdata.mathparameters
+ local properties = tfmdata.properties
+ local hasparameters = parameters and next(parameters)
+ local hasmathparameters = mathparameters and next(mathparameters)
+ if hasparameters then
+ report_status("%s @ %s => text parameters => %s",properties.fullname,parameters.size,serialize(parameters,false))
+ end
+ if hasmathparameters then
+ report_status("%s @ %s => math parameters => %s",properties.fullname,parameters.size,serialize(mathparameters,false))
+ end
+ if not hasparameters and not hasmathparameters then
+ report_status("%s @ %s => no text parameters and/or math parameters",properties.fullname,parameters.size)
+ end
+ end
+end
+
+-- for the moment here, this will become a chain of extras that is
+-- hooked into the ctx registration (or scaler or ...)
+
+local dimenfactors = number.dimenfactors
+
+function helpers.dimenfactor(unit,tfmdata) -- could be a method of a font instance
+ if unit == "ex" then
+ return (tfmdata and tfmdata.parameters.x_height) or 655360
+ elseif unit == "em" then
+ return (tfmdata and tfmdata.parameters.em_width) or 655360
+ else
+ local du = dimenfactors[unit]
+ return du and 1/du or tonumber(unit) or 1
+ end
+end
+
+local function digitwidth(font) -- max(quad/2,wd(0..9))
+ local tfmdata = fontdata[font]
+ local parameters = tfmdata.parameters
+ local width = parameters.digitwidth
+ if not width then
+ width = round(parameters.quad/2) -- maybe tex.scale
+ local characters = tfmdata.characters
+ for i=48,57 do
+ local wd = round(characters[i].width)
+ if wd > width then
+ width = wd
+ end
+ end
+ parameters.digitwidth = width
+ end
+ return width
+end
+
+helpers.getdigitwidth = digitwidth
+helpers.setdigitwidth = digitwidth
+
+--
+
+function helpers.getparameters(tfmdata)
+ local p = { }
+ local m = p
+ local parameters = tfmdata.parameters
+ while true do
+ for k, v in next, parameters do
+ m[k] = v
+ end
+ parameters = getmetatable(parameters)
+ parameters = parameters and parameters.__index
+ if type(parameters) == "table" then
+ m = { }
+ p.metatable = m
+ else
+ break
+ end
+ end
+ return p
+end
+
+if environment.initex then
+
+ local function names(t)
+ local nt = #t
+ if nt > 0 then
+ local n = { }
+ for i=1,nt do
+ n[i] = t[i].name
+ end
+ return concat(n," ")
+ else
+ return "-"
+ end
+ end
+
+ statistics.register("font processing", function()
+ local l = { }
+ for what, handler in table.sortedpairs(handlers) do
+ local features = handler.features
+ if features then
+ l[#l+1] = format("[%s (base initializers: %s) (base processors: %s) (base manipulators: %s) (node initializers: %s) (node processors: %s) (node manipulators: %s)]",
+ what,
+ names(features.initializers.base),
+ names(features.processors .base),
+ names(features.manipulators.base),
+ names(features.initializers.node),
+ names(features.processors .node),
+ names(features.manipulators.node)
+ )
+ end
+ end
+ return concat(l, " | ")
+ end)
+
+end
+
+-- redefinition
+
+local quads = hashes.quads
+local xheights = hashes.xheights
+
+setmetatableindex(number.dimenfactors, function(t,k)
+ if k == "ex" then
+ return xheigths[currentfont()]
+ elseif k == "em" then
+ return quads[currentfont()]
+ elseif k == "%" then
+ return dimen.hsize/100
+ else
+ -- error("wrong dimension: " .. (s or "?")) -- better a message
+ return false
+ end
+end)
+
+--[[ldx--
+
Before a font is passed to we scale it. Here we also need
+to scale virtual characters.
+--ldx]]--
+
+function constructors.checkvirtualids(tfmdata)
+ -- begin of experiment: we can use { "slot", 0, number } in virtual fonts
+ local fonts = tfmdata.fonts
+ local selfid = font.nextid()
+ if fonts and #fonts > 0 then
+ for i=1,#fonts do
+ if fonts[i][2] == 0 then
+ fonts[i][2] = selfid
+ end
+ end
+ else
+ -- tfmdata.fonts = { "id", selfid } -- conflicts with other next id's (vf math), too late anyway
+ end
+ -- end of experiment
+end
+
+-- function constructors.getvirtualid(tfmdata)
+-- -- since we don't know the id yet, we use 0 as signal
+-- local tf = tfmdata.fonts
+-- if not tf then
+-- local properties = tfmdata.properties
+-- if properties then
+-- properties.virtualized = true
+-- else
+-- tfmdata.properties = { virtualized = true }
+-- end
+-- tf = { }
+-- tfmdata.fonts = tf
+-- end
+-- local ntf = #tf + 1
+-- tf[ntf] = { id = 0 }
+-- return ntf
+-- end
+--
+-- function constructors.checkvirtualid(tfmdata, id) -- will go
+-- local properties = tfmdata.properties
+-- if tfmdata and tfmdata.type == "virtual" or (properties and properties.virtualized) then
+-- local vfonts = tfmdata.fonts
+-- if not vffonts or #vfonts == 0 then
+-- if properties then
+-- properties.virtualized = false
+-- end
+-- tfmdata.fonts = nil
+-- else
+-- for f=1,#vfonts do
+-- local fnt = vfonts[f]
+-- if fnt.id and fnt.id == 0 then
+-- fnt.id = id
+-- end
+-- end
+-- end
+-- end
+-- end
+
+function commands.setfontofid(id)
+ context.getvalue(csnames[id])
+end
+
+-- more interfacing:
+
+commands.definefontfeature = presetcontext
+
+local cache = { }
+
+local hows = {
+ ["+"] = "add",
+ ["-"] = "subtract",
+ ["="] = "replace",
+}
+
+function commands.feature(how,parent,name,font)
+ if not how then
+ if trace_features and texattribute[0] ~= 0 then
+ report_cummulative("font %!font:name!, reset",fontdata[font or true])
+ end
+ texattribute[0] = 0
+ elseif how == true then
+ local hash = "feature > " .. parent
+ local done = cache[hash]
+ if trace_features and done then
+ report_cummulative("font %!font:name!, revive %a : %!font:features!",fontdata[font or true],parent,setups[numbers[done]])
+ end
+ texattribute[0] = done or 0
+ else
+ local full = parent .. how .. name
+ local hash = "feature > " .. full
+ local done = cache[hash]
+ if not done then
+ local n = setups[full]
+ if n then
+ -- already defined
+ else
+ n = mergecontextfeatures(parent,name,how,full)
+ end
+ done = registercontextfeature(hash,full,how)
+ cache[hash] = done
+ if trace_features then
+ report_cummulative("font %!font:name!, %s %a : %!font:features!",fontdata[font or true],hows[how],full,setups[numbers[done]])
+ end
+ end
+ texattribute[0] = done
+ end
+end
+
+function commands.featurelist(...)
+ context(fonts.specifiers.contexttostring(...))
+end
+
+function commands.registerlanguagefeatures()
+ local specifications = languages.data.specifications
+ for i=1,#specifications do
+ local specification = specifications[i]
+ local language = specification.opentype
+ if language then
+ local script = specification.opentypescript or specification.script
+ if script then
+ local context = specification.context
+ if type(context) == "table" then
+ for i=1,#context do
+ definecontext(context[i], { language = language, script = script})
+ end
+ elseif type(context) == "string" then
+ definecontext(context, { language = language, script = script})
+ end
+ end
+ end
+ end
+end
+
+-- a fontkern plug:
+
+local copy_node = node.copy
+local kern = nodes.pool.register(nodes.pool.kern())
+
+node.set_attribute(kern,attributes.private('fontkern'),1) -- we can have several, attributes are shared
+
+nodes.injections.installnewkern(function(k)
+ local c = copy_node(kern)
+ c.kern = k
+ return c
+end)
+
+directives.register("nodes.injections.fontkern", function(v) kern.subtype = v and 0 or 1 end)
+
+-- here
+
+local trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+local analyzers = fonts.analyzers
+local methods = analyzers.methods
+
+local unsetvalue = attributes.unsetvalue
+
+local traverse_by_id = node.traverse_id
+
+local a_color = attributes.private('color')
+local a_colormodel = attributes.private('colormodel')
+local a_state = attributes.private('state')
+local m_color = attributes.list[a_color] or { }
+
+local glyph_code = nodes.nodecodes.glyph
+
+local states = analyzers.states
+
+local names = {
+ [states.init] = "font:1",
+ [states.medi] = "font:2",
+ [states.fina] = "font:3",
+ [states.isol] = "font:4",
+ [states.mark] = "font:5",
+ [states.rest] = "font:6",
+ [states.rphf] = "font:1",
+ [states.half] = "font:2",
+ [states.pref] = "font:3",
+ [states.blwf] = "font:4",
+ [states.pstf] = "font:5",
+}
+
+local function markstates(head)
+ if head then
+ local model = head[a_colormodel] or 1
+ for glyph in traverse_by_id(glyph_code,head) do
+ local a = glyph[a_state]
+ if a then
+ local name = names[a]
+ if name then
+ local color = m_color[name]
+ if color then
+ glyph[a_colormodel] = model
+ glyph[a_color] = color
+ end
+ end
+ end
+ end
+ end
+end
+
+local function analyzeprocessor(head,font,attr)
+ local tfmdata = fontdata[font]
+ local script, language = otf.scriptandlanguage(tfmdata,attr)
+ local action = methods[script]
+ if not action then
+ return head, false
+ end
+ if type(action) == "function" then
+ local head, done = action(head,font,attr)
+ if done and trace_analyzing then
+ markstates(head)
+ end
+ return head, done
+ end
+ action = action[language]
+ if action then
+ local head, done = action(head,font,attr)
+ if done and trace_analyzing then
+ markstates(head)
+ end
+ return head, done
+ else
+ return head, false
+ end
+end
+
+registerotffeature { -- adapts
+ name = "analyze",
+ processors = {
+ node = analyzeprocessor,
+ }
+}
+
+function methods.nocolor(head,font,attr)
+ for n in traverse_by_id(glyph_code,head) do
+ if not font or n.font == font then
+ n[a_color] = unsetvalue
+ end
+ end
+ return head, true
+end
diff --git a/tex/context/base/font-def.lua b/tex/context/base/font-def.lua
index bee02e8dc..7e01c5620 100644
--- a/tex/context/base/font-def.lua
+++ b/tex/context/base/font-def.lua
@@ -1,449 +1,449 @@
-if not modules then modules = { } end modules ['font-def'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- We can overload some of the definers.functions so we don't local them.
-
-local format, gmatch, match, find, lower, gsub = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub
-local tostring, next = tostring, next
-local lpegmatch = lpeg.match
-
-local allocate = utilities.storage.allocate
-
-local trace_defining = false trackers .register("fonts.defining", function(v) trace_defining = v end)
-local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end)
-
-trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading", "tfm.loading")
-trackers.register("fonts.all", "fonts.*", "otf.*", "afm.*", "tfm.*")
-
-local report_defining = logs.reporter("fonts","defining")
-
---[[ldx--
-
Here we deal with defining fonts. We do so by intercepting the
-default loader that only handles .
---ldx]]--
-
-local fonts = fonts
-local fontdata = fonts.hashes.identifiers
-local readers = fonts.readers
-local definers = fonts.definers
-local specifiers = fonts.specifiers
-local constructors = fonts.constructors
-local fontgoodies = fonts.goodies
-
-readers.sequence = allocate { 'otf', 'ttf', 'afm', 'tfm', 'lua' } -- dfont ttc
-
-local variants = allocate()
-specifiers.variants = variants
-
-definers.methods = definers.methods or { }
-
-local internalized = allocate() -- internal tex numbers (private)
-local lastdefined = nil -- we don't want this one to end up in s-tra-02
-
-local loadedfonts = constructors.loadedfonts
-local designsizes = constructors.designsizes
-
--- not in generic (some day I'll make two defs, one for context, one for generic)
-
-local resolvefile = fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end
-
---[[ldx--
-
We hardly gain anything when we cache the final (pre scaled)
- table. But it can be handy for debugging, so we no
-longer carry this code along. Also, we now have quite some reference
-to other tables so we would end up with lots of catches.
---ldx]]--
-
---[[ldx--
-
We can prefix a font specification by name: or
-file:. The first case will result in a lookup in the
-synonym table.
The following function split the font specification into components
-and prepares a table that will move along as we proceed.
---ldx]]--
-
--- beware, we discard additional specs
---
--- method:name method:name(sub) method:name(sub)*spec method:name*spec
--- name name(sub) name(sub)*spec name*spec
--- name@spec*oeps
-
-local splitter, splitspecifiers = nil, "" -- not so nice
-
-local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc
-
-local left = P("(")
-local right = P(")")
-local colon = P(":")
-local space = P(" ")
-
-definers.defaultlookup = "file"
-
-local prefixpattern = P(false)
-
-local function addspecifier(symbol)
- splitspecifiers = splitspecifiers .. symbol
- local method = S(splitspecifiers)
- local lookup = C(prefixpattern) * colon
- local sub = left * C(P(1-left-right-method)^1) * right
- local specification = C(method) * C(P(1)^1)
- local name = C((1-sub-specification)^1)
- splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc("")))
-end
-
-local function addlookup(str,default)
- prefixpattern = prefixpattern + P(str)
-end
-
-definers.addlookup = addlookup
-
-addlookup("file")
-addlookup("name")
-addlookup("spec")
-
-local function getspecification(str)
- return lpegmatch(splitter,str)
-end
-
-definers.getspecification = getspecification
-
-function definers.registersplit(symbol,action,verbosename)
- addspecifier(symbol)
- variants[symbol] = action
- if verbosename then
- variants[verbosename] = action
- end
-end
-
-local function makespecification(specification,lookup,name,sub,method,detail,size)
- size = size or 655360
- if not lookup or lookup == "" then
- lookup = definers.defaultlookup
- end
- if trace_defining then
- report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a",
- specification, lookup, name, sub, method, detail)
- end
- local t = {
- lookup = lookup, -- forced type
- specification = specification, -- full specification
- size = size, -- size in scaled points or -1000*n
- name = name, -- font or filename
- sub = sub, -- subfont (eg in ttc)
- method = method, -- specification method
- detail = detail, -- specification
- resolved = "", -- resolved font name
- forced = "", -- forced loader
- features = { }, -- preprocessed features
- }
- return t
-end
-
-
-definers.makespecification = makespecification
-
-function definers.analyze(specification, size)
- -- can be optimized with locals
- local lookup, name, sub, method, detail = getspecification(specification or "")
- return makespecification(specification, lookup, name, sub, method, detail, size)
-end
-
---[[ldx--
-
We can resolve the filename using the next function:
---ldx]]--
-
-definers.resolvers = definers.resolvers or { }
-local resolvers = definers.resolvers
-
--- todo: reporter
-
-function resolvers.file(specification)
- local name = resolvefile(specification.name) -- catch for renames
- local suffix = file.suffix(name)
- if fonts.formats[suffix] then
- specification.forced = suffix
- specification.name = file.removesuffix(name)
- else
- specification.name = name -- can be resolved
- end
-end
-
-function resolvers.name(specification)
- local resolve = fonts.names.resolve
- if resolve then
- local resolved, sub = resolve(specification.name,specification.sub,specification) -- we pass specification for overloaded versions
- if resolved then
- specification.resolved = resolved
- specification.sub = sub
- local suffix = file.suffix(resolved)
- if fonts.formats[suffix] then
- specification.forced = suffix
- specification.name = file.removesuffix(resolved)
- else
- specification.name = resolved
- end
- end
- else
- resolvers.file(specification)
- end
-end
-
-function resolvers.spec(specification)
- local resolvespec = fonts.names.resolvespec
- if resolvespec then
- local resolved, sub = resolvespec(specification.name,specification.sub,specification) -- we pass specification for overloaded versions
- if resolved then
- specification.resolved = resolved
- specification.sub = sub
- specification.forced = file.suffix(resolved)
- specification.name = file.removesuffix(resolved)
- end
- else
- resolvers.name(specification)
- end
-end
-
-function definers.resolve(specification)
- if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash
- local r = resolvers[specification.lookup]
- if r then
- r(specification)
- end
- end
- if specification.forced == "" then
- specification.forced = nil
- else
- specification.forced = specification.forced
- end
- specification.hash = lower(specification.name .. ' @ ' .. constructors.hashfeatures(specification))
- if specification.sub and specification.sub ~= "" then
- specification.hash = specification.sub .. ' @ ' .. specification.hash
- end
- return specification
-end
-
---[[ldx--
-
The main read function either uses a forced reader (as determined by
-a lookup) or tries to resolve the name using the list of readers.
-
-
We need to cache when possible. We do cache raw tfm data (from , or ). After that we can cache based
-on specificstion (name) and size, that is, only needs a number
-for an already loaded fonts. However, it may make sense to cache fonts
-before they're scaled as well (store 's with applied methods
-and features). However, there may be a relation between the size and
-features (esp in virtual fonts) so let's not do that now.
-
-
Watch out, here we do load a font, but we don't prepare the
-specification yet.
---ldx]]--
-
--- very experimental:
-
-function definers.applypostprocessors(tfmdata)
- local postprocessors = tfmdata.postprocessors
- if postprocessors then
- local properties = tfmdata.properties
- for i=1,#postprocessors do
- local extrahash = postprocessors[i](tfmdata) -- after scaling etc
- if type(extrahash) == "string" and extrahash ~= "" then
- -- e.g. a reencoding needs this
- extrahash = gsub(lower(extrahash),"[^a-z]","-")
- properties.fullname = format("%s-%s",properties.fullname,extrahash)
- end
- end
- end
- return tfmdata
-end
-
--- function definers.applypostprocessors(tfmdata)
--- return tfmdata
--- end
-
-local function checkembedding(tfmdata)
- local properties = tfmdata.properties
- local embedding
- if directive_embedall then
- embedding = "full"
- elseif properties and properties.filename and constructors.dontembed[properties.filename] then
- embedding = "no"
- else
- embedding = "subset"
- end
- if properties then
- properties.embedding = embedding
- else
- tfmdata.properties = { embedding = embedding }
- end
- tfmdata.embedding = embedding
-end
-
-function definers.loadfont(specification)
- local hash = constructors.hashinstance(specification)
- local tfmdata = loadedfonts[hash] -- hashes by size !
- if not tfmdata then
- local forced = specification.forced or ""
- if forced ~= "" then
- local reader = readers[lower(forced)]
- tfmdata = reader and reader(specification)
- if not tfmdata then
- report_defining("forced type %a of %a not found",forced,specification.name)
- end
- else
- local sequence = readers.sequence -- can be overloaded so only a shortcut here
- for s=1,#sequence do
- local reader = sequence[s]
- if readers[reader] then -- we skip not loaded readers
- if trace_defining then
- report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename)
- end
- tfmdata = readers[reader](specification)
- if tfmdata then
- break
- else
- specification.filename = nil
- end
- end
- end
- end
- if tfmdata then
- tfmdata = definers.applypostprocessors(tfmdata)
- checkembedding(tfmdata) -- todo: general postprocessor
- loadedfonts[hash] = tfmdata
- designsizes[specification.hash] = tfmdata.parameters.designsize
- end
- end
- if not tfmdata then
- report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup)
- end
- return tfmdata
-end
-
-function constructors.checkvirtualids()
- -- dummy in plain version
-end
-
-function constructors.readanddefine(name,size) -- no id -- maybe a dummy first
- local specification = definers.analyze(name,size)
- local method = specification.method
- if method and variants[method] then
- specification = variants[method](specification)
- end
- specification = definers.resolve(specification)
- local hash = constructors.hashinstance(specification)
- local id = definers.registered(hash)
- if not id then
- local tfmdata = definers.loadfont(specification)
- if tfmdata then
- tfmdata.properties.hash = hash
- constructors.checkvirtualids(tfmdata) -- experiment, will become obsolete when slots can selfreference
- id = font.define(tfmdata)
- definers.register(tfmdata,id)
- else
- id = 0 -- signal
- end
- end
- return fontdata[id], id
-end
-
---[[ldx--
-
So far the specifiers. Now comes the real definer. Here we cache
-based on id's. Here we also intercept the virtual font handler. Since
-it evolved stepwise I may rewrite this bit (combine code).
-
-In the previously defined reader (the one resulting in a
-table) we cached the (scaled) instances. Here we cache them again, but
-this time based on id. We could combine this in one cache but this does
-not gain much. By the way, passing id's back to in the callback was
-introduced later in the development.
---ldx]]--
-
-function definers.current() -- or maybe current
- return lastdefined
-end
-
-function definers.registered(hash)
- local id = internalized[hash]
- return id, id and fontdata[id]
-end
-
-function definers.register(tfmdata,id)
- if tfmdata and id then
- local hash = tfmdata.properties.hash
- if not hash then
- report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?")
- elseif not internalized[hash] then
- internalized[hash] = id
- if trace_defining then
- report_defining("registering font, id %s, hash %a",id,hash)
- end
- fontdata[id] = tfmdata
- end
- end
-end
-
-function definers.read(specification,size,id) -- id can be optional, name can already be table
- statistics.starttiming(fonts)
- if type(specification) == "string" then
- specification = definers.analyze(specification,size)
- end
- local method = specification.method
- if method and variants[method] then
- specification = variants[method](specification)
- end
- specification = definers.resolve(specification)
- local hash = constructors.hashinstance(specification)
- local tfmdata = definers.registered(hash) -- id
- if tfmdata then
- if trace_defining then
- report_defining("already hashed: %s",hash)
- end
- else
- tfmdata = definers.loadfont(specification) -- can be overloaded
- if tfmdata then
- if trace_defining then
- report_defining("loaded and hashed: %s",hash)
- end
- tfmdata.properties.hash = hash
- if id then
- definers.register(tfmdata,id)
- end
- else
- if trace_defining then
- report_defining("not loaded and hashed: %s",hash)
- end
- end
- end
- lastdefined = tfmdata or id -- todo ! ! ! ! !
- if not tfmdata then -- or id?
- report_defining( "unknown font %a, loading aborted",specification.name)
- elseif trace_defining and type(tfmdata) == "table" then
- local properties = tfmdata.properties or { }
- local parameters = tfmdata.parameters or { }
- report_defining("using %s font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
- properties.format, id, properties.name, parameters.size, properties.encodingbytes,
- properties.encodingname, properties.fullname, file.basename(properties.filename))
- end
- statistics.stoptiming(fonts)
- return tfmdata
-end
-
-function font.getfont(id)
- return fontdata[id] -- otherwise issues
-end
-
---[[ldx--
-
We overload the reader.
---ldx]]--
-
-callbacks.register('define_font', definers.read, "definition of fonts (tfmdata preparation)")
+if not modules then modules = { } end modules ['font-def'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- We can overload some of the definers.functions so we don't local them.
+
+local format, gmatch, match, find, lower, gsub = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub
+local tostring, next = tostring, next
+local lpegmatch = lpeg.match
+
+local allocate = utilities.storage.allocate
+
+local trace_defining = false trackers .register("fonts.defining", function(v) trace_defining = v end)
+local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end)
+
+trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading", "tfm.loading")
+trackers.register("fonts.all", "fonts.*", "otf.*", "afm.*", "tfm.*")
+
+local report_defining = logs.reporter("fonts","defining")
+
+--[[ldx--
+
Here we deal with defining fonts. We do so by intercepting the
+default loader that only handles .
+--ldx]]--
+
+local fonts = fonts
+local fontdata = fonts.hashes.identifiers
+local readers = fonts.readers
+local definers = fonts.definers
+local specifiers = fonts.specifiers
+local constructors = fonts.constructors
+local fontgoodies = fonts.goodies
+
+readers.sequence = allocate { 'otf', 'ttf', 'afm', 'tfm', 'lua' } -- dfont ttc
+
+local variants = allocate()
+specifiers.variants = variants
+
+definers.methods = definers.methods or { }
+
+local internalized = allocate() -- internal tex numbers (private)
+local lastdefined = nil -- we don't want this one to end up in s-tra-02
+
+local loadedfonts = constructors.loadedfonts
+local designsizes = constructors.designsizes
+
+-- not in generic (some day I'll make two defs, one for context, one for generic)
+
+local resolvefile = fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end
+
+--[[ldx--
+
We hardly gain anything when we cache the final (pre scaled)
+ table. But it can be handy for debugging, so we no
+longer carry this code along. Also, we now have quite some reference
+to other tables so we would end up with lots of catches.
+--ldx]]--
+
+--[[ldx--
+
We can prefix a font specification by name: or
+file:. The first case will result in a lookup in the
+synonym table.
The following function split the font specification into components
+and prepares a table that will move along as we proceed.
+--ldx]]--
+
+-- beware, we discard additional specs
+--
+-- method:name method:name(sub) method:name(sub)*spec method:name*spec
+-- name name(sub) name(sub)*spec name*spec
+-- name@spec*oeps
+
+local splitter, splitspecifiers = nil, "" -- not so nice
+
+local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc
+
+local left = P("(")
+local right = P(")")
+local colon = P(":")
+local space = P(" ")
+
+definers.defaultlookup = "file"
+
+local prefixpattern = P(false)
+
+local function addspecifier(symbol)
+ splitspecifiers = splitspecifiers .. symbol
+ local method = S(splitspecifiers)
+ local lookup = C(prefixpattern) * colon
+ local sub = left * C(P(1-left-right-method)^1) * right
+ local specification = C(method) * C(P(1)^1)
+ local name = C((1-sub-specification)^1)
+ splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc("")))
+end
+
+local function addlookup(str,default)
+ prefixpattern = prefixpattern + P(str)
+end
+
+definers.addlookup = addlookup
+
+addlookup("file")
+addlookup("name")
+addlookup("spec")
+
+local function getspecification(str)
+ return lpegmatch(splitter,str)
+end
+
+definers.getspecification = getspecification
+
+function definers.registersplit(symbol,action,verbosename)
+ addspecifier(symbol)
+ variants[symbol] = action
+ if verbosename then
+ variants[verbosename] = action
+ end
+end
+
+local function makespecification(specification,lookup,name,sub,method,detail,size)
+ size = size or 655360
+ if not lookup or lookup == "" then
+ lookup = definers.defaultlookup
+ end
+ if trace_defining then
+ report_defining("specification %a, lookup %a, name %a, sub %a, method %a, detail %a",
+ specification, lookup, name, sub, method, detail)
+ end
+ local t = {
+ lookup = lookup, -- forced type
+ specification = specification, -- full specification
+ size = size, -- size in scaled points or -1000*n
+ name = name, -- font or filename
+ sub = sub, -- subfont (eg in ttc)
+ method = method, -- specification method
+ detail = detail, -- specification
+ resolved = "", -- resolved font name
+ forced = "", -- forced loader
+ features = { }, -- preprocessed features
+ }
+ return t
+end
+
+
+definers.makespecification = makespecification
+
+function definers.analyze(specification, size)
+ -- can be optimized with locals
+ local lookup, name, sub, method, detail = getspecification(specification or "")
+ return makespecification(specification, lookup, name, sub, method, detail, size)
+end
+
+--[[ldx--
+
We can resolve the filename using the next function:
+--ldx]]--
+
+definers.resolvers = definers.resolvers or { }
+local resolvers = definers.resolvers
+
+-- todo: reporter
+
+function resolvers.file(specification)
+ local name = resolvefile(specification.name) -- catch for renames
+ local suffix = file.suffix(name)
+ if fonts.formats[suffix] then
+ specification.forced = suffix
+ specification.name = file.removesuffix(name)
+ else
+ specification.name = name -- can be resolved
+ end
+end
+
+function resolvers.name(specification)
+ local resolve = fonts.names.resolve
+ if resolve then
+ local resolved, sub = resolve(specification.name,specification.sub,specification) -- we pass specification for overloaded versions
+ if resolved then
+ specification.resolved = resolved
+ specification.sub = sub
+ local suffix = file.suffix(resolved)
+ if fonts.formats[suffix] then
+ specification.forced = suffix
+ specification.name = file.removesuffix(resolved)
+ else
+ specification.name = resolved
+ end
+ end
+ else
+ resolvers.file(specification)
+ end
+end
+
+function resolvers.spec(specification)
+ local resolvespec = fonts.names.resolvespec
+ if resolvespec then
+ local resolved, sub = resolvespec(specification.name,specification.sub,specification) -- we pass specification for overloaded versions
+ if resolved then
+ specification.resolved = resolved
+ specification.sub = sub
+ specification.forced = file.suffix(resolved)
+ specification.name = file.removesuffix(resolved)
+ end
+ else
+ resolvers.name(specification)
+ end
+end
+
+function definers.resolve(specification)
+ if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash
+ local r = resolvers[specification.lookup]
+ if r then
+ r(specification)
+ end
+ end
+ if specification.forced == "" then
+ specification.forced = nil
+ else
+ specification.forced = specification.forced
+ end
+ specification.hash = lower(specification.name .. ' @ ' .. constructors.hashfeatures(specification))
+ if specification.sub and specification.sub ~= "" then
+ specification.hash = specification.sub .. ' @ ' .. specification.hash
+ end
+ return specification
+end
+
+--[[ldx--
+
The main read function either uses a forced reader (as determined by
+a lookup) or tries to resolve the name using the list of readers.
+
+
We need to cache when possible. We do cache raw tfm data (from , or ). After that we can cache based
+on specificstion (name) and size, that is, only needs a number
+for an already loaded fonts. However, it may make sense to cache fonts
+before they're scaled as well (store 's with applied methods
+and features). However, there may be a relation between the size and
+features (esp in virtual fonts) so let's not do that now.
+
+
Watch out, here we do load a font, but we don't prepare the
+specification yet.
+--ldx]]--
+
+-- very experimental:
+
+function definers.applypostprocessors(tfmdata)
+ local postprocessors = tfmdata.postprocessors
+ if postprocessors then
+ local properties = tfmdata.properties
+ for i=1,#postprocessors do
+ local extrahash = postprocessors[i](tfmdata) -- after scaling etc
+ if type(extrahash) == "string" and extrahash ~= "" then
+ -- e.g. a reencoding needs this
+ extrahash = gsub(lower(extrahash),"[^a-z]","-")
+ properties.fullname = format("%s-%s",properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
+end
+
+-- function definers.applypostprocessors(tfmdata)
+-- return tfmdata
+-- end
+
+local function checkembedding(tfmdata)
+ local properties = tfmdata.properties
+ local embedding
+ if directive_embedall then
+ embedding = "full"
+ elseif properties and properties.filename and constructors.dontembed[properties.filename] then
+ embedding = "no"
+ else
+ embedding = "subset"
+ end
+ if properties then
+ properties.embedding = embedding
+ else
+ tfmdata.properties = { embedding = embedding }
+ end
+ tfmdata.embedding = embedding
+end
+
+function definers.loadfont(specification)
+ local hash = constructors.hashinstance(specification)
+ local tfmdata = loadedfonts[hash] -- hashes by size !
+ if not tfmdata then
+ local forced = specification.forced or ""
+ if forced ~= "" then
+ local reader = readers[lower(forced)]
+ tfmdata = reader and reader(specification)
+ if not tfmdata then
+ report_defining("forced type %a of %a not found",forced,specification.name)
+ end
+ else
+ local sequence = readers.sequence -- can be overloaded so only a shortcut here
+ for s=1,#sequence do
+ local reader = sequence[s]
+ if readers[reader] then -- we skip not loaded readers
+ if trace_defining then
+ report_defining("trying (reader sequence driven) type %a for %a with file %a",reader,specification.name,specification.filename)
+ end
+ tfmdata = readers[reader](specification)
+ if tfmdata then
+ break
+ else
+ specification.filename = nil
+ end
+ end
+ end
+ end
+ if tfmdata then
+ tfmdata = definers.applypostprocessors(tfmdata)
+ checkembedding(tfmdata) -- todo: general postprocessor
+ loadedfonts[hash] = tfmdata
+ designsizes[specification.hash] = tfmdata.parameters.designsize
+ end
+ end
+ if not tfmdata then
+ report_defining("font with asked name %a is not found using lookup %a",specification.name,specification.lookup)
+ end
+ return tfmdata
+end
+
+function constructors.checkvirtualids()
+ -- dummy in plain version
+end
+
+function constructors.readanddefine(name,size) -- no id -- maybe a dummy first
+ local specification = definers.analyze(name,size)
+ local method = specification.method
+ if method and variants[method] then
+ specification = variants[method](specification)
+ end
+ specification = definers.resolve(specification)
+ local hash = constructors.hashinstance(specification)
+ local id = definers.registered(hash)
+ if not id then
+ local tfmdata = definers.loadfont(specification)
+ if tfmdata then
+ tfmdata.properties.hash = hash
+ constructors.checkvirtualids(tfmdata) -- experiment, will become obsolete when slots can selfreference
+ id = font.define(tfmdata)
+ definers.register(tfmdata,id)
+ else
+ id = 0 -- signal
+ end
+ end
+ return fontdata[id], id
+end
+
+--[[ldx--
+
So far the specifiers. Now comes the real definer. Here we cache
+based on id's. Here we also intercept the virtual font handler. Since
+it evolved stepwise I may rewrite this bit (combine code).
+
+In the previously defined reader (the one resulting in a
+table) we cached the (scaled) instances. Here we cache them again, but
+this time based on id. We could combine this in one cache but this does
+not gain much. By the way, passing id's back to in the callback was
+introduced later in the development.
+--ldx]]--
+
+function definers.current() -- or maybe current
+ return lastdefined
+end
+
+function definers.registered(hash)
+ local id = internalized[hash]
+ return id, id and fontdata[id]
+end
+
+function definers.register(tfmdata,id)
+ if tfmdata and id then
+ local hash = tfmdata.properties.hash
+ if not hash then
+ report_defining("registering font, id %a, name %a, invalid hash",id,tfmdata.properties.filename or "?")
+ elseif not internalized[hash] then
+ internalized[hash] = id
+ if trace_defining then
+ report_defining("registering font, id %s, hash %a",id,hash)
+ end
+ fontdata[id] = tfmdata
+ end
+ end
+end
+
+function definers.read(specification,size,id) -- id can be optional, name can already be table
+ statistics.starttiming(fonts)
+ if type(specification) == "string" then
+ specification = definers.analyze(specification,size)
+ end
+ local method = specification.method
+ if method and variants[method] then
+ specification = variants[method](specification)
+ end
+ specification = definers.resolve(specification)
+ local hash = constructors.hashinstance(specification)
+ local tfmdata = definers.registered(hash) -- id
+ if tfmdata then
+ if trace_defining then
+ report_defining("already hashed: %s",hash)
+ end
+ else
+ tfmdata = definers.loadfont(specification) -- can be overloaded
+ if tfmdata then
+ if trace_defining then
+ report_defining("loaded and hashed: %s",hash)
+ end
+ tfmdata.properties.hash = hash
+ if id then
+ definers.register(tfmdata,id)
+ end
+ else
+ if trace_defining then
+ report_defining("not loaded and hashed: %s",hash)
+ end
+ end
+ end
+ lastdefined = tfmdata or id -- todo ! ! ! ! !
+ if not tfmdata then -- or id?
+ report_defining( "unknown font %a, loading aborted",specification.name)
+ elseif trace_defining and type(tfmdata) == "table" then
+ local properties = tfmdata.properties or { }
+ local parameters = tfmdata.parameters or { }
+ report_defining("using %s font with id %a, name %a, size %a, bytes %a, encoding %a, fullname %a, filename %a",
+ properties.format, id, properties.name, parameters.size, properties.encodingbytes,
+ properties.encodingname, properties.fullname, file.basename(properties.filename))
+ end
+ statistics.stoptiming(fonts)
+ return tfmdata
+end
+
+function font.getfont(id)
+ return fontdata[id] -- otherwise issues
+end
+
+--[[ldx--
+
We overload the reader.
+--ldx]]--
+
+callbacks.register('define_font', definers.read, "definition of fonts (tfmdata preparation)")
diff --git a/tex/context/base/font-enc.lua b/tex/context/base/font-enc.lua
index 5305f0736..9795e0948 100644
--- a/tex/context/base/font-enc.lua
+++ b/tex/context/base/font-enc.lua
@@ -1,147 +1,147 @@
-if not modules then modules = { } end modules ['font-enc'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this module is obsolete
-
-local match, gmatch, gsub = string.match, string.gmatch, string.gsub
-
-local setmetatableindex = table.setmetatableindex
-
---[[ldx--
-
Because encodings are going to disappear, we don't bother defining
-them in tables. But we may do so some day, for consistency.
Beware! The generic encoding files don't always apply to the ones that
-ship with fonts. This has to do with the fact that names follow (slightly)
-different standards. However, the fonts where this applies to (for instance
-Latin Modern or Gyre) come in OpenType variants too, so these
-will be used.
---ldx]]--
-
-local enccodes = characters.enccodes or { }
-
-function encodings.load(filename)
- local name = file.removesuffix(filename)
- local data = containers.read(encodings.cache,name)
- if data then
- return data
- end
- if name == "unicode" then
- data = encodings.make_unicode_vector() -- special case, no tex file for this
- end
- if data then
- return data
- end
- local vector, tag, hash, unicodes = { }, "", { }, { }
- local foundname = resolvers.findfile(filename,'enc')
- if foundname and foundname ~= "" then
- local ok, encoding, size = resolvers.loadbinfile(foundname)
- if ok and encoding then
- encoding = gsub(encoding,"%%(.-)\n","")
- local tag, vec = match(encoding,"/(%w+)%s*%[(.*)%]%s*def")
- local i = 0
- for ch in gmatch(vec,"/([%a%d%.]+)") do
- if ch ~= ".notdef" then
- vector[i] = ch
- if not hash[ch] then
- hash[ch] = i
- else
- -- duplicate, play safe for tex ligs and take first
- end
- if enccodes[ch] then
- unicodes[enccodes[ch]] = i
- end
- end
- i = i + 1
- end
- end
- end
- local data = {
- name = name,
- tag = tag,
- vector = vector,
- hash = hash,
- unicodes = unicodes
- }
- return containers.write(encodings.cache, name, data)
-end
-
---[[ldx--
-
There is no unicode encoding but for practical purposes we define
-one.
---ldx]]--
-
--- maybe make this a function:
-
-function encodings.make_unicode_vector()
- local vector, hash = { }, { }
- for code, v in next, characters.data do
- local name = v.adobename
- if name then
- vector[code] = name
- hash[name] = code
- else
- vector[code] = '.notdef'
- end
- end
- for name, code in next, characters.synonyms do
- vector[code], hash[name] = name, code
- end
- return containers.write(encodings.cache, 'unicode', { name='unicode', tag='unicode', vector=vector, hash=hash })
-end
-
-if not encodings.agl then
-
- -- We delay delay loading this rather big vector that is only needed when a
- -- font is loaded for caching. Once we're further along the route we can also
- -- delay it in the generic version (which doesn't use this file).
-
- encodings.agl = { }
-
- setmetatableindex(encodings.agl, function(t,k)
- report_encoding("loading (extended) adobe glyph list")
- dofile(resolvers.findfile("font-agl.lua"))
- return rawget(encodings.agl,k)
- end)
-
-end
+if not modules then modules = { } end modules ['font-enc'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- this module is obsolete
+
+local match, gmatch, gsub = string.match, string.gmatch, string.gsub
+
+local setmetatableindex = table.setmetatableindex
+
+--[[ldx--
+
Because encodings are going to disappear, we don't bother defining
+them in tables. But we may do so some day, for consistency.
Beware! The generic encoding files don't always apply to the ones that
+ship with fonts. This has to do with the fact that names follow (slightly)
+different standards. However, the fonts where this applies to (for instance
+Latin Modern or Gyre) come in OpenType variants too, so these
+will be used.
+--ldx]]--
+
+local enccodes = characters.enccodes or { }
+
+function encodings.load(filename)
+ local name = file.removesuffix(filename)
+ local data = containers.read(encodings.cache,name)
+ if data then
+ return data
+ end
+ if name == "unicode" then
+ data = encodings.make_unicode_vector() -- special case, no tex file for this
+ end
+ if data then
+ return data
+ end
+ local vector, tag, hash, unicodes = { }, "", { }, { }
+ local foundname = resolvers.findfile(filename,'enc')
+ if foundname and foundname ~= "" then
+ local ok, encoding, size = resolvers.loadbinfile(foundname)
+ if ok and encoding then
+ encoding = gsub(encoding,"%%(.-)\n","")
+ local tag, vec = match(encoding,"/(%w+)%s*%[(.*)%]%s*def")
+ local i = 0
+ for ch in gmatch(vec,"/([%a%d%.]+)") do
+ if ch ~= ".notdef" then
+ vector[i] = ch
+ if not hash[ch] then
+ hash[ch] = i
+ else
+ -- duplicate, play safe for tex ligs and take first
+ end
+ if enccodes[ch] then
+ unicodes[enccodes[ch]] = i
+ end
+ end
+ i = i + 1
+ end
+ end
+ end
+ local data = {
+ name = name,
+ tag = tag,
+ vector = vector,
+ hash = hash,
+ unicodes = unicodes
+ }
+ return containers.write(encodings.cache, name, data)
+end
+
+--[[ldx--
+
There is no unicode encoding but for practical purposes we define
+one.
+--ldx]]--
+
+-- maybe make this a function:
+
+function encodings.make_unicode_vector()
+ local vector, hash = { }, { }
+ for code, v in next, characters.data do
+ local name = v.adobename
+ if name then
+ vector[code] = name
+ hash[name] = code
+ else
+ vector[code] = '.notdef'
+ end
+ end
+ for name, code in next, characters.synonyms do
+ vector[code], hash[name] = name, code
+ end
+ return containers.write(encodings.cache, 'unicode', { name='unicode', tag='unicode', vector=vector, hash=hash })
+end
+
+if not encodings.agl then
+
+ -- We delay delay loading this rather big vector that is only needed when a
+ -- font is loaded for caching. Once we're further along the route we can also
+ -- delay it in the generic version (which doesn't use this file).
+
+ encodings.agl = { }
+
+ setmetatableindex(encodings.agl, function(t,k)
+ report_encoding("loading (extended) adobe glyph list")
+ dofile(resolvers.findfile("font-agl.lua"))
+ return rawget(encodings.agl,k)
+ end)
+
+end
diff --git a/tex/context/base/font-enh.lua b/tex/context/base/font-enh.lua
index 2bf0741f5..cb152083d 100644
--- a/tex/context/base/font-enh.lua
+++ b/tex/context/base/font-enh.lua
@@ -1,200 +1,200 @@
-if not modules then modules = { } end modules ['font-enh'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local next = next
-
-local trace_unicoding = false
-
-trackers.register("fonts.defining", function(v) trace_unicoding = v end)
-trackers.register("fonts.unicoding", function(v) trace_unicoding = v end)
-
-local report_unicoding = logs.reporter("fonts","unicoding")
-
-local fonts = fonts
-local constructors = fonts.constructors
-
-local tfmfeatures = constructors.newfeatures("tfm")
-local registertfmfeature = tfmfeatures.register
-
-local afmfeatures = fonts.constructors.newfeatures("afm")
-local registerafmfeature = afmfeatures.register
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
--- -- these will become goodies (when needed at all)
---
--- local fontencodings = fonts.encodings
--- fontencodings.remappings = fontencodings.remappings or { }
---
--- local function reencode(tfmdata,encoding)
--- if encoding and fontencodings.known[encoding] then
--- local data = fontencodings.load(encoding)
--- if data then
--- tfmdata.properties.encoding = encoding
--- local characters = tfmdata.characters
--- local original = { }
--- local vector = data.vector
--- for unicode, character in next, characters do
--- character.name = vector[unicode]
--- character.index = unicode, character
--- original[unicode] = character
--- end
--- for newcode, oldcode in next, data.unicodes do
--- if newcode ~= oldcode then
--- if trace_unicoding then
--- report_unicoding("reencoding %U to %U",oldcode,newcode)
--- end
--- characters[newcode] = original[oldcode]
--- end
--- end
--- end
--- end
--- end
---
--- registertfmfeature {
--- name = "reencode",
--- description = "reencode",
--- manipulators = {
--- base = reencode,
--- node = reencode,
--- }
--- }
---
--- local function remap(tfmdata,remapping)
--- local vector = remapping and fontencodings.remappings[remapping]
--- if vector then
--- local characters, original = tfmdata.characters, { }
--- for k, v in next, characters do
--- original[k], characters[k] = v, nil
--- end
--- for k,v in next, vector do
--- if k ~= v then
--- if trace_unicoding then
--- report_unicoding("remapping %U to %U",k,v)
--- end
--- local c = original[k]
--- characters[v] = c
--- c.index = k
--- end
--- end
--- local properties = tfmdata.properties
--- if not properties then
--- properties = { }
--- tfmdata.properties = properties
--- else
--- properties.encodingbytes = 2
--- properties.format = properties.format or 'type1'
--- end
--- end
--- end
---
--- registertfmfeature {
--- name = "remap",
--- description = "remap",
--- manipulators = {
--- base = remap,
--- node = remap,
--- }
--- }
-
--- \definefontfeature[dingbats][goodies=dingbats,unicoding=yes]
-
--- we only add and don't replace
--- we could also add kerns but we asssume symbols
--- todo: complain if not basemode
-
--- remapping = {
--- tounicode = true,
--- unicodes = {
--- a1 = 0x2701,
-
-local tosixteen = fonts.mappings.tounicode16
-
-local function initializeunicoding(tfmdata)
- local goodies = tfmdata.goodies
- local newcoding = nil
- local tounicode = false
- for i=1,#goodies do
- local remapping = goodies[i].remapping
- if remapping and remapping.unicodes then
- newcoding = remapping.unicodes -- names to unicodes
- tounicode = remapping.tounicode
- end
- end
- if newcoding then
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local oldcoding = tfmdata.resources.unicodes
- local tounicodes = tfmdata.resources.tounicode -- index to unicode
- local originals = { }
- for name, newcode in next, newcoding do
- local oldcode = oldcoding[name]
- if characters[newcode] and not originals[newcode] then
- originals[newcode] = {
- character = characters [newcode],
- description = descriptions[newcode],
- }
- end
- if oldcode then
- local original = originals[oldcode]
- if original then
- characters [newcode] = original.character
- descriptions[newcode] = original.description
- else
- characters [newcode] = characters [oldcode]
- descriptions[newcode] = descriptions[oldcode]
- end
- else
- oldcoding[name] = newcode
- end
- if tounicode then
- local description = descriptions[newcode]
- if description then
- local index = description.index
- if not tounicodes[index] then
- tounicodes[index] = tosixteen(newcode) -- shared (we could have a metatable)
- end
- end
- end
- if trace_unicoding then
- if oldcode then
- report_unicoding("aliasing glyph %a from %U to %U",name,oldcode,newcode)
- else
- report_unicoding("aliasing glyph %a to %U",name,newcode)
- end
- end
- end
- end
-end
-
-registerafmfeature {
- name = "unicoding",
- description = "adapt unicode table",
- initializers = {
- base = initializeunicoding,
- node = initializeunicoding,
- },
- -- manipulators = {
- -- base = finalizeunicoding,
- -- node = finalizeunicoding,
- -- }
-}
-
-registerotffeature {
- name = "unicoding",
- description = "adapt unicode table",
- initializers = {
- base = initializeunicoding,
- node = initializeunicoding,
- },
- -- manipulators = {
- -- base = finalizeunicoding,
- -- node = finalizeunicoding,
- -- }
-}
+if not modules then modules = { } end modules ['font-enh'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local next = next
+
+local trace_unicoding = false
+
+trackers.register("fonts.defining", function(v) trace_unicoding = v end)
+trackers.register("fonts.unicoding", function(v) trace_unicoding = v end)
+
+local report_unicoding = logs.reporter("fonts","unicoding")
+
+local fonts = fonts
+local constructors = fonts.constructors
+
+local tfmfeatures = constructors.newfeatures("tfm")
+local registertfmfeature = tfmfeatures.register
+
+local afmfeatures = fonts.constructors.newfeatures("afm")
+local registerafmfeature = afmfeatures.register
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+-- -- these will become goodies (when needed at all)
+--
+-- local fontencodings = fonts.encodings
+-- fontencodings.remappings = fontencodings.remappings or { }
+--
+-- local function reencode(tfmdata,encoding)
+-- if encoding and fontencodings.known[encoding] then
+-- local data = fontencodings.load(encoding)
+-- if data then
+-- tfmdata.properties.encoding = encoding
+-- local characters = tfmdata.characters
+-- local original = { }
+-- local vector = data.vector
+-- for unicode, character in next, characters do
+-- character.name = vector[unicode]
+-- character.index = unicode, character
+-- original[unicode] = character
+-- end
+-- for newcode, oldcode in next, data.unicodes do
+-- if newcode ~= oldcode then
+-- if trace_unicoding then
+-- report_unicoding("reencoding %U to %U",oldcode,newcode)
+-- end
+-- characters[newcode] = original[oldcode]
+-- end
+-- end
+-- end
+-- end
+-- end
+--
+-- registertfmfeature {
+-- name = "reencode",
+-- description = "reencode",
+-- manipulators = {
+-- base = reencode,
+-- node = reencode,
+-- }
+-- }
+--
+-- local function remap(tfmdata,remapping)
+-- local vector = remapping and fontencodings.remappings[remapping]
+-- if vector then
+-- local characters, original = tfmdata.characters, { }
+-- for k, v in next, characters do
+-- original[k], characters[k] = v, nil
+-- end
+-- for k,v in next, vector do
+-- if k ~= v then
+-- if trace_unicoding then
+-- report_unicoding("remapping %U to %U",k,v)
+-- end
+-- local c = original[k]
+-- characters[v] = c
+-- c.index = k
+-- end
+-- end
+-- local properties = tfmdata.properties
+-- if not properties then
+-- properties = { }
+-- tfmdata.properties = properties
+-- else
+-- properties.encodingbytes = 2
+-- properties.format = properties.format or 'type1'
+-- end
+-- end
+-- end
+--
+-- registertfmfeature {
+-- name = "remap",
+-- description = "remap",
+-- manipulators = {
+-- base = remap,
+-- node = remap,
+-- }
+-- }
+
+-- \definefontfeature[dingbats][goodies=dingbats,unicoding=yes]
+
+-- we only add and don't replace
+-- we could also add kerns but we asssume symbols
+-- todo: complain if not basemode
+
+-- remapping = {
+-- tounicode = true,
+-- unicodes = {
+-- a1 = 0x2701,
+
+local tosixteen = fonts.mappings.tounicode16
+
+local function initializeunicoding(tfmdata)
+ local goodies = tfmdata.goodies
+ local newcoding = nil
+ local tounicode = false
+ for i=1,#goodies do
+ local remapping = goodies[i].remapping
+ if remapping and remapping.unicodes then
+ newcoding = remapping.unicodes -- names to unicodes
+ tounicode = remapping.tounicode
+ end
+ end
+ if newcoding then
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ local oldcoding = tfmdata.resources.unicodes
+ local tounicodes = tfmdata.resources.tounicode -- index to unicode
+ local originals = { }
+ for name, newcode in next, newcoding do
+ local oldcode = oldcoding[name]
+ if characters[newcode] and not originals[newcode] then
+ originals[newcode] = {
+ character = characters [newcode],
+ description = descriptions[newcode],
+ }
+ end
+ if oldcode then
+ local original = originals[oldcode]
+ if original then
+ characters [newcode] = original.character
+ descriptions[newcode] = original.description
+ else
+ characters [newcode] = characters [oldcode]
+ descriptions[newcode] = descriptions[oldcode]
+ end
+ else
+ oldcoding[name] = newcode
+ end
+ if tounicode then
+ local description = descriptions[newcode]
+ if description then
+ local index = description.index
+ if not tounicodes[index] then
+ tounicodes[index] = tosixteen(newcode) -- shared (we could have a metatable)
+ end
+ end
+ end
+ if trace_unicoding then
+ if oldcode then
+ report_unicoding("aliasing glyph %a from %U to %U",name,oldcode,newcode)
+ else
+ report_unicoding("aliasing glyph %a to %U",name,newcode)
+ end
+ end
+ end
+ end
+end
+
+registerafmfeature {
+ name = "unicoding",
+ description = "adapt unicode table",
+ initializers = {
+ base = initializeunicoding,
+ node = initializeunicoding,
+ },
+ -- manipulators = {
+ -- base = finalizeunicoding,
+ -- node = finalizeunicoding,
+ -- }
+}
+
+registerotffeature {
+ name = "unicoding",
+ description = "adapt unicode table",
+ initializers = {
+ base = initializeunicoding,
+ node = initializeunicoding,
+ },
+ -- manipulators = {
+ -- base = finalizeunicoding,
+ -- node = finalizeunicoding,
+ -- }
+}
diff --git a/tex/context/base/font-ext.lua b/tex/context/base/font-ext.lua
index 89d5927d4..d2bc21837 100644
--- a/tex/context/base/font-ext.lua
+++ b/tex/context/base/font-ext.lua
@@ -1,946 +1,946 @@
-if not modules then modules = { } end modules ['font-ext'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv and hand-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local next, type, byte = next, type, string.byte
-local gmatch, concat, format = string.gmatch, table.concat, string.format
-local utfchar = utf.char
-
-local commands, context = commands, context
-local fonts, utilities = fonts, utilities
-
-local trace_protrusion = false trackers.register("fonts.protrusion", function(v) trace_protrusion = v end)
-local trace_expansion = false trackers.register("fonts.expansion", function(v) trace_expansion = v end)
-
-local report_expansions = logs.reporter("fonts","expansions")
-local report_protrusions = logs.reporter("fonts","protrusions")
-
--- todo: byte(..) => 0xHHHH
-
---[[ldx--
-
When we implement functions that deal with features, most of them
-will depend of the font format. Here we define the few that are kind
-of neutral.
---ldx]]--
-
-local handlers = fonts.handlers
-local hashes = fonts.hashes
-local otf = handlers.otf
-
-local registerotffeature = handlers.otf.features.register
-local registerafmfeature = handlers.afm.features.register
-
-local fontdata = hashes.identifiers
-
-local allocate = utilities.storage.allocate
-local settings_to_array = utilities.parsers.settings_to_array
-local getparameters = utilities.parsers.getparameters
-
-local setmetatableindex = table.setmetatableindex
-
--- -- -- -- -- --
--- shared
--- -- -- -- -- --
-
-local function get_class_and_vector(tfmdata,value,where) -- "expansions"
- local g_where = tfmdata.goodies and tfmdata.goodies[where]
- local f_where = fonts[where]
- local g_classes = g_where and g_where.classes
- local f_classes = f_where and f_where.classes
- local class = (g_classes and g_classes[value]) or (f_classes and f_classes[value])
- if class then
- local class_vector = class.vector
- local g_vectors = g_where and g_where.vectors
- local f_vectors = f_where and f_where.vectors
- local vector = (g_vectors and g_vectors[class_vector]) or (f_vectors and f_vectors[class_vector])
- return class, vector
- end
-end
-
--- -- -- -- -- --
--- expansion (hz)
--- -- -- -- -- --
-
-local expansions = fonts.expansions or allocate()
-
-fonts.expansions = expansions
-
-local classes = expansions.classes or allocate()
-local vectors = expansions.vectors or allocate()
-
-expansions.classes = classes
-expansions.vectors = vectors
-
--- beware, pdftex itself uses percentages * 10
-
-classes.preset = { stretch = 2, shrink = 2, step = .5, factor = 1 }
-
-function commands.setupfontexpansion(class,settings)
- getparameters(classes,class,'preset',settings)
-end
-
-classes['quality'] = {
- stretch = 2, shrink = 2, step = .5, vector = 'default', factor = 1
-}
-
-vectors['default'] = {
- [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7,
- [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7,
- [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7,
- [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7,
- [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7,
- [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7,
- [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7,
- [byte('w')] = 0.7, [byte('z')] = 0.7,
- [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7,
-}
-
-vectors['quality'] = vectors['default'] -- metatable ?
-
-local function initializeexpansion(tfmdata,value)
- if value then
- local class, vector = get_class_and_vector(tfmdata,value,"expansions")
- if class then
- if vector then
- local stretch = class.stretch or 0
- local shrink = class.shrink or 0
- local step = class.step or 0
- local factor = class.factor or 1
- if trace_expansion then
- report_expansions("setting class %a, vector %a, factor %a, stretch %a, shrink %a, step %a",
- value,class.vector,factor,stretch,shrink,step)
- end
- tfmdata.parameters.expansion = {
- stretch = 10 * stretch,
- shrink = 10 * shrink,
- step = 10 * step,
- factor = factor,
- auto = true,
- }
- local data = characters and characters.data
- for i, chr in next, tfmdata.characters do
- local v = vector[i]
- if data and not v then -- we could move the data test outside (needed for plain)
- local d = data[i]
- if d then
- local s = d.shcode
- if not s then
- -- sorry
- elseif type(s) == "table" then
- v = ((vector[s[1]] or 0) + (vector[s[#s]] or 0)) / 2
- else
- v = vector[s] or 0
- end
- end
- end
- if v and v ~= 0 then
- chr.expansion_factor = v*factor
- else -- can be option
- chr.expansion_factor = factor
- end
- end
- elseif trace_expansion then
- report_expansions("unknown vector %a in class %a",class.vector,value)
- end
- elseif trace_expansion then
- report_expansions("unknown class %a",value)
- end
- end
-end
-
-registerotffeature {
- name = "expansion",
- description = "apply hz optimization",
- initializers = {
- base = initializeexpansion,
- node = initializeexpansion,
- }
-}
-
-registerafmfeature {
- name = "expansion",
- description = "apply hz optimization",
- initializers = {
- base = initializeexpansion,
- node = initializeexpansion,
- }
-}
-
-fonts.goodies.register("expansions", function(...) return fonts.goodies.report("expansions", trace_expansion, ...) end)
-
-local report_opbd = logs.reporter("fonts","otf opbd")
-
--- -- -- -- -- --
--- protrusion
--- -- -- -- -- --
-
-fonts.protrusions = allocate()
-local protrusions = fonts.protrusions
-
-protrusions.classes = allocate()
-protrusions.vectors = allocate()
-
-local classes = protrusions.classes
-local vectors = protrusions.vectors
-
--- the values need to be revisioned
-
-classes.preset = { factor = 1, left = 1, right = 1 }
-
-function commands.setupfontprotrusion(class,settings)
- getparameters(classes,class,'preset',settings)
-end
-
-classes['pure'] = {
- vector = 'pure', factor = 1
-}
-classes['punctuation'] = {
- vector = 'punctuation', factor = 1
-}
-classes['alpha'] = {
- vector = 'alpha', factor = 1
-}
-classes['quality'] = {
- vector = 'quality', factor = 1
-}
-
-vectors['pure'] = {
-
- [0x002C] = { 0, 1 }, -- comma
- [0x002E] = { 0, 1 }, -- period
- [0x003A] = { 0, 1 }, -- colon
- [0x003B] = { 0, 1 }, -- semicolon
- [0x002D] = { 0, 1 }, -- hyphen
- [0x00AD] = { 0, 1 }, -- also hyphen
- [0x2013] = { 0, 0.50 }, -- endash
- [0x2014] = { 0, 0.33 }, -- emdash
- [0x3001] = { 0, 1 }, -- ideographic comma ã€
- [0x3002] = { 0, 1 }, -- ideographic full stop 。
- [0x060C] = { 0, 1 }, -- arabic comma ،
- [0x061B] = { 0, 1 }, -- arabic semicolon Ø›
- [0x06D4] = { 0, 1 }, -- arabic full stop Û”
-
-}
-
-vectors['punctuation'] = {
-
- [0x003F] = { 0, 0.20 }, -- ?
- [0x00BF] = { 0, 0.20 }, -- ¿
- [0x0021] = { 0, 0.20 }, -- !
- [0x00A1] = { 0, 0.20 }, -- ¡
- [0x0028] = { 0.05, 0 }, -- (
- [0x0029] = { 0, 0.05 }, -- )
- [0x005B] = { 0.05, 0 }, -- [
- [0x005D] = { 0, 0.05 }, -- ]
- [0x002C] = { 0, 0.70 }, -- comma
- [0x002E] = { 0, 0.70 }, -- period
- [0x003A] = { 0, 0.50 }, -- colon
- [0x003B] = { 0, 0.50 }, -- semicolon
- [0x002D] = { 0, 0.70 }, -- hyphen
- [0x00AD] = { 0, 0.70 }, -- also hyphen
- [0x2013] = { 0, 0.30 }, -- endash
- [0x2014] = { 0, 0.20 }, -- emdash
- [0x060C] = { 0, 0.70 }, -- arabic comma
- [0x061B] = { 0, 0.50 }, -- arabic semicolon
- [0x06D4] = { 0, 0.70 }, -- arabic full stop
- [0x061F] = { 0, 0.20 }, -- ØŸ
-
- -- todo: left and right quotes: .5 double, .7 single
-
- [0x2039] = { 0.70, 0.70 }, -- left single guillemet ‹
- [0x203A] = { 0.70, 0.70 }, -- right single guillemet ›
- [0x00AB] = { 0.50, 0.50 }, -- left guillemet «
- [0x00BB] = { 0.50, 0.50 }, -- right guillemet »
-
- [0x2018] = { 0.70, 0.70 }, -- left single quotation mark ‘
- [0x2019] = { 0, 0.70 }, -- right single quotation mark ’
- [0x201A] = { 0.70, 0 }, -- single low-9 quotation mark ,
- [0x201B] = { 0.70, 0 }, -- single high-reversed-9 quotation mark ‛
- [0x201C] = { 0.50, 0.50 }, -- left double quotation mark “
- [0x201D] = { 0, 0.50 }, -- right double quotation mark â€
- [0x201E] = { 0.50, 0 }, -- double low-9 quotation mark „
- [0x201F] = { 0.50, 0 }, -- double high-reversed-9 quotation mark ‟
-
-}
-
-vectors['alpha'] = {
-
- [byte("A")] = { .05, .05 },
- [byte("F")] = { 0, .05 },
- [byte("J")] = { .05, 0 },
- [byte("K")] = { 0, .05 },
- [byte("L")] = { 0, .05 },
- [byte("T")] = { .05, .05 },
- [byte("V")] = { .05, .05 },
- [byte("W")] = { .05, .05 },
- [byte("X")] = { .05, .05 },
- [byte("Y")] = { .05, .05 },
-
- [byte("k")] = { 0, .05 },
- [byte("r")] = { 0, .05 },
- [byte("t")] = { 0, .05 },
- [byte("v")] = { .05, .05 },
- [byte("w")] = { .05, .05 },
- [byte("x")] = { .05, .05 },
- [byte("y")] = { .05, .05 },
-
-}
-
-vectors['quality'] = table.merged(
- vectors['punctuation'],
- vectors['alpha']
-)
-
--- As this is experimental code, users should not depend on it. The
--- implications are still discussed on the ConTeXt Dev List and we're
--- not sure yet what exactly the spec is (the next code is tested with
--- a gyre font patched by / fea file made by Khaled Hosny). The double
--- trick should not be needed it proper hanging punctuation is used in
--- which case values < 1 can be used.
---
--- preferred (in context, usine vectors):
---
--- \definefontfeature[whatever][default][mode=node,protrusion=quality]
---
--- using lfbd and rtbd, with possibibility to enable only one side :
---
--- \definefontfeature[whocares][default][mode=node,protrusion=yes, opbd=yes,script=latn]
--- \definefontfeature[whocares][default][mode=node,protrusion=right,opbd=yes,script=latn]
---
--- idem, using multiplier
---
--- \definefontfeature[whocares][default][mode=node,protrusion=2,opbd=yes,script=latn]
--- \definefontfeature[whocares][default][mode=node,protrusion=double,opbd=yes,script=latn]
---
--- idem, using named feature file (less frozen):
---
--- \definefontfeature[whocares][default][mode=node,protrusion=2,opbd=yes,script=latn,featurefile=texgyrepagella-regularxx.fea]
-
-classes['double'] = { -- for testing opbd
- factor = 2, left = 1, right = 1,
-}
-
-local function map_opbd_onto_protrusion(tfmdata,value,opbd)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local properties = tfmdata.properties
- local rawdata = tfmdata.shared.rawdata
- local lookuphash = rawdata.lookuphash
- local script = properties.script
- local language = properties.language
- local done, factor, left, right = false, 1, 1, 1
- local class = classes[value]
- if class then
- factor = class.factor or 1
- left = class.left or 1
- right = class.right or 1
- else
- factor = tonumber(value) or 1
- end
- if opbd ~= "right" then
- local validlookups, lookuplist = otf.collectlookups(rawdata,"lfbd",script,language)
- if validlookups then
- for i=1,#lookuplist do
- local lookup = lookuplist[i]
- local data = lookuphash[lookup]
- if data then
- if trace_protrusion then
- report_protrusions("setting left using lfbd lookup %a",lookup)
- end
- for k, v in next, data do
- -- local p = - v[3] / descriptions[k].width-- or 1 ~= 0 too but the same
- local p = - (v[1] / 1000) * factor * left
- characters[k].left_protruding = p
- if trace_protrusion then
- report_protrusions("lfbd -> %s -> %C -> %0.03f (% t)",lookup,k,p,v)
- end
- end
- done = true
- end
- end
- end
- end
- if opbd ~= "left" then
- local validlookups, lookuplist = otf.collectlookups(rawdata,"rtbd",script,language)
- if validlookups then
- for i=1,#lookuplist do
- local lookup = lookuplist[i]
- local data = lookuphash[lookup]
- if data then
- if trace_protrusion then
- report_protrusions("setting right using rtbd lookup %a",lookup)
- end
- for k, v in next, data do
- -- local p = v[3] / descriptions[k].width -- or 3
- local p = (v[1] / 1000) * factor * right
- characters[k].right_protruding = p
- if trace_protrusion then
- report_protrusions("rtbd -> %s -> %C -> %0.03f (% t)",lookup,k,p,v)
- end
- end
- end
- done = true
- end
- end
- end
- local parameters = tfmdata.parameters
- local protrusion = tfmdata.protrusion
- if not protrusion then
- parameters.protrusion = {
- auto = true
- }
- else
- protrusion.auto = true
- end
-end
-
--- The opbd test is just there because it was discussed on the
--- context development list. However, the mentioned fxlbi.otf font
--- only has some kerns for digits. So, consider this feature not
--- supported till we have a proper test font.
-
-local function initializeprotrusion(tfmdata,value)
- if value then
- local opbd = tfmdata.shared.features.opbd
- if opbd then
- -- possible values: left right both yes no (experimental)
- map_opbd_onto_protrusion(tfmdata,value,opbd)
- else
- local class, vector = get_class_and_vector(tfmdata,value,"protrusions")
- if class then
- if vector then
- local factor = class.factor or 1
- local left = class.left or 1
- local right = class.right or 1
- if trace_protrusion then
- report_protrusions("setting class %a, vector %a, factor %a, left %a, right %a",
- value,class.vector,factor,left,right)
- end
- local data = characters.data
- local emwidth = tfmdata.parameters.quad
- tfmdata.parameters.protrusion = {
- factor = factor,
- left = left,
- right = right,
- auto = true,
- }
- for i, chr in next, tfmdata.characters do
- local v, pl, pr = vector[i], nil, nil
- if v then
- pl, pr = v[1], v[2]
- else
- local d = data[i]
- if d then
- local s = d.shcode
- if not s then
- -- sorry
- elseif type(s) == "table" then
- local vl, vr = vector[s[1]], vector[s[#s]]
- if vl then pl = vl[1] end
- if vr then pr = vr[2] end
- else
- v = vector[s]
- if v then
- pl, pr = v[1], v[2]
- end
- end
- end
- end
- if pl and pl ~= 0 then
- chr.left_protruding = left *pl*factor
- end
- if pr and pr ~= 0 then
- chr.right_protruding = right*pr*factor
- end
- end
- elseif trace_protrusion then
- report_protrusions("unknown vector %a in class %a",class.vector,value)
- end
- elseif trace_protrusion then
- report_protrusions("unknown class %a",value)
- end
- end
- end
-end
-
-registerotffeature {
- name = "protrusion",
- description = "shift characters into the left and or right margin",
- initializers = {
- base = initializeprotrusion,
- node = initializeprotrusion,
- }
-}
-
-registerafmfeature {
- name = "protrusion",
- description = "shift characters into the left and or right margin",
- initializers = {
- base = initializeprotrusion,
- node = initializeprotrusion,
- }
-}
-
-fonts.goodies.register("protrusions", function(...) return fonts.goodies.report("protrusions", trace_protrusion, ...) end)
-
--- -- --
-
-local function initializenostackmath(tfmdata,value)
- tfmdata.properties.nostackmath = value and true
-end
-
-registerotffeature {
- name = "nostackmath",
- description = "disable math stacking mechanism",
- initializers = {
- base = initializenostackmath,
- node = initializenostackmath,
- }
-}
-
-local function initializeitlc(tfmdata,value) -- hm, always value
- if value then
- -- the magic 40 and it formula come from Dohyun Kim but we might need another guess
- local parameters = tfmdata.parameters
- local italicangle = parameters.italicangle
- if italicangle and italicangle ~= 0 then
- local properties = tfmdata.properties
- local factor = tonumber(value) or 1
- properties.hasitalics = true
- properties.autoitalicamount = factor * (parameters.uwidth or 40)/2
- end
- end
-end
-
-registerotffeature {
- name = "itlc",
- description = "italic correction",
- initializers = {
- base = initializeitlc,
- node = initializeitlc,
- }
-}
-
-registerafmfeature {
- name = "itlc",
- description = "italic correction",
- initializers = {
- base = initializeitlc,
- node = initializeitlc,
- }
-}
-
-local function initializetextitalics(tfmdata,value) -- yes no delay
- local delay = value == "delay"
- tfmdata.properties.textitalics = delay and true or value
- tfmdata.properties.delaytextitalics = delay
-end
-
-registerotffeature {
- name = "textitalics",
- description = "use alternative text italic correction",
- initializers = {
- base = initializetextitalics,
- node = initializetextitalics,
- }
-}
-
-registerafmfeature {
- name = "textitalics",
- description = "use alternative text italic correction",
- initializers = {
- base = initializetextitalics,
- node = initializetextitalics,
- }
-}
-
--- slanting
-
-local function initializeslant(tfmdata,value)
- value = tonumber(value)
- if not value then
- value = 0
- elseif value > 1 then
- value = 1
- elseif value < -1 then
- value = -1
- end
- tfmdata.parameters.slantfactor = value
-end
-
-registerotffeature {
- name = "slant",
- description = "slant glyphs",
- initializers = {
- base = initializeslant,
- node = initializeslant,
- }
-}
-
-registerafmfeature {
- name = "slant",
- description = "slant glyphs",
- initializers = {
- base = initializeslant,
- node = initializeslant,
- }
-}
-
-local function initializeextend(tfmdata,value)
- value = tonumber(value)
- if not value then
- value = 0
- elseif value > 10 then
- value = 10
- elseif value < -10 then
- value = -10
- end
- tfmdata.parameters.extendfactor = value
-end
-
-registerotffeature {
- name = "extend",
- description = "scale glyphs horizontally",
- initializers = {
- base = initializeextend,
- node = initializeextend,
- }
-}
-
-registerafmfeature {
- name = "extend",
- description = "scale glyphs horizontally",
- initializers = {
- base = initializeextend,
- node = initializeextend,
- }
-}
-
--- For Wolfgang Schuster:
---
--- \definefontfeature[thisway][default][script=hang,language=zhs,dimensions={2,2,2}]
--- \definedfont[file:kozminpr6nregular*thisway]
---
--- For the moment we don't mess with the descriptions.
-
-local function manipulatedimensions(tfmdata,key,value)
- if type(value) == "string" and value ~= "" then
- local characters = tfmdata.characters
- local parameters = tfmdata.parameters
- local emwidth = parameters.quad
- local exheight = parameters.xheight
- local spec = settings_to_array(value)
- local width = (spec[1] or 0) * emwidth
- local height = (spec[2] or 0) * exheight
- local depth = (spec[3] or 0) * exheight
- if width > 0 then
- local resources = tfmdata.resources
- local additions = { }
- local private = resources.private
- for unicode, old_c in next, characters do
- local oldwidth = old_c.width
- if oldwidth ~= width then
- -- Defining the tables in one step is more efficient
- -- than adding fields later.
- private = private + 1
- local new_c
- local commands = {
- { "right", (width - oldwidth) / 2 },
- { "slot", 1, private },
- }
- if height > 0 then
- if depth > 0 then
- new_c = {
- width = width,
- height = height,
- depth = depth,
- commands = commands,
- }
- else
- new_c = {
- width = width,
- height = height,
- commands = commands,
- }
- end
- else
- if depth > 0 then
- new_c = {
- width = width,
- depth = depth,
- commands = commands,
- }
- else
- new_c = {
- width = width,
- commands = commands,
- }
- end
- end
- setmetatableindex(new_c,old_c)
- characters[unicode] = new_c
- additions[private] = old_c
- end
- end
- for k, v in next, additions do
- characters[k] = v
- end
- resources.private = private
- elseif height > 0 and depth > 0 then
- for unicode, old_c in next, characters do
- old_c.height = height
- old_c.depth = depth
- end
- elseif height > 0 then
- for unicode, old_c in next, characters do
- old_c.height = height
- end
- elseif depth > 0 then
- for unicode, old_c in next, characters do
- old_c.depth = depth
- end
- end
- end
-end
-
-registerotffeature {
- name = "dimensions",
- description = "force dimensions",
- manipulators = {
- base = manipulatedimensions,
- node = manipulatedimensions,
- }
-}
-
--- for zhichu chen (see mailing list archive): we might add a few more variants
--- in due time
---
--- \definefontfeature[boxed][default][boundingbox=yes] % paleblue
---
--- maybe:
---
--- \definecolor[DummyColor][s=.75,t=.5,a=1] {\DummyColor test} \nopdfcompression
---
--- local gray = { "special", "pdf: /Tr1 gs .75 g" }
--- local black = { "special", "pdf: /Tr0 gs 0 g" }
-
-local push = { "push" }
-local pop = { "pop" }
-local gray = { "special", "pdf: .75 g" }
-local black = { "special", "pdf: 0 g" }
-
-local downcache = { } -- handy for huge cjk fonts
-local rulecache = { } -- handy for huge cjk fonts
-
-setmetatableindex(downcache,function(t,d)
- local v = { "down", d }
- t[d] = v
- return v
-end)
-
-setmetatableindex(rulecache,function(t,h)
- local v = { }
- t[h] = v
- setmetatableindex(v,function(t,w)
- local v = { "rule", h, w }
- t[w] = v
- return v
- end)
- return v
-end)
-
-local function showboundingbox(tfmdata,key,value)
- if value then
- local vfspecials = backends.pdf.tables.vfspecials
- local gray = vfspecials and (vfspecials.rulecolors[value] or vfspecials.rulecolors.palegray) or gray
- local characters = tfmdata.characters
- local resources = tfmdata.resources
- local additions = { }
- local private = resources.private
- for unicode, old_c in next, characters do
- private = private + 1
- local width = old_c.width or 0
- local height = old_c.height or 0
- local depth = old_c.depth or 0
- local new_c
- if depth == 0 then
- new_c = {
- width = width,
- height = height,
- commands = {
- push,
- gray,
- rulecache[height][width],
- black,
- pop,
- { "slot", 1, private },
- }
- }
- else
- new_c = {
- width = width,
- height = height,
- depth = depth,
- commands = {
- push,
- downcache[depth],
- gray,
- rulecache[height+depth][width],
- black,
- pop,
- { "slot", 1, private },
- }
- }
- end
- setmetatableindex(new_c,old_c)
- characters[unicode] = new_c
- additions[private] = old_c
- end
- for k, v in next, additions do
- characters[k] = v
- end
- resources.private = private
- end
-end
-
-registerotffeature {
- name = "boundingbox",
- description = "show boundingbox",
- manipulators = {
- base = showboundingbox,
- node = showboundingbox,
- }
-}
-
--- -- historic stuff, move from font-ota (handled differently, typo-rep)
---
--- local delete_node = nodes.delete
--- local fontdata = fonts.hashes.identifiers
---
--- local nodecodes = nodes.nodecodes
--- local glyph_code = nodecodes.glyph
---
--- local strippables = allocate()
--- fonts.strippables = strippables
---
--- strippables.joiners = table.tohash {
--- 0x200C, -- zwnj
--- 0x200D, -- zwj
--- }
---
--- strippables.all = table.tohash {
--- 0x000AD, 0x017B4, 0x017B5, 0x0200B, 0x0200C, 0x0200D, 0x0200E, 0x0200F, 0x0202A, 0x0202B,
--- 0x0202C, 0x0202D, 0x0202E, 0x02060, 0x02061, 0x02062, 0x02063, 0x0206A, 0x0206B, 0x0206C,
--- 0x0206D, 0x0206E, 0x0206F, 0x0FEFF, 0x1D173, 0x1D174, 0x1D175, 0x1D176, 0x1D177, 0x1D178,
--- 0x1D179, 0x1D17A, 0xE0001, 0xE0020, 0xE0021, 0xE0022, 0xE0023, 0xE0024, 0xE0025, 0xE0026,
--- 0xE0027, 0xE0028, 0xE0029, 0xE002A, 0xE002B, 0xE002C, 0xE002D, 0xE002E, 0xE002F, 0xE0030,
--- 0xE0031, 0xE0032, 0xE0033, 0xE0034, 0xE0035, 0xE0036, 0xE0037, 0xE0038, 0xE0039, 0xE003A,
--- 0xE003B, 0xE003C, 0xE003D, 0xE003E, 0xE003F, 0xE0040, 0xE0041, 0xE0042, 0xE0043, 0xE0044,
--- 0xE0045, 0xE0046, 0xE0047, 0xE0048, 0xE0049, 0xE004A, 0xE004B, 0xE004C, 0xE004D, 0xE004E,
--- 0xE004F, 0xE0050, 0xE0051, 0xE0052, 0xE0053, 0xE0054, 0xE0055, 0xE0056, 0xE0057, 0xE0058,
--- 0xE0059, 0xE005A, 0xE005B, 0xE005C, 0xE005D, 0xE005E, 0xE005F, 0xE0060, 0xE0061, 0xE0062,
--- 0xE0063, 0xE0064, 0xE0065, 0xE0066, 0xE0067, 0xE0068, 0xE0069, 0xE006A, 0xE006B, 0xE006C,
--- 0xE006D, 0xE006E, 0xE006F, 0xE0070, 0xE0071, 0xE0072, 0xE0073, 0xE0074, 0xE0075, 0xE0076,
--- 0xE0077, 0xE0078, 0xE0079, 0xE007A, 0xE007B, 0xE007C, 0xE007D, 0xE007E, 0xE007F,
--- }
---
--- strippables[true] = strippables.joiners
---
--- local function processformatters(head,font)
--- local subset = fontdata[font].shared.features.formatters
--- local vector = subset and strippables[subset]
--- if vector then
--- local current, done = head, false
--- while current do
--- if current.id == glyph_code and current.subtype<256 and current.font == font then
--- local char = current.char
--- if vector[char] then
--- head, current = delete_node(head,current)
--- done = true
--- else
--- current = current.next
--- end
--- else
--- current = current.next
--- end
--- end
--- return head, done
--- else
--- return head, false
--- end
--- end
---
--- registerotffeature {
--- name = "formatters",
--- description = "hide formatting characters",
--- methods = {
--- base = processformatters,
--- node = processformatters,
--- }
--- }
-
--- a handy helper (might change or be moved to another namespace)
-
-local new_special = nodes.pool.special
-local new_glyph = nodes.pool.glyph
-local hpack_node = node.hpack
-
-function fonts.helpers.addprivate(tfmdata,name,characterdata)
- local properties = tfmdata.properties
- local privates = properties.privates
- local lastprivate = properties.lastprivate
- if lastprivate then
- lastprivate = lastprivate + 1
- else
- lastprivate = 0xE000
- end
- if not privates then
- privates = { }
- properties.privates = privates
- end
- if name then
- privates[name] = lastprivate
- end
- properties.lastprivate = lastprivate
- tfmdata.characters[lastprivate] = characterdata
- if properties.finalized then
- properties.lateprivates = true
- end
- return lastprivate
-end
-
-function fonts.helpers.getprivatenode(tfmdata,name)
- local properties = tfmdata.properties
- local privates = properties and properties.privates
- if privates then
- local p = privates[name]
- if p then
- local char = tfmdata.characters[p]
- local commands = char.commands
- if commands then
- local fake = hpack_node(new_special(commands[1][2]))
- fake.width = char.width
- fake.height = char.height
- fake.depth = char.depth
- return fake
- else
- -- todo: set current attribibutes
- return new_glyph(properties.id,p)
- end
- end
- end
-end
-
-function fonts.helpers.hasprivate(tfmdata,name)
- local properties = tfmdata.properties
- local privates = properties and properties.privates
- return privates and privates[name] or false
-end
-
-function commands.getprivatechar(name)
- context(fonts.helpers.getprivatenode(fontdata[font.current()],name))
-end
+if not modules then modules = { } end modules ['font-ext'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv and hand-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local next, type, byte = next, type, string.byte
+local gmatch, concat, format = string.gmatch, table.concat, string.format
+local utfchar = utf.char
+
+local commands, context = commands, context
+local fonts, utilities = fonts, utilities
+
+local trace_protrusion = false trackers.register("fonts.protrusion", function(v) trace_protrusion = v end)
+local trace_expansion = false trackers.register("fonts.expansion", function(v) trace_expansion = v end)
+
+local report_expansions = logs.reporter("fonts","expansions")
+local report_protrusions = logs.reporter("fonts","protrusions")
+
+-- todo: byte(..) => 0xHHHH
+
+--[[ldx--
+
When we implement functions that deal with features, most of them
+will depend of the font format. Here we define the few that are kind
+of neutral.
+--ldx]]--
+
+local handlers = fonts.handlers
+local hashes = fonts.hashes
+local otf = handlers.otf
+
+local registerotffeature = handlers.otf.features.register
+local registerafmfeature = handlers.afm.features.register
+
+local fontdata = hashes.identifiers
+
+local allocate = utilities.storage.allocate
+local settings_to_array = utilities.parsers.settings_to_array
+local getparameters = utilities.parsers.getparameters
+
+local setmetatableindex = table.setmetatableindex
+
+-- -- -- -- -- --
+-- shared
+-- -- -- -- -- --
+
+local function get_class_and_vector(tfmdata,value,where) -- "expansions"
+ local g_where = tfmdata.goodies and tfmdata.goodies[where]
+ local f_where = fonts[where]
+ local g_classes = g_where and g_where.classes
+ local f_classes = f_where and f_where.classes
+ local class = (g_classes and g_classes[value]) or (f_classes and f_classes[value])
+ if class then
+ local class_vector = class.vector
+ local g_vectors = g_where and g_where.vectors
+ local f_vectors = f_where and f_where.vectors
+ local vector = (g_vectors and g_vectors[class_vector]) or (f_vectors and f_vectors[class_vector])
+ return class, vector
+ end
+end
+
+-- -- -- -- -- --
+-- expansion (hz)
+-- -- -- -- -- --
+
+local expansions = fonts.expansions or allocate()
+
+fonts.expansions = expansions
+
+local classes = expansions.classes or allocate()
+local vectors = expansions.vectors or allocate()
+
+expansions.classes = classes
+expansions.vectors = vectors
+
+-- beware, pdftex itself uses percentages * 10
+
+classes.preset = { stretch = 2, shrink = 2, step = .5, factor = 1 }
+
+function commands.setupfontexpansion(class,settings)
+ getparameters(classes,class,'preset',settings)
+end
+
+classes['quality'] = {
+ stretch = 2, shrink = 2, step = .5, vector = 'default', factor = 1
+}
+
+vectors['default'] = {
+ [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7,
+ [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7,
+ [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7,
+ [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7,
+ [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7,
+ [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7,
+ [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7,
+ [byte('w')] = 0.7, [byte('z')] = 0.7,
+ [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7,
+}
+
+vectors['quality'] = vectors['default'] -- metatable ?
+
+local function initializeexpansion(tfmdata,value)
+ if value then
+ local class, vector = get_class_and_vector(tfmdata,value,"expansions")
+ if class then
+ if vector then
+ local stretch = class.stretch or 0
+ local shrink = class.shrink or 0
+ local step = class.step or 0
+ local factor = class.factor or 1
+ if trace_expansion then
+ report_expansions("setting class %a, vector %a, factor %a, stretch %a, shrink %a, step %a",
+ value,class.vector,factor,stretch,shrink,step)
+ end
+ tfmdata.parameters.expansion = {
+ stretch = 10 * stretch,
+ shrink = 10 * shrink,
+ step = 10 * step,
+ factor = factor,
+ auto = true,
+ }
+ local data = characters and characters.data
+ for i, chr in next, tfmdata.characters do
+ local v = vector[i]
+ if data and not v then -- we could move the data test outside (needed for plain)
+ local d = data[i]
+ if d then
+ local s = d.shcode
+ if not s then
+ -- sorry
+ elseif type(s) == "table" then
+ v = ((vector[s[1]] or 0) + (vector[s[#s]] or 0)) / 2
+ else
+ v = vector[s] or 0
+ end
+ end
+ end
+ if v and v ~= 0 then
+ chr.expansion_factor = v*factor
+ else -- can be option
+ chr.expansion_factor = factor
+ end
+ end
+ elseif trace_expansion then
+ report_expansions("unknown vector %a in class %a",class.vector,value)
+ end
+ elseif trace_expansion then
+ report_expansions("unknown class %a",value)
+ end
+ end
+end
+
+registerotffeature {
+ name = "expansion",
+ description = "apply hz optimization",
+ initializers = {
+ base = initializeexpansion,
+ node = initializeexpansion,
+ }
+}
+
+registerafmfeature {
+ name = "expansion",
+ description = "apply hz optimization",
+ initializers = {
+ base = initializeexpansion,
+ node = initializeexpansion,
+ }
+}
+
+fonts.goodies.register("expansions", function(...) return fonts.goodies.report("expansions", trace_expansion, ...) end)
+
+local report_opbd = logs.reporter("fonts","otf opbd")
+
+-- -- -- -- -- --
+-- protrusion
+-- -- -- -- -- --
+
+fonts.protrusions = allocate()
+local protrusions = fonts.protrusions
+
+protrusions.classes = allocate()
+protrusions.vectors = allocate()
+
+local classes = protrusions.classes
+local vectors = protrusions.vectors
+
+-- the values need to be revisioned
+
+classes.preset = { factor = 1, left = 1, right = 1 }
+
+function commands.setupfontprotrusion(class,settings)
+ getparameters(classes,class,'preset',settings)
+end
+
+classes['pure'] = {
+ vector = 'pure', factor = 1
+}
+classes['punctuation'] = {
+ vector = 'punctuation', factor = 1
+}
+classes['alpha'] = {
+ vector = 'alpha', factor = 1
+}
+classes['quality'] = {
+ vector = 'quality', factor = 1
+}
+
+vectors['pure'] = {
+
+ [0x002C] = { 0, 1 }, -- comma
+ [0x002E] = { 0, 1 }, -- period
+ [0x003A] = { 0, 1 }, -- colon
+ [0x003B] = { 0, 1 }, -- semicolon
+ [0x002D] = { 0, 1 }, -- hyphen
+ [0x00AD] = { 0, 1 }, -- also hyphen
+ [0x2013] = { 0, 0.50 }, -- endash
+ [0x2014] = { 0, 0.33 }, -- emdash
+ [0x3001] = { 0, 1 }, -- ideographic comma ã€
+ [0x3002] = { 0, 1 }, -- ideographic full stop 。
+ [0x060C] = { 0, 1 }, -- arabic comma ،
+ [0x061B] = { 0, 1 }, -- arabic semicolon Ø›
+ [0x06D4] = { 0, 1 }, -- arabic full stop Û”
+
+}
+
+vectors['punctuation'] = {
+
+ [0x003F] = { 0, 0.20 }, -- ?
+ [0x00BF] = { 0, 0.20 }, -- ¿
+ [0x0021] = { 0, 0.20 }, -- !
+ [0x00A1] = { 0, 0.20 }, -- ¡
+ [0x0028] = { 0.05, 0 }, -- (
+ [0x0029] = { 0, 0.05 }, -- )
+ [0x005B] = { 0.05, 0 }, -- [
+ [0x005D] = { 0, 0.05 }, -- ]
+ [0x002C] = { 0, 0.70 }, -- comma
+ [0x002E] = { 0, 0.70 }, -- period
+ [0x003A] = { 0, 0.50 }, -- colon
+ [0x003B] = { 0, 0.50 }, -- semicolon
+ [0x002D] = { 0, 0.70 }, -- hyphen
+ [0x00AD] = { 0, 0.70 }, -- also hyphen
+ [0x2013] = { 0, 0.30 }, -- endash
+ [0x2014] = { 0, 0.20 }, -- emdash
+ [0x060C] = { 0, 0.70 }, -- arabic comma
+ [0x061B] = { 0, 0.50 }, -- arabic semicolon
+ [0x06D4] = { 0, 0.70 }, -- arabic full stop
+ [0x061F] = { 0, 0.20 }, -- ØŸ
+
+ -- todo: left and right quotes: .5 double, .7 single
+
+ [0x2039] = { 0.70, 0.70 }, -- left single guillemet ‹
+ [0x203A] = { 0.70, 0.70 }, -- right single guillemet ›
+ [0x00AB] = { 0.50, 0.50 }, -- left guillemet «
+ [0x00BB] = { 0.50, 0.50 }, -- right guillemet »
+
+ [0x2018] = { 0.70, 0.70 }, -- left single quotation mark ‘
+ [0x2019] = { 0, 0.70 }, -- right single quotation mark ’
+ [0x201A] = { 0.70, 0 }, -- single low-9 quotation mark ,
+ [0x201B] = { 0.70, 0 }, -- single high-reversed-9 quotation mark ‛
+ [0x201C] = { 0.50, 0.50 }, -- left double quotation mark “
+ [0x201D] = { 0, 0.50 }, -- right double quotation mark â€
+ [0x201E] = { 0.50, 0 }, -- double low-9 quotation mark „
+ [0x201F] = { 0.50, 0 }, -- double high-reversed-9 quotation mark ‟
+
+}
+
+vectors['alpha'] = {
+
+ [byte("A")] = { .05, .05 },
+ [byte("F")] = { 0, .05 },
+ [byte("J")] = { .05, 0 },
+ [byte("K")] = { 0, .05 },
+ [byte("L")] = { 0, .05 },
+ [byte("T")] = { .05, .05 },
+ [byte("V")] = { .05, .05 },
+ [byte("W")] = { .05, .05 },
+ [byte("X")] = { .05, .05 },
+ [byte("Y")] = { .05, .05 },
+
+ [byte("k")] = { 0, .05 },
+ [byte("r")] = { 0, .05 },
+ [byte("t")] = { 0, .05 },
+ [byte("v")] = { .05, .05 },
+ [byte("w")] = { .05, .05 },
+ [byte("x")] = { .05, .05 },
+ [byte("y")] = { .05, .05 },
+
+}
+
+vectors['quality'] = table.merged(
+ vectors['punctuation'],
+ vectors['alpha']
+)
+
+-- As this is experimental code, users should not depend on it. The
+-- implications are still discussed on the ConTeXt Dev List and we're
+-- not sure yet what exactly the spec is (the next code is tested with
+-- a gyre font patched by / fea file made by Khaled Hosny). The double
+-- trick should not be needed it proper hanging punctuation is used in
+-- which case values < 1 can be used.
+--
+-- preferred (in context, usine vectors):
+--
+-- \definefontfeature[whatever][default][mode=node,protrusion=quality]
+--
+-- using lfbd and rtbd, with possibibility to enable only one side :
+--
+-- \definefontfeature[whocares][default][mode=node,protrusion=yes, opbd=yes,script=latn]
+-- \definefontfeature[whocares][default][mode=node,protrusion=right,opbd=yes,script=latn]
+--
+-- idem, using multiplier
+--
+-- \definefontfeature[whocares][default][mode=node,protrusion=2,opbd=yes,script=latn]
+-- \definefontfeature[whocares][default][mode=node,protrusion=double,opbd=yes,script=latn]
+--
+-- idem, using named feature file (less frozen):
+--
+-- \definefontfeature[whocares][default][mode=node,protrusion=2,opbd=yes,script=latn,featurefile=texgyrepagella-regularxx.fea]
+
+classes['double'] = { -- for testing opbd
+ factor = 2, left = 1, right = 1,
+}
+
+local function map_opbd_onto_protrusion(tfmdata,value,opbd)
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ local properties = tfmdata.properties
+ local rawdata = tfmdata.shared.rawdata
+ local lookuphash = rawdata.lookuphash
+ local script = properties.script
+ local language = properties.language
+ local done, factor, left, right = false, 1, 1, 1
+ local class = classes[value]
+ if class then
+ factor = class.factor or 1
+ left = class.left or 1
+ right = class.right or 1
+ else
+ factor = tonumber(value) or 1
+ end
+ if opbd ~= "right" then
+ local validlookups, lookuplist = otf.collectlookups(rawdata,"lfbd",script,language)
+ if validlookups then
+ for i=1,#lookuplist do
+ local lookup = lookuplist[i]
+ local data = lookuphash[lookup]
+ if data then
+ if trace_protrusion then
+ report_protrusions("setting left using lfbd lookup %a",lookup)
+ end
+ for k, v in next, data do
+ -- local p = - v[3] / descriptions[k].width-- or 1 ~= 0 too but the same
+ local p = - (v[1] / 1000) * factor * left
+ characters[k].left_protruding = p
+ if trace_protrusion then
+ report_protrusions("lfbd -> %s -> %C -> %0.03f (% t)",lookup,k,p,v)
+ end
+ end
+ done = true
+ end
+ end
+ end
+ end
+ if opbd ~= "left" then
+ local validlookups, lookuplist = otf.collectlookups(rawdata,"rtbd",script,language)
+ if validlookups then
+ for i=1,#lookuplist do
+ local lookup = lookuplist[i]
+ local data = lookuphash[lookup]
+ if data then
+ if trace_protrusion then
+ report_protrusions("setting right using rtbd lookup %a",lookup)
+ end
+ for k, v in next, data do
+ -- local p = v[3] / descriptions[k].width -- or 3
+ local p = (v[1] / 1000) * factor * right
+ characters[k].right_protruding = p
+ if trace_protrusion then
+ report_protrusions("rtbd -> %s -> %C -> %0.03f (% t)",lookup,k,p,v)
+ end
+ end
+ end
+ done = true
+ end
+ end
+ end
+ local parameters = tfmdata.parameters
+ local protrusion = tfmdata.protrusion
+ if not protrusion then
+ parameters.protrusion = {
+ auto = true
+ }
+ else
+ protrusion.auto = true
+ end
+end
+
+-- The opbd test is just there because it was discussed on the
+-- context development list. However, the mentioned fxlbi.otf font
+-- only has some kerns for digits. So, consider this feature not
+-- supported till we have a proper test font.
+
+local function initializeprotrusion(tfmdata,value)
+ if value then
+ local opbd = tfmdata.shared.features.opbd
+ if opbd then
+ -- possible values: left right both yes no (experimental)
+ map_opbd_onto_protrusion(tfmdata,value,opbd)
+ else
+ local class, vector = get_class_and_vector(tfmdata,value,"protrusions")
+ if class then
+ if vector then
+ local factor = class.factor or 1
+ local left = class.left or 1
+ local right = class.right or 1
+ if trace_protrusion then
+ report_protrusions("setting class %a, vector %a, factor %a, left %a, right %a",
+ value,class.vector,factor,left,right)
+ end
+ local data = characters.data
+ local emwidth = tfmdata.parameters.quad
+ tfmdata.parameters.protrusion = {
+ factor = factor,
+ left = left,
+ right = right,
+ auto = true,
+ }
+ for i, chr in next, tfmdata.characters do
+ local v, pl, pr = vector[i], nil, nil
+ if v then
+ pl, pr = v[1], v[2]
+ else
+ local d = data[i]
+ if d then
+ local s = d.shcode
+ if not s then
+ -- sorry
+ elseif type(s) == "table" then
+ local vl, vr = vector[s[1]], vector[s[#s]]
+ if vl then pl = vl[1] end
+ if vr then pr = vr[2] end
+ else
+ v = vector[s]
+ if v then
+ pl, pr = v[1], v[2]
+ end
+ end
+ end
+ end
+ if pl and pl ~= 0 then
+ chr.left_protruding = left *pl*factor
+ end
+ if pr and pr ~= 0 then
+ chr.right_protruding = right*pr*factor
+ end
+ end
+ elseif trace_protrusion then
+ report_protrusions("unknown vector %a in class %a",class.vector,value)
+ end
+ elseif trace_protrusion then
+ report_protrusions("unknown class %a",value)
+ end
+ end
+ end
+end
+
+registerotffeature {
+ name = "protrusion",
+ description = "shift characters into the left and or right margin",
+ initializers = {
+ base = initializeprotrusion,
+ node = initializeprotrusion,
+ }
+}
+
+registerafmfeature {
+ name = "protrusion",
+ description = "shift characters into the left and or right margin",
+ initializers = {
+ base = initializeprotrusion,
+ node = initializeprotrusion,
+ }
+}
+
+fonts.goodies.register("protrusions", function(...) return fonts.goodies.report("protrusions", trace_protrusion, ...) end)
+
+-- -- --
+
+local function initializenostackmath(tfmdata,value)
+ tfmdata.properties.nostackmath = value and true
+end
+
+registerotffeature {
+ name = "nostackmath",
+ description = "disable math stacking mechanism",
+ initializers = {
+ base = initializenostackmath,
+ node = initializenostackmath,
+ }
+}
+
+local function initializeitlc(tfmdata,value) -- hm, always value
+ if value then
+ -- the magic 40 and it formula come from Dohyun Kim but we might need another guess
+ local parameters = tfmdata.parameters
+ local italicangle = parameters.italicangle
+ if italicangle and italicangle ~= 0 then
+ local properties = tfmdata.properties
+ local factor = tonumber(value) or 1
+ properties.hasitalics = true
+ properties.autoitalicamount = factor * (parameters.uwidth or 40)/2
+ end
+ end
+end
+
+registerotffeature {
+ name = "itlc",
+ description = "italic correction",
+ initializers = {
+ base = initializeitlc,
+ node = initializeitlc,
+ }
+}
+
+registerafmfeature {
+ name = "itlc",
+ description = "italic correction",
+ initializers = {
+ base = initializeitlc,
+ node = initializeitlc,
+ }
+}
+
+local function initializetextitalics(tfmdata,value) -- yes no delay
+ local delay = value == "delay"
+ tfmdata.properties.textitalics = delay and true or value
+ tfmdata.properties.delaytextitalics = delay
+end
+
+registerotffeature {
+ name = "textitalics",
+ description = "use alternative text italic correction",
+ initializers = {
+ base = initializetextitalics,
+ node = initializetextitalics,
+ }
+}
+
+registerafmfeature {
+ name = "textitalics",
+ description = "use alternative text italic correction",
+ initializers = {
+ base = initializetextitalics,
+ node = initializetextitalics,
+ }
+}
+
+-- slanting
+
+local function initializeslant(tfmdata,value)
+ value = tonumber(value)
+ if not value then
+ value = 0
+ elseif value > 1 then
+ value = 1
+ elseif value < -1 then
+ value = -1
+ end
+ tfmdata.parameters.slantfactor = value
+end
+
+registerotffeature {
+ name = "slant",
+ description = "slant glyphs",
+ initializers = {
+ base = initializeslant,
+ node = initializeslant,
+ }
+}
+
+registerafmfeature {
+ name = "slant",
+ description = "slant glyphs",
+ initializers = {
+ base = initializeslant,
+ node = initializeslant,
+ }
+}
+
+local function initializeextend(tfmdata,value)
+ value = tonumber(value)
+ if not value then
+ value = 0
+ elseif value > 10 then
+ value = 10
+ elseif value < -10 then
+ value = -10
+ end
+ tfmdata.parameters.extendfactor = value
+end
+
+registerotffeature {
+ name = "extend",
+ description = "scale glyphs horizontally",
+ initializers = {
+ base = initializeextend,
+ node = initializeextend,
+ }
+}
+
+registerafmfeature {
+ name = "extend",
+ description = "scale glyphs horizontally",
+ initializers = {
+ base = initializeextend,
+ node = initializeextend,
+ }
+}
+
+-- For Wolfgang Schuster:
+--
+-- \definefontfeature[thisway][default][script=hang,language=zhs,dimensions={2,2,2}]
+-- \definedfont[file:kozminpr6nregular*thisway]
+--
+-- For the moment we don't mess with the descriptions.
+
+local function manipulatedimensions(tfmdata,key,value)
+ if type(value) == "string" and value ~= "" then
+ local characters = tfmdata.characters
+ local parameters = tfmdata.parameters
+ local emwidth = parameters.quad
+ local exheight = parameters.xheight
+ local spec = settings_to_array(value)
+ local width = (spec[1] or 0) * emwidth
+ local height = (spec[2] or 0) * exheight
+ local depth = (spec[3] or 0) * exheight
+ if width > 0 then
+ local resources = tfmdata.resources
+ local additions = { }
+ local private = resources.private
+ for unicode, old_c in next, characters do
+ local oldwidth = old_c.width
+ if oldwidth ~= width then
+ -- Defining the tables in one step is more efficient
+ -- than adding fields later.
+ private = private + 1
+ local new_c
+ local commands = {
+ { "right", (width - oldwidth) / 2 },
+ { "slot", 1, private },
+ }
+ if height > 0 then
+ if depth > 0 then
+ new_c = {
+ width = width,
+ height = height,
+ depth = depth,
+ commands = commands,
+ }
+ else
+ new_c = {
+ width = width,
+ height = height,
+ commands = commands,
+ }
+ end
+ else
+ if depth > 0 then
+ new_c = {
+ width = width,
+ depth = depth,
+ commands = commands,
+ }
+ else
+ new_c = {
+ width = width,
+ commands = commands,
+ }
+ end
+ end
+ setmetatableindex(new_c,old_c)
+ characters[unicode] = new_c
+ additions[private] = old_c
+ end
+ end
+ for k, v in next, additions do
+ characters[k] = v
+ end
+ resources.private = private
+ elseif height > 0 and depth > 0 then
+ for unicode, old_c in next, characters do
+ old_c.height = height
+ old_c.depth = depth
+ end
+ elseif height > 0 then
+ for unicode, old_c in next, characters do
+ old_c.height = height
+ end
+ elseif depth > 0 then
+ for unicode, old_c in next, characters do
+ old_c.depth = depth
+ end
+ end
+ end
+end
+
+registerotffeature {
+ name = "dimensions",
+ description = "force dimensions",
+ manipulators = {
+ base = manipulatedimensions,
+ node = manipulatedimensions,
+ }
+}
+
+-- for zhichu chen (see mailing list archive): we might add a few more variants
+-- in due time
+--
+-- \definefontfeature[boxed][default][boundingbox=yes] % paleblue
+--
+-- maybe:
+--
+-- \definecolor[DummyColor][s=.75,t=.5,a=1] {\DummyColor test} \nopdfcompression
+--
+-- local gray = { "special", "pdf: /Tr1 gs .75 g" }
+-- local black = { "special", "pdf: /Tr0 gs 0 g" }
+
+local push = { "push" }
+local pop = { "pop" }
+local gray = { "special", "pdf: .75 g" }
+local black = { "special", "pdf: 0 g" }
+
+local downcache = { } -- handy for huge cjk fonts
+local rulecache = { } -- handy for huge cjk fonts
+
+setmetatableindex(downcache,function(t,d)
+ local v = { "down", d }
+ t[d] = v
+ return v
+end)
+
+setmetatableindex(rulecache,function(t,h)
+ local v = { }
+ t[h] = v
+ setmetatableindex(v,function(t,w)
+ local v = { "rule", h, w }
+ t[w] = v
+ return v
+ end)
+ return v
+end)
+
+local function showboundingbox(tfmdata,key,value)
+ if value then
+ local vfspecials = backends.pdf.tables.vfspecials
+ local gray = vfspecials and (vfspecials.rulecolors[value] or vfspecials.rulecolors.palegray) or gray
+ local characters = tfmdata.characters
+ local resources = tfmdata.resources
+ local additions = { }
+ local private = resources.private
+ for unicode, old_c in next, characters do
+ private = private + 1
+ local width = old_c.width or 0
+ local height = old_c.height or 0
+ local depth = old_c.depth or 0
+ local new_c
+ if depth == 0 then
+ new_c = {
+ width = width,
+ height = height,
+ commands = {
+ push,
+ gray,
+ rulecache[height][width],
+ black,
+ pop,
+ { "slot", 1, private },
+ }
+ }
+ else
+ new_c = {
+ width = width,
+ height = height,
+ depth = depth,
+ commands = {
+ push,
+ downcache[depth],
+ gray,
+ rulecache[height+depth][width],
+ black,
+ pop,
+ { "slot", 1, private },
+ }
+ }
+ end
+ setmetatableindex(new_c,old_c)
+ characters[unicode] = new_c
+ additions[private] = old_c
+ end
+ for k, v in next, additions do
+ characters[k] = v
+ end
+ resources.private = private
+ end
+end
+
+registerotffeature {
+ name = "boundingbox",
+ description = "show boundingbox",
+ manipulators = {
+ base = showboundingbox,
+ node = showboundingbox,
+ }
+}
+
+-- -- historic stuff, move from font-ota (handled differently, typo-rep)
+--
+-- local delete_node = nodes.delete
+-- local fontdata = fonts.hashes.identifiers
+--
+-- local nodecodes = nodes.nodecodes
+-- local glyph_code = nodecodes.glyph
+--
+-- local strippables = allocate()
+-- fonts.strippables = strippables
+--
+-- strippables.joiners = table.tohash {
+-- 0x200C, -- zwnj
+-- 0x200D, -- zwj
+-- }
+--
+-- strippables.all = table.tohash {
+-- 0x000AD, 0x017B4, 0x017B5, 0x0200B, 0x0200C, 0x0200D, 0x0200E, 0x0200F, 0x0202A, 0x0202B,
+-- 0x0202C, 0x0202D, 0x0202E, 0x02060, 0x02061, 0x02062, 0x02063, 0x0206A, 0x0206B, 0x0206C,
+-- 0x0206D, 0x0206E, 0x0206F, 0x0FEFF, 0x1D173, 0x1D174, 0x1D175, 0x1D176, 0x1D177, 0x1D178,
+-- 0x1D179, 0x1D17A, 0xE0001, 0xE0020, 0xE0021, 0xE0022, 0xE0023, 0xE0024, 0xE0025, 0xE0026,
+-- 0xE0027, 0xE0028, 0xE0029, 0xE002A, 0xE002B, 0xE002C, 0xE002D, 0xE002E, 0xE002F, 0xE0030,
+-- 0xE0031, 0xE0032, 0xE0033, 0xE0034, 0xE0035, 0xE0036, 0xE0037, 0xE0038, 0xE0039, 0xE003A,
+-- 0xE003B, 0xE003C, 0xE003D, 0xE003E, 0xE003F, 0xE0040, 0xE0041, 0xE0042, 0xE0043, 0xE0044,
+-- 0xE0045, 0xE0046, 0xE0047, 0xE0048, 0xE0049, 0xE004A, 0xE004B, 0xE004C, 0xE004D, 0xE004E,
+-- 0xE004F, 0xE0050, 0xE0051, 0xE0052, 0xE0053, 0xE0054, 0xE0055, 0xE0056, 0xE0057, 0xE0058,
+-- 0xE0059, 0xE005A, 0xE005B, 0xE005C, 0xE005D, 0xE005E, 0xE005F, 0xE0060, 0xE0061, 0xE0062,
+-- 0xE0063, 0xE0064, 0xE0065, 0xE0066, 0xE0067, 0xE0068, 0xE0069, 0xE006A, 0xE006B, 0xE006C,
+-- 0xE006D, 0xE006E, 0xE006F, 0xE0070, 0xE0071, 0xE0072, 0xE0073, 0xE0074, 0xE0075, 0xE0076,
+-- 0xE0077, 0xE0078, 0xE0079, 0xE007A, 0xE007B, 0xE007C, 0xE007D, 0xE007E, 0xE007F,
+-- }
+--
+-- strippables[true] = strippables.joiners
+--
+-- local function processformatters(head,font)
+-- local subset = fontdata[font].shared.features.formatters
+-- local vector = subset and strippables[subset]
+-- if vector then
+-- local current, done = head, false
+-- while current do
+-- if current.id == glyph_code and current.subtype<256 and current.font == font then
+-- local char = current.char
+-- if vector[char] then
+-- head, current = delete_node(head,current)
+-- done = true
+-- else
+-- current = current.next
+-- end
+-- else
+-- current = current.next
+-- end
+-- end
+-- return head, done
+-- else
+-- return head, false
+-- end
+-- end
+--
+-- registerotffeature {
+-- name = "formatters",
+-- description = "hide formatting characters",
+-- methods = {
+-- base = processformatters,
+-- node = processformatters,
+-- }
+-- }
+
+-- a handy helper (might change or be moved to another namespace)
+
+local new_special = nodes.pool.special
+local new_glyph = nodes.pool.glyph
+local hpack_node = node.hpack
+
+function fonts.helpers.addprivate(tfmdata,name,characterdata)
+ local properties = tfmdata.properties
+ local privates = properties.privates
+ local lastprivate = properties.lastprivate
+ if lastprivate then
+ lastprivate = lastprivate + 1
+ else
+ lastprivate = 0xE000
+ end
+ if not privates then
+ privates = { }
+ properties.privates = privates
+ end
+ if name then
+ privates[name] = lastprivate
+ end
+ properties.lastprivate = lastprivate
+ tfmdata.characters[lastprivate] = characterdata
+ if properties.finalized then
+ properties.lateprivates = true
+ end
+ return lastprivate
+end
+
+function fonts.helpers.getprivatenode(tfmdata,name)
+ local properties = tfmdata.properties
+ local privates = properties and properties.privates
+ if privates then
+ local p = privates[name]
+ if p then
+ local char = tfmdata.characters[p]
+ local commands = char.commands
+ if commands then
+ local fake = hpack_node(new_special(commands[1][2]))
+ fake.width = char.width
+ fake.height = char.height
+ fake.depth = char.depth
+ return fake
+ else
+ -- todo: set current attribibutes
+ return new_glyph(properties.id,p)
+ end
+ end
+ end
+end
+
+function fonts.helpers.hasprivate(tfmdata,name)
+ local properties = tfmdata.properties
+ local privates = properties and properties.privates
+ return privates and privates[name] or false
+end
+
+function commands.getprivatechar(name)
+ context(fonts.helpers.getprivatenode(fontdata[font.current()],name))
+end
diff --git a/tex/context/base/font-fbk.lua b/tex/context/base/font-fbk.lua
index 48e2167e6..32e5d16de 100644
--- a/tex/context/base/font-fbk.lua
+++ b/tex/context/base/font-fbk.lua
@@ -1,304 +1,304 @@
-if not modules then modules = { } end modules ['font-fbk'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local cos, tan, rad, format = math.cos, math.tan, math.rad, string.format
-local utfbyte, utfchar = utf.byte, utf.char
-
---[[ldx--
-
This is very experimental code!
---ldx]]--
-
-local trace_combining_visualize = false trackers.register("fonts.composing.visualize", function(v) trace_combining_visualize = v end)
-local trace_combining_define = false trackers.register("fonts.composing.define", function(v) trace_combining_define = v end)
-
-trackers.register("fonts.combining", "fonts.composing.define") -- for old times sake (and manuals)
-trackers.register("fonts.combining.all", "fonts.composing.*") -- for old times sake (and manuals)
-
-local report_combining = logs.reporter("fonts","combining")
-
-local force_combining = false -- just for demo purposes (see mk)
-
-local allocate = utilities.storage.allocate
-
-local fonts = fonts
-local handlers = fonts.handlers
-local constructors = fonts.constructors
-
-local registerotffeature = handlers.otf.features.register
-local registerafmfeature = handlers.afm.features.register
-
-local unicodecharacters = characters.data
-local unicodefallbacks = characters.fallbacks
-
-local vf = handlers.vf
-local commands = vf.combiner.commands
-local push = vf.predefined.push
-local pop = vf.predefined.pop
-
-local force_composed = false
-local cache = { } -- we could make these weak
-local fraction = 0.15 -- 30 units for lucida
-
-local function composecharacters(tfmdata)
- -- this assumes that slot 1 is self, there will be a proper self some day
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local parameters = tfmdata.parameters
- local properties = tfmdata.properties
- local Xdesc = descriptions[utfbyte("X")]
- local xdesc = descriptions[utfbyte("x")]
- if Xdesc and xdesc then
- local scale = parameters.factor or 1
- local deltaxheight = scale * (Xdesc.boundingbox[4] - xdesc.boundingbox[4])
- local extraxheight = fraction * deltaxheight -- maybe use compose value
- local italicfactor = parameters.italicfactor or 0
- local vfspecials = backends.tables.vfspecials --brr
- local red, green, blue, black
- if trace_combining_visualize then
- red = vfspecials.red
- green = vfspecials.green
- blue = vfspecials.blue
- black = vfspecials.black
- end
- local compose = fonts.goodies.getcompositions(tfmdata)
- if compose and trace_combining_visualize then
- report_combining("using compose information from goodies file")
- end
- local done = false
- for i, c in next, unicodecharacters do -- loop over all characters ... not that efficient but a specials hash takes memory
- if force_combining or not characters[i] then
- local s = c.specials
- if s and s[1] == 'char' then
- local chr = s[2]
- local charschr = characters[chr]
- if charschr then
- local cc = c.category
- if cc == 'll' or cc == 'lu' or cc == 'lt' then -- characters.is_letter[cc]
- local acc = s[3]
- local t = { }
- for k, v in next, charschr do
- if k ~= "commands" then
- t[k] = v
- end
- end
- local charsacc = characters[acc]
- --~ local ca = charsacc.category
- --~ if ca == "mn" then
- --~ -- mark nonspacing
- --~ elseif ca == "ms" then
- --~ -- mark spacing combining
- --~ elseif ca == "me" then
- --~ -- mark enclosing
- --~ else
- if not charsacc then -- fallback accents
- acc = unicodefallbacks[acc]
- charsacc = acc and characters[acc]
- end
- local chr_t = cache[chr]
- if not chr_t then
- chr_t = {"slot", 1, chr}
- cache[chr] = chr_t
- end
- if charsacc then
- if trace_combining_define then
- report_combining("composed %C, base %C, accent %C",i,chr,acc)
- end
- local acc_t = cache[acc]
- if not acc_t then
- acc_t = {"slot", 1, acc}
- cache[acc] = acc_t
- end
- local cb = descriptions[chr].boundingbox
- local ab = descriptions[acc].boundingbox
- -- todo: adapt height
- if cb and ab then
- local c_llx, c_lly, c_urx, c_ury = scale*cb[1], scale*cb[2], scale*cb[3], scale*cb[4]
- local a_llx, a_lly, a_urx, a_ury = scale*ab[1], scale*ab[2], scale*ab[3], scale*ab[4]
- local done = false
- if compose then
- local i_compose = compose[i]
- local i_anchored = i_compose and i_compose.anchored
- if i_anchored then
- local c_compose = compose[chr]
- local a_compose = compose[acc]
- local c_anchors = c_compose and c_compose.anchors
- local a_anchors = a_compose and a_compose.anchors
- if c_anchors and a_anchors then
- local c_anchor = c_anchors[i_anchored]
- local a_anchor = a_anchors[i_anchored]
- if c_anchor and a_anchor then
- local cx = c_anchor.x or 0
- local cy = c_anchor.y or 0
- local ax = a_anchor.x or 0
- local ay = a_anchor.y or 0
- local dx = cx - ax
- local dy = cy - ay
- if trace_combining_define then
- report_combining("building %C from %C and %C",i,chr,acc)
- report_combining(" boundingbox:")
- report_combining(" chr: %3i %3i %3i %3i",unpack(cb))
- report_combining(" acc: %3i %3i %3i %3i",unpack(ab))
- report_combining(" anchors:")
- report_combining(" chr: %3i %3i",cx,cy)
- report_combining(" acc: %3i %3i",ax,ay)
- report_combining(" delta:")
- report_combining(" %s: %3i %3i",i_anchored,dx,dy)
- end
- if trace_combining_visualize then
- t.commands = { push, {"right", scale*dx}, {"down",-scale*dy}, green, acc_t, black, pop, chr_t }
- -- t.commands = {
- -- push, {"right", scale*cx}, {"down", -scale*cy}, red, {"rule",10000,10000,10000}, pop,
- -- push, {"right", scale*ax}, {"down", -scale*ay}, blue, {"rule",10000,10000,10000}, pop,
- -- push, {"right", scale*dx}, {"down", -scale*dy}, green, acc_t, black, pop, chr_t
- -- }
- else
- t.commands = { push, {"right", scale*dx}, {"down",-scale*dy}, acc_t, pop, chr_t }
- end
- done = true
- end
- end
- end
- end
- if not done then
- -- can be sped up for scale == 1
- local dx = (c_urx - a_urx - a_llx + c_llx)/2
- local dd = (c_urx - c_llx)*italicfactor
- if a_ury < 0 then
- if trace_combining_visualize then
- t.commands = { push, {"right", dx-dd}, red, acc_t, black, pop, chr_t }
- else
- t.commands = { push, {"right", dx-dd}, acc_t, pop, chr_t }
- end
- elseif c_ury > a_lly then -- messy test
- local dy
- if compose then
- -- experimental: we could use sx but all that testing
- -- takes time and code
- dy = compose[i]
- if dy then
- dy = dy.dy
- end
- if not dy then
- dy = compose[acc]
- if dy then
- dy = dy and dy.dy
- end
- end
- if not dy then
- dy = compose.dy
- end
- if not dy then
- dy = - deltaxheight + extraxheight
- elseif dy > -1.5 and dy < 1.5 then
- -- we assume a fraction of (percentage)
- dy = - dy * deltaxheight
- else
- -- we assume fontunits (value smaller than 2 make no sense)
- dy = - dy * scale
- end
- else
- dy = - deltaxheight + extraxheight
- end
- if trace_combining_visualize then
- t.commands = { push, {"right", dx+dd}, {"down", dy}, green, acc_t, black, pop, chr_t }
- else
- t.commands = { push, {"right", dx+dd}, {"down", dy}, acc_t, pop, chr_t }
- end
- else
- if trace_combining_visualize then
- t.commands = { push, {"right", dx+dd}, blue, acc_t, black, pop, chr_t }
- else
- t.commands = { push, {"right", dx+dd}, acc_t, pop, chr_t }
- end
- end
- end
- else
- t.commands = { chr_t } -- else index mess
- end
- else
- if trace_combining_define then
- report_combining("%C becomes simplfied %C",i,chr)
- end
- t.commands = { chr_t } -- else index mess
- end
- done = true
- characters[i] = t
- local d = { }
- for k, v in next, descriptions[chr] do
- d[k] = v
- end
- descriptions[i] = d
- end
- end
- end
- end
- end
- if done then
- properties.virtualized = true
- end
- end
-end
-
-registerotffeature {
- name = "compose",
- description = "additional composed characters",
- manipulators = {
- base = composecharacters,
- node = composecharacters,
- }
-}
-
-registerafmfeature {
- name = "compose",
- description = "additional composed characters",
- manipulators = {
- base = composecharacters,
- node = composecharacters,
- }
-}
-
-vf.helpers.composecharacters = composecharacters
-
--- This installs the builder into the regular virtual font builder,
--- which only makes sense as demo.
-
-commands["compose.trace.enable"] = function()
- trace_combining_visualize = true
-end
-
-commands["compose.trace.disable"] = function()
- trace_combining_visualize = false
-end
-
-commands["compose.force.enable"] = function()
- force_combining = true
-end
-
-commands["compose.force.disable"] = function()
- force_combining = false
-end
-
-commands["compose.trace.set"] = function(g,v)
- if v[2] == nil then
- trace_combining_visualize = true
- else
- trace_combining_visualize = v[2]
- end
-end
-
-commands["compose.apply"] = function(g,v)
- composecharacters(g)
-end
-
--- vf builder
-
--- {'special', 'pdf: q ' .. s .. ' 0 0 '.. s .. ' 0 0 cm'},
--- {'special', 'pdf: q 1 0 0 1 ' .. -w .. ' ' .. -h .. ' cm'},
--- {'special', 'pdf: /Fm\XX\space Do'},
--- {'special', 'pdf: Q'},
--- {'special', 'pdf: Q'},
+if not modules then modules = { } end modules ['font-fbk'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local cos, tan, rad, format = math.cos, math.tan, math.rad, string.format
+local utfbyte, utfchar = utf.byte, utf.char
+
+--[[ldx--
+
This is very experimental code!
+--ldx]]--
+
+local trace_combining_visualize = false trackers.register("fonts.composing.visualize", function(v) trace_combining_visualize = v end)
+local trace_combining_define = false trackers.register("fonts.composing.define", function(v) trace_combining_define = v end)
+
+trackers.register("fonts.combining", "fonts.composing.define") -- for old times sake (and manuals)
+trackers.register("fonts.combining.all", "fonts.composing.*") -- for old times sake (and manuals)
+
+local report_combining = logs.reporter("fonts","combining")
+
+local force_combining = false -- just for demo purposes (see mk)
+
+local allocate = utilities.storage.allocate
+
+local fonts = fonts
+local handlers = fonts.handlers
+local constructors = fonts.constructors
+
+local registerotffeature = handlers.otf.features.register
+local registerafmfeature = handlers.afm.features.register
+
+local unicodecharacters = characters.data
+local unicodefallbacks = characters.fallbacks
+
+local vf = handlers.vf
+local commands = vf.combiner.commands
+local push = vf.predefined.push
+local pop = vf.predefined.pop
+
+local force_composed = false
+local cache = { } -- we could make these weak
+local fraction = 0.15 -- 30 units for lucida
+
+local function composecharacters(tfmdata)
+ -- this assumes that slot 1 is self, there will be a proper self some day
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ local parameters = tfmdata.parameters
+ local properties = tfmdata.properties
+ local Xdesc = descriptions[utfbyte("X")]
+ local xdesc = descriptions[utfbyte("x")]
+ if Xdesc and xdesc then
+ local scale = parameters.factor or 1
+ local deltaxheight = scale * (Xdesc.boundingbox[4] - xdesc.boundingbox[4])
+ local extraxheight = fraction * deltaxheight -- maybe use compose value
+ local italicfactor = parameters.italicfactor or 0
+ local vfspecials = backends.tables.vfspecials --brr
+ local red, green, blue, black
+ if trace_combining_visualize then
+ red = vfspecials.red
+ green = vfspecials.green
+ blue = vfspecials.blue
+ black = vfspecials.black
+ end
+ local compose = fonts.goodies.getcompositions(tfmdata)
+ if compose and trace_combining_visualize then
+ report_combining("using compose information from goodies file")
+ end
+ local done = false
+ for i, c in next, unicodecharacters do -- loop over all characters ... not that efficient but a specials hash takes memory
+ if force_combining or not characters[i] then
+ local s = c.specials
+ if s and s[1] == 'char' then
+ local chr = s[2]
+ local charschr = characters[chr]
+ if charschr then
+ local cc = c.category
+ if cc == 'll' or cc == 'lu' or cc == 'lt' then -- characters.is_letter[cc]
+ local acc = s[3]
+ local t = { }
+ for k, v in next, charschr do
+ if k ~= "commands" then
+ t[k] = v
+ end
+ end
+ local charsacc = characters[acc]
+ --~ local ca = charsacc.category
+ --~ if ca == "mn" then
+ --~ -- mark nonspacing
+ --~ elseif ca == "ms" then
+ --~ -- mark spacing combining
+ --~ elseif ca == "me" then
+ --~ -- mark enclosing
+ --~ else
+ if not charsacc then -- fallback accents
+ acc = unicodefallbacks[acc]
+ charsacc = acc and characters[acc]
+ end
+ local chr_t = cache[chr]
+ if not chr_t then
+ chr_t = {"slot", 1, chr}
+ cache[chr] = chr_t
+ end
+ if charsacc then
+ if trace_combining_define then
+ report_combining("composed %C, base %C, accent %C",i,chr,acc)
+ end
+ local acc_t = cache[acc]
+ if not acc_t then
+ acc_t = {"slot", 1, acc}
+ cache[acc] = acc_t
+ end
+ local cb = descriptions[chr].boundingbox
+ local ab = descriptions[acc].boundingbox
+ -- todo: adapt height
+ if cb and ab then
+ local c_llx, c_lly, c_urx, c_ury = scale*cb[1], scale*cb[2], scale*cb[3], scale*cb[4]
+ local a_llx, a_lly, a_urx, a_ury = scale*ab[1], scale*ab[2], scale*ab[3], scale*ab[4]
+ local done = false
+ if compose then
+ local i_compose = compose[i]
+ local i_anchored = i_compose and i_compose.anchored
+ if i_anchored then
+ local c_compose = compose[chr]
+ local a_compose = compose[acc]
+ local c_anchors = c_compose and c_compose.anchors
+ local a_anchors = a_compose and a_compose.anchors
+ if c_anchors and a_anchors then
+ local c_anchor = c_anchors[i_anchored]
+ local a_anchor = a_anchors[i_anchored]
+ if c_anchor and a_anchor then
+ local cx = c_anchor.x or 0
+ local cy = c_anchor.y or 0
+ local ax = a_anchor.x or 0
+ local ay = a_anchor.y or 0
+ local dx = cx - ax
+ local dy = cy - ay
+ if trace_combining_define then
+ report_combining("building %C from %C and %C",i,chr,acc)
+ report_combining(" boundingbox:")
+ report_combining(" chr: %3i %3i %3i %3i",unpack(cb))
+ report_combining(" acc: %3i %3i %3i %3i",unpack(ab))
+ report_combining(" anchors:")
+ report_combining(" chr: %3i %3i",cx,cy)
+ report_combining(" acc: %3i %3i",ax,ay)
+ report_combining(" delta:")
+ report_combining(" %s: %3i %3i",i_anchored,dx,dy)
+ end
+ if trace_combining_visualize then
+ t.commands = { push, {"right", scale*dx}, {"down",-scale*dy}, green, acc_t, black, pop, chr_t }
+ -- t.commands = {
+ -- push, {"right", scale*cx}, {"down", -scale*cy}, red, {"rule",10000,10000,10000}, pop,
+ -- push, {"right", scale*ax}, {"down", -scale*ay}, blue, {"rule",10000,10000,10000}, pop,
+ -- push, {"right", scale*dx}, {"down", -scale*dy}, green, acc_t, black, pop, chr_t
+ -- }
+ else
+ t.commands = { push, {"right", scale*dx}, {"down",-scale*dy}, acc_t, pop, chr_t }
+ end
+ done = true
+ end
+ end
+ end
+ end
+ if not done then
+ -- can be sped up for scale == 1
+ local dx = (c_urx - a_urx - a_llx + c_llx)/2
+ local dd = (c_urx - c_llx)*italicfactor
+ if a_ury < 0 then
+ if trace_combining_visualize then
+ t.commands = { push, {"right", dx-dd}, red, acc_t, black, pop, chr_t }
+ else
+ t.commands = { push, {"right", dx-dd}, acc_t, pop, chr_t }
+ end
+ elseif c_ury > a_lly then -- messy test
+ local dy
+ if compose then
+ -- experimental: we could use sx but all that testing
+ -- takes time and code
+ dy = compose[i]
+ if dy then
+ dy = dy.dy
+ end
+ if not dy then
+ dy = compose[acc]
+ if dy then
+ dy = dy and dy.dy
+ end
+ end
+ if not dy then
+ dy = compose.dy
+ end
+ if not dy then
+ dy = - deltaxheight + extraxheight
+ elseif dy > -1.5 and dy < 1.5 then
+ -- we assume a fraction of (percentage)
+ dy = - dy * deltaxheight
+ else
+ -- we assume fontunits (value smaller than 2 make no sense)
+ dy = - dy * scale
+ end
+ else
+ dy = - deltaxheight + extraxheight
+ end
+ if trace_combining_visualize then
+ t.commands = { push, {"right", dx+dd}, {"down", dy}, green, acc_t, black, pop, chr_t }
+ else
+ t.commands = { push, {"right", dx+dd}, {"down", dy}, acc_t, pop, chr_t }
+ end
+ else
+ if trace_combining_visualize then
+ t.commands = { push, {"right", dx+dd}, blue, acc_t, black, pop, chr_t }
+ else
+ t.commands = { push, {"right", dx+dd}, acc_t, pop, chr_t }
+ end
+ end
+ end
+ else
+ t.commands = { chr_t } -- else index mess
+ end
+ else
+ if trace_combining_define then
+ report_combining("%C becomes simplfied %C",i,chr)
+ end
+ t.commands = { chr_t } -- else index mess
+ end
+ done = true
+ characters[i] = t
+ local d = { }
+ for k, v in next, descriptions[chr] do
+ d[k] = v
+ end
+ descriptions[i] = d
+ end
+ end
+ end
+ end
+ end
+ if done then
+ properties.virtualized = true
+ end
+ end
+end
+
+registerotffeature {
+ name = "compose",
+ description = "additional composed characters",
+ manipulators = {
+ base = composecharacters,
+ node = composecharacters,
+ }
+}
+
+registerafmfeature {
+ name = "compose",
+ description = "additional composed characters",
+ manipulators = {
+ base = composecharacters,
+ node = composecharacters,
+ }
+}
+
+vf.helpers.composecharacters = composecharacters
+
+-- This installs the builder into the regular virtual font builder,
+-- which only makes sense as demo.
+
+commands["compose.trace.enable"] = function()
+ trace_combining_visualize = true
+end
+
+commands["compose.trace.disable"] = function()
+ trace_combining_visualize = false
+end
+
+commands["compose.force.enable"] = function()
+ force_combining = true
+end
+
+commands["compose.force.disable"] = function()
+ force_combining = false
+end
+
+commands["compose.trace.set"] = function(g,v)
+ if v[2] == nil then
+ trace_combining_visualize = true
+ else
+ trace_combining_visualize = v[2]
+ end
+end
+
+commands["compose.apply"] = function(g,v)
+ composecharacters(g)
+end
+
+-- vf builder
+
+-- {'special', 'pdf: q ' .. s .. ' 0 0 '.. s .. ' 0 0 cm'},
+-- {'special', 'pdf: q 1 0 0 1 ' .. -w .. ' ' .. -h .. ' cm'},
+-- {'special', 'pdf: /Fm\XX\space Do'},
+-- {'special', 'pdf: Q'},
+-- {'special', 'pdf: Q'},
diff --git a/tex/context/base/font-gds.lua b/tex/context/base/font-gds.lua
index 6332f40b0..4eb57bfa4 100644
--- a/tex/context/base/font-gds.lua
+++ b/tex/context/base/font-gds.lua
@@ -1,752 +1,752 @@
-if not modules then modules = { } end modules ['font-gds'] = {
- version = 1.000,
- comment = "companion to font-gds.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- depends on ctx
-
-local type, next, tonumber = type, next, tonumber
-local gmatch, format, lower, find, splitup = string.gmatch, string.format, string.lower, string.find, string.splitup
-local texsp = tex.sp
-
-local fonts, nodes, attributes, node = fonts, nodes, attributes, node
-
-local trace_goodies = false trackers.register("fonts.goodies", function(v) trace_goodies = v end)
-local report_goodies = logs.reporter("fonts","goodies")
-
-local allocate = utilities.storage.allocate
-
-local otf = fonts.handlers.otf
-local afm = fonts.handlers.afm
-local tfm = fonts.handlers.tfm
-
-local registerotffeature = otf.features.register
-local registerafmfeature = afm.features.register
-local registertfmfeature = tfm.features.register
-
-local fontgoodies = fonts.goodies or { }
-fonts.goodies = fontgoodies
-
-local typefaces = fonts.typefaces or allocate()
-fonts.typefaces = typefaces
-
-local data = fontgoodies.data or allocate()
-fontgoodies.data = data
-
-local list = fontgoodies.list or { }
-fontgoodies.list = list -- no allocate as we want to see what is there
-
-local addotffeature = otf.enhancers.addfeature
-
-local findfile = resolvers.findfile
-
-function fontgoodies.report(what,trace,goodies)
- if trace_goodies or trace then
- local whatever = goodies[what]
- if whatever then
- report_goodies("goodie %a found in %a",what,goodies.name)
- end
- end
-end
-
-local function loadgoodies(filename) -- maybe a merge is better
- local goodies = data[filename] -- we assume no suffix is given
- if goodies ~= nil then
- -- found or tagged unfound
- elseif type(filename) == "string" then
- local fullname = findfile(file.addsuffix(filename,"lfg")) or "" -- prefered suffix
- if fullname == "" then
- fullname = findfile(file.addsuffix(filename,"lua")) or "" -- fallback suffix
- end
- if fullname == "" then
- report_goodies("goodie file '%s.lfg' is not found",filename)
- data[filename] = false -- signal for not found
- else
- goodies = dofile(fullname) or false
- if not goodies then
- report_goodies("goodie file %a is invalid",fullname)
- return nil
- elseif trace_goodies then
- report_goodies("goodie file %a is loaded",fullname)
- end
- goodies.name = goodies.name or "no name"
- for name, fnc in next, list do
- fnc(goodies)
- end
- goodies.initialized = true
- data[filename] = goodies
- end
- end
- return goodies
-end
-
-function fontgoodies.register(name,fnc) -- will be a proper sequencer
- list[name] = fnc
-end
-
-fontgoodies.load = loadgoodies
-
--- register goodies file
-
-local function setgoodies(tfmdata,value)
- local goodies = tfmdata.goodies
- if not goodies then -- actually an error
- goodies = { }
- tfmdata.goodies = goodies
- end
- for filename in gmatch(value,"[^, ]+") do
- -- we need to check for duplicates
- local ok = loadgoodies(filename)
- if ok then
- if trace_goodies then
- report_goodies("assigning goodie %a",filename)
- end
- goodies[#goodies+1] = ok
- end
- end
-end
-
--- this will be split into good-* files and this file might become good-ini.lua
-
--- featuresets
-
-local function flattenedfeatures(t,tt)
- -- first set value dominates
- local tt = tt or { }
- for i=1,#t do
- local ti = t[i]
- if type(ti) == "table" then
- flattenedfeatures(ti,tt)
- elseif tt[ti] == nil then
- tt[ti] = true
- end
- end
- for k, v in next, t do
- if type(k) ~= "number" then -- not tonumber(k)
- if type(v) == "table" then
- flattenedfeatures(v,tt)
- elseif tt[k] == nil then
- tt[k] = v
- end
- end
- end
- return tt
-end
-
--- fonts.features.flattened = flattenedfeatures
-
-local function prepare_features(goodies,name,set)
- if set then
- local ff = flattenedfeatures(set)
- local fullname = goodies.name .. "::" .. name
- local n, s = fonts.specifiers.presetcontext(fullname,"",ff)
- goodies.featuresets[name] = s -- set
- if trace_goodies then
- report_goodies("feature set %a gets number %a and name %a",name,n,fullname)
- end
- return n
- end
-end
-
-fontgoodies.prepare_features = prepare_features
-
-local function initialize(goodies,tfmdata)
- local featuresets = goodies.featuresets
- local goodiesname = goodies.name
- if featuresets then
- if trace_goodies then
- report_goodies("checking featuresets in %a",goodies.name)
- end
- for name, set in next, featuresets do
- prepare_features(goodies,name,set)
- end
- end
-end
-
-fontgoodies.register("featureset",initialize)
-
-local function setfeatureset(tfmdata,set,features)
- local goodies = tfmdata.goodies -- shared ?
- if goodies then
- local properties = tfmdata.properties
- local what
- for i=1,#goodies do
- -- last one wins
- local g = goodies[i]
- what = g.featuresets and g.featuresets[set] or what
- end
- if what then
- for feature, value in next, what do
- if features[feature] == nil then
- features[feature] = value
- end
- end
- properties.mode = what.mode or properties.mode
- end
- end
-end
-
--- postprocessors (we could hash processor and share code)
-
-function fontgoodies.registerpostprocessor(tfmdata,f,prepend)
- local postprocessors = tfmdata.postprocessors
- if not postprocessors then
- tfmdata.postprocessors = { f }
- elseif prepend then
- table.insert(postprocessors,f,1)
- else
- table.insert(postprocessors,f)
- end
-end
-
-local function setpostprocessor(tfmdata,processor)
- local goodies = tfmdata.goodies
- if goodies and type(processor) == "string" then
- local found = { }
- local asked = utilities.parsers.settings_to_array(processor)
- for i=1,#goodies do
- local g = goodies[i]
- local p = g.postprocessors
- if p then
- for i=1,#asked do
- local a = asked[i]
- local f = p[a]
- if type(f) == "function" then
- found[a] = f
- end
- end
- end
- end
- local postprocessors = tfmdata.postprocessors or { }
- for i=1,#asked do
- local a = asked[i]
- local f = found[a]
- if f then
- postprocessors[#postprocessors+1] = f
- end
- end
- if #postprocessors > 0 then
- tfmdata.postprocessors = postprocessors
- end
- end
-end
-
--- colorschemes
-
-local colorschemes = fontgoodies.colorschemes or allocate { }
-fontgoodies.colorschemes = colorschemes
-colorschemes.data = colorschemes.data or { }
-
-local function setcolorscheme(tfmdata,scheme)
- if type(scheme) == "string" then
- local goodies = tfmdata.goodies
- -- todo : check for already defined in shared
- if goodies then
- local what
- for i=1,#goodies do
- -- last one counts
- local g = goodies[i]
- what = g.colorschemes and g.colorschemes[scheme] or what
- end
- if type(what) == "table" then
- -- this is font bound but we can share them if needed
- -- just as we could hash the conversions (per font)
- local hash = tfmdata.resources.unicodes
- local reverse = { }
- local characters = tfmdata.characters
- for i=1,#what do
- local w = what[i]
- for j=1,#w do
- local name = w[j]
- if name == "*" then
- -- inefficient but only used for tracing anyway
- for _, unicode in next, hash do
- reverse[unicode] = i
- end
- elseif type(name) == "number" then
- reverse[name] = i
- elseif find(name,":") then
- local start, stop = splitup(name,":")
- start = tonumber(start)
- stop = tonumber(stop)
- if start and stop then
- -- limited usage: we only deal with non reassigned
- -- maybe some day I'll also support the ones with a
- -- tounicode in this range
- for unicode=start,stop do
- if characters[unicode] then
- reverse[unicode] = i
- end
- end
- end
- else
- local unicode = hash[name]
- if unicode then
- reverse[unicode] = i
- end
- end
- end
- end
- tfmdata.properties.colorscheme = reverse
- return
- end
- end
- end
- tfmdata.properties.colorscheme = false
-end
-
-local fontdata = fonts.hashes.identifiers
-local setnodecolor = nodes.tracers.colors.set
-local traverse_id = node.traverse_id
-local a_colorscheme = attributes.private('colorscheme')
-local glyph = node.id("glyph")
-
-function colorschemes.coloring(head)
- local lastfont, lastscheme
- local done = false
- for n in traverse_id(glyph,head) do
- local a = n[a_colorscheme]
- if a then
- local f = n.font
- if f ~= lastfont then
- lastscheme, lastfont = fontdata[f].properties.colorscheme, f
- end
- if lastscheme then
- local sc = lastscheme[n.char]
- if sc then
- done = true
- setnodecolor(n,"colorscheme:"..a..":"..sc) -- slow
- end
- end
- end
- end
- return head, done
-end
-
-function colorschemes.enable()
- nodes.tasks.appendaction("processors","fonts","fonts.goodies.colorschemes.coloring")
- function colorschemes.enable() end
-end
-
-local function setextrafeatures(tfmdata)
- local goodies = tfmdata.goodies
- if goodies then
- for i=1,#goodies do
- local g = goodies[i]
- local f = g.features
- if f then
- for feature, specification in next, f do
- addotffeature(tfmdata.shared.rawdata,feature,specification)
- registerotffeature {
- name = feature,
- description = format("extra: %s",feature)
- }
- end
- end
- end
- end
-end
-
--- installation (collected to keep the overview) -- also for type 1
-
-registerotffeature {
- name = "goodies",
- description = "goodies on top of built in features",
- initializers = {
- position = 1,
- base = setgoodies,
- node = setgoodies,
- }
-}
-
-registerotffeature {
- name = "extrafeatures",
- description = "extra features",
- default = true,
- initializers = {
- position = 2,
- base = setextrafeatures,
- node = setextrafeatures,
- }
-}
-
-registerotffeature {
- name = "featureset",
- description = "goodie feature set",
- initializers = {
- position = 3,
- base = setfeatureset,
- node = setfeatureset,
- }
-}
-
-registerotffeature {
- name = "colorscheme",
- description = "goodie color scheme",
- initializers = {
- base = setcolorscheme,
- node = setcolorscheme,
- }
-}
-
-registerotffeature {
- name = "postprocessor",
- description = "goodie postprocessor",
- initializers = {
- base = setpostprocessor,
- node = setpostprocessor,
- }
-}
-
--- afm
-
-registerafmfeature {
- name = "goodies",
- description = "goodies on top of built in features",
- initializers = {
- position = 1,
- base = setgoodies,
- node = setgoodies,
- }
-}
-
--- tfm
-
-registertfmfeature {
- name = "goodies",
- description = "goodies on top of built in features",
- initializers = {
- position = 1,
- base = setgoodies,
- node = setgoodies,
- }
-}
-
--- experiment, we have to load the definitions immediately as they precede
--- the definition so they need to be initialized in the typescript
-
-local function initialize(goodies)
- local mathgoodies = goodies.mathematics
- if mathgoodies then
- local virtuals = mathgoodies.virtuals
- local mapfiles = mathgoodies.mapfiles
- local maplines = mathgoodies.maplines
- if virtuals then
- for name, specification in next, virtuals do
- -- beware, they are all constructed
- mathematics.makefont(name,specification,goodies)
- end
- end
- if mapfiles then
- for i=1,#mapfiles do
- fonts.mappings.loadfile(mapfiles[i]) -- todo: backend function
- end
- end
- if maplines then
- for i=1,#maplines do
- fonts.mappings.loadline(maplines[i]) -- todo: backend function
- end
- end
- end
-end
-
-fontgoodies.register("mathematics", initialize)
-
--- the following takes care of explicit file specifications
---
--- files = {
--- name = "antykwapoltawskiego",
--- list = {
--- ["AntPoltLtCond-Regular.otf"] = {
--- -- name = "antykwapoltawskiego",
--- style = "regular",
--- weight = "light",
--- width = "condensed",
--- },
--- },
--- }
-
--- math italics
-
--- it would be nice to have a \noitalics\font option
-
-local function initialize(tfmdata)
- local goodies = tfmdata.goodies
- if goodies then
- local shared = tfmdata.shared
- for i=1,#goodies do
- local mathgoodies = goodies[i].mathematics
- local mathitalics = mathgoodies and mathgoodies.italics
- if mathitalics then
- local properties = tfmdata.properties
- mathitalics = mathitalics[file.nameonly(properties.name)] or mathitalics
- if mathitalics then
- if trace_goodies then
- report_goodies("loading mathitalics for font %a",properties.name)
- end
- local corrections = mathitalics.corrections
- local defaultfactor = mathitalics.defaultfactor
- local disableengine = mathitalics.disableengine
- properties.hasitalics = true
- properties.mathitalic_defaultfactor = defaultfactor -- we inherit outer one anyway (name will change)
- if properties.mathitalics == nil then
- properties.mathitalics = disableengine
- end
- if corrections then
- -- As we want to set italic_correction (the context one) we need a
- -- postprocessor instead of messing with the (unscaled) descriptions.
- fontgoodies.registerpostprocessor(tfmdata, function(tfmdata) -- this is another tfmdata (a copy)
- -- better make a helper so that we have less code being defined
- local properties = tfmdata.properties
- local parameters = tfmdata.parameters
- local characters = tfmdata.characters
- properties.hasitalics = true
- properties.mathitalic_defaultfactor = defaultfactor
- properties.mathitalic_defaultvalue = defaultfactor * parameters.quad
- if properties.mathitalics == nil then
- properties.mathitalics = disableengine
- end
- if trace_goodies then
- report_goodies("assigning mathitalics for font %a",properties.name)
- end
- local mathitalics = properties.mathitalics
- local quad = parameters.quad
- local hfactor = parameters.hfactor
- for k, v in next, corrections do
- local c = characters[k]
- if v > -1 and v < 1 then
- v = v * quad
- else
- v = v * hfactor
- end
- c.italic_correction = v -- for context
- if mathitalics then
- c.italic = v -- for tex
- else
- c.italic = nil
- end
- end
- end)
- end
- return -- maybe not as these can accumulate
- end
- end
- end
- end
-end
-
-registerotffeature {
- name = "mathitalics",
- description = "additional math italic corrections",
- -- default = true,
- initializers = {
- base = initialize,
- node = initialize,
- }
-}
-
--- fontgoodies.register("mathitalics", initialize)
-
--- files
-
-local function initialize(goodies)
- local files = goodies.files
- if files then
- fonts.names.register(files)
- end
-end
-
-fontgoodies.register("files", initialize)
-
--- some day we will have a define command and then we can also do some
--- proper tracing
---
--- fonts.typefaces["antykwapoltawskiego-condensed"] = {
--- shortcut = "rm",
--- shape = "serif",
--- fontname = "antykwapoltawskiego",
--- normalweight = "light",
--- boldweight = "medium",
--- width = "condensed",
--- size = "default",
--- features = "default",
--- }
-
-local function initialize(goodies)
- local typefaces = goodies.typefaces
- if typefaces then
- local ft = fonts.typefaces
- for k, v in next, typefaces do
- ft[k] = v
- end
- end
-end
-
-fontgoodies.register("typefaces", initialize)
-
-local compositions = { }
-
-function fontgoodies.getcompositions(tfmdata)
- return compositions[file.nameonly(tfmdata.properties.filename or "")]
-end
-
-local function initialize(goodies)
- local gc = goodies.compositions
- if gc then
- for k, v in next, gc do
- compositions[k] = v
- end
- end
-end
-
-fontgoodies.register("compositions", initialize)
-
--- extra treatments (on top of defaults): \loadfontgoodies[mytreatments]
-
-local treatmentdata = fonts.treatments.data
-
-local function initialize(goodies)
- local treatments = goodies.treatments
- if treatments then
- for name, data in next, treatments do
- treatmentdata[name] = data -- always wins
- end
- end
-end
-
-fontgoodies.register("treatments", initialize)
-
-local filenames = fontgoodies.filenames or allocate()
-fontgoodies.filenames = filenames
-
-local filedata = filenames.data or allocate()
-filenames.data = filedata
-
-local function initialize(goodies) -- design sizes are registered global
- local fn = goodies.filenames
- if fn then
- for usedname, alternativenames in next, fn do
- filedata[usedname] = alternativenames
- end
- end
-end
-
-fontgoodies.register("filenames", initialize)
-
-function fontgoodies.filenames.resolve(name)
- local fd = filedata[name]
- if fd and findfile(name) == "" then
- for i=1,#fd do
- local fn = fd[i]
- if findfile(fn) ~= "" then
- return fn
- end
- end
- else
- -- no lookup, just use the regular mechanism
- end
- return name
-end
-
-local designsizes = fontgoodies.designsizes or allocate()
-fontgoodies.designsizes = designsizes
-
-local designdata = designsizes.data or allocate()
-designsizes.data = designdata
-
-local function initialize(goodies) -- design sizes are registered global
- local gd = goodies.designsizes
- if gd then
- for name, data in next, gd do
- local ranges = { }
- for size, file in next, data do
- if size ~= "default" then
- ranges[#ranges+1] = { texsp(size), file } -- also lower(file)
- end
- end
- table.sort(ranges,function(a,b) return a[1] < b[1] end)
- designdata[lower(name)] = { -- overloads, doesn't merge!
- default = data.default,
- ranges = ranges,
- }
- end
- end
-end
-
-fontgoodies.register("designsizes", initialize)
-
-function fontgoodies.designsizes.register(name,size,specification)
- local d = designdata[name]
- if not d then
- d = {
- ranges = { },
- default = nil, -- so we have no default set
- }
- designdata[name] = d
- end
- if size == "default" then
- d.default = specification
- else
- if type(size) == "string" then
- size = texsp(size)
- end
- local ranges = d.ranges
- ranges[#ranges+1] = { size, specification }
- end
-end
-
-function fontgoodies.designsizes.filename(name,spec,size) -- returns nil of no match
- if spec and spec ~= "" then
- local data = designdata[lower(name)]
- if data then
- if spec == "default" then
- return data.default
- elseif spec == "auto" then
- local ranges = data.ranges
- if ranges then
- for i=1,#ranges do
- local r = ranges[i]
- if r[1] >= size then -- todo: rounding so maybe size - 100
- return r[2]
- end
- end
- end
- return data.default or (ranges and ranges[#ranges][2])
- end
- end
- end
-end
-
--- The following file (husayni.lfg) is the experimental setup that we used
--- for Idris font. For the moment we don't store this in the cache and quite
--- probably these files sit in one of the paths:
---
--- tex/context/fonts/goodies
--- tex/fonts/goodies/context
--- tex/fonts/data/foundry/collection
---
--- see lfg files in distribution
-
--- interface
-
-commands.loadfontgoodies = fontgoodies.load
-commands.enablefontcolorschemes = colorschemes.enable
-
--- weird place ... depends on math
-
-local function finalize(tfmdata,feature,value)
- mathematics.overloaddimensions(tfmdata,tfmdata,value)
-end
-
-registerotffeature {
- name = "mathdimensions",
- description = "manipulate math dimensions",
- -- default = true,
- manipulators = {
- base = finalize,
- node = finalize,
- }
-}
+if not modules then modules = { } end modules ['font-gds'] = {
+ version = 1.000,
+ comment = "companion to font-gds.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- depends on ctx
+
+local type, next, tonumber = type, next, tonumber
+local gmatch, format, lower, find, splitup = string.gmatch, string.format, string.lower, string.find, string.splitup
+local texsp = tex.sp
+
+local fonts, nodes, attributes, node = fonts, nodes, attributes, node
+
+local trace_goodies = false trackers.register("fonts.goodies", function(v) trace_goodies = v end)
+local report_goodies = logs.reporter("fonts","goodies")
+
+local allocate = utilities.storage.allocate
+
+local otf = fonts.handlers.otf
+local afm = fonts.handlers.afm
+local tfm = fonts.handlers.tfm
+
+local registerotffeature = otf.features.register
+local registerafmfeature = afm.features.register
+local registertfmfeature = tfm.features.register
+
+local fontgoodies = fonts.goodies or { }
+fonts.goodies = fontgoodies
+
+local typefaces = fonts.typefaces or allocate()
+fonts.typefaces = typefaces
+
+local data = fontgoodies.data or allocate()
+fontgoodies.data = data
+
+local list = fontgoodies.list or { }
+fontgoodies.list = list -- no allocate as we want to see what is there
+
+local addotffeature = otf.enhancers.addfeature
+
+local findfile = resolvers.findfile
+
+function fontgoodies.report(what,trace,goodies)
+ if trace_goodies or trace then
+ local whatever = goodies[what]
+ if whatever then
+ report_goodies("goodie %a found in %a",what,goodies.name)
+ end
+ end
+end
+
+local function loadgoodies(filename) -- maybe a merge is better
+ local goodies = data[filename] -- we assume no suffix is given
+ if goodies ~= nil then
+ -- found or tagged unfound
+ elseif type(filename) == "string" then
+ local fullname = findfile(file.addsuffix(filename,"lfg")) or "" -- prefered suffix
+ if fullname == "" then
+ fullname = findfile(file.addsuffix(filename,"lua")) or "" -- fallback suffix
+ end
+ if fullname == "" then
+ report_goodies("goodie file '%s.lfg' is not found",filename)
+ data[filename] = false -- signal for not found
+ else
+ goodies = dofile(fullname) or false
+ if not goodies then
+ report_goodies("goodie file %a is invalid",fullname)
+ return nil
+ elseif trace_goodies then
+ report_goodies("goodie file %a is loaded",fullname)
+ end
+ goodies.name = goodies.name or "no name"
+ for name, fnc in next, list do
+ fnc(goodies)
+ end
+ goodies.initialized = true
+ data[filename] = goodies
+ end
+ end
+ return goodies
+end
+
+function fontgoodies.register(name,fnc) -- will be a proper sequencer
+ list[name] = fnc
+end
+
+fontgoodies.load = loadgoodies
+
+-- register goodies file
+
+local function setgoodies(tfmdata,value)
+ local goodies = tfmdata.goodies
+ if not goodies then -- actually an error
+ goodies = { }
+ tfmdata.goodies = goodies
+ end
+ for filename in gmatch(value,"[^, ]+") do
+ -- we need to check for duplicates
+ local ok = loadgoodies(filename)
+ if ok then
+ if trace_goodies then
+ report_goodies("assigning goodie %a",filename)
+ end
+ goodies[#goodies+1] = ok
+ end
+ end
+end
+
+-- this will be split into good-* files and this file might become good-ini.lua
+
+-- featuresets
+
+local function flattenedfeatures(t,tt)
+ -- first set value dominates
+ local tt = tt or { }
+ for i=1,#t do
+ local ti = t[i]
+ if type(ti) == "table" then
+ flattenedfeatures(ti,tt)
+ elseif tt[ti] == nil then
+ tt[ti] = true
+ end
+ end
+ for k, v in next, t do
+ if type(k) ~= "number" then -- not tonumber(k)
+ if type(v) == "table" then
+ flattenedfeatures(v,tt)
+ elseif tt[k] == nil then
+ tt[k] = v
+ end
+ end
+ end
+ return tt
+end
+
+-- fonts.features.flattened = flattenedfeatures
+
+local function prepare_features(goodies,name,set)
+ if set then
+ local ff = flattenedfeatures(set)
+ local fullname = goodies.name .. "::" .. name
+ local n, s = fonts.specifiers.presetcontext(fullname,"",ff)
+ goodies.featuresets[name] = s -- set
+ if trace_goodies then
+ report_goodies("feature set %a gets number %a and name %a",name,n,fullname)
+ end
+ return n
+ end
+end
+
+fontgoodies.prepare_features = prepare_features
+
+local function initialize(goodies,tfmdata)
+ local featuresets = goodies.featuresets
+ local goodiesname = goodies.name
+ if featuresets then
+ if trace_goodies then
+ report_goodies("checking featuresets in %a",goodies.name)
+ end
+ for name, set in next, featuresets do
+ prepare_features(goodies,name,set)
+ end
+ end
+end
+
+fontgoodies.register("featureset",initialize)
+
+local function setfeatureset(tfmdata,set,features)
+ local goodies = tfmdata.goodies -- shared ?
+ if goodies then
+ local properties = tfmdata.properties
+ local what
+ for i=1,#goodies do
+ -- last one wins
+ local g = goodies[i]
+ what = g.featuresets and g.featuresets[set] or what
+ end
+ if what then
+ for feature, value in next, what do
+ if features[feature] == nil then
+ features[feature] = value
+ end
+ end
+ properties.mode = what.mode or properties.mode
+ end
+ end
+end
+
+-- postprocessors (we could hash processor and share code)
+
+function fontgoodies.registerpostprocessor(tfmdata,f,prepend)
+ local postprocessors = tfmdata.postprocessors
+ if not postprocessors then
+ tfmdata.postprocessors = { f }
+ elseif prepend then
+ table.insert(postprocessors,f,1)
+ else
+ table.insert(postprocessors,f)
+ end
+end
+
+local function setpostprocessor(tfmdata,processor)
+ local goodies = tfmdata.goodies
+ if goodies and type(processor) == "string" then
+ local found = { }
+ local asked = utilities.parsers.settings_to_array(processor)
+ for i=1,#goodies do
+ local g = goodies[i]
+ local p = g.postprocessors
+ if p then
+ for i=1,#asked do
+ local a = asked[i]
+ local f = p[a]
+ if type(f) == "function" then
+ found[a] = f
+ end
+ end
+ end
+ end
+ local postprocessors = tfmdata.postprocessors or { }
+ for i=1,#asked do
+ local a = asked[i]
+ local f = found[a]
+ if f then
+ postprocessors[#postprocessors+1] = f
+ end
+ end
+ if #postprocessors > 0 then
+ tfmdata.postprocessors = postprocessors
+ end
+ end
+end
+
+-- colorschemes
+
+local colorschemes = fontgoodies.colorschemes or allocate { }
+fontgoodies.colorschemes = colorschemes
+colorschemes.data = colorschemes.data or { }
+
+local function setcolorscheme(tfmdata,scheme)
+ if type(scheme) == "string" then
+ local goodies = tfmdata.goodies
+ -- todo : check for already defined in shared
+ if goodies then
+ local what
+ for i=1,#goodies do
+ -- last one counts
+ local g = goodies[i]
+ what = g.colorschemes and g.colorschemes[scheme] or what
+ end
+ if type(what) == "table" then
+ -- this is font bound but we can share them if needed
+ -- just as we could hash the conversions (per font)
+ local hash = tfmdata.resources.unicodes
+ local reverse = { }
+ local characters = tfmdata.characters
+ for i=1,#what do
+ local w = what[i]
+ for j=1,#w do
+ local name = w[j]
+ if name == "*" then
+ -- inefficient but only used for tracing anyway
+ for _, unicode in next, hash do
+ reverse[unicode] = i
+ end
+ elseif type(name) == "number" then
+ reverse[name] = i
+ elseif find(name,":") then
+ local start, stop = splitup(name,":")
+ start = tonumber(start)
+ stop = tonumber(stop)
+ if start and stop then
+ -- limited usage: we only deal with non reassigned
+ -- maybe some day I'll also support the ones with a
+ -- tounicode in this range
+ for unicode=start,stop do
+ if characters[unicode] then
+ reverse[unicode] = i
+ end
+ end
+ end
+ else
+ local unicode = hash[name]
+ if unicode then
+ reverse[unicode] = i
+ end
+ end
+ end
+ end
+ tfmdata.properties.colorscheme = reverse
+ return
+ end
+ end
+ end
+ tfmdata.properties.colorscheme = false
+end
+
+local fontdata = fonts.hashes.identifiers
+local setnodecolor = nodes.tracers.colors.set
+local traverse_id = node.traverse_id
+local a_colorscheme = attributes.private('colorscheme')
+local glyph = node.id("glyph")
+
+function colorschemes.coloring(head)
+ local lastfont, lastscheme
+ local done = false
+ for n in traverse_id(glyph,head) do
+ local a = n[a_colorscheme]
+ if a then
+ local f = n.font
+ if f ~= lastfont then
+ lastscheme, lastfont = fontdata[f].properties.colorscheme, f
+ end
+ if lastscheme then
+ local sc = lastscheme[n.char]
+ if sc then
+ done = true
+ setnodecolor(n,"colorscheme:"..a..":"..sc) -- slow
+ end
+ end
+ end
+ end
+ return head, done
+end
+
+function colorschemes.enable()
+ nodes.tasks.appendaction("processors","fonts","fonts.goodies.colorschemes.coloring")
+ function colorschemes.enable() end
+end
+
+local function setextrafeatures(tfmdata)
+ local goodies = tfmdata.goodies
+ if goodies then
+ for i=1,#goodies do
+ local g = goodies[i]
+ local f = g.features
+ if f then
+ for feature, specification in next, f do
+ addotffeature(tfmdata.shared.rawdata,feature,specification)
+ registerotffeature {
+ name = feature,
+ description = format("extra: %s",feature)
+ }
+ end
+ end
+ end
+ end
+end
+
+-- installation (collected to keep the overview) -- also for type 1
+
+registerotffeature {
+ name = "goodies",
+ description = "goodies on top of built in features",
+ initializers = {
+ position = 1,
+ base = setgoodies,
+ node = setgoodies,
+ }
+}
+
+registerotffeature {
+ name = "extrafeatures",
+ description = "extra features",
+ default = true,
+ initializers = {
+ position = 2,
+ base = setextrafeatures,
+ node = setextrafeatures,
+ }
+}
+
+registerotffeature {
+ name = "featureset",
+ description = "goodie feature set",
+ initializers = {
+ position = 3,
+ base = setfeatureset,
+ node = setfeatureset,
+ }
+}
+
+registerotffeature {
+ name = "colorscheme",
+ description = "goodie color scheme",
+ initializers = {
+ base = setcolorscheme,
+ node = setcolorscheme,
+ }
+}
+
+registerotffeature {
+ name = "postprocessor",
+ description = "goodie postprocessor",
+ initializers = {
+ base = setpostprocessor,
+ node = setpostprocessor,
+ }
+}
+
+-- afm
+
+registerafmfeature {
+ name = "goodies",
+ description = "goodies on top of built in features",
+ initializers = {
+ position = 1,
+ base = setgoodies,
+ node = setgoodies,
+ }
+}
+
+-- tfm
+
+registertfmfeature {
+ name = "goodies",
+ description = "goodies on top of built in features",
+ initializers = {
+ position = 1,
+ base = setgoodies,
+ node = setgoodies,
+ }
+}
+
+-- experiment, we have to load the definitions immediately as they precede
+-- the definition so they need to be initialized in the typescript
+
+local function initialize(goodies)
+ local mathgoodies = goodies.mathematics
+ if mathgoodies then
+ local virtuals = mathgoodies.virtuals
+ local mapfiles = mathgoodies.mapfiles
+ local maplines = mathgoodies.maplines
+ if virtuals then
+ for name, specification in next, virtuals do
+ -- beware, they are all constructed
+ mathematics.makefont(name,specification,goodies)
+ end
+ end
+ if mapfiles then
+ for i=1,#mapfiles do
+ fonts.mappings.loadfile(mapfiles[i]) -- todo: backend function
+ end
+ end
+ if maplines then
+ for i=1,#maplines do
+ fonts.mappings.loadline(maplines[i]) -- todo: backend function
+ end
+ end
+ end
+end
+
+fontgoodies.register("mathematics", initialize)
+
+-- the following takes care of explicit file specifications
+--
+-- files = {
+-- name = "antykwapoltawskiego",
+-- list = {
+-- ["AntPoltLtCond-Regular.otf"] = {
+-- -- name = "antykwapoltawskiego",
+-- style = "regular",
+-- weight = "light",
+-- width = "condensed",
+-- },
+-- },
+-- }
+
+-- math italics
+
+-- it would be nice to have a \noitalics\font option
+
+local function initialize(tfmdata)
+ local goodies = tfmdata.goodies
+ if goodies then
+ local shared = tfmdata.shared
+ for i=1,#goodies do
+ local mathgoodies = goodies[i].mathematics
+ local mathitalics = mathgoodies and mathgoodies.italics
+ if mathitalics then
+ local properties = tfmdata.properties
+ mathitalics = mathitalics[file.nameonly(properties.name)] or mathitalics
+ if mathitalics then
+ if trace_goodies then
+ report_goodies("loading mathitalics for font %a",properties.name)
+ end
+ local corrections = mathitalics.corrections
+ local defaultfactor = mathitalics.defaultfactor
+ local disableengine = mathitalics.disableengine
+ properties.hasitalics = true
+ properties.mathitalic_defaultfactor = defaultfactor -- we inherit outer one anyway (name will change)
+ if properties.mathitalics == nil then
+ properties.mathitalics = disableengine
+ end
+ if corrections then
+ -- As we want to set italic_correction (the context one) we need a
+ -- postprocessor instead of messing with the (unscaled) descriptions.
+ fontgoodies.registerpostprocessor(tfmdata, function(tfmdata) -- this is another tfmdata (a copy)
+ -- better make a helper so that we have less code being defined
+ local properties = tfmdata.properties
+ local parameters = tfmdata.parameters
+ local characters = tfmdata.characters
+ properties.hasitalics = true
+ properties.mathitalic_defaultfactor = defaultfactor
+ properties.mathitalic_defaultvalue = defaultfactor * parameters.quad
+ if properties.mathitalics == nil then
+ properties.mathitalics = disableengine
+ end
+ if trace_goodies then
+ report_goodies("assigning mathitalics for font %a",properties.name)
+ end
+ local mathitalics = properties.mathitalics
+ local quad = parameters.quad
+ local hfactor = parameters.hfactor
+ for k, v in next, corrections do
+ local c = characters[k]
+ if v > -1 and v < 1 then
+ v = v * quad
+ else
+ v = v * hfactor
+ end
+ c.italic_correction = v -- for context
+ if mathitalics then
+ c.italic = v -- for tex
+ else
+ c.italic = nil
+ end
+ end
+ end)
+ end
+ return -- maybe not as these can accumulate
+ end
+ end
+ end
+ end
+end
+
+registerotffeature {
+ name = "mathitalics",
+ description = "additional math italic corrections",
+ -- default = true,
+ initializers = {
+ base = initialize,
+ node = initialize,
+ }
+}
+
+-- fontgoodies.register("mathitalics", initialize)
+
+-- files
+
+local function initialize(goodies)
+ local files = goodies.files
+ if files then
+ fonts.names.register(files)
+ end
+end
+
+fontgoodies.register("files", initialize)
+
+-- some day we will have a define command and then we can also do some
+-- proper tracing
+--
+-- fonts.typefaces["antykwapoltawskiego-condensed"] = {
+-- shortcut = "rm",
+-- shape = "serif",
+-- fontname = "antykwapoltawskiego",
+-- normalweight = "light",
+-- boldweight = "medium",
+-- width = "condensed",
+-- size = "default",
+-- features = "default",
+-- }
+
+local function initialize(goodies)
+ local typefaces = goodies.typefaces
+ if typefaces then
+ local ft = fonts.typefaces
+ for k, v in next, typefaces do
+ ft[k] = v
+ end
+ end
+end
+
+fontgoodies.register("typefaces", initialize)
+
+local compositions = { }
+
+function fontgoodies.getcompositions(tfmdata)
+ return compositions[file.nameonly(tfmdata.properties.filename or "")]
+end
+
+local function initialize(goodies)
+ local gc = goodies.compositions
+ if gc then
+ for k, v in next, gc do
+ compositions[k] = v
+ end
+ end
+end
+
+fontgoodies.register("compositions", initialize)
+
+-- extra treatments (on top of defaults): \loadfontgoodies[mytreatments]
+
+local treatmentdata = fonts.treatments.data
+
+local function initialize(goodies)
+ local treatments = goodies.treatments
+ if treatments then
+ for name, data in next, treatments do
+ treatmentdata[name] = data -- always wins
+ end
+ end
+end
+
+fontgoodies.register("treatments", initialize)
+
+local filenames = fontgoodies.filenames or allocate()
+fontgoodies.filenames = filenames
+
+local filedata = filenames.data or allocate()
+filenames.data = filedata
+
+local function initialize(goodies) -- design sizes are registered global
+ local fn = goodies.filenames
+ if fn then
+ for usedname, alternativenames in next, fn do
+ filedata[usedname] = alternativenames
+ end
+ end
+end
+
+fontgoodies.register("filenames", initialize)
+
+function fontgoodies.filenames.resolve(name)
+ local fd = filedata[name]
+ if fd and findfile(name) == "" then
+ for i=1,#fd do
+ local fn = fd[i]
+ if findfile(fn) ~= "" then
+ return fn
+ end
+ end
+ else
+ -- no lookup, just use the regular mechanism
+ end
+ return name
+end
+
+local designsizes = fontgoodies.designsizes or allocate()
+fontgoodies.designsizes = designsizes
+
+local designdata = designsizes.data or allocate()
+designsizes.data = designdata
+
+local function initialize(goodies) -- design sizes are registered global
+ local gd = goodies.designsizes
+ if gd then
+ for name, data in next, gd do
+ local ranges = { }
+ for size, file in next, data do
+ if size ~= "default" then
+ ranges[#ranges+1] = { texsp(size), file } -- also lower(file)
+ end
+ end
+ table.sort(ranges,function(a,b) return a[1] < b[1] end)
+ designdata[lower(name)] = { -- overloads, doesn't merge!
+ default = data.default,
+ ranges = ranges,
+ }
+ end
+ end
+end
+
+fontgoodies.register("designsizes", initialize)
+
+function fontgoodies.designsizes.register(name,size,specification)
+ local d = designdata[name]
+ if not d then
+ d = {
+ ranges = { },
+ default = nil, -- so we have no default set
+ }
+ designdata[name] = d
+ end
+ if size == "default" then
+ d.default = specification
+ else
+ if type(size) == "string" then
+ size = texsp(size)
+ end
+ local ranges = d.ranges
+ ranges[#ranges+1] = { size, specification }
+ end
+end
+
+function fontgoodies.designsizes.filename(name,spec,size) -- returns nil of no match
+ if spec and spec ~= "" then
+ local data = designdata[lower(name)]
+ if data then
+ if spec == "default" then
+ return data.default
+ elseif spec == "auto" then
+ local ranges = data.ranges
+ if ranges then
+ for i=1,#ranges do
+ local r = ranges[i]
+ if r[1] >= size then -- todo: rounding so maybe size - 100
+ return r[2]
+ end
+ end
+ end
+ return data.default or (ranges and ranges[#ranges][2])
+ end
+ end
+ end
+end
+
+-- The following file (husayni.lfg) is the experimental setup that we used
+-- for Idris font. For the moment we don't store this in the cache and quite
+-- probably these files sit in one of the paths:
+--
+-- tex/context/fonts/goodies
+-- tex/fonts/goodies/context
+-- tex/fonts/data/foundry/collection
+--
+-- see lfg files in distribution
+
+-- interface
+
+commands.loadfontgoodies = fontgoodies.load
+commands.enablefontcolorschemes = colorschemes.enable
+
+-- weird place ... depends on math
+
+local function finalize(tfmdata,feature,value)
+ mathematics.overloaddimensions(tfmdata,tfmdata,value)
+end
+
+registerotffeature {
+ name = "mathdimensions",
+ description = "manipulate math dimensions",
+ -- default = true,
+ manipulators = {
+ base = finalize,
+ node = finalize,
+ }
+}
diff --git a/tex/context/base/font-hsh.lua b/tex/context/base/font-hsh.lua
index f5c80d705..c11f9a721 100644
--- a/tex/context/base/font-hsh.lua
+++ b/tex/context/base/font-hsh.lua
@@ -1,226 +1,226 @@
-if not modules then modules = { } end modules ['font-hsh'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local setmetatableindex = table.setmetatableindex
-local currentfont = font.current
-local allocate = utilities.storage.allocate
-
-local fonts = fonts
-local hashes = fonts.hashes or allocate()
-fonts.hashes = hashes
-
--- todo: autoallocate ... just create on the fly .. use constructors.keys (problem: plurals)
-
-local identifiers = hashes.identifiers or allocate()
-local characters = hashes.characters or allocate() -- chardata
-local descriptions = hashes.descriptions or allocate()
-local parameters = hashes.parameters or allocate()
-local properties = hashes.properties or allocate()
-local resources = hashes.resources or allocate()
-local spacings = hashes.spacings or allocate()
-local spaces = hashes.spaces or allocate()
-local quads = hashes.quads or allocate() -- maybe also spacedata
-local xheights = hashes.xheights or allocate()
-local csnames = hashes.csnames or allocate() -- namedata
-local marks = hashes.marks or allocate()
-local italics = hashes.italics or allocate()
-local lastmathids = hashes.lastmathids or allocate()
-local dynamics = hashes.dynamics or allocate()
-
-hashes.characters = characters
-hashes.descriptions = descriptions
-hashes.parameters = parameters
-hashes.properties = properties
-hashes.resources = resources
-hashes.spacings = spacings
-hashes.spaces = spaces
-hashes.quads = quads hashes.emwidths = quads
-hashes.xheights = xheights hashes.exheights = xheights
-hashes.csnames = csnames
-hashes.marks = marks
-hashes.italics = italics
-hashes.lastmathids = lastmathids
-hashes.dynamics = dynamics
-
-local nulldata = allocate {
- name = "nullfont",
- characters = { },
- descriptions = { },
- properties = { },
- parameters = { -- lmromanregular @ 12pt
- slantperpoint = 0,
- spacing = {
- width = 256377,
- stretch = 128188,
- shrink = 85459,
- extra = 85459,
- },
- quad = 786432,
- xheight = 338952,
- -- compatibility:
- slant = 0, -- 1
- space = 256377, -- 2
- space_stretch = 128188, -- 3
- space_shrink = 85459, -- 4
- x_height = 338952, -- 5
- quad = 786432, -- 6
- extra_space = 85459, -- 7
- },
-}
-
-fonts.nulldata = nulldata
-
-fonts.constructors.enhanceparameters(nulldata.parameters) -- official copies for us
-
-setmetatableindex(identifiers, function(t,k)
- return k == true and identifiers[currentfont()] or nulldata
-end)
-
-setmetatableindex(characters, function(t,k)
- if k == true then
- return characters[currentfont()]
- else
- local characters = identifiers[k].characters
- t[k] = characters
- return characters
- end
-end)
-
-setmetatableindex(descriptions, function(t,k)
- if k == true then
- return descriptions[currentfont()]
- else
- local descriptions = identifiers[k].descriptions
- t[k] = descriptions
- return descriptions
- end
-end)
-
-setmetatableindex(parameters, function(t,k)
- if k == true then
- return parameters[currentfont()]
- else
- local parameters = identifiers[k].parameters
- t[k] = parameters
- return parameters
- end
-end)
-
-setmetatableindex(properties, function(t,k)
- if k == true then
- return properties[currentfont()]
- else
- local properties = identifiers[k].properties
- t[k] = properties
- return properties
- end
-end)
-
-setmetatableindex(resources, function(t,k)
- if k == true then
- return resources[currentfont()]
- else
- local shared = identifiers[k].shared
- local rawdata = shared and shared.rawdata
- local resources = rawdata and rawdata.resources
- t[k] = resources or false -- better than resolving each time
- return resources
- end
-end)
-
-setmetatableindex(quads, function(t,k)
- if k == true then
- return quads[currentfont()]
- else
- local parameters = parameters[k]
- local quad = parameters and parameters.quad or 0
- t[k] = quad
- return quad
- end
-end)
-
-local nospacing = {
- width = 0,
- stretch = 0,
- shrink = 0,
- extra = 0,
-}
-
-setmetatableindex(spacings, function(t,k)
- if k == true then
- return spacings[currentfont()]
- else
- local parameters = parameters[k]
- local spacing = parameters and parameters.spacing or nospacing
- t[k] = spacing
- return spacing
- end
-end)
-
-setmetatableindex(spaces, function(t,k)
- if k == true then
- return spaces[currentfont()]
- else
- local space = spacings[k].width
- t[k] = space
- return space
- end
-end)
-
-setmetatableindex(marks, function(t,k)
- if k == true then
- return marks[currentfont()]
- else
- local resources = identifiers[k].resources or { }
- local marks = resources.marks or { }
- t[k] = marks
- return marks
- end
-end)
-
-setmetatableindex(xheights, function(t,k)
- if k == true then
- return xheights[currentfont()]
- else
- local parameters = parameters[k]
- local xheight = parameters and parameters.xheight or 0
- t[k] = xheight
- return xheight
- end
-end)
-
-setmetatableindex(italics, function(t,k) -- is test !
- if k == true then
- return italics[currentfont()]
- else
- local properties = identifiers[k].properties
- local hasitalics = properties and properties.hasitalics
- if hasitalics then
- hasitalics = characters[k] -- convenient return
- else
- hasitalics = false
- end
- t[k] = hasitalics
- return hasitalics
- end
-end)
-
-setmetatableindex(dynamics, function(t,k)
- if k == true then
- return dynamics[currentfont()]
- else
- local shared = identifiers[k].shared
- local dynamics = shared and shared.dynamics or false
- t[k] = dynamics
- return dynamics
- end
-end)
-
-function font.getfont(id)
- return identifiers[id]
-end
+if not modules then modules = { } end modules ['font-hsh'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local setmetatableindex = table.setmetatableindex
+local currentfont = font.current
+local allocate = utilities.storage.allocate
+
+local fonts = fonts
+local hashes = fonts.hashes or allocate()
+fonts.hashes = hashes
+
+-- todo: autoallocate ... just create on the fly .. use constructors.keys (problem: plurals)
+
+local identifiers = hashes.identifiers or allocate()
+local characters = hashes.characters or allocate() -- chardata
+local descriptions = hashes.descriptions or allocate()
+local parameters = hashes.parameters or allocate()
+local properties = hashes.properties or allocate()
+local resources = hashes.resources or allocate()
+local spacings = hashes.spacings or allocate()
+local spaces = hashes.spaces or allocate()
+local quads = hashes.quads or allocate() -- maybe also spacedata
+local xheights = hashes.xheights or allocate()
+local csnames = hashes.csnames or allocate() -- namedata
+local marks = hashes.marks or allocate()
+local italics = hashes.italics or allocate()
+local lastmathids = hashes.lastmathids or allocate()
+local dynamics = hashes.dynamics or allocate()
+
+hashes.characters = characters
+hashes.descriptions = descriptions
+hashes.parameters = parameters
+hashes.properties = properties
+hashes.resources = resources
+hashes.spacings = spacings
+hashes.spaces = spaces
+hashes.quads = quads hashes.emwidths = quads
+hashes.xheights = xheights hashes.exheights = xheights
+hashes.csnames = csnames
+hashes.marks = marks
+hashes.italics = italics
+hashes.lastmathids = lastmathids
+hashes.dynamics = dynamics
+
+local nulldata = allocate {
+ name = "nullfont",
+ characters = { },
+ descriptions = { },
+ properties = { },
+ parameters = { -- lmromanregular @ 12pt
+ slantperpoint = 0,
+ spacing = {
+ width = 256377,
+ stretch = 128188,
+ shrink = 85459,
+ extra = 85459,
+ },
+ quad = 786432,
+ xheight = 338952,
+ -- compatibility:
+ slant = 0, -- 1
+ space = 256377, -- 2
+ space_stretch = 128188, -- 3
+ space_shrink = 85459, -- 4
+ x_height = 338952, -- 5
+ quad = 786432, -- 6
+ extra_space = 85459, -- 7
+ },
+}
+
+fonts.nulldata = nulldata
+
+fonts.constructors.enhanceparameters(nulldata.parameters) -- official copies for us
+
+setmetatableindex(identifiers, function(t,k)
+ return k == true and identifiers[currentfont()] or nulldata
+end)
+
+setmetatableindex(characters, function(t,k)
+ if k == true then
+ return characters[currentfont()]
+ else
+ local characters = identifiers[k].characters
+ t[k] = characters
+ return characters
+ end
+end)
+
+setmetatableindex(descriptions, function(t,k)
+ if k == true then
+ return descriptions[currentfont()]
+ else
+ local descriptions = identifiers[k].descriptions
+ t[k] = descriptions
+ return descriptions
+ end
+end)
+
+setmetatableindex(parameters, function(t,k)
+ if k == true then
+ return parameters[currentfont()]
+ else
+ local parameters = identifiers[k].parameters
+ t[k] = parameters
+ return parameters
+ end
+end)
+
+setmetatableindex(properties, function(t,k)
+ if k == true then
+ return properties[currentfont()]
+ else
+ local properties = identifiers[k].properties
+ t[k] = properties
+ return properties
+ end
+end)
+
+setmetatableindex(resources, function(t,k)
+ if k == true then
+ return resources[currentfont()]
+ else
+ local shared = identifiers[k].shared
+ local rawdata = shared and shared.rawdata
+ local resources = rawdata and rawdata.resources
+ t[k] = resources or false -- better than resolving each time
+ return resources
+ end
+end)
+
+setmetatableindex(quads, function(t,k)
+ if k == true then
+ return quads[currentfont()]
+ else
+ local parameters = parameters[k]
+ local quad = parameters and parameters.quad or 0
+ t[k] = quad
+ return quad
+ end
+end)
+
+local nospacing = {
+ width = 0,
+ stretch = 0,
+ shrink = 0,
+ extra = 0,
+}
+
+setmetatableindex(spacings, function(t,k)
+ if k == true then
+ return spacings[currentfont()]
+ else
+ local parameters = parameters[k]
+ local spacing = parameters and parameters.spacing or nospacing
+ t[k] = spacing
+ return spacing
+ end
+end)
+
+setmetatableindex(spaces, function(t,k)
+ if k == true then
+ return spaces[currentfont()]
+ else
+ local space = spacings[k].width
+ t[k] = space
+ return space
+ end
+end)
+
+setmetatableindex(marks, function(t,k)
+ if k == true then
+ return marks[currentfont()]
+ else
+ local resources = identifiers[k].resources or { }
+ local marks = resources.marks or { }
+ t[k] = marks
+ return marks
+ end
+end)
+
+setmetatableindex(xheights, function(t,k)
+ if k == true then
+ return xheights[currentfont()]
+ else
+ local parameters = parameters[k]
+ local xheight = parameters and parameters.xheight or 0
+ t[k] = xheight
+ return xheight
+ end
+end)
+
+setmetatableindex(italics, function(t,k) -- is test !
+ if k == true then
+ return italics[currentfont()]
+ else
+ local properties = identifiers[k].properties
+ local hasitalics = properties and properties.hasitalics
+ if hasitalics then
+ hasitalics = characters[k] -- convenient return
+ else
+ hasitalics = false
+ end
+ t[k] = hasitalics
+ return hasitalics
+ end
+end)
+
+setmetatableindex(dynamics, function(t,k)
+ if k == true then
+ return dynamics[currentfont()]
+ else
+ local shared = identifiers[k].shared
+ local dynamics = shared and shared.dynamics or false
+ t[k] = dynamics
+ return dynamics
+ end
+end)
+
+function font.getfont(id)
+ return identifiers[id]
+end
diff --git a/tex/context/base/font-ini.lua b/tex/context/base/font-ini.lua
index 884b22474..e902eca03 100644
--- a/tex/context/base/font-ini.lua
+++ b/tex/context/base/font-ini.lua
@@ -1,32 +1,32 @@
-if not modules then modules = { } end modules ['font-ini'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-
Not much is happening here.
---ldx]]--
-
-local allocate = utilities.storage.allocate
-
-local report_defining = logs.reporter("fonts","defining")
-
-fonts = fonts or { }
-local fonts = fonts
-
-fonts.hashes = { identifiers = allocate() }
-
-fonts.tables = fonts.tables or { }
-fonts.helpers = fonts.helpers or { }
-fonts.tracers = fonts.tracers or { } -- for the moment till we have move to moduledata
-fonts.specifiers = fonts.specifiers or { } -- in format !
-
-fonts.analyzers = { } -- not needed here
-fonts.readers = { }
-fonts.definers = { methods = { } }
-fonts.loggers = { register = function() end }
-
-fontloader.totable = fontloader.to_table
+if not modules then modules = { } end modules ['font-ini'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+
Not much is happening here.
+--ldx]]--
+
+local allocate = utilities.storage.allocate
+
+local report_defining = logs.reporter("fonts","defining")
+
+fonts = fonts or { }
+local fonts = fonts
+
+fonts.hashes = { identifiers = allocate() }
+
+fonts.tables = fonts.tables or { }
+fonts.helpers = fonts.helpers or { }
+fonts.tracers = fonts.tracers or { } -- for the moment till we have move to moduledata
+fonts.specifiers = fonts.specifiers or { } -- in format !
+
+fonts.analyzers = { } -- not needed here
+fonts.readers = { }
+fonts.definers = { methods = { } }
+fonts.loggers = { register = function() end }
+
+fontloader.totable = fontloader.to_table
diff --git a/tex/context/base/font-ldr.lua b/tex/context/base/font-ldr.lua
index 175b4d0cc..46cd396f8 100644
--- a/tex/context/base/font-ldr.lua
+++ b/tex/context/base/font-ldr.lua
@@ -1,70 +1,70 @@
-if not modules then modules = { } end modules ['font-ldr'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This module provides an experimental replacement for fontloader.to_table
--- but is not used that much.
-
-local fields = fontloader.fields
-
-if fields then
-
- local glyphfields
-
- local function get_glyphs(r)
- local t = { }
- local g = r.glyphs
- for i=1,r.glyphmax-1 do
- local gi = g[i]
- if gi then
- if not glyphfields then
- glyphfields = fields(gi)
- end
- local h = { }
- for i=1,#glyphfields do
- local s = glyphfields[i]
- h[s] = gi[s]
- end
- t[i] = h
- end
- end
- return t
- end
-
- local function to_table(r)
- local f = fields(r)
- if f then
- local t = { }
- for i=1,#f do
- local fi = f[i]
- local ri = r[fi]
- if not ri then
- -- skip
- elseif fi == "glyphs" then
- t.glyphs = get_glyphs(r)
- elseif fi == "subfonts" then
- t[fi] = ri
- ri.glyphs = get_glyphs(ri)
- else
- t[fi] = r[fi]
- end
- end
- return t
- end
- end
-
- -- currently glyphs, subfont-glyphs and the main table are userdata
-
- function fonts.to_table(raw)
- return to_table(raw)
- end
-
-else
-
- fonts.to_table = fontloader.to_table
-
-end
+if not modules then modules = { } end modules ['font-ldr'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This module provides an experimental replacement for fontloader.to_table
+-- but is not used that much.
+
+local fields = fontloader.fields
+
+if fields then
+
+ local glyphfields
+
+ local function get_glyphs(r)
+ local t = { }
+ local g = r.glyphs
+ for i=1,r.glyphmax-1 do
+ local gi = g[i]
+ if gi then
+ if not glyphfields then
+ glyphfields = fields(gi)
+ end
+ local h = { }
+ for i=1,#glyphfields do
+ local s = glyphfields[i]
+ h[s] = gi[s]
+ end
+ t[i] = h
+ end
+ end
+ return t
+ end
+
+ local function to_table(r)
+ local f = fields(r)
+ if f then
+ local t = { }
+ for i=1,#f do
+ local fi = f[i]
+ local ri = r[fi]
+ if not ri then
+ -- skip
+ elseif fi == "glyphs" then
+ t.glyphs = get_glyphs(r)
+ elseif fi == "subfonts" then
+ t[fi] = ri
+ ri.glyphs = get_glyphs(ri)
+ else
+ t[fi] = r[fi]
+ end
+ end
+ return t
+ end
+ end
+
+ -- currently glyphs, subfont-glyphs and the main table are userdata
+
+ function fonts.to_table(raw)
+ return to_table(raw)
+ end
+
+else
+
+ fonts.to_table = fontloader.to_table
+
+end
diff --git a/tex/context/base/font-log.lua b/tex/context/base/font-log.lua
index 41da75378..3a2a1c5de 100644
--- a/tex/context/base/font-log.lua
+++ b/tex/context/base/font-log.lua
@@ -1,86 +1,86 @@
-if not modules then modules = { } end modules ['font-log'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local next, format, lower, concat = next, string.format, string.lower, table.concat
-
-local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
-local report_defining = logs.reporter("fonts","defining")
-
-local basename = file.basename
-
-local fonts = fonts
-local loggers = { }
-fonts.loggers = loggers
-local usedfonts = utilities.storage.allocate()
------ loadedfonts = utilities.storage.allocate()
-
---[[ldx--
-
The following functions are used for reporting about the fonts
-used. The message itself is not that useful in regular runs but since
-we now have several readers it may be handy to know what reader is
-used for which font.
---ldx]]--
-
-function loggers.onetimemessage(font,char,message,reporter)
- local tfmdata = fonts.hashes.identifiers[font]
- local shared = tfmdata.shared
- local messages = shared.messages
- if not messages then
- messages = { }
- shared.messages = messages
- end
- local category = messages[message]
- if not category then
- category = { }
- messages[message] = category
- end
- if not category[char] then
- if not reporter then
- reporter = report_defining
- end
- reporter("char %U in font %a with id %s: %s",char,tfmdata.properties.fullname,font,message)
- category[char] = true
- end
-end
-
-function loggers.register(tfmdata,source,specification) -- save file name in spec here ! ! ! ! ! !
- if tfmdata and specification and specification.specification then
- local name = lower(specification.name)
- if trace_defining and not usedfonts[name] then
- report_defining("registering %a as %a, used %a",file.basename(specification.name),source,file.basename(specification.filename))
- end
- specification.source = source
- -- loadedfonts[lower(specification.specification)] = specification
- usedfonts[lower(specification.filename or specification.name)] = source
- end
-end
-
-function loggers.format(name) -- should be avoided
- return usedfonts[name] or "unknown"
-end
-
-statistics.register("loaded fonts", function()
- if next(usedfonts) then
- local t, n = { }, 0
- local treatmentdata = fonts.treatments.data
- for name, used in table.sortedhash(usedfonts) do
- n = n + 1
- local base = basename(name)
- if complete then
- t[n] = format("%s -> %s",used,base)
- else
- t[n] = base
- end
- local treatment = treatmentdata[base]
- if treatment and treatment.comment then
- t[n] = format("%s (%s)",t[n],treatment.comment)
- end
- end
- return n > 0 and format("%s files: %s",n,concat(t,", ")) or "none"
- end
-end)
+if not modules then modules = { } end modules ['font-log'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local next, format, lower, concat = next, string.format, string.lower, table.concat
+
+local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
+local report_defining = logs.reporter("fonts","defining")
+
+local basename = file.basename
+
+local fonts = fonts
+local loggers = { }
+fonts.loggers = loggers
+local usedfonts = utilities.storage.allocate()
+----- loadedfonts = utilities.storage.allocate()
+
+--[[ldx--
+
The following functions are used for reporting about the fonts
+used. The message itself is not that useful in regular runs but since
+we now have several readers it may be handy to know what reader is
+used for which font.
+--ldx]]--
+
+function loggers.onetimemessage(font,char,message,reporter)
+ local tfmdata = fonts.hashes.identifiers[font]
+ local shared = tfmdata.shared
+ local messages = shared.messages
+ if not messages then
+ messages = { }
+ shared.messages = messages
+ end
+ local category = messages[message]
+ if not category then
+ category = { }
+ messages[message] = category
+ end
+ if not category[char] then
+ if not reporter then
+ reporter = report_defining
+ end
+ reporter("char %U in font %a with id %s: %s",char,tfmdata.properties.fullname,font,message)
+ category[char] = true
+ end
+end
+
+function loggers.register(tfmdata,source,specification) -- save file name in spec here ! ! ! ! ! !
+ if tfmdata and specification and specification.specification then
+ local name = lower(specification.name)
+ if trace_defining and not usedfonts[name] then
+ report_defining("registering %a as %a, used %a",file.basename(specification.name),source,file.basename(specification.filename))
+ end
+ specification.source = source
+ -- loadedfonts[lower(specification.specification)] = specification
+ usedfonts[lower(specification.filename or specification.name)] = source
+ end
+end
+
+function loggers.format(name) -- should be avoided
+ return usedfonts[name] or "unknown"
+end
+
+statistics.register("loaded fonts", function()
+ if next(usedfonts) then
+ local t, n = { }, 0
+ local treatmentdata = fonts.treatments.data
+ for name, used in table.sortedhash(usedfonts) do
+ n = n + 1
+ local base = basename(name)
+ if complete then
+ t[n] = format("%s -> %s",used,base)
+ else
+ t[n] = base
+ end
+ local treatment = treatmentdata[base]
+ if treatment and treatment.comment then
+ t[n] = format("%s (%s)",t[n],treatment.comment)
+ end
+ end
+ return n > 0 and format("%s files: %s",n,concat(t,", ")) or "none"
+ end
+end)
diff --git a/tex/context/base/font-lua.lua b/tex/context/base/font-lua.lua
index 6fbbcf17e..27b40e5b8 100644
--- a/tex/context/base/font-lua.lua
+++ b/tex/context/base/font-lua.lua
@@ -1,46 +1,46 @@
-if not modules then modules = { } end modules ['font-lua'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
-
-local report_lua = logs.reporter("fonts","lua loading")
-
-local fonts = fonts
-local readers = fonts.readers
-fonts.formats.lua = "lua"
-
--- we could add support for features here
-
-local function check_lua(specification,fullname)
- -- standard tex file lookup
- local fullname = resolvers.findfile(fullname) or ""
- if fullname ~= "" then
- local loader = loadfile(fullname)
- loader = loader and loader()
- return loader and loader(specification)
- end
-end
-
-readers.check_lua = check_lua
-
-function readers.lua(specification)
- local original = specification.specification
- if trace_defining then
- report_lua("using lua reader for %a",original)
- end
- local fullname = specification.filename or ""
- if fullname == "" then
- local forced = specification.forced or ""
- if forced ~= "" then
- fullname = specification.name .. "." .. forced
- else
- fullname = specification.name
- end
- end
- return check_lua(specification,fullname)
-end
+if not modules then modules = { } end modules ['font-lua'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
+
+local report_lua = logs.reporter("fonts","lua loading")
+
+local fonts = fonts
+local readers = fonts.readers
+fonts.formats.lua = "lua"
+
+-- we could add support for features here
+
+local function check_lua(specification,fullname)
+ -- standard tex file lookup
+ local fullname = resolvers.findfile(fullname) or ""
+ if fullname ~= "" then
+ local loader = loadfile(fullname)
+ loader = loader and loader()
+ return loader and loader(specification)
+ end
+end
+
+readers.check_lua = check_lua
+
+function readers.lua(specification)
+ local original = specification.specification
+ if trace_defining then
+ report_lua("using lua reader for %a",original)
+ end
+ local fullname = specification.filename or ""
+ if fullname == "" then
+ local forced = specification.forced or ""
+ if forced ~= "" then
+ fullname = specification.name .. "." .. forced
+ else
+ fullname = specification.name
+ end
+ end
+ return check_lua(specification,fullname)
+end
diff --git a/tex/context/base/font-map.lua b/tex/context/base/font-map.lua
index 6988b9b9e..864b43c24 100644
--- a/tex/context/base/font-map.lua
+++ b/tex/context/base/font-map.lua
@@ -1,329 +1,329 @@
-if not modules then modules = { } end modules ['font-map'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local tonumber = tonumber
-
-local match, format, find, concat, gsub, lower = string.match, string.format, string.find, table.concat, string.gsub, string.lower
-local P, R, S, C, Ct, Cc, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.match
-local utfbyte = utf.byte
-local floor = math.floor
-
-local trace_loading = false trackers.register("fonts.loading", function(v) trace_loading = v end)
-local trace_mapping = false trackers.register("fonts.mapping", function(v) trace_unimapping = v end)
-
-local report_fonts = logs.reporter("fonts","loading") -- not otf only
-
-local fonts = fonts or { }
-local mappings = fonts.mappings or { }
-fonts.mappings = mappings
-
---[[ldx--
-
Eventually this code will disappear because map files are kind
-of obsolete. Some code may move to runtime or auxiliary modules.
-
The name to unciode related code will stay of course.
---ldx]]--
-
-local function loadlumtable(filename) -- will move to font goodies
- local lumname = file.replacesuffix(file.basename(filename),"lum")
- local lumfile = resolvers.findfile(lumname,"map") or ""
- if lumfile ~= "" and lfs.isfile(lumfile) then
- if trace_loading or trace_mapping then
- report_fonts("loading map table %a",lumfile)
- end
- lumunic = dofile(lumfile)
- return lumunic, lumfile
- end
-end
-
-local hex = R("AF","09")
-local hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end
-local hexsix = (hex*hex*hex*hex*hex*hex) / function(s) return tonumber(s,16) end
-local dec = (R("09")^1) / tonumber
-local period = P(".")
-local unicode = P("uni") * (hexfour * (period + P(-1)) * Cc(false) + Ct(hexfour^1) * Cc(true))
-local ucode = P("u") * (hexsix * (period + P(-1)) * Cc(false) + Ct(hexsix ^1) * Cc(true))
-local index = P("index") * dec * Cc(false)
-
-local parser = unicode + ucode + index
-
-local parsers = { }
-
-local function makenameparser(str)
- if not str or str == "" then
- return parser
- else
- local p = parsers[str]
- if not p then
- p = P(str) * period * dec * Cc(false)
- parsers[str] = p
- end
- return p
- end
-end
-
--- local parser = makenameparser("Japan1")
--- local parser = makenameparser()
--- local function test(str)
--- local b, a = lpegmatch(parser,str)
--- print((a and table.serialize(b)) or b)
--- end
--- test("a.sc")
--- test("a")
--- test("uni1234")
--- test("uni1234.xx")
--- test("uni12349876")
--- test("u123400987600")
--- test("index1234")
--- test("Japan1.123")
-
-local function tounicode16(unicode,name)
- if unicode < 0x10000 then
- return format("%04X",unicode)
- elseif unicode < 0x1FFFFFFFFF then
- return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
- else
- report_fonts("can't convert %a in %a into tounicode",unicode,name)
- end
-end
-
-local function tounicode16sequence(unicodes,name)
- local t = { }
- for l=1,#unicodes do
- local unicode = unicodes[l]
- if unicode < 0x10000 then
- t[l] = format("%04X",unicode)
- elseif unicode < 0x1FFFFFFFFF then
- t[l] = format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
- else
- report_fonts ("can't convert %a in %a into tounicode",unicode,name)
- end
- end
- return concat(t)
-end
-
-local function fromunicode16(str)
- if #str == 4 then
- return tonumber(str,16)
- else
- local l, r = match(str,"(....)(....)")
- return (tonumber(l,16))*0x400 + tonumber(r,16) - 0xDC00
- end
-end
-
--- Slightly slower:
---
--- local p = C(4) * (C(4)^-1) / function(l,r)
--- if r then
--- return (tonumber(l,16))*0x400 + tonumber(r,16) - 0xDC00
--- else
--- return tonumber(l,16)
--- end
--- end
---
--- local function fromunicode16(str)
--- return lpegmatch(p,str)
--- end
-
--- This is quite a bit faster but at the cost of some memory but if we
--- do this we will also use it elsewhere so let's not follow this route
--- now. I might use this method in the plain variant (no caching there)
--- but then I need a flag that distinguishes between code branches.
---
--- local cache = { }
---
--- function mappings.tounicode16(unicode)
--- local s = cache[unicode]
--- if not s then
--- if unicode < 0x10000 then
--- s = format("%04X",unicode)
--- else
--- s = format("%04X%04X",unicode/0x400+0xD800,unicode%0x400+0xDC00)
--- end
--- cache[unicode] = s
--- end
--- return s
--- end
-
-mappings.loadlumtable = loadlumtable
-mappings.makenameparser = makenameparser
-mappings.tounicode16 = tounicode16
-mappings.tounicode16sequence = tounicode16sequence
-mappings.fromunicode16 = fromunicode16
-
-local separator = S("_.")
-local other = C((1 - separator)^1)
-local ligsplitter = Ct(other * (separator * other)^0)
-
---~ print(table.serialize(lpegmatch(ligsplitter,"this")))
---~ print(table.serialize(lpegmatch(ligsplitter,"this.that")))
---~ print(table.serialize(lpegmatch(ligsplitter,"japan1.123")))
---~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more")))
---~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more.that")))
-
-function mappings.addtounicode(data,filename)
- local resources = data.resources
- local properties = data.properties
- local descriptions = data.descriptions
- local unicodes = resources.unicodes
- if not unicodes then
- return
- end
- -- we need to move this code
- unicodes['space'] = unicodes['space'] or 32
- unicodes['hyphen'] = unicodes['hyphen'] or 45
- unicodes['zwj'] = unicodes['zwj'] or 0x200D
- unicodes['zwnj'] = unicodes['zwnj'] or 0x200C
- -- the tounicode mapping is sparse and only needed for alternatives
- local private = fonts.constructors.privateoffset
- local unknown = format("%04X",utfbyte("?"))
- local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context
- local tounicode = { }
- local originals = { }
- resources.tounicode = tounicode
- resources.originals = originals
- local lumunic, uparser, oparser
- local cidinfo, cidnames, cidcodes, usedmap
- if false then -- will become an option
- lumunic = loadlumtable(filename)
- lumunic = lumunic and lumunic.tounicode
- end
- --
- cidinfo = properties.cidinfo
- usedmap = cidinfo and fonts.cid.getmap(cidinfo)
- --
- if usedmap then
- oparser = usedmap and makenameparser(cidinfo.ordering)
- cidnames = usedmap.names
- cidcodes = usedmap.unicodes
- end
- uparser = makenameparser()
- local ns, nl = 0, 0
- for unic, glyph in next, descriptions do
- local index = glyph.index
- local name = glyph.name
- if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
- local unicode = lumunic and lumunic[name] or unicodevector[name]
- if unicode then
- originals[index] = unicode
- tounicode[index] = tounicode16(unicode,name)
- ns = ns + 1
- end
- -- cidmap heuristics, beware, there is no guarantee for a match unless
- -- the chain resolves
- if (not unicode) and usedmap then
- local foundindex = lpegmatch(oparser,name)
- if foundindex then
- unicode = cidcodes[foundindex] -- name to number
- if unicode then
- originals[index] = unicode
- tounicode[index] = tounicode16(unicode,name)
- ns = ns + 1
- else
- local reference = cidnames[foundindex] -- number to name
- if reference then
- local foundindex = lpegmatch(oparser,reference)
- if foundindex then
- unicode = cidcodes[foundindex]
- if unicode then
- originals[index] = unicode
- tounicode[index] = tounicode16(unicode,name)
- ns = ns + 1
- end
- end
- if not unicode or unicode == "" then
- local foundcodes, multiple = lpegmatch(uparser,reference)
- if foundcodes then
- originals[index] = foundcodes
- if multiple then
- tounicode[index] = tounicode16sequence(foundcodes)
- nl = nl + 1
- unicode = true
- else
- tounicode[index] = tounicode16(foundcodes,name)
- ns = ns + 1
- unicode = foundcodes
- end
- end
- end
- end
- end
- end
- end
- -- a.whatever or a_b_c.whatever or a_b_c (no numbers)
- if not unicode or unicode == "" then
- local split = lpegmatch(ligsplitter,name)
- local nplit = split and #split or 0
- if nplit >= 2 then
- local t, n = { }, 0
- for l=1,nplit do
- local base = split[l]
- local u = unicodes[base] or unicodevector[base]
- if not u then
- break
- elseif type(u) == "table" then
- n = n + 1
- t[n] = u[1]
- else
- n = n + 1
- t[n] = u
- end
- end
- if n == 0 then -- done then
- -- nothing
- elseif n == 1 then
- originals[index] = t[1]
- tounicode[index] = tounicode16(t[1],name)
- else
- originals[index] = t
- tounicode[index] = tounicode16sequence(t)
- end
- nl = nl + 1
- unicode = true
- else
- -- skip: already checked and we don't want privates here
- end
- end
- -- last resort (we might need to catch private here as well)
- if not unicode or unicode == "" then
- local foundcodes, multiple = lpegmatch(uparser,name)
- if foundcodes then
- if multiple then
- originals[index] = foundcodes
- tounicode[index] = tounicode16sequence(foundcodes,name)
- nl = nl + 1
- unicode = true
- else
- originals[index] = foundcodes
- tounicode[index] = tounicode16(foundcodes,name)
- ns = ns + 1
- unicode = foundcodes
- end
- end
- end
- -- if not unicode then
- -- originals[index] = 0xFFFD
- -- tounicode[index] = "FFFD"
- -- end
- end
- end
- if trace_mapping then
- for unic, glyph in table.sortedhash(descriptions) do
- local name = glyph.name
- local index = glyph.index
- local toun = tounicode[index]
- if toun then
- report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun)
- else
- report_fonts("internal slot %U, name %a, unicode %U",index,name,unic)
- end
- end
- end
- if trace_loading and (ns > 0 or nl > 0) then
- report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns)
- end
-end
+if not modules then modules = { } end modules ['font-map'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local tonumber = tonumber
+
+local match, format, find, concat, gsub, lower = string.match, string.format, string.find, table.concat, string.gsub, string.lower
+local P, R, S, C, Ct, Cc, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.match
+local utfbyte = utf.byte
+local floor = math.floor
+
+local trace_loading = false trackers.register("fonts.loading", function(v) trace_loading = v end)
+local trace_mapping = false trackers.register("fonts.mapping", function(v) trace_unimapping = v end)
+
+local report_fonts = logs.reporter("fonts","loading") -- not otf only
+
+local fonts = fonts or { }
+local mappings = fonts.mappings or { }
+fonts.mappings = mappings
+
+--[[ldx--
+
Eventually this code will disappear because map files are kind
+of obsolete. Some code may move to runtime or auxiliary modules.
+
The name to unciode related code will stay of course.
+--ldx]]--
+
+local function loadlumtable(filename) -- will move to font goodies
+ local lumname = file.replacesuffix(file.basename(filename),"lum")
+ local lumfile = resolvers.findfile(lumname,"map") or ""
+ if lumfile ~= "" and lfs.isfile(lumfile) then
+ if trace_loading or trace_mapping then
+ report_fonts("loading map table %a",lumfile)
+ end
+ lumunic = dofile(lumfile)
+ return lumunic, lumfile
+ end
+end
+
+local hex = R("AF","09")
+local hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end
+local hexsix = (hex*hex*hex*hex*hex*hex) / function(s) return tonumber(s,16) end
+local dec = (R("09")^1) / tonumber
+local period = P(".")
+local unicode = P("uni") * (hexfour * (period + P(-1)) * Cc(false) + Ct(hexfour^1) * Cc(true))
+local ucode = P("u") * (hexsix * (period + P(-1)) * Cc(false) + Ct(hexsix ^1) * Cc(true))
+local index = P("index") * dec * Cc(false)
+
+local parser = unicode + ucode + index
+
+local parsers = { }
+
+local function makenameparser(str)
+ if not str or str == "" then
+ return parser
+ else
+ local p = parsers[str]
+ if not p then
+ p = P(str) * period * dec * Cc(false)
+ parsers[str] = p
+ end
+ return p
+ end
+end
+
+-- local parser = makenameparser("Japan1")
+-- local parser = makenameparser()
+-- local function test(str)
+-- local b, a = lpegmatch(parser,str)
+-- print((a and table.serialize(b)) or b)
+-- end
+-- test("a.sc")
+-- test("a")
+-- test("uni1234")
+-- test("uni1234.xx")
+-- test("uni12349876")
+-- test("u123400987600")
+-- test("index1234")
+-- test("Japan1.123")
+
+local function tounicode16(unicode,name)
+ if unicode < 0x10000 then
+ return format("%04X",unicode)
+ elseif unicode < 0x1FFFFFFFFF then
+ return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts("can't convert %a in %a into tounicode",unicode,name)
+ end
+end
+
+local function tounicode16sequence(unicodes,name)
+ local t = { }
+ for l=1,#unicodes do
+ local unicode = unicodes[l]
+ if unicode < 0x10000 then
+ t[l] = format("%04X",unicode)
+ elseif unicode < 0x1FFFFFFFFF then
+ t[l] = format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts ("can't convert %a in %a into tounicode",unicode,name)
+ end
+ end
+ return concat(t)
+end
+
+local function fromunicode16(str)
+ if #str == 4 then
+ return tonumber(str,16)
+ else
+ local l, r = match(str,"(....)(....)")
+ return (tonumber(l,16))*0x400 + tonumber(r,16) - 0xDC00
+ end
+end
+
+-- Slightly slower:
+--
+-- local p = C(4) * (C(4)^-1) / function(l,r)
+-- if r then
+-- return (tonumber(l,16))*0x400 + tonumber(r,16) - 0xDC00
+-- else
+-- return tonumber(l,16)
+-- end
+-- end
+--
+-- local function fromunicode16(str)
+-- return lpegmatch(p,str)
+-- end
+
+-- This is quite a bit faster but at the cost of some memory but if we
+-- do this we will also use it elsewhere so let's not follow this route
+-- now. I might use this method in the plain variant (no caching there)
+-- but then I need a flag that distinguishes between code branches.
+--
+-- local cache = { }
+--
+-- function mappings.tounicode16(unicode)
+-- local s = cache[unicode]
+-- if not s then
+-- if unicode < 0x10000 then
+-- s = format("%04X",unicode)
+-- else
+-- s = format("%04X%04X",unicode/0x400+0xD800,unicode%0x400+0xDC00)
+-- end
+-- cache[unicode] = s
+-- end
+-- return s
+-- end
+
+mappings.loadlumtable = loadlumtable
+mappings.makenameparser = makenameparser
+mappings.tounicode16 = tounicode16
+mappings.tounicode16sequence = tounicode16sequence
+mappings.fromunicode16 = fromunicode16
+
+local separator = S("_.")
+local other = C((1 - separator)^1)
+local ligsplitter = Ct(other * (separator * other)^0)
+
+--~ print(table.serialize(lpegmatch(ligsplitter,"this")))
+--~ print(table.serialize(lpegmatch(ligsplitter,"this.that")))
+--~ print(table.serialize(lpegmatch(ligsplitter,"japan1.123")))
+--~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more")))
+--~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more.that")))
+
+function mappings.addtounicode(data,filename)
+ local resources = data.resources
+ local properties = data.properties
+ local descriptions = data.descriptions
+ local unicodes = resources.unicodes
+ if not unicodes then
+ return
+ end
+ -- we need to move this code
+ unicodes['space'] = unicodes['space'] or 32
+ unicodes['hyphen'] = unicodes['hyphen'] or 45
+ unicodes['zwj'] = unicodes['zwj'] or 0x200D
+ unicodes['zwnj'] = unicodes['zwnj'] or 0x200C
+ -- the tounicode mapping is sparse and only needed for alternatives
+ local private = fonts.constructors.privateoffset
+ local unknown = format("%04X",utfbyte("?"))
+ local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context
+ local tounicode = { }
+ local originals = { }
+ resources.tounicode = tounicode
+ resources.originals = originals
+ local lumunic, uparser, oparser
+ local cidinfo, cidnames, cidcodes, usedmap
+ if false then -- will become an option
+ lumunic = loadlumtable(filename)
+ lumunic = lumunic and lumunic.tounicode
+ end
+ --
+ cidinfo = properties.cidinfo
+ usedmap = cidinfo and fonts.cid.getmap(cidinfo)
+ --
+ if usedmap then
+ oparser = usedmap and makenameparser(cidinfo.ordering)
+ cidnames = usedmap.names
+ cidcodes = usedmap.unicodes
+ end
+ uparser = makenameparser()
+ local ns, nl = 0, 0
+ for unic, glyph in next, descriptions do
+ local index = glyph.index
+ local name = glyph.name
+ if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
+ local unicode = lumunic and lumunic[name] or unicodevector[name]
+ if unicode then
+ originals[index] = unicode
+ tounicode[index] = tounicode16(unicode,name)
+ ns = ns + 1
+ end
+ -- cidmap heuristics, beware, there is no guarantee for a match unless
+ -- the chain resolves
+ if (not unicode) and usedmap then
+ local foundindex = lpegmatch(oparser,name)
+ if foundindex then
+ unicode = cidcodes[foundindex] -- name to number
+ if unicode then
+ originals[index] = unicode
+ tounicode[index] = tounicode16(unicode,name)
+ ns = ns + 1
+ else
+ local reference = cidnames[foundindex] -- number to name
+ if reference then
+ local foundindex = lpegmatch(oparser,reference)
+ if foundindex then
+ unicode = cidcodes[foundindex]
+ if unicode then
+ originals[index] = unicode
+ tounicode[index] = tounicode16(unicode,name)
+ ns = ns + 1
+ end
+ end
+ if not unicode or unicode == "" then
+ local foundcodes, multiple = lpegmatch(uparser,reference)
+ if foundcodes then
+ originals[index] = foundcodes
+ if multiple then
+ tounicode[index] = tounicode16sequence(foundcodes)
+ nl = nl + 1
+ unicode = true
+ else
+ tounicode[index] = tounicode16(foundcodes,name)
+ ns = ns + 1
+ unicode = foundcodes
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ -- a.whatever or a_b_c.whatever or a_b_c (no numbers)
+ if not unicode or unicode == "" then
+ local split = lpegmatch(ligsplitter,name)
+ local nplit = split and #split or 0
+ if nplit >= 2 then
+ local t, n = { }, 0
+ for l=1,nplit do
+ local base = split[l]
+ local u = unicodes[base] or unicodevector[base]
+ if not u then
+ break
+ elseif type(u) == "table" then
+ n = n + 1
+ t[n] = u[1]
+ else
+ n = n + 1
+ t[n] = u
+ end
+ end
+ if n == 0 then -- done then
+ -- nothing
+ elseif n == 1 then
+ originals[index] = t[1]
+ tounicode[index] = tounicode16(t[1],name)
+ else
+ originals[index] = t
+ tounicode[index] = tounicode16sequence(t)
+ end
+ nl = nl + 1
+ unicode = true
+ else
+ -- skip: already checked and we don't want privates here
+ end
+ end
+ -- last resort (we might need to catch private here as well)
+ if not unicode or unicode == "" then
+ local foundcodes, multiple = lpegmatch(uparser,name)
+ if foundcodes then
+ if multiple then
+ originals[index] = foundcodes
+ tounicode[index] = tounicode16sequence(foundcodes,name)
+ nl = nl + 1
+ unicode = true
+ else
+ originals[index] = foundcodes
+ tounicode[index] = tounicode16(foundcodes,name)
+ ns = ns + 1
+ unicode = foundcodes
+ end
+ end
+ end
+ -- if not unicode then
+ -- originals[index] = 0xFFFD
+ -- tounicode[index] = "FFFD"
+ -- end
+ end
+ end
+ if trace_mapping then
+ for unic, glyph in table.sortedhash(descriptions) do
+ local name = glyph.name
+ local index = glyph.index
+ local toun = tounicode[index]
+ if toun then
+ report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun)
+ else
+ report_fonts("internal slot %U, name %a, unicode %U",index,name,unic)
+ end
+ end
+ end
+ if trace_loading and (ns > 0 or nl > 0) then
+ report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns)
+ end
+end
diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua
index 83df65341..1915f7a82 100644
--- a/tex/context/base/font-mis.lua
+++ b/tex/context/base/font-mis.lua
@@ -1,111 +1,111 @@
-if not modules then modules = { } end modules ['font-mis'] = {
- version = 1.001,
- comment = "companion to mtx-fonts",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local next = next
-local lower, strip = string.lower, string.strip
-
--- also used in other scripts so we need to check some tables:
-
-fonts = fonts or { }
-
-fonts.helpers = fonts.helpers or { }
-local helpers = fonts.helpers
-
-fonts.handlers = fonts.handlers or { }
-local handlers = fonts.handlers
-
-handlers.otf = handlers.otf or { }
-local otf = handlers.otf
-
-otf.version = otf.version or 2.743
-otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true)
-
-function otf.loadcached(filename,format,sub)
- -- no recache when version mismatch
- local name = file.basename(file.removesuffix(filename))
- if sub == "" then sub = false end
- local hash = name
- if sub then
- hash = hash .. "-" .. sub
- end
- hash = containers.cleanname(hash)
- local data = containers.read(otf.cache, hash)
- if data and not data.verbose then
- otf.enhancers.unpack(data)
- return data
- else
- return nil
- end
-end
-
-local featuregroups = { "gsub", "gpos" }
-
-function fonts.helpers.getfeatures(name,t,script,language) -- maybe per font type
- local t = lower(t or (name and file.suffix(name)) or "")
- if t == "otf" or t == "ttf" or t == "ttc" or t == "dfont" then
- local filename = resolvers.findfile(name,t) or ""
- if filename ~= "" then
- local data = otf.loadcached(filename)
- if data and data.resources and data.resources.features then
- return data.resources.features
- else
- local ff = fontloader.open(filename)
- if ff then
- local data = fontloader.to_table(ff)
- fontloader.close(ff)
- local features = { }
- for k=1,#featuregroups do
- local what = featuregroups[k]
- local dw = data[what]
- if dw then
- local f = { }
- features[what] = f
- for i=1,#dw do
- local d = dw[i]
- local dfeatures = d.features
- if dfeatures then
- for i=1,#dfeatures do
- local df = dfeatures[i]
- local tag = strip(lower(df.tag))
- local ft = f[tag] if not ft then ft = {} f[tag] = ft end
- local dfscripts = df.scripts
- for i=1,#dfscripts do
- local ds = dfscripts[i]
- local scri = strip(lower(ds.script))
- local fts = ft[scri] if not fts then fts = {} ft[scri] = fts end
- local dslangs = ds.langs
- for i=1,#dslangs do
- local lang = dslangs[i]
- lang = strip(lower(lang))
- if scri == script then
- if lang == language then
- fts[lang] = 'sl'
- else
- fts[lang] = 's'
- end
- else
- if lang == language then
- fts[lang] = 'l'
- else
- fts[lang] = true
- end
- end
- end
- end
- end
- end
- end
- end
- end
- return features
- end
- end
- end
- end
- return nil, nil
-end
+if not modules then modules = { } end modules ['font-mis'] = {
+ version = 1.001,
+ comment = "companion to mtx-fonts",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local next = next
+local lower, strip = string.lower, string.strip
+
+-- also used in other scripts so we need to check some tables:
+
+fonts = fonts or { }
+
+fonts.helpers = fonts.helpers or { }
+local helpers = fonts.helpers
+
+fonts.handlers = fonts.handlers or { }
+local handlers = fonts.handlers
+
+handlers.otf = handlers.otf or { }
+local otf = handlers.otf
+
+otf.version = otf.version or 2.743
+otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true)
+
+function otf.loadcached(filename,format,sub)
+ -- no recache when version mismatch
+ local name = file.basename(file.removesuffix(filename))
+ if sub == "" then sub = false end
+ local hash = name
+ if sub then
+ hash = hash .. "-" .. sub
+ end
+ hash = containers.cleanname(hash)
+ local data = containers.read(otf.cache, hash)
+ if data and not data.verbose then
+ otf.enhancers.unpack(data)
+ return data
+ else
+ return nil
+ end
+end
+
+local featuregroups = { "gsub", "gpos" }
+
+function fonts.helpers.getfeatures(name,t,script,language) -- maybe per font type
+ local t = lower(t or (name and file.suffix(name)) or "")
+ if t == "otf" or t == "ttf" or t == "ttc" or t == "dfont" then
+ local filename = resolvers.findfile(name,t) or ""
+ if filename ~= "" then
+ local data = otf.loadcached(filename)
+ if data and data.resources and data.resources.features then
+ return data.resources.features
+ else
+ local ff = fontloader.open(filename)
+ if ff then
+ local data = fontloader.to_table(ff)
+ fontloader.close(ff)
+ local features = { }
+ for k=1,#featuregroups do
+ local what = featuregroups[k]
+ local dw = data[what]
+ if dw then
+ local f = { }
+ features[what] = f
+ for i=1,#dw do
+ local d = dw[i]
+ local dfeatures = d.features
+ if dfeatures then
+ for i=1,#dfeatures do
+ local df = dfeatures[i]
+ local tag = strip(lower(df.tag))
+ local ft = f[tag] if not ft then ft = {} f[tag] = ft end
+ local dfscripts = df.scripts
+ for i=1,#dfscripts do
+ local ds = dfscripts[i]
+ local scri = strip(lower(ds.script))
+ local fts = ft[scri] if not fts then fts = {} ft[scri] = fts end
+ local dslangs = ds.langs
+ for i=1,#dslangs do
+ local lang = dslangs[i]
+ lang = strip(lower(lang))
+ if scri == script then
+ if lang == language then
+ fts[lang] = 'sl'
+ else
+ fts[lang] = 's'
+ end
+ else
+ if lang == language then
+ fts[lang] = 'l'
+ else
+ fts[lang] = true
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ return features
+ end
+ end
+ end
+ end
+ return nil, nil
+end
diff --git a/tex/context/base/font-nod.lua b/tex/context/base/font-nod.lua
index f99130279..7c93e294c 100644
--- a/tex/context/base/font-nod.lua
+++ b/tex/context/base/font-nod.lua
@@ -1,434 +1,434 @@
-if not modules then modules = { } end modules ['font-nod'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-
This is rather experimental. We need more control and some of this
-might become a runtime module instead. This module will be cleaned up!
---ldx]]--
-
-local tonumber, tostring = tonumber, tostring
-local utfchar = utf.char
-local concat = table.concat
-local match, gmatch, concat, rep = string.match, string.gmatch, table.concat, string.rep
-
-local report_nodes = logs.reporter("fonts","tracing")
-
-fonts = fonts or { }
-nodes = nodes or { }
-
-local fonts, nodes, node, context = fonts, nodes, node, context
-
-local tracers = nodes.tracers or { }
-nodes.tracers = tracers
-
-local tasks = nodes.tasks or { }
-nodes.tasks = tasks
-
-local handlers = nodes.handlers or { }
-nodes.handlers = handlers
-
-local injections = nodes.injections or { }
-nodes.injections = injections
-
-local char_tracers = tracers.characters or { }
-tracers.characters = char_tracers
-
-local step_tracers = tracers.steppers or { }
-tracers.steppers = step_tracers
-
-local copy_node_list = node.copy_list
-local hpack_node_list = node.hpack
-local free_node_list = node.flush_list
-local traverse_nodes = node.traverse
-
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
-
-local glyph_code = nodecodes.glyph
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local disc_code = nodecodes.disc
-local glue_code = nodecodes.glue
-local kern_code = nodecodes.kern
-local rule_code = nodecodes.rule
-local whatsit_code = nodecodes.whatsit
-local spec_code = nodecodes.glue_spec
-
-local localpar_code = whatcodes.localpar
-local dir_code = whatcodes.dir
-
-local nodepool = nodes.pool
-local new_glyph = nodepool.glyph
-
-local formatters = string.formatters
-local formatter = string.formatter
-
-local hashes = fonts.hashes
-
-local fontidentifiers = hashes.identifiers
-local fontdescriptions = hashes.descriptions
-local fontcharacters = hashes.characters
-local fontproperties = hashes.properties
-local fontparameters = hashes.parameters
-
-function char_tracers.collect(head,list,tag,n)
- n = n or 0
- local ok, fn = false, nil
- while head do
- local id = head.id
- if id == glyph_code then
- local f = head.font
- if f ~= fn then
- ok, fn = false, f
- end
- local c = head.char
- local i = fontidentifiers[f].indices[c] or 0
- if not ok then
- ok = true
- n = n + 1
- list[n] = list[n] or { }
- list[n][tag] = { }
- end
- local l = list[n][tag]
- l[#l+1] = { c, f, i }
- elseif id == disc_code then
- -- skip
- else
- ok = false
- end
- head = head.next
- end
-end
-
-function char_tracers.equal(ta, tb)
- if #ta ~= #tb then
- return false
- else
- for i=1,#ta do
- local a, b = ta[i], tb[i]
- if a[1] ~= b[1] or a[2] ~= b[2] or a[3] ~= b[3] then
- return false
- end
- end
- end
- return true
-end
-
-function char_tracers.string(t)
- local tt = { }
- for i=1,#t do
- tt[i] = utfchar(t[i][1])
- end
- return concat(tt,"")
-end
-
-local f_unicode = formatters["%U"]
-
-function char_tracers.unicodes(t,decimal)
- local tt = { }
- for i=1,#t do
- local n = t[i][1]
- if n == 0 then
- tt[i] = "-"
- elseif decimal then
- tt[i] = n
- else
- tt[i] = f_unicode(n)
- end
- end
- return concat(tt," ")
-end
-
-function char_tracers.indices(t,decimal)
- local tt = { }
- for i=1,#t do
- local n = t[i][3]
- if n == 0 then
- tt[i] = "-"
- elseif decimal then
- tt[i] = n
- else
- tt[i] = f_unicode(n)
- end
- end
- return concat(tt," ")
-end
-
-function char_tracers.start()
- local npc = handlers.characters
- local list = { }
- function handlers.characters(head)
- local n = #list
- char_tracers.collect(head,list,'before',n)
- local h, d = npc(head)
- char_tracers.collect(head,list,'after',n)
- if #list > n then
- list[#list+1] = { }
- end
- return h, d
- end
- function char_tracers.stop()
- tracers.list['characters'] = list
- local variables = {
- ['title'] = 'ConTeXt Character Processing Information',
- ['color-background-one'] = lmx.get('color-background-yellow'),
- ['color-background-two'] = lmx.get('color-background-purple'),
- }
- lmx.show('context-characters.lmx',variables)
- handlers.characters = npc
- tasks.restart("processors", "characters")
- end
- tasks.restart("processors", "characters")
-end
-
-local stack = { }
-
-function tracers.start(tag)
- stack[#stack+1] = tag
- local tracer = tracers[tag]
- if tracer and tracer.start then
- tracer.start()
- end
-end
-function tracers.stop()
- local tracer = stack[#stack]
- if tracer and tracer.stop then
- tracer.stop()
- end
- stack[#stack] = nil
-end
-
--- experimental
-
-local collection, collecting, messages = { }, false, { }
-
-function step_tracers.start()
- collecting = true
-end
-
-function step_tracers.stop()
- collecting = false
-end
-
-function step_tracers.reset()
- for i=1,#collection do
- local c = collection[i]
- if c then
- free_node_list(c)
- end
- end
- collection, messages = { }, { }
-end
-
-function step_tracers.nofsteps()
- return context(#collection)
-end
-
-function step_tracers.glyphs(n,i)
- local c = collection[i]
- if c then
- tex.box[n] = hpack_node_list(copy_node_list(c))
- end
-end
-
-function step_tracers.features()
- -- we cannot use first_glyph here as it only finds characters with subtype < 256
- local f = collection[1]
- while f do
- if f.id == glyph_code then
- local tfmdata, t = fontidentifiers[f.font], { }
- for feature, value in table.sortedhash(tfmdata.shared.features) do
- if feature == "number" or feature == "features" then
- -- private
- elseif type(value) == "boolean" then
- if value then
- t[#t+1] = formatters["%s=yes"](feature)
- else
- -- skip
- end
- else
- t[#t+1] = formatters["%s=%s"](feature,value)
- end
- end
- if #t > 0 then
- context(concat(t,", "))
- else
- context("no features")
- end
- return
- end
- f = f.next
- end
-end
-
-function tracers.fontchar(font,char)
- local n = new_glyph()
- n.font, n.char, n.subtype = font, char, 256
- context(n)
-end
-
-function step_tracers.font(command)
- local c = collection[1]
- while c do
- local id = c.id
- if id == glyph_code then
- local font = c.font
- local name = file.basename(fontproperties[font].filename or "unknown")
- local size = fontparameters[font].size or 0
- if command then
- context[command](font,name,size) -- size in sp
- else
- context("[%s: %s @ %p]",font,name,size)
- end
- return
- else
- c = c.next
- end
- end
-end
-
-function step_tracers.codes(i,command)
- local c = collection[i]
- while c do
- local id = c.id
- if id == glyph_code then
- if command then
- local f, c = c.font,c.char
- local d = fontdescriptions[f]
- local d = d and d[c]
- context[command](f,c,d and d.class or "")
- else
- context("[%s:U+%04X]",c.font,c.char)
- end
- elseif id == whatsit_code and (c.subtype == localpar_code or c.subtype == dir_code) then
- context("[%s]",c.dir)
- else
- context("[%s]",nodecodes[id])
- end
- c = c.next
- end
-end
-
-function step_tracers.messages(i,command,split)
- local list = messages[i] -- or { "no messages" }
- if list then
- for i=1,#list do
- local l = list[i]
- if not command then
- context("(%s)",l)
- elseif split then
- local a, b = match(l,"^(.-)%s*:%s*(.*)$")
- context[command](a or l or "",b or "")
- else
- context[command](l)
- end
- end
- end
-end
-
--- hooks into the node list processor (see otf)
-
-function step_tracers.check(head)
- if collecting then
- step_tracers.reset()
- local n = copy_node_list(head)
- injections.handler(n,nil,"trace",true)
- handlers.protectglyphs(n) -- can be option
- collection[1] = n
- end
-end
-
-function step_tracers.register(head)
- if collecting then
- local nc = #collection+1
- if messages[nc] then
- local n = copy_node_list(head)
- injections.handler(n,nil,"trace",true)
- handlers.protectglyphs(n) -- can be option
- collection[nc] = n
- end
- end
-end
-
-function step_tracers.message(str,...)
- str = formatter(str,...)
- if collecting then
- local n = #collection + 1
- local m = messages[n]
- if not m then m = { } messages[n] = m end
- m[#m+1] = str
- end
- return str -- saves an intermediate var in the caller
-end
-
---
-
-local threshold = 65536
-
-local function toutf(list,result,nofresult,stopcriterium)
- if list then
- for n in traverse_nodes(list) do
- local id = n.id
- if id == glyph_code then
- local components = n.components
- if components then
- result, nofresult = toutf(components,result,nofresult)
- else
- local c = n.char
- local fc = fontcharacters[n.font]
- if fc then
- local u = fc[c].tounicode
- if u then
- for s in gmatch(u,"....") do
- nofresult = nofresult + 1
- result[nofresult] = utfchar(tonumber(s,16))
- end
- else
- nofresult = nofresult + 1
- result[nofresult] = utfchar(c)
- end
- else
- nofresult = nofresult + 1
- result[nofresult] = utfchar(c)
- end
- end
- elseif id == disc_code then
- result, nofresult = toutf(n.replace,result,nofresult) -- needed?
- elseif id == hlist_code or id == vlist_code then
- -- if nofresult > 0 and result[nofresult] ~= " " then
- -- nofresult = nofresult + 1
- -- result[nofresult] = " "
- -- end
- result, nofresult = toutf(n.list,result,nofresult)
- elseif id == glue_code then
- if nofresult > 0 and result[nofresult] ~= " " then
- nofresult = nofresult + 1
- result[nofresult] = " "
- end
- elseif id == kern_code and n.kern > threshold then
- if nofresult > 0 and result[nofresult] ~= " " then
- nofresult = nofresult + 1
- result[nofresult] = " "
- end
- end
- if n == stopcriterium then
- break
- end
- end
- end
- if nofresult > 0 and result[nofresult] == " " then
- result[nofresult] = nil
- nofresult = nofresult - 1
- end
- return result, nofresult
-end
-
-function nodes.toutf(list,stopcriterium)
- local result, nofresult = toutf(list,{},0,stopcriterium)
- return concat(result)
-end
+if not modules then modules = { } end modules ['font-nod'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+
This is rather experimental. We need more control and some of this
+might become a runtime module instead. This module will be cleaned up!
+--ldx]]--
+
+local tonumber, tostring = tonumber, tostring
+local utfchar = utf.char
+local concat = table.concat
+local match, gmatch, concat, rep = string.match, string.gmatch, table.concat, string.rep
+
+local report_nodes = logs.reporter("fonts","tracing")
+
+fonts = fonts or { }
+nodes = nodes or { }
+
+local fonts, nodes, node, context = fonts, nodes, node, context
+
+local tracers = nodes.tracers or { }
+nodes.tracers = tracers
+
+local tasks = nodes.tasks or { }
+nodes.tasks = tasks
+
+local handlers = nodes.handlers or { }
+nodes.handlers = handlers
+
+local injections = nodes.injections or { }
+nodes.injections = injections
+
+local char_tracers = tracers.characters or { }
+tracers.characters = char_tracers
+
+local step_tracers = tracers.steppers or { }
+tracers.steppers = step_tracers
+
+local copy_node_list = node.copy_list
+local hpack_node_list = node.hpack
+local free_node_list = node.flush_list
+local traverse_nodes = node.traverse
+
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
+
+local glyph_code = nodecodes.glyph
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local disc_code = nodecodes.disc
+local glue_code = nodecodes.glue
+local kern_code = nodecodes.kern
+local rule_code = nodecodes.rule
+local whatsit_code = nodecodes.whatsit
+local spec_code = nodecodes.glue_spec
+
+local localpar_code = whatcodes.localpar
+local dir_code = whatcodes.dir
+
+local nodepool = nodes.pool
+local new_glyph = nodepool.glyph
+
+local formatters = string.formatters
+local formatter = string.formatter
+
+local hashes = fonts.hashes
+
+local fontidentifiers = hashes.identifiers
+local fontdescriptions = hashes.descriptions
+local fontcharacters = hashes.characters
+local fontproperties = hashes.properties
+local fontparameters = hashes.parameters
+
+function char_tracers.collect(head,list,tag,n)
+ n = n or 0
+ local ok, fn = false, nil
+ while head do
+ local id = head.id
+ if id == glyph_code then
+ local f = head.font
+ if f ~= fn then
+ ok, fn = false, f
+ end
+ local c = head.char
+ local i = fontidentifiers[f].indices[c] or 0
+ if not ok then
+ ok = true
+ n = n + 1
+ list[n] = list[n] or { }
+ list[n][tag] = { }
+ end
+ local l = list[n][tag]
+ l[#l+1] = { c, f, i }
+ elseif id == disc_code then
+ -- skip
+ else
+ ok = false
+ end
+ head = head.next
+ end
+end
+
+function char_tracers.equal(ta, tb)
+ if #ta ~= #tb then
+ return false
+ else
+ for i=1,#ta do
+ local a, b = ta[i], tb[i]
+ if a[1] ~= b[1] or a[2] ~= b[2] or a[3] ~= b[3] then
+ return false
+ end
+ end
+ end
+ return true
+end
+
+function char_tracers.string(t)
+ local tt = { }
+ for i=1,#t do
+ tt[i] = utfchar(t[i][1])
+ end
+ return concat(tt,"")
+end
+
+local f_unicode = formatters["%U"]
+
+function char_tracers.unicodes(t,decimal)
+ local tt = { }
+ for i=1,#t do
+ local n = t[i][1]
+ if n == 0 then
+ tt[i] = "-"
+ elseif decimal then
+ tt[i] = n
+ else
+ tt[i] = f_unicode(n)
+ end
+ end
+ return concat(tt," ")
+end
+
+function char_tracers.indices(t,decimal)
+ local tt = { }
+ for i=1,#t do
+ local n = t[i][3]
+ if n == 0 then
+ tt[i] = "-"
+ elseif decimal then
+ tt[i] = n
+ else
+ tt[i] = f_unicode(n)
+ end
+ end
+ return concat(tt," ")
+end
+
+function char_tracers.start()
+ local npc = handlers.characters
+ local list = { }
+ function handlers.characters(head)
+ local n = #list
+ char_tracers.collect(head,list,'before',n)
+ local h, d = npc(head)
+ char_tracers.collect(head,list,'after',n)
+ if #list > n then
+ list[#list+1] = { }
+ end
+ return h, d
+ end
+ function char_tracers.stop()
+ tracers.list['characters'] = list
+ local variables = {
+ ['title'] = 'ConTeXt Character Processing Information',
+ ['color-background-one'] = lmx.get('color-background-yellow'),
+ ['color-background-two'] = lmx.get('color-background-purple'),
+ }
+ lmx.show('context-characters.lmx',variables)
+ handlers.characters = npc
+ tasks.restart("processors", "characters")
+ end
+ tasks.restart("processors", "characters")
+end
+
+local stack = { }
+
+function tracers.start(tag)
+ stack[#stack+1] = tag
+ local tracer = tracers[tag]
+ if tracer and tracer.start then
+ tracer.start()
+ end
+end
+function tracers.stop()
+ local tracer = stack[#stack]
+ if tracer and tracer.stop then
+ tracer.stop()
+ end
+ stack[#stack] = nil
+end
+
+-- experimental
+
+local collection, collecting, messages = { }, false, { }
+
+function step_tracers.start()
+ collecting = true
+end
+
+function step_tracers.stop()
+ collecting = false
+end
+
+function step_tracers.reset()
+ for i=1,#collection do
+ local c = collection[i]
+ if c then
+ free_node_list(c)
+ end
+ end
+ collection, messages = { }, { }
+end
+
+function step_tracers.nofsteps()
+ return context(#collection)
+end
+
+function step_tracers.glyphs(n,i)
+ local c = collection[i]
+ if c then
+ tex.box[n] = hpack_node_list(copy_node_list(c))
+ end
+end
+
+function step_tracers.features()
+ -- we cannot use first_glyph here as it only finds characters with subtype < 256
+ local f = collection[1]
+ while f do
+ if f.id == glyph_code then
+ local tfmdata, t = fontidentifiers[f.font], { }
+ for feature, value in table.sortedhash(tfmdata.shared.features) do
+ if feature == "number" or feature == "features" then
+ -- private
+ elseif type(value) == "boolean" then
+ if value then
+ t[#t+1] = formatters["%s=yes"](feature)
+ else
+ -- skip
+ end
+ else
+ t[#t+1] = formatters["%s=%s"](feature,value)
+ end
+ end
+ if #t > 0 then
+ context(concat(t,", "))
+ else
+ context("no features")
+ end
+ return
+ end
+ f = f.next
+ end
+end
+
+function tracers.fontchar(font,char)
+ local n = new_glyph()
+ n.font, n.char, n.subtype = font, char, 256
+ context(n)
+end
+
+function step_tracers.font(command)
+ local c = collection[1]
+ while c do
+ local id = c.id
+ if id == glyph_code then
+ local font = c.font
+ local name = file.basename(fontproperties[font].filename or "unknown")
+ local size = fontparameters[font].size or 0
+ if command then
+ context[command](font,name,size) -- size in sp
+ else
+ context("[%s: %s @ %p]",font,name,size)
+ end
+ return
+ else
+ c = c.next
+ end
+ end
+end
+
+function step_tracers.codes(i,command)
+ local c = collection[i]
+ while c do
+ local id = c.id
+ if id == glyph_code then
+ if command then
+ local f, c = c.font,c.char
+ local d = fontdescriptions[f]
+ local d = d and d[c]
+ context[command](f,c,d and d.class or "")
+ else
+ context("[%s:U+%04X]",c.font,c.char)
+ end
+ elseif id == whatsit_code and (c.subtype == localpar_code or c.subtype == dir_code) then
+ context("[%s]",c.dir)
+ else
+ context("[%s]",nodecodes[id])
+ end
+ c = c.next
+ end
+end
+
+function step_tracers.messages(i,command,split)
+ local list = messages[i] -- or { "no messages" }
+ if list then
+ for i=1,#list do
+ local l = list[i]
+ if not command then
+ context("(%s)",l)
+ elseif split then
+ local a, b = match(l,"^(.-)%s*:%s*(.*)$")
+ context[command](a or l or "",b or "")
+ else
+ context[command](l)
+ end
+ end
+ end
+end
+
+-- hooks into the node list processor (see otf)
+
+function step_tracers.check(head)
+ if collecting then
+ step_tracers.reset()
+ local n = copy_node_list(head)
+ injections.handler(n,nil,"trace",true)
+ handlers.protectglyphs(n) -- can be option
+ collection[1] = n
+ end
+end
+
+function step_tracers.register(head)
+ if collecting then
+ local nc = #collection+1
+ if messages[nc] then
+ local n = copy_node_list(head)
+ injections.handler(n,nil,"trace",true)
+ handlers.protectglyphs(n) -- can be option
+ collection[nc] = n
+ end
+ end
+end
+
+function step_tracers.message(str,...)
+ str = formatter(str,...)
+ if collecting then
+ local n = #collection + 1
+ local m = messages[n]
+ if not m then m = { } messages[n] = m end
+ m[#m+1] = str
+ end
+ return str -- saves an intermediate var in the caller
+end
+
+--
+
+local threshold = 65536
+
+local function toutf(list,result,nofresult,stopcriterium)
+ if list then
+ for n in traverse_nodes(list) do
+ local id = n.id
+ if id == glyph_code then
+ local components = n.components
+ if components then
+ result, nofresult = toutf(components,result,nofresult)
+ else
+ local c = n.char
+ local fc = fontcharacters[n.font]
+ if fc then
+ local u = fc[c].tounicode
+ if u then
+ for s in gmatch(u,"....") do
+ nofresult = nofresult + 1
+ result[nofresult] = utfchar(tonumber(s,16))
+ end
+ else
+ nofresult = nofresult + 1
+ result[nofresult] = utfchar(c)
+ end
+ else
+ nofresult = nofresult + 1
+ result[nofresult] = utfchar(c)
+ end
+ end
+ elseif id == disc_code then
+ result, nofresult = toutf(n.replace,result,nofresult) -- needed?
+ elseif id == hlist_code or id == vlist_code then
+ -- if nofresult > 0 and result[nofresult] ~= " " then
+ -- nofresult = nofresult + 1
+ -- result[nofresult] = " "
+ -- end
+ result, nofresult = toutf(n.list,result,nofresult)
+ elseif id == glue_code then
+ if nofresult > 0 and result[nofresult] ~= " " then
+ nofresult = nofresult + 1
+ result[nofresult] = " "
+ end
+ elseif id == kern_code and n.kern > threshold then
+ if nofresult > 0 and result[nofresult] ~= " " then
+ nofresult = nofresult + 1
+ result[nofresult] = " "
+ end
+ end
+ if n == stopcriterium then
+ break
+ end
+ end
+ end
+ if nofresult > 0 and result[nofresult] == " " then
+ result[nofresult] = nil
+ nofresult = nofresult - 1
+ end
+ return result, nofresult
+end
+
+function nodes.toutf(list,stopcriterium)
+ local result, nofresult = toutf(list,{},0,stopcriterium)
+ return concat(result)
+end
diff --git a/tex/context/base/font-odk.lua b/tex/context/base/font-odk.lua
index c34efc120..3ed562348 100644
--- a/tex/context/base/font-odk.lua
+++ b/tex/context/base/font-odk.lua
@@ -1,904 +1,904 @@
--- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
--- We keep the original around for a while so that we can check it --
--- when the above code does it wrong (data tables are not included). --
--- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
-
--- author : Kai Eigner, TAT Zetwerk
--- copyright : TAT Zetwerk
--- comment : see font-odv.lua for current implementation
-
--- local state = attributes.private('state')
--- local sylnr = attributes.private('syllabe')
---
--- local function install_dev(tfmdata)
--- local features = tfmdata.resources.features
--- local sequences = tfmdata.resources.sequences
---
--- local insertpos = 1
--- for s=1,#sequences do -- classify chars
--- for k in pairs(basic_shaping_forms) do
--- if sequences[s].features and ( sequences[s].features[k] or sequences[s].features.locl ) then insertpos = s + 1 end
--- end
--- end
---
--- features.gsub["dev2_reorder_matras"] = { ["dev2"] = { ["dflt"] = true } }
--- features.gsub["dev2_reorder_reph"] = { ["dev2"] = { ["dflt"] = true } }
--- features.gsub["dev2_reorder_pre_base_reordering_consonants"] = { ["dev2"] = { ["dflt"] = true } }
--- features.gsub["remove_joiners"] = { ["deva"] = { ["dflt"] = true }, ["dev2"] = { ["dflt"] = true } }
---
--- local sequence_dev2_reorder_matras = {
--- chain = 0,
--- features = { dev2_reorder_matras = { dev2 = { dflt = true } } },
--- flags = { false, false, false, false },
--- name = "dev2_reorder_matras",
--- subtables = { "dev2_reorder_matras" },
--- type = "dev2_reorder_matras",
--- }
--- local sequence_dev2_reorder_reph = {
--- chain = 0,
--- features = { dev2_reorder_reph = { dev2 = { dflt = true } } },
--- flags = { false, false, false, false },
--- name = "dev2_reorder_reph",
--- subtables = { "dev2_reorder_reph" },
--- type = "dev2_reorder_reph",
--- }
--- local sequence_dev2_reorder_pre_base_reordering_consonants = {
--- chain = 0,
--- features = { dev2_reorder_pre_base_reordering_consonants = { dev2 = { dflt = true } } },
--- flags = { false, false, false, false },
--- name = "dev2_reorder_pre_base_reordering_consonants",
--- subtables = { "dev2_reorder_pre_base_reordering_consonants" },
--- type = "dev2_reorder_pre_base_reordering_consonants",
--- }
--- local sequence_remove_joiners = {
--- chain = 0,
--- features = { remove_joiners = { deva = { dflt = true }, dev2 = { dflt = true } } },
--- flags = { false, false, false, false },
--- name = "remove_joiners",
--- subtables = { "remove_joiners" },
--- type = "remove_joiners",
--- }
--- table.insert(sequences, insertpos, sequence_dev2_reorder_pre_base_reordering_consonants)
--- table.insert(sequences, insertpos, sequence_dev2_reorder_reph)
--- table.insert(sequences, insertpos, sequence_dev2_reorder_matras)
--- table.insert(sequences, insertpos, sequence_remove_joiners)
--- end
---
--- local function deva_reorder(head,start,stop,font,attr)
--- local tfmdata = fontdata[font]
--- local lookuphash = tfmdata.resources.lookuphash
--- local sequences = tfmdata.resources.sequences
---
--- if not lookuphash["remove_joiners"] then install_dev(tfmdata) end --install Devanagari-features
---
--- local sharedfeatures = tfmdata.shared.features
--- sharedfeatures["remove_joiners"] = true
--- local datasets = otf.dataset(tfmdata,font,attr)
---
--- lookuphash["remove_joiners"] = { [0x200C] = true, [0x200D] = true }
---
--- local current, n, base, firstcons, lastcons, basefound = start, start.next, nil, nil, nil, false
--- local reph, vattu = false, false
--- for s=1,#sequences do
--- local dataset = datasets[s]
--- featurevalue = dataset and dataset[1]
--- if featurevalue and dataset[4] == "rphf" then reph = true end
--- if featurevalue and dataset[4] == "blwf" then vattu = true end
--- end
--- if ra[start.char] and halant[n.char] and reph then -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants
--- if n == stop then return head, stop end
--- if zwj[n.next.char] then
--- current = start
--- else
--- current = n.next
--- set_attribute(start,state,5) -- rphf
--- end
--- end
---
--- if nbsp[current.char] then --Stand Alone cluster
--- if current == stop then
--- stop = stop.prev
--- head = node.remove(head, current)
--- node.free(current)
--- return head, stop
--- else
--- base, firstcons, lastcons = current, current, current
--- current = current.next
--- if current ~= stop then
--- if nukta[current.char] then current = current.next end
--- if zwj[current.char] then
--- if current ~= stop and current.next ~= stop and halant[current.next.char] then
--- current = current.next
--- local tmp = current.next.next
--- local changestop = current.next == stop
--- local tempcurrent = node.copy(current.next)
--- tempcurrent.next = node.copy(current)
--- tempcurrent.next.prev = tempcurrent
--- set_attribute(tempcurrent,state,8) --blwf
--- tempcurrent = nodes.handlers.characters(tempcurrent)
--- unset_attribute(tempcurrent,state)
--- if current.next.char == tempcurrent.char then
--- node.flush_list(tempcurrent)
--- local n = node.copy(current)
--- current.char = dotted_circle
--- head = node.insert_after(head, current, n)
--- else
--- current.char = tempcurrent.char -- (assumes that result of blwf consists of one node)
--- local freenode = current.next
--- current.next = tmp
--- tmp.prev = current
--- node.free(freenode)
--- node.flush_list(tempcurrent)
--- if changestop then stop = current end
--- end
--- end
--- end
--- end
--- end
--- end
---
--- while not basefound do -- find base consonant
--- if consonant[current.char] then
--- set_attribute(current, state, 6) -- half
--- if not firstcons then firstcons = current end
--- lastcons = current
--- if not base then
--- base = current
--- else --check whether consonant has below-base (or post-base) form
--- local baseform = true
--- for s=1,#sequences do
--- local sequence = sequences[s]
--- local dataset = datasets[s]
--- featurevalue = dataset and dataset[1]
--- if featurevalue and dataset[4] == "blwf" then
--- local subtables = sequence.subtables
--- for i=1,#subtables do
--- local lookupname = subtables[i]
--- local lookupcache = lookuphash[lookupname]
--- if lookupcache then
--- local lookupmatch = lookupcache[current.char]
--- if lookupmatch then
--- set_attribute(current, state, 8) -- blwf
--- baseform = false
--- end
--- end
--- end
--- end
--- end
--- if baseform then base = current end
--- end
--- end
--- basefound = current == stop
--- current = current.next
--- end
--- if base ~= lastcons then -- if base consonant is not last one then move halant from base consonant to last one
--- n = base.next
--- if nukta[n.char] then n = n.next end
--- if halant[n.char] then
--- if lastcons ~= stop then
--- local ln = lastcons.next
--- if nukta[ln.char] then lastcons = ln end
--- end
--- local np, nn, ln = n.prev, n.next, lastcons.next
--- np.next = n.next
--- nn.prev = n.prev
--- lastcons.next = n
--- if ln then ln.prev = n end
--- n.next = ln
--- n.prev = lastcons
--- if lastcons == stop then stop = n end
--- end
--- end
---
--- n = start.next
--- if ra[start.char] and halant[n.char] and not ( n ~= stop and ( zwj[n.next.char] or zwnj[n.next.char] ) ) then -- if syllable starts with Ra + H then move this combination so that it follows either: the post-base 'matra' (if any) or the base consonant
--- local matra = base
--- if base ~= stop and dependent_vowel[base.next.char] then matra = base.next end
--- local sp, nn, mn = start.prev, n.next, matra.next
--- if sp then sp.next = nn end
--- nn.prev = sp
--- matra.next = start
--- start.prev = matra
--- n.next = mn
--- if mn then mn.prev = n end
--- if head == start then head = nn end
--- start = nn
--- if matra == stop then stop = n end
--- end
---
--- local current = start
--- while current ~= stop do
--- if halant[current.next.char] and current.next ~= stop and zwnj[current.next.next.char] then unset_attribute(current, state) end
--- current = current.next
--- end
---
--- if has_attribute(base, state) and base ~= stop and halant[base.next.char] and not ( base.next ~= stop and zwj[base.next.next.char] ) then unset_attribute(base, state) end
---
--- local current, allreordered, moved = start, false, { [base] = true }
--- local a, b, p, bn = base, base, base, base.next
--- if base ~= stop and nukta[bn.char] then a, b, p = bn, bn, bn end
--- while not allreordered do
--- local c, n, l = current, current.next, nil --current is always consonant
--- if c ~= stop and nukta[n.char] then c = n n = n.next end
--- if c ~= stop and halant[n.char] then c = n n = n.next end
--- while c ~= stop and dependent_vowel[n.char] do c = n n = n.next end
--- if c ~= stop and vowel_modifier[n.char] then c = n n = n.next end
--- if c ~= stop and stress_tone_mark[n.char] then c = n n = n.next end
--- local bp, cn = firstcons.prev, current.next
--- while cn ~= c.next do -- move pre-base matras...
--- if pre_mark[cn.char] then
--- if bp then bp.next = cn end
--- cn.prev.next = cn.next
--- if cn.next then cn.next.prev = cn.prev end
--- if cn == stop then stop = cn.prev end
--- cn.prev = bp
--- cn.next = firstcons
--- firstcons.prev = cn
--- if firstcons == start then
--- if head == start then head = cn end
--- start = cn
--- end
--- break
--- end
--- cn = cn.next
--- end
--- allreordered = c == stop
--- current = c.next
--- end
---
--- if reph or vattu then
--- local current, cns = start, nil
--- while current ~= stop do
--- local c, n = current, current.next
--- if ra[current.char] and halant[n.char] then
--- c, n = n, n.next
--- local b, bn = base, base
--- while bn ~= stop do
--- if dependent_vowel[bn.next.char] then b = bn.next end
--- bn = bn.next
--- end
--- if has_attribute(current,state,attribute) == 5 then -- position Reph (Ra + H) after post-base 'matra' (if any) since these become marks on the 'matra', not on the base glyph
--- if b ~= current then
--- if current == start then
--- if head == start then head = n end
--- start = n
--- end
--- if b == stop then stop = c end
--- if current.prev then current.prev.next = n end
--- if n then n.prev = current.prev end
--- c.next = b.next
--- if b.next then b.next.prev = c end
--- b.next = current
--- current.prev = b
--- end
--- elseif cns and cns.next ~= current then -- position below-base Ra (vattu) following the consonants on which it is placed (either the base consonant or one of the pre-base consonants)
--- local cp, cnsn = current.prev, cns.next
--- if cp then cp.next = n end
--- if n then n.prev = cp end
--- cns.next = current
--- current.prev = cns
--- c.next = cnsn
--- if cnsn then cnsn.prev = c end
--- if c == stop then stop = cp break end
--- current = n.prev
--- end
--- elseif consonant[current.char] or nbsp[current.char] then
--- cns = current
--- if halant[cns.next.char] then cns = cns.next end
--- end
--- current = current.next
--- end
--- end
---
--- if nbsp[base.char] then
--- head = node.remove(head, base)
--- node.free(base)
--- end
---
--- return head, stop
--- end
---
--- function dev2_reorder_matras(start,kind,lookupname,replacement)
--- local current = start
--- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do
--- if halant[current.char] and not has_attribute(current, state) then
--- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
--- local sn = start.next
--- start.next.prev = start.prev
--- if start.prev then start.prev.next = start.next end
--- if current.next then current.next.prev = start end
--- start.next = current.next
--- current.next = start
--- start.prev = current
--- start = sn
--- break
--- end
--- current = current.next
--- end
--- return start, true
--- end
---
--- function dev2_reorder_reph(start,kind,lookupname,replacement)
--- local current, sn = start.next, nil
--- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 2
--- if halant[current.char] and not has_attribute(current, state) then
--- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
--- sn = start.next
--- start.next.prev = start.prev
--- if start.prev then start.prev.next = start.next end
--- if current.next then current.next.prev = start end
--- start.next = current.next
--- current.next = start
--- start.prev = current
--- start = sn
--- break
--- end
--- current = current.next
--- end
--- if not sn then
--- current = start.next
--- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 4
--- if has_attribute(current, state) == 9 then --post-base
--- sn = start.next
--- start.next.prev = start.prev
--- if start.prev then start.prev.next = start.next end
--- start.prev = current.prev
--- current.prev.next = start
--- start.next = current
--- current.prev = start
--- start = sn
--- break
--- end
--- current = current.next
--- end
--- end
--- if not sn then
--- current = start.next
--- local c = nil
--- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 5
--- if not c and ( above_mark[current.char] or below_mark[current.char] or post_mark[current.char] ) and ReorderClass[current.char] ~= "after subscript" then c = current end
--- current = current.next
--- end
--- if c then
--- sn = start.next
--- start.next.prev = start.prev
--- if start.prev then start.prev.next = start.next end
--- start.prev = c.prev
--- c.prev.next = start
--- start.next = c
--- c.prev = start
--- start = sn
--- end
--- end
--- if not sn then
--- current = start
--- while current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) do --step 6
--- current = current.next
--- end
--- if start ~= current then
--- sn = start.next
--- start.next.prev = start.prev
--- if start.prev then start.prev.next = start.next end
--- if current.next then current.next.prev = start end
--- start.next = current.next
--- current.next = start
--- start.prev = current
--- start = sn
--- end
--- end
--- return start, true
--- end
---
--- function dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement)
--- local current, sn = start, nil
--- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do
--- if halant[current.char] and not has_attribute(current, state) then
--- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
--- sn = start.next
--- start.next.prev = start.prev
--- if start.prev then start.prev.next = start.next end
--- if current.next then current.next.prev = start end
--- start.next = current.next
--- current.next = start
--- start.prev = current
--- start = sn
--- break
--- end
--- current = current.next
--- end
--- if not sn then
--- current = start.next
--- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do
--- if not consonant[current.char] and has_attribute(current, state) then --main
--- sn = start.next
--- start.next.prev = start.prev
--- if start.prev then start.prev.next = start.next end
--- start.prev = current.prev
--- current.prev.next = start
--- start.next = current
--- current.prev = start
--- start = sn
--- break
--- end
--- current = current.next
--- end
--- end
--- return start, true
--- end
---
--- function remove_joiners(start,kind,lookupname,replacement)
--- local stop = start.next
--- while stop and stop.id == glyph and stop.subtype<256 and stop.font == start.font and (zwj[stop.char] or zwnj[stop.char]) do stop = stop.next end
--- if stop then stop.prev.next = nil stop.prev = start.prev end
--- if start.prev then start.prev.next = stop end
--- node.flush_list(start)
--- return stop, true
--- end
---
--- local function dev2_reorder(head,start,stop,font,attr)
--- local tfmdata = fontdata[font]
--- local lookuphash = tfmdata.resources.lookuphash
--- local sequences = tfmdata.resources.sequences
---
--- if not lookuphash["remove_joiners"] then install_dev(tfmdata) end --install Devanagari-features
---
--- local sharedfeatures = tfmdata.shared.features
--- sharedfeatures["dev2_reorder_matras"] = true
--- sharedfeatures["dev2_reorder_reph"] = true
--- sharedfeatures["dev2_reorder_pre_base_reordering_consonants"] = true
--- sharedfeatures["remove_joiners"] = true
--- local datasets = otf.dataset(tfmdata,font,attr)
---
--- local reph, pre_base_reordering_consonants = false, nil
--- local halfpos, basepos, subpos, postpos = nil, nil, nil, nil
--- local locl = { }
---
--- for s=1,#sequences do -- classify chars
--- local sequence = sequences[s]
--- local dataset = datasets[s]
--- featurevalue = dataset and dataset[1]
--- if featurevalue and dataset[4] then
--- local subtables = sequence.subtables
--- for i=1,#subtables do
--- local lookupname = subtables[i]
--- local lookupcache = lookuphash[lookupname]
--- if lookupcache then
--- if dataset[4] == "rphf" then
--- if dataset[3] ~= 0 then --rphf is result of of chain
--- else
--- reph = lookupcache[0x0930] and lookupcache[0x0930][0x094D] and lookupcache[0x0930][0x094D]["ligature"]
--- end
--- end
--- if dataset[4] == "pref" and not pre_base_reordering_consonants then
--- for k, v in pairs(lookupcache[0x094D]) do
--- pre_base_reordering_consonants[k] = v and v["ligature"] --ToDo: reph might also be result of chain
--- end
--- end
--- local current = start
--- while current ~= stop.next do
--- if dataset[4] == "locl" then locl[current] = lookupcache[current.char] end --ToDo: locl might also be result of chain
--- if current ~= stop then
--- local c, n = locl[current] or current.char, locl[current.next] or current.next.char
--- if dataset[4] == "rphf" and lookupcache[c] and lookupcache[c][n] then --above-base: rphf Consonant + Halant
--- if current.next ~= stop and ( zwj[current.next.next.char] or zwnj[current.next.next.char] ) then --ZWJ and ZWNJ prevent creation of reph
--- current = current.next
--- elseif current == start then
--- set_attribute(current,state,5)
--- end
--- current = current.next
--- end
--- if dataset[4] == "half" and lookupcache[c] and lookupcache[c][n] then --half forms: half Consonant + Halant
--- if current.next ~= stop and zwnj[current.next.next.char] then --ZWNJ prevent creation of half
--- current = current.next
--- else
--- set_attribute(current,state,6)
--- if not halfpos then halfpos = current end
--- end
--- current = current.next
--- end
--- if dataset[4] == "pref" and lookupcache[c] and lookupcache[c][n] then --pre-base: pref Halant + Consonant
--- set_attribute(current,state,7)
--- set_attribute(current.next,state,7)
--- current = current.next
--- end
--- if dataset[4] == "blwf" and lookupcache[c] and lookupcache[c][n] then --below-base: blwf Halant + Consonant
--- set_attribute(current,state,8)
--- set_attribute(current.next,state,8)
--- current = current.next
--- subpos = current
--- end
--- if dataset[4] == "pstf" and lookupcache[c] and lookupcache[c][n] then --post-base: pstf Halant + Consonant
--- set_attribute(current,state,9)
--- set_attribute(current.next,state,9)
--- current = current.next
--- postpos = current
--- end
--- end
--- current = current.next
--- end
--- end
--- end
--- end
--- end
---
--- lookuphash["dev2_reorder_matras"] = pre_mark
--- lookuphash["dev2_reorder_reph"] = { [reph] = true }
--- lookuphash["dev2_reorder_pre_base_reordering_consonants"] = pre_base_reordering_consonants or { }
--- lookuphash["remove_joiners"] = { [0x200C] = true, [0x200D] = true }
---
--- local current, base, firstcons = start, nil, nil
--- if has_attribute(start,state) == 5 then current = start.next.next end -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants
---
--- if current ~= stop.next and nbsp[current.char] then --Stand Alone cluster
--- if current == stop then
--- stop = stop.prev
--- head = node.remove(head, current)
--- node.free(current)
--- return head, stop
--- else
--- base = current
--- current = current.next
--- if current ~= stop then
--- if nukta[current.char] then current = current.next end
--- if zwj[current.char] then
--- if current ~= stop and current.next ~= stop and halant[current.next.char] then
--- current = current.next
--- local tmp = current.next.next
--- local changestop = current.next == stop
--- current.next.next = nil
--- set_attribute(current,state,7) --pref
--- current = nodes.handlers.characters(current)
--- set_attribute(current,state,8) --blwf
--- current = nodes.handlers.characters(current)
--- set_attribute(current,state,9) --pstf
--- current = nodes.handlers.characters(current)
--- unset_attribute(current,state)
--- if halant[current.char] then
--- current.next.next = tmp
--- local nc = node.copy(current)
--- current.char = dotted_circle
--- head = node.insert_after(head, current, nc)
--- else
--- current.next = tmp -- (assumes that result of pref, blwf, or pstf consists of one node)
--- if changestop then stop = current end
--- end
--- end
--- end
--- end
--- end
--- else --not Stand Alone cluster
--- while current ~= stop.next do -- find base consonant
--- if consonant[current.char] and not ( current ~= stop and halant[current.next.char] and current.next ~= stop and zwj[current.next.next.char] ) then
--- if not firstcons then firstcons = current end
--- if not ( has_attribute(current, state) == 7 or has_attribute(current, state) == 8 or has_attribute(current, state) == 9 ) then base = current end --check whether consonant has below-base or post-base form or is pre-base reordering Ra
--- end
--- current = current.next
--- end
--- if not base then
--- base = firstcons
--- end
--- end
---
--- if not base then
--- if has_attribute(start, state) == 5 then unset_attribute(start, state) end
--- return head, stop
--- else
--- if has_attribute(base, state) then unset_attribute(base, state) end
--- basepos = base
--- end
--- if not halfpos then halfpos = base end
--- if not subpos then subpos = base end
--- if not postpos then postpos = subpos or base end
---
--- --Matra characters are classified and reordered by which consonant in a conjunct they have affinity for
--- local moved = { }
--- current = start
--- while current ~= stop.next do
--- local char, target, cn = locl[current] or current.char, nil, current.next
--- if not moved[current] and dependent_vowel[char] then
--- if pre_mark[char] then -- Before first half form in the syllable
--- moved[current] = true
--- if current.prev then current.prev.next = current.next end
--- if current.next then current.next.prev = current.prev end
--- if current == stop then stop = current.prev end
--- if halfpos == start then
--- if head == start then head = current end
--- start = current
--- end
--- if halfpos.prev then halfpos.prev.next = current end
--- current.prev = halfpos.prev
--- halfpos.prev = current
--- current.next = halfpos
--- halfpos = current
--- elseif above_mark[char] then -- After main consonant
--- target = basepos
--- if subpos == basepos then subpos = current end
--- if postpos == basepos then postpos = current end
--- basepos = current
--- elseif below_mark[char] then -- After subjoined consonants
--- target = subpos
--- if postpos == subpos then postpos = current end
--- subpos = current
--- elseif post_mark[char] then -- After post-form consonant
--- target = postpos
--- postpos = current
--- end
--- if ( above_mark[char] or below_mark[char] or post_mark[char] ) and current.prev ~= target then
--- if current.prev then current.prev.next = current.next end
--- if current.next then current.next.prev = current.prev end
--- if current == stop then stop = current.prev end
--- if target.next then target.next.prev = current end
--- current.next = target.next
--- target.next = current
--- current.prev = target
--- end
--- end
--- current = cn
--- end
---
--- --Reorder marks to canonical order: Adjacent nukta and halant or nukta and vedic sign are always repositioned if necessary, so that the nukta is first.
--- local current, c = start, nil
--- while current ~= stop do
--- if halant[current.char] or stress_tone_mark[current.char] then
--- if not c then c = current end
--- else
--- c = nil
--- end
--- if c and nukta[current.next.char] then
--- if head == c then head = current.next end
--- if stop == current.next then stop = current end
--- if c.prev then c.prev.next = current.next end
--- current.next.prev = c.prev
--- current.next = current.next.next
--- if current.next.next then current.next.next.prev = current end
--- c.prev = current.next
--- current.next.next = c
--- end
--- if stop == current then break end
--- current = current.next
--- end
---
--- if nbsp[base.char] then
--- head = node.remove(head, base)
--- node.free(base)
--- end
---
--- return head, stop
--- end
---
--- function fonts.analyzers.methods.deva(head,font,attr)
--- local orighead = head
--- local current, start, done = head, true, false
--- while current do
--- if current.id == glyph and current.subtype<256 and current.font == font then
--- done = true
--- local syllablestart, syllableend = current, nil
---
--- local c = current --Checking Stand Alone cluster (this behavior is copied from dev2)
--- if ra[c.char] and c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] and c.next.next and c.next.next.id == glyph and c.next.next.subtype<256 and c.next.next.font == font then c = c.next.next end
--- if nbsp[c.char] and ( not current.prev or current.prev.id ~= glyph or current.prev.subtype>=256 or current.prev.font ~= font or
--- ( not consonant[current.prev.char] and not independent_vowel[current.prev.char] and not dependent_vowel[current.prev.char] and
--- not vowel_modifier[current.prev.char] and not stress_tone_mark[current.prev.char] and not nukta[current.prev.char] and not halant[current.prev.char] )
--- ) then --Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
--- local n = c.next
--- if n and n.id == glyph and n.subtype<256 and n.font == font then
--- local ni = n.next
--- if ( zwj[n.char] or zwnj[n.char] ) and ni and ni.id == glyph and ni.subtype<256 and ni.font == font then n = ni ni = ni.next end
--- if halant[n.char] and ni and ni.id == glyph and ni.subtype<256 and ni.font == font and consonant[ni.char] then c = ni end
--- end
--- while c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] do c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- current = c.next
--- syllableend = c
--- if syllablestart ~= syllableend then
--- head, current = deva_reorder(head, syllablestart,syllableend,font,attr)
--- current = current.next
--- end
--- elseif consonant[current.char] then -- syllable containing consonant
--- prevc = true
--- while prevc do
--- prevc = false
--- local n = current.next
--- if n and n.id == glyph and n.subtype<256 and n.font == font and nukta[n.char] then n = n.next end
--- if n and n.id == glyph and n.subtype<256 and n.font == font and halant[n.char] then
--- local n = n.next
--- if n and n.id == glyph and n.subtype<256 and n.font == font and ( zwj[n.char] or zwnj[n.char] ) then n = n.next end
--- if n and n.id == glyph and n.subtype<256 and n.font == font and consonant[n.char] then
--- prevc = true
--- current = n
--- end
--- end
--- end
--- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == font and nukta[current.next.char] then current = current.next end -- nukta (not specified in Microsft Devanagari OpenType specification)
--- syllableend = current
--- current = current.next
--- if current and current.id == glyph and current.subtype<256 and current.font == font and halant[current.char] then -- syllable containing consonant without vowels: {C + [Nukta] + H} + C + H
--- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == font and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
--- syllableend = current
--- current = current.next
--- else -- syllable containing consonant with vowels: {C + [Nukta] + H} + C + [M] + [VM] + [SM]
--- if current and current.id == glyph and current.subtype<256 and current.font == font and dependent_vowel[current.char] then
--- syllableend = current
--- current = current.next
--- end
--- if current and current.id == glyph and current.subtype<256 and current.font == font and vowel_modifier[current.char] then
--- syllableend = current
--- current = current.next
--- end
--- if current and current.id == glyph and current.subtype<256 and current.font == font and stress_tone_mark[current.char] then
--- syllableend = current
--- current = current.next
--- end
--- end
--- if syllablestart ~= syllableend then
--- head, current = deva_reorder(head,syllablestart,syllableend,font,attr)
--- current = current.next
--- end
--- elseif current.id == glyph and current.subtype<256 and current.font == font and independent_vowel[current.char] then -- syllable without consonants: VO + [VM] + [SM]
--- syllableend = current
--- current = current.next
--- if current and current.id == glyph and current.subtype<256 and current.font == font and vowel_modifier[current.char] then
--- syllableend = current
--- current = current.next
--- end
--- if current and current.id == glyph and current.subtype<256 and current.font == font and stress_tone_mark[current.char] then
--- syllableend = current
--- current = current.next
--- end
--- else -- Syntax error
--- if pre_mark[current.char] or above_mark[current.char] or below_mark[current.char] or post_mark[current.char] then
--- local n = node.copy(current)
--- if pre_mark[current.char] then
--- n.char = dotted_circle
--- else
--- current.char = dotted_circle
--- end
--- head, current = node.insert_after(head, current, n)
--- end
--- current = current.next
--- end
--- else
--- current = current.next
--- end
--- start = false
--- end
---
--- return head, done
--- end
---
--- function fonts.analyzers.methods.dev2(head,font,attr)
--- local current, start, done, syl_nr = head, true, false, 0
--- while current do
--- local syllablestart, syllableend = nil, nil
--- if current.id == glyph and current.subtype<256 and current.font == font then
--- syllablestart = current
--- done = true
--- local c, n = current, current.next
--- if ra[current.char] and n and n.id == glyph and n.subtype<256 and n.font == font and halant[n.char] and n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font then c = n.next end
--- if independent_vowel[c.char] then --Vowel-based syllable: [Ra+H]+V+[N]+[<[]+H+C|ZWJ+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
--- n = c.next
--- local ni, nii = nil, nil
--- if n and n.id == glyph and n.subtype<256 and n.font == font and nukta[n.char] then n = n.next end
--- if n and n.id == glyph and n.subtype<256 and n.font == font then local ni = n.next end
--- if ni and ni.id == glyph and ni.subtype<256 and ni.font == font and ni.next and ni.next.id == glyph and ni.next.subtype<256 and ni.next.font == font then
--- nii = ni.next
--- if zwj[ni.char] and consonant[nii.char] then
--- c = nii
--- elseif (zwj[ni.char] or zwnj[ni.char]) and halant[nii.char] and nii.next and nii.next.id == glyph and nii.next.subtype<256 and nii.next.font == font and consonant[nii.next.char] then
--- c = nii.next
--- end
--- end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- current = c
--- syllableend = c
--- elseif nbsp[c.char] and ( not current.prev or current.prev.id ~= glyph or current.prev.subtype>=256 or current.prev.font ~= font or
--- ( not consonant[current.prev.char] and not independent_vowel[current.prev.char] and not dependent_vowel[current.prev.char] and
--- not vowel_modifier[current.prev.char] and not stress_tone_mark[current.prev.char] and not nukta[current.prev.char] and not halant[current.prev.char] )
--- ) then --Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
--- n = c.next
--- if n and n.id == glyph and n.subtype<256 and n.font == font then
--- local ni = n.next
--- if ( zwj[n.char] or zwnj[n.char] ) and ni and ni.id == glyph and ni.subtype<256 and ni.font == font then n = ni ni = ni.next end
--- if halant[n.char] and ni and ni.id == glyph and ni.subtype<256 and ni.font == font and consonant[ni.char] then c = ni end
--- end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- current = c
--- syllableend = c
--- elseif consonant[current.char] then --Consonant syllable: {C+[N]+]|+H>} + C+[N]+[A] + [< H+[] | {M}+[N]+[H]>]+[SM]+[(VD)]
--- c = current
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
--- n = c
--- while n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and ( halant[n.next.char] or zwnj[n.next.char] or zwj[n.next.char] ) do
--- if halant[n.next.char] then
--- n = n.next
--- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and ( zwnj[n.next.char] or zwj[n.next.char] ) then n = n.next end
--- else
--- if n.next.next and n.next.next.id == glyph and n.next.next.subtype<256 and n.next.next.font == font and halant[n.next.next.char] then n = n.next.next end
--- end
--- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and consonant[n.next.char] then
--- n = n.next
--- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and nukta[n.next.char] then n = n.next end
--- c = n
--- else
--- break
--- end
--- end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and anudatta[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then
--- c = c.next
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and ( zwnj[c.next.char] or zwj[c.next.char] ) then c = c.next end
--- else
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
--- end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
--- current = c
--- syllableend = c
--- end
--- end
---
--- if syllableend then
--- syl_nr = syl_nr + 1
--- c = syllablestart
--- while c ~= syllableend.next do
--- set_attribute(c,sylnr,syl_nr)
--- c = c.next
--- end
--- end
--- if syllableend and syllablestart ~= syllableend then
--- head, current = dev2_reorder(head,syllablestart,syllableend,font,attr)
--- end
---
--- if not syllableend and not has_attribute(current, state) and current.id == glyph and current.subtype<256 and current.font == font then -- Syntax error
--- if pre_mark[current.char] or above_mark[current.char] or below_mark[current.char] or post_mark[current.char] then
--- local n = node.copy(current)
--- if pre_mark[current.char] then
--- n.char = dotted_circle
--- else
--- current.char = dotted_circle
--- end
--- head, current = node.insert_after(head, current, n)
--- end
--- end
---
--- start = false
--- current = current.next
--- end
---
--- return head, done
--- end
---
--- function otf.handlers.dev2_reorder_matras(start,kind,lookupname,replacement)
--- return dev2_reorder_matras(start,kind,lookupname,replacement)
--- end
---
--- function otf.handlers.dev2_reorder_reph(start,kind,lookupname,replacement)
--- return dev2_reorder_reph(start,kind,lookupname,replacement)
--- end
---
--- function otf.handlers.dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement)
--- return dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement)
--- end
---
--- function otf.handlers.remove_joiners(start,kind,lookupname,replacement)
--- return remove_joiners(start,kind,lookupname,replacement)
--- end
+-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
+-- We keep the original around for a while so that we can check it --
+-- when the above code does it wrong (data tables are not included). --
+-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
+
+-- author : Kai Eigner, TAT Zetwerk
+-- copyright : TAT Zetwerk
+-- comment : see font-odv.lua for current implementation
+
+-- local state = attributes.private('state')
+-- local sylnr = attributes.private('syllabe')
+--
+-- local function install_dev(tfmdata)
+-- local features = tfmdata.resources.features
+-- local sequences = tfmdata.resources.sequences
+--
+-- local insertpos = 1
+-- for s=1,#sequences do -- classify chars
+-- for k in pairs(basic_shaping_forms) do
+-- if sequences[s].features and ( sequences[s].features[k] or sequences[s].features.locl ) then insertpos = s + 1 end
+-- end
+-- end
+--
+-- features.gsub["dev2_reorder_matras"] = { ["dev2"] = { ["dflt"] = true } }
+-- features.gsub["dev2_reorder_reph"] = { ["dev2"] = { ["dflt"] = true } }
+-- features.gsub["dev2_reorder_pre_base_reordering_consonants"] = { ["dev2"] = { ["dflt"] = true } }
+-- features.gsub["remove_joiners"] = { ["deva"] = { ["dflt"] = true }, ["dev2"] = { ["dflt"] = true } }
+--
+-- local sequence_dev2_reorder_matras = {
+-- chain = 0,
+-- features = { dev2_reorder_matras = { dev2 = { dflt = true } } },
+-- flags = { false, false, false, false },
+-- name = "dev2_reorder_matras",
+-- subtables = { "dev2_reorder_matras" },
+-- type = "dev2_reorder_matras",
+-- }
+-- local sequence_dev2_reorder_reph = {
+-- chain = 0,
+-- features = { dev2_reorder_reph = { dev2 = { dflt = true } } },
+-- flags = { false, false, false, false },
+-- name = "dev2_reorder_reph",
+-- subtables = { "dev2_reorder_reph" },
+-- type = "dev2_reorder_reph",
+-- }
+-- local sequence_dev2_reorder_pre_base_reordering_consonants = {
+-- chain = 0,
+-- features = { dev2_reorder_pre_base_reordering_consonants = { dev2 = { dflt = true } } },
+-- flags = { false, false, false, false },
+-- name = "dev2_reorder_pre_base_reordering_consonants",
+-- subtables = { "dev2_reorder_pre_base_reordering_consonants" },
+-- type = "dev2_reorder_pre_base_reordering_consonants",
+-- }
+-- local sequence_remove_joiners = {
+-- chain = 0,
+-- features = { remove_joiners = { deva = { dflt = true }, dev2 = { dflt = true } } },
+-- flags = { false, false, false, false },
+-- name = "remove_joiners",
+-- subtables = { "remove_joiners" },
+-- type = "remove_joiners",
+-- }
+-- table.insert(sequences, insertpos, sequence_dev2_reorder_pre_base_reordering_consonants)
+-- table.insert(sequences, insertpos, sequence_dev2_reorder_reph)
+-- table.insert(sequences, insertpos, sequence_dev2_reorder_matras)
+-- table.insert(sequences, insertpos, sequence_remove_joiners)
+-- end
+--
+-- local function deva_reorder(head,start,stop,font,attr)
+-- local tfmdata = fontdata[font]
+-- local lookuphash = tfmdata.resources.lookuphash
+-- local sequences = tfmdata.resources.sequences
+--
+-- if not lookuphash["remove_joiners"] then install_dev(tfmdata) end --install Devanagari-features
+--
+-- local sharedfeatures = tfmdata.shared.features
+-- sharedfeatures["remove_joiners"] = true
+-- local datasets = otf.dataset(tfmdata,font,attr)
+--
+-- lookuphash["remove_joiners"] = { [0x200C] = true, [0x200D] = true }
+--
+-- local current, n, base, firstcons, lastcons, basefound = start, start.next, nil, nil, nil, false
+-- local reph, vattu = false, false
+-- for s=1,#sequences do
+-- local dataset = datasets[s]
+-- featurevalue = dataset and dataset[1]
+-- if featurevalue and dataset[4] == "rphf" then reph = true end
+-- if featurevalue and dataset[4] == "blwf" then vattu = true end
+-- end
+-- if ra[start.char] and halant[n.char] and reph then -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants
+-- if n == stop then return head, stop end
+-- if zwj[n.next.char] then
+-- current = start
+-- else
+-- current = n.next
+-- set_attribute(start,state,5) -- rphf
+-- end
+-- end
+--
+-- if nbsp[current.char] then --Stand Alone cluster
+-- if current == stop then
+-- stop = stop.prev
+-- head = node.remove(head, current)
+-- node.free(current)
+-- return head, stop
+-- else
+-- base, firstcons, lastcons = current, current, current
+-- current = current.next
+-- if current ~= stop then
+-- if nukta[current.char] then current = current.next end
+-- if zwj[current.char] then
+-- if current ~= stop and current.next ~= stop and halant[current.next.char] then
+-- current = current.next
+-- local tmp = current.next.next
+-- local changestop = current.next == stop
+-- local tempcurrent = node.copy(current.next)
+-- tempcurrent.next = node.copy(current)
+-- tempcurrent.next.prev = tempcurrent
+-- set_attribute(tempcurrent,state,8) --blwf
+-- tempcurrent = nodes.handlers.characters(tempcurrent)
+-- unset_attribute(tempcurrent,state)
+-- if current.next.char == tempcurrent.char then
+-- node.flush_list(tempcurrent)
+-- local n = node.copy(current)
+-- current.char = dotted_circle
+-- head = node.insert_after(head, current, n)
+-- else
+-- current.char = tempcurrent.char -- (assumes that result of blwf consists of one node)
+-- local freenode = current.next
+-- current.next = tmp
+-- tmp.prev = current
+-- node.free(freenode)
+-- node.flush_list(tempcurrent)
+-- if changestop then stop = current end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+--
+-- while not basefound do -- find base consonant
+-- if consonant[current.char] then
+-- set_attribute(current, state, 6) -- half
+-- if not firstcons then firstcons = current end
+-- lastcons = current
+-- if not base then
+-- base = current
+-- else --check whether consonant has below-base (or post-base) form
+-- local baseform = true
+-- for s=1,#sequences do
+-- local sequence = sequences[s]
+-- local dataset = datasets[s]
+-- featurevalue = dataset and dataset[1]
+-- if featurevalue and dataset[4] == "blwf" then
+-- local subtables = sequence.subtables
+-- for i=1,#subtables do
+-- local lookupname = subtables[i]
+-- local lookupcache = lookuphash[lookupname]
+-- if lookupcache then
+-- local lookupmatch = lookupcache[current.char]
+-- if lookupmatch then
+-- set_attribute(current, state, 8) -- blwf
+-- baseform = false
+-- end
+-- end
+-- end
+-- end
+-- end
+-- if baseform then base = current end
+-- end
+-- end
+-- basefound = current == stop
+-- current = current.next
+-- end
+-- if base ~= lastcons then -- if base consonant is not last one then move halant from base consonant to last one
+-- n = base.next
+-- if nukta[n.char] then n = n.next end
+-- if halant[n.char] then
+-- if lastcons ~= stop then
+-- local ln = lastcons.next
+-- if nukta[ln.char] then lastcons = ln end
+-- end
+-- local np, nn, ln = n.prev, n.next, lastcons.next
+-- np.next = n.next
+-- nn.prev = n.prev
+-- lastcons.next = n
+-- if ln then ln.prev = n end
+-- n.next = ln
+-- n.prev = lastcons
+-- if lastcons == stop then stop = n end
+-- end
+-- end
+--
+-- n = start.next
+-- if ra[start.char] and halant[n.char] and not ( n ~= stop and ( zwj[n.next.char] or zwnj[n.next.char] ) ) then -- if syllable starts with Ra + H then move this combination so that it follows either: the post-base 'matra' (if any) or the base consonant
+-- local matra = base
+-- if base ~= stop and dependent_vowel[base.next.char] then matra = base.next end
+-- local sp, nn, mn = start.prev, n.next, matra.next
+-- if sp then sp.next = nn end
+-- nn.prev = sp
+-- matra.next = start
+-- start.prev = matra
+-- n.next = mn
+-- if mn then mn.prev = n end
+-- if head == start then head = nn end
+-- start = nn
+-- if matra == stop then stop = n end
+-- end
+--
+-- local current = start
+-- while current ~= stop do
+-- if halant[current.next.char] and current.next ~= stop and zwnj[current.next.next.char] then unset_attribute(current, state) end
+-- current = current.next
+-- end
+--
+-- if has_attribute(base, state) and base ~= stop and halant[base.next.char] and not ( base.next ~= stop and zwj[base.next.next.char] ) then unset_attribute(base, state) end
+--
+-- local current, allreordered, moved = start, false, { [base] = true }
+-- local a, b, p, bn = base, base, base, base.next
+-- if base ~= stop and nukta[bn.char] then a, b, p = bn, bn, bn end
+-- while not allreordered do
+-- local c, n, l = current, current.next, nil --current is always consonant
+-- if c ~= stop and nukta[n.char] then c = n n = n.next end
+-- if c ~= stop and halant[n.char] then c = n n = n.next end
+-- while c ~= stop and dependent_vowel[n.char] do c = n n = n.next end
+-- if c ~= stop and vowel_modifier[n.char] then c = n n = n.next end
+-- if c ~= stop and stress_tone_mark[n.char] then c = n n = n.next end
+-- local bp, cn = firstcons.prev, current.next
+-- while cn ~= c.next do -- move pre-base matras...
+-- if pre_mark[cn.char] then
+-- if bp then bp.next = cn end
+-- cn.prev.next = cn.next
+-- if cn.next then cn.next.prev = cn.prev end
+-- if cn == stop then stop = cn.prev end
+-- cn.prev = bp
+-- cn.next = firstcons
+-- firstcons.prev = cn
+-- if firstcons == start then
+-- if head == start then head = cn end
+-- start = cn
+-- end
+-- break
+-- end
+-- cn = cn.next
+-- end
+-- allreordered = c == stop
+-- current = c.next
+-- end
+--
+-- if reph or vattu then
+-- local current, cns = start, nil
+-- while current ~= stop do
+-- local c, n = current, current.next
+-- if ra[current.char] and halant[n.char] then
+-- c, n = n, n.next
+-- local b, bn = base, base
+-- while bn ~= stop do
+-- if dependent_vowel[bn.next.char] then b = bn.next end
+-- bn = bn.next
+-- end
+-- if has_attribute(current,state,attribute) == 5 then -- position Reph (Ra + H) after post-base 'matra' (if any) since these become marks on the 'matra', not on the base glyph
+-- if b ~= current then
+-- if current == start then
+-- if head == start then head = n end
+-- start = n
+-- end
+-- if b == stop then stop = c end
+-- if current.prev then current.prev.next = n end
+-- if n then n.prev = current.prev end
+-- c.next = b.next
+-- if b.next then b.next.prev = c end
+-- b.next = current
+-- current.prev = b
+-- end
+-- elseif cns and cns.next ~= current then -- position below-base Ra (vattu) following the consonants on which it is placed (either the base consonant or one of the pre-base consonants)
+-- local cp, cnsn = current.prev, cns.next
+-- if cp then cp.next = n end
+-- if n then n.prev = cp end
+-- cns.next = current
+-- current.prev = cns
+-- c.next = cnsn
+-- if cnsn then cnsn.prev = c end
+-- if c == stop then stop = cp break end
+-- current = n.prev
+-- end
+-- elseif consonant[current.char] or nbsp[current.char] then
+-- cns = current
+-- if halant[cns.next.char] then cns = cns.next end
+-- end
+-- current = current.next
+-- end
+-- end
+--
+-- if nbsp[base.char] then
+-- head = node.remove(head, base)
+-- node.free(base)
+-- end
+--
+-- return head, stop
+-- end
+--
+-- function dev2_reorder_matras(start,kind,lookupname,replacement)
+-- local current = start
+-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do
+-- if halant[current.char] and not has_attribute(current, state) then
+-- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
+-- local sn = start.next
+-- start.next.prev = start.prev
+-- if start.prev then start.prev.next = start.next end
+-- if current.next then current.next.prev = start end
+-- start.next = current.next
+-- current.next = start
+-- start.prev = current
+-- start = sn
+-- break
+-- end
+-- current = current.next
+-- end
+-- return start, true
+-- end
+--
+-- function dev2_reorder_reph(start,kind,lookupname,replacement)
+-- local current, sn = start.next, nil
+-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 2
+-- if halant[current.char] and not has_attribute(current, state) then
+-- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
+-- sn = start.next
+-- start.next.prev = start.prev
+-- if start.prev then start.prev.next = start.next end
+-- if current.next then current.next.prev = start end
+-- start.next = current.next
+-- current.next = start
+-- start.prev = current
+-- start = sn
+-- break
+-- end
+-- current = current.next
+-- end
+-- if not sn then
+-- current = start.next
+-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 4
+-- if has_attribute(current, state) == 9 then --post-base
+-- sn = start.next
+-- start.next.prev = start.prev
+-- if start.prev then start.prev.next = start.next end
+-- start.prev = current.prev
+-- current.prev.next = start
+-- start.next = current
+-- current.prev = start
+-- start = sn
+-- break
+-- end
+-- current = current.next
+-- end
+-- end
+-- if not sn then
+-- current = start.next
+-- local c = nil
+-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do --step 5
+-- if not c and ( above_mark[current.char] or below_mark[current.char] or post_mark[current.char] ) and ReorderClass[current.char] ~= "after subscript" then c = current end
+-- current = current.next
+-- end
+-- if c then
+-- sn = start.next
+-- start.next.prev = start.prev
+-- if start.prev then start.prev.next = start.next end
+-- start.prev = c.prev
+-- c.prev.next = start
+-- start.next = c
+-- c.prev = start
+-- start = sn
+-- end
+-- end
+-- if not sn then
+-- current = start
+-- while current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) do --step 6
+-- current = current.next
+-- end
+-- if start ~= current then
+-- sn = start.next
+-- start.next.prev = start.prev
+-- if start.prev then start.prev.next = start.next end
+-- if current.next then current.next.prev = start end
+-- start.next = current.next
+-- current.next = start
+-- start.prev = current
+-- start = sn
+-- end
+-- end
+-- return start, true
+-- end
+--
+-- function dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement)
+-- local current, sn = start, nil
+-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do
+-- if halant[current.char] and not has_attribute(current, state) then
+-- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == start.font and has_attribute(current.next, sylnr) == has_attribute(start, sylnr) and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
+-- sn = start.next
+-- start.next.prev = start.prev
+-- if start.prev then start.prev.next = start.next end
+-- if current.next then current.next.prev = start end
+-- start.next = current.next
+-- current.next = start
+-- start.prev = current
+-- start = sn
+-- break
+-- end
+-- current = current.next
+-- end
+-- if not sn then
+-- current = start.next
+-- while current and current.id == glyph and current.subtype<256 and current.font == start.font and has_attribute(current, sylnr) == has_attribute(start, sylnr) do
+-- if not consonant[current.char] and has_attribute(current, state) then --main
+-- sn = start.next
+-- start.next.prev = start.prev
+-- if start.prev then start.prev.next = start.next end
+-- start.prev = current.prev
+-- current.prev.next = start
+-- start.next = current
+-- current.prev = start
+-- start = sn
+-- break
+-- end
+-- current = current.next
+-- end
+-- end
+-- return start, true
+-- end
+--
+-- function remove_joiners(start,kind,lookupname,replacement)
+-- local stop = start.next
+-- while stop and stop.id == glyph and stop.subtype<256 and stop.font == start.font and (zwj[stop.char] or zwnj[stop.char]) do stop = stop.next end
+-- if stop then stop.prev.next = nil stop.prev = start.prev end
+-- if start.prev then start.prev.next = stop end
+-- node.flush_list(start)
+-- return stop, true
+-- end
+--
+-- local function dev2_reorder(head,start,stop,font,attr)
+-- local tfmdata = fontdata[font]
+-- local lookuphash = tfmdata.resources.lookuphash
+-- local sequences = tfmdata.resources.sequences
+--
+-- if not lookuphash["remove_joiners"] then install_dev(tfmdata) end --install Devanagari-features
+--
+-- local sharedfeatures = tfmdata.shared.features
+-- sharedfeatures["dev2_reorder_matras"] = true
+-- sharedfeatures["dev2_reorder_reph"] = true
+-- sharedfeatures["dev2_reorder_pre_base_reordering_consonants"] = true
+-- sharedfeatures["remove_joiners"] = true
+-- local datasets = otf.dataset(tfmdata,font,attr)
+--
+-- local reph, pre_base_reordering_consonants = false, nil
+-- local halfpos, basepos, subpos, postpos = nil, nil, nil, nil
+-- local locl = { }
+--
+-- for s=1,#sequences do -- classify chars
+-- local sequence = sequences[s]
+-- local dataset = datasets[s]
+-- featurevalue = dataset and dataset[1]
+-- if featurevalue and dataset[4] then
+-- local subtables = sequence.subtables
+-- for i=1,#subtables do
+-- local lookupname = subtables[i]
+-- local lookupcache = lookuphash[lookupname]
+-- if lookupcache then
+-- if dataset[4] == "rphf" then
+-- if dataset[3] ~= 0 then --rphf is result of of chain
+-- else
+-- reph = lookupcache[0x0930] and lookupcache[0x0930][0x094D] and lookupcache[0x0930][0x094D]["ligature"]
+-- end
+-- end
+-- if dataset[4] == "pref" and not pre_base_reordering_consonants then
+-- for k, v in pairs(lookupcache[0x094D]) do
+-- pre_base_reordering_consonants[k] = v and v["ligature"] --ToDo: reph might also be result of chain
+-- end
+-- end
+-- local current = start
+-- while current ~= stop.next do
+-- if dataset[4] == "locl" then locl[current] = lookupcache[current.char] end --ToDo: locl might also be result of chain
+-- if current ~= stop then
+-- local c, n = locl[current] or current.char, locl[current.next] or current.next.char
+-- if dataset[4] == "rphf" and lookupcache[c] and lookupcache[c][n] then --above-base: rphf Consonant + Halant
+-- if current.next ~= stop and ( zwj[current.next.next.char] or zwnj[current.next.next.char] ) then --ZWJ and ZWNJ prevent creation of reph
+-- current = current.next
+-- elseif current == start then
+-- set_attribute(current,state,5)
+-- end
+-- current = current.next
+-- end
+-- if dataset[4] == "half" and lookupcache[c] and lookupcache[c][n] then --half forms: half Consonant + Halant
+-- if current.next ~= stop and zwnj[current.next.next.char] then --ZWNJ prevent creation of half
+-- current = current.next
+-- else
+-- set_attribute(current,state,6)
+-- if not halfpos then halfpos = current end
+-- end
+-- current = current.next
+-- end
+-- if dataset[4] == "pref" and lookupcache[c] and lookupcache[c][n] then --pre-base: pref Halant + Consonant
+-- set_attribute(current,state,7)
+-- set_attribute(current.next,state,7)
+-- current = current.next
+-- end
+-- if dataset[4] == "blwf" and lookupcache[c] and lookupcache[c][n] then --below-base: blwf Halant + Consonant
+-- set_attribute(current,state,8)
+-- set_attribute(current.next,state,8)
+-- current = current.next
+-- subpos = current
+-- end
+-- if dataset[4] == "pstf" and lookupcache[c] and lookupcache[c][n] then --post-base: pstf Halant + Consonant
+-- set_attribute(current,state,9)
+-- set_attribute(current.next,state,9)
+-- current = current.next
+-- postpos = current
+-- end
+-- end
+-- current = current.next
+-- end
+-- end
+-- end
+-- end
+-- end
+--
+-- lookuphash["dev2_reorder_matras"] = pre_mark
+-- lookuphash["dev2_reorder_reph"] = { [reph] = true }
+-- lookuphash["dev2_reorder_pre_base_reordering_consonants"] = pre_base_reordering_consonants or { }
+-- lookuphash["remove_joiners"] = { [0x200C] = true, [0x200D] = true }
+--
+-- local current, base, firstcons = start, nil, nil
+-- if has_attribute(start,state) == 5 then current = start.next.next end -- if syllable starts with Ra + H and script has 'Reph' then exclude Reph from candidates for base consonants
+--
+-- if current ~= stop.next and nbsp[current.char] then --Stand Alone cluster
+-- if current == stop then
+-- stop = stop.prev
+-- head = node.remove(head, current)
+-- node.free(current)
+-- return head, stop
+-- else
+-- base = current
+-- current = current.next
+-- if current ~= stop then
+-- if nukta[current.char] then current = current.next end
+-- if zwj[current.char] then
+-- if current ~= stop and current.next ~= stop and halant[current.next.char] then
+-- current = current.next
+-- local tmp = current.next.next
+-- local changestop = current.next == stop
+-- current.next.next = nil
+-- set_attribute(current,state,7) --pref
+-- current = nodes.handlers.characters(current)
+-- set_attribute(current,state,8) --blwf
+-- current = nodes.handlers.characters(current)
+-- set_attribute(current,state,9) --pstf
+-- current = nodes.handlers.characters(current)
+-- unset_attribute(current,state)
+-- if halant[current.char] then
+-- current.next.next = tmp
+-- local nc = node.copy(current)
+-- current.char = dotted_circle
+-- head = node.insert_after(head, current, nc)
+-- else
+-- current.next = tmp -- (assumes that result of pref, blwf, or pstf consists of one node)
+-- if changestop then stop = current end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- else --not Stand Alone cluster
+-- while current ~= stop.next do -- find base consonant
+-- if consonant[current.char] and not ( current ~= stop and halant[current.next.char] and current.next ~= stop and zwj[current.next.next.char] ) then
+-- if not firstcons then firstcons = current end
+-- if not ( has_attribute(current, state) == 7 or has_attribute(current, state) == 8 or has_attribute(current, state) == 9 ) then base = current end --check whether consonant has below-base or post-base form or is pre-base reordering Ra
+-- end
+-- current = current.next
+-- end
+-- if not base then
+-- base = firstcons
+-- end
+-- end
+--
+-- if not base then
+-- if has_attribute(start, state) == 5 then unset_attribute(start, state) end
+-- return head, stop
+-- else
+-- if has_attribute(base, state) then unset_attribute(base, state) end
+-- basepos = base
+-- end
+-- if not halfpos then halfpos = base end
+-- if not subpos then subpos = base end
+-- if not postpos then postpos = subpos or base end
+--
+-- --Matra characters are classified and reordered by which consonant in a conjunct they have affinity for
+-- local moved = { }
+-- current = start
+-- while current ~= stop.next do
+-- local char, target, cn = locl[current] or current.char, nil, current.next
+-- if not moved[current] and dependent_vowel[char] then
+-- if pre_mark[char] then -- Before first half form in the syllable
+-- moved[current] = true
+-- if current.prev then current.prev.next = current.next end
+-- if current.next then current.next.prev = current.prev end
+-- if current == stop then stop = current.prev end
+-- if halfpos == start then
+-- if head == start then head = current end
+-- start = current
+-- end
+-- if halfpos.prev then halfpos.prev.next = current end
+-- current.prev = halfpos.prev
+-- halfpos.prev = current
+-- current.next = halfpos
+-- halfpos = current
+-- elseif above_mark[char] then -- After main consonant
+-- target = basepos
+-- if subpos == basepos then subpos = current end
+-- if postpos == basepos then postpos = current end
+-- basepos = current
+-- elseif below_mark[char] then -- After subjoined consonants
+-- target = subpos
+-- if postpos == subpos then postpos = current end
+-- subpos = current
+-- elseif post_mark[char] then -- After post-form consonant
+-- target = postpos
+-- postpos = current
+-- end
+-- if ( above_mark[char] or below_mark[char] or post_mark[char] ) and current.prev ~= target then
+-- if current.prev then current.prev.next = current.next end
+-- if current.next then current.next.prev = current.prev end
+-- if current == stop then stop = current.prev end
+-- if target.next then target.next.prev = current end
+-- current.next = target.next
+-- target.next = current
+-- current.prev = target
+-- end
+-- end
+-- current = cn
+-- end
+--
+-- --Reorder marks to canonical order: Adjacent nukta and halant or nukta and vedic sign are always repositioned if necessary, so that the nukta is first.
+-- local current, c = start, nil
+-- while current ~= stop do
+-- if halant[current.char] or stress_tone_mark[current.char] then
+-- if not c then c = current end
+-- else
+-- c = nil
+-- end
+-- if c and nukta[current.next.char] then
+-- if head == c then head = current.next end
+-- if stop == current.next then stop = current end
+-- if c.prev then c.prev.next = current.next end
+-- current.next.prev = c.prev
+-- current.next = current.next.next
+-- if current.next.next then current.next.next.prev = current end
+-- c.prev = current.next
+-- current.next.next = c
+-- end
+-- if stop == current then break end
+-- current = current.next
+-- end
+--
+-- if nbsp[base.char] then
+-- head = node.remove(head, base)
+-- node.free(base)
+-- end
+--
+-- return head, stop
+-- end
+--
+-- function fonts.analyzers.methods.deva(head,font,attr)
+-- local orighead = head
+-- local current, start, done = head, true, false
+-- while current do
+-- if current.id == glyph and current.subtype<256 and current.font == font then
+-- done = true
+-- local syllablestart, syllableend = current, nil
+--
+-- local c = current --Checking Stand Alone cluster (this behavior is copied from dev2)
+-- if ra[c.char] and c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] and c.next.next and c.next.next.id == glyph and c.next.next.subtype<256 and c.next.next.font == font then c = c.next.next end
+-- if nbsp[c.char] and ( not current.prev or current.prev.id ~= glyph or current.prev.subtype>=256 or current.prev.font ~= font or
+-- ( not consonant[current.prev.char] and not independent_vowel[current.prev.char] and not dependent_vowel[current.prev.char] and
+-- not vowel_modifier[current.prev.char] and not stress_tone_mark[current.prev.char] and not nukta[current.prev.char] and not halant[current.prev.char] )
+-- ) then --Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
+-- local n = c.next
+-- if n and n.id == glyph and n.subtype<256 and n.font == font then
+-- local ni = n.next
+-- if ( zwj[n.char] or zwnj[n.char] ) and ni and ni.id == glyph and ni.subtype<256 and ni.font == font then n = ni ni = ni.next end
+-- if halant[n.char] and ni and ni.id == glyph and ni.subtype<256 and ni.font == font and consonant[ni.char] then c = ni end
+-- end
+-- while c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] do c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- current = c.next
+-- syllableend = c
+-- if syllablestart ~= syllableend then
+-- head, current = deva_reorder(head, syllablestart,syllableend,font,attr)
+-- current = current.next
+-- end
+-- elseif consonant[current.char] then -- syllable containing consonant
+-- prevc = true
+-- while prevc do
+-- prevc = false
+-- local n = current.next
+-- if n and n.id == glyph and n.subtype<256 and n.font == font and nukta[n.char] then n = n.next end
+-- if n and n.id == glyph and n.subtype<256 and n.font == font and halant[n.char] then
+-- local n = n.next
+-- if n and n.id == glyph and n.subtype<256 and n.font == font and ( zwj[n.char] or zwnj[n.char] ) then n = n.next end
+-- if n and n.id == glyph and n.subtype<256 and n.font == font and consonant[n.char] then
+-- prevc = true
+-- current = n
+-- end
+-- end
+-- end
+-- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == font and nukta[current.next.char] then current = current.next end -- nukta (not specified in Microsft Devanagari OpenType specification)
+-- syllableend = current
+-- current = current.next
+-- if current and current.id == glyph and current.subtype<256 and current.font == font and halant[current.char] then -- syllable containing consonant without vowels: {C + [Nukta] + H} + C + H
+-- if current.next and current.next.id == glyph and current.next.subtype<256 and current.next.font == font and ( zwj[current.next.char] or zwnj[current.next.char] ) then current = current.next end
+-- syllableend = current
+-- current = current.next
+-- else -- syllable containing consonant with vowels: {C + [Nukta] + H} + C + [M] + [VM] + [SM]
+-- if current and current.id == glyph and current.subtype<256 and current.font == font and dependent_vowel[current.char] then
+-- syllableend = current
+-- current = current.next
+-- end
+-- if current and current.id == glyph and current.subtype<256 and current.font == font and vowel_modifier[current.char] then
+-- syllableend = current
+-- current = current.next
+-- end
+-- if current and current.id == glyph and current.subtype<256 and current.font == font and stress_tone_mark[current.char] then
+-- syllableend = current
+-- current = current.next
+-- end
+-- end
+-- if syllablestart ~= syllableend then
+-- head, current = deva_reorder(head,syllablestart,syllableend,font,attr)
+-- current = current.next
+-- end
+-- elseif current.id == glyph and current.subtype<256 and current.font == font and independent_vowel[current.char] then -- syllable without consonants: VO + [VM] + [SM]
+-- syllableend = current
+-- current = current.next
+-- if current and current.id == glyph and current.subtype<256 and current.font == font and vowel_modifier[current.char] then
+-- syllableend = current
+-- current = current.next
+-- end
+-- if current and current.id == glyph and current.subtype<256 and current.font == font and stress_tone_mark[current.char] then
+-- syllableend = current
+-- current = current.next
+-- end
+-- else -- Syntax error
+-- if pre_mark[current.char] or above_mark[current.char] or below_mark[current.char] or post_mark[current.char] then
+-- local n = node.copy(current)
+-- if pre_mark[current.char] then
+-- n.char = dotted_circle
+-- else
+-- current.char = dotted_circle
+-- end
+-- head, current = node.insert_after(head, current, n)
+-- end
+-- current = current.next
+-- end
+-- else
+-- current = current.next
+-- end
+-- start = false
+-- end
+--
+-- return head, done
+-- end
+--
+-- function fonts.analyzers.methods.dev2(head,font,attr)
+-- local current, start, done, syl_nr = head, true, false, 0
+-- while current do
+-- local syllablestart, syllableend = nil, nil
+-- if current.id == glyph and current.subtype<256 and current.font == font then
+-- syllablestart = current
+-- done = true
+-- local c, n = current, current.next
+-- if ra[current.char] and n and n.id == glyph and n.subtype<256 and n.font == font and halant[n.char] and n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font then c = n.next end
+-- if independent_vowel[c.char] then --Vowel-based syllable: [Ra+H]+V+[N]+[<[]+H+C|ZWJ+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
+-- n = c.next
+-- local ni, nii = nil, nil
+-- if n and n.id == glyph and n.subtype<256 and n.font == font and nukta[n.char] then n = n.next end
+-- if n and n.id == glyph and n.subtype<256 and n.font == font then local ni = n.next end
+-- if ni and ni.id == glyph and ni.subtype<256 and ni.font == font and ni.next and ni.next.id == glyph and ni.next.subtype<256 and ni.next.font == font then
+-- nii = ni.next
+-- if zwj[ni.char] and consonant[nii.char] then
+-- c = nii
+-- elseif (zwj[ni.char] or zwnj[ni.char]) and halant[nii.char] and nii.next and nii.next.id == glyph and nii.next.subtype<256 and nii.next.font == font and consonant[nii.next.char] then
+-- c = nii.next
+-- end
+-- end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- current = c
+-- syllableend = c
+-- elseif nbsp[c.char] and ( not current.prev or current.prev.id ~= glyph or current.prev.subtype>=256 or current.prev.font ~= font or
+-- ( not consonant[current.prev.char] and not independent_vowel[current.prev.char] and not dependent_vowel[current.prev.char] and
+-- not vowel_modifier[current.prev.char] and not stress_tone_mark[current.prev.char] and not nukta[current.prev.char] and not halant[current.prev.char] )
+-- ) then --Stand Alone cluster (at the start of the word only): #[Ra+H]+NBSP+[N]+[<[]+H+C>]+[{M}+[N]+[H]]+[SM]+[(VD)]
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
+-- n = c.next
+-- if n and n.id == glyph and n.subtype<256 and n.font == font then
+-- local ni = n.next
+-- if ( zwj[n.char] or zwnj[n.char] ) and ni and ni.id == glyph and ni.subtype<256 and ni.font == font then n = ni ni = ni.next end
+-- if halant[n.char] and ni and ni.id == glyph and ni.subtype<256 and ni.font == font and consonant[ni.char] then c = ni end
+-- end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- current = c
+-- syllableend = c
+-- elseif consonant[current.char] then --Consonant syllable: {C+[N]+]|+H>} + C+[N]+[A] + [< H+[] | {M}+[N]+[H]>]+[SM]+[(VD)]
+-- c = current
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
+-- n = c
+-- while n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and ( halant[n.next.char] or zwnj[n.next.char] or zwj[n.next.char] ) do
+-- if halant[n.next.char] then
+-- n = n.next
+-- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and ( zwnj[n.next.char] or zwj[n.next.char] ) then n = n.next end
+-- else
+-- if n.next.next and n.next.next.id == glyph and n.next.next.subtype<256 and n.next.next.font == font and halant[n.next.next.char] then n = n.next.next end
+-- end
+-- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and consonant[n.next.char] then
+-- n = n.next
+-- if n.next and n.next.id == glyph and n.next.subtype<256 and n.next.font == font and nukta[n.next.char] then n = n.next end
+-- c = n
+-- else
+-- break
+-- end
+-- end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and anudatta[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then
+-- c = c.next
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and ( zwnj[c.next.char] or zwj[c.next.char] ) then c = c.next end
+-- else
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and dependent_vowel[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and nukta[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and halant[c.next.char] then c = c.next end
+-- end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and vowel_modifier[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- if c.next and c.next.id == glyph and c.next.subtype<256 and c.next.font == font and stress_tone_mark[c.next.char] then c = c.next end
+-- current = c
+-- syllableend = c
+-- end
+-- end
+--
+-- if syllableend then
+-- syl_nr = syl_nr + 1
+-- c = syllablestart
+-- while c ~= syllableend.next do
+-- set_attribute(c,sylnr,syl_nr)
+-- c = c.next
+-- end
+-- end
+-- if syllableend and syllablestart ~= syllableend then
+-- head, current = dev2_reorder(head,syllablestart,syllableend,font,attr)
+-- end
+--
+-- if not syllableend and not has_attribute(current, state) and current.id == glyph and current.subtype<256 and current.font == font then -- Syntax error
+-- if pre_mark[current.char] or above_mark[current.char] or below_mark[current.char] or post_mark[current.char] then
+-- local n = node.copy(current)
+-- if pre_mark[current.char] then
+-- n.char = dotted_circle
+-- else
+-- current.char = dotted_circle
+-- end
+-- head, current = node.insert_after(head, current, n)
+-- end
+-- end
+--
+-- start = false
+-- current = current.next
+-- end
+--
+-- return head, done
+-- end
+--
+-- function otf.handlers.dev2_reorder_matras(start,kind,lookupname,replacement)
+-- return dev2_reorder_matras(start,kind,lookupname,replacement)
+-- end
+--
+-- function otf.handlers.dev2_reorder_reph(start,kind,lookupname,replacement)
+-- return dev2_reorder_reph(start,kind,lookupname,replacement)
+-- end
+--
+-- function otf.handlers.dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement)
+-- return dev2_reorder_pre_base_reordering_consonants(start,kind,lookupname,replacement)
+-- end
+--
+-- function otf.handlers.remove_joiners(start,kind,lookupname,replacement)
+-- return remove_joiners(start,kind,lookupname,replacement)
+-- end
diff --git a/tex/context/base/font-otb.lua b/tex/context/base/font-otb.lua
index 2a7b821ea..75bda383e 100644
--- a/tex/context/base/font-otb.lua
+++ b/tex/context/base/font-otb.lua
@@ -1,657 +1,657 @@
-if not modules then modules = { } end modules ['font-otb'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-local concat = table.concat
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local lpegmatch = lpeg.match
-local utfchar = utf.char
-
-local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end)
-local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_ligatures_detail = false trackers.register("otf.ligatures.detail", function(v) trace_ligatures_detail = v end)
-local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
-local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
-
-local report_prepare = logs.reporter("fonts","otf prepare")
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-local otffeatures = otf.features
-local registerotffeature = otffeatures.register
-
-otf.defaultbasealternate = "none" -- first last
-
-local wildcard = "*"
-local default = "dflt"
-
-local formatters = string.formatters
-local f_unicode = formatters["%U"]
-local f_uniname = formatters["%U (%s)"]
-local f_unilist = formatters["% t (% t)"]
-
-local function gref(descriptions,n)
- if type(n) == "number" then
- local name = descriptions[n].name
- if name then
- return f_uniname(n,name)
- else
- return f_unicode(n)
- end
- elseif n then
- local num, nam = { }, { }
- for i=2,#n do
- local ni = n[i]
- if tonumber(ni) then -- first is likely a key
- local di = descriptions[ni]
- num[i] = f_unicode(ni)
- nam[i] = di and di.name or "-"
- end
- end
- return f_unilist(num,nam)
- else
- return ""
- end
-end
-
-local function cref(feature,lookupname)
- if lookupname then
- return formatters["feature %a, lookup %a"](feature,lookupname)
- else
- return formatters["feature %a"](feature)
- end
-end
-
-local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment)
- report_prepare("%s: base alternate %s => %s (%S => %S)",
- cref(feature,lookupname),
- gref(descriptions,unicode),
- replacement and gref(descriptions,replacement),
- value,
- comment)
-end
-
-local function report_substitution(feature,lookupname,descriptions,unicode,substitution)
- report_prepare("%s: base substitution %s => %S",
- cref(feature,lookupname),
- gref(descriptions,unicode),
- gref(descriptions,substitution))
-end
-
-local function report_ligature(feature,lookupname,descriptions,unicode,ligature)
- report_prepare("%s: base ligature %s => %S",
- cref(feature,lookupname),
- gref(descriptions,ligature),
- gref(descriptions,unicode))
-end
-
-local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value)
- report_prepare("%s: base kern %s + %s => %S",
- cref(feature,lookupname),
- gref(descriptions,unicode),
- gref(descriptions,otherunicode),
- value)
-end
-
-local basemethods = { }
-local basemethod = ""
-
-local function applybasemethod(what,...)
- local m = basemethods[basemethod][what]
- if m then
- return m(...)
- end
-end
-
--- We need to make sure that luatex sees the difference between
--- base fonts that have different glyphs in the same slots in fonts
--- that have the same fullname (or filename). LuaTeX will merge fonts
--- eventually (and subset later on). If needed we can use a more
--- verbose name as long as we don't use <()<>[]{}/%> and the length
--- is < 128.
-
-local basehash, basehashes, applied = { }, 1, { }
-
-local function registerbasehash(tfmdata)
- local properties = tfmdata.properties
- local hash = concat(applied," ")
- local base = basehash[hash]
- if not base then
- basehashes = basehashes + 1
- base = basehashes
- basehash[hash] = base
- end
- properties.basehash = base
- properties.fullname = properties.fullname .. "-" .. base
- -- report_prepare("fullname base hash '%a, featureset %a",tfmdata.properties.fullname,hash)
- applied = { }
-end
-
-local function registerbasefeature(feature,value)
- applied[#applied+1] = feature .. "=" .. tostring(value)
-end
-
--- The original basemode ligature builder used the names of components
--- and did some expression juggling to get the chain right. The current
--- variant starts with unicodes but still uses names to make the chain.
--- This is needed because we have to create intermediates when needed
--- but use predefined snippets when available. To some extend the
--- current builder is more stupid but I don't worry that much about it
--- as ligatures are rather predicatable.
---
--- Personally I think that an ff + i == ffi rule as used in for instance
--- latin modern is pretty weird as no sane person will key that in and
--- expect a glyph for that ligature plus the following character. Anyhow,
--- as we need to deal with this, we do, but no guarantes are given.
---
--- latin modern dejavu
---
--- f+f 102 102 102 102
--- f+i 102 105 102 105
--- f+l 102 108 102 108
--- f+f+i 102 102 105
--- f+f+l 102 102 108 102 102 108
--- ff+i 64256 105 64256 105
--- ff+l 64256 108
---
--- As you can see here, latin modern is less complete than dejavu but
--- in practice one will not notice it.
---
--- The while loop is needed because we need to resolve for instance
--- pseudo names like hyphen_hyphen to endash so in practice we end
--- up with a bit too many definitions but the overhead is neglectable.
---
--- Todo: if changed[first] or changed[second] then ... end
-
-local trace = false
-
-local function finalize_ligatures(tfmdata,ligatures)
- local nofligatures = #ligatures
- if nofligatures > 0 then
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local unicodes = resources.unicodes
- local private = resources.private
- local alldone = false
- while not alldone do
- local done = 0
- for i=1,nofligatures do
- local ligature = ligatures[i]
- if ligature then
- local unicode, lookupdata = ligature[1], ligature[2]
- if trace then
- trace_ligatures_detail("building % a into %a",lookupdata,unicode)
- end
- local size = #lookupdata
- local firstcode = lookupdata[1] -- [2]
- local firstdata = characters[firstcode]
- local okay = false
- if firstdata then
- local firstname = "ctx_" .. firstcode
- for i=1,size-1 do -- for i=2,size-1 do
- local firstdata = characters[firstcode]
- if not firstdata then
- firstcode = private
- if trace then
- trace_ligatures_detail("defining %a as %a",firstname,firstcode)
- end
- unicodes[firstname] = firstcode
- firstdata = { intermediate = true, ligatures = { } }
- characters[firstcode] = firstdata
- descriptions[firstcode] = { name = firstname }
- private = private + 1
- end
- local target
- local secondcode = lookupdata[i+1]
- local secondname = firstname .. "_" .. secondcode
- if i == size - 1 then
- target = unicode
- if not unicodes[secondname] then
- unicodes[secondname] = unicode -- map final ligature onto intermediates
- end
- okay = true
- else
- target = unicodes[secondname]
- if not target then
- break
- end
- end
- if trace then
- trace_ligatures_detail("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
- end
- local firstligs = firstdata.ligatures
- if firstligs then
- firstligs[secondcode] = { char = target }
- else
- firstdata.ligatures = { [secondcode] = { char = target } }
- end
- firstcode = target
- firstname = secondname
- end
- end
- if okay then
- ligatures[i] = false
- done = done + 1
- end
- end
- end
- alldone = done == 0
- end
- if trace then
- for k, v in next, characters do
- if v.ligatures then table.print(v,k) end
- end
- end
- tfmdata.resources.private = private
- end
-end
-
-local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local changed = tfmdata.changed
- local unicodes = resources.unicodes
- local lookuphash = resources.lookuphash
- local lookuptypes = resources.lookuptypes
-
- local ligatures = { }
- local alternate = tonumber(value)
- local defaultalt = otf.defaultbasealternate
-
- local trace_singles = trace_baseinit and trace_singles
- local trace_alternatives = trace_baseinit and trace_alternatives
- local trace_ligatures = trace_baseinit and trace_ligatures
-
- local actions = {
- substitution = function(lookupdata,lookupname,description,unicode)
- if trace_singles then
- report_substitution(feature,lookupname,descriptions,unicode,lookupdata)
- end
- changed[unicode] = lookupdata
- end,
- alternate = function(lookupdata,lookupname,description,unicode)
- local replacement = lookupdata[alternate]
- if replacement then
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
- end
- elseif defaultalt == "first" then
- replacement = lookupdata[1]
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- elseif defaultalt == "last" then
- replacement = lookupdata[#data]
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- else
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
- end
- end
- end,
- ligature = function(lookupdata,lookupname,description,unicode)
- if trace_ligatures then
- report_ligature(feature,lookupname,descriptions,unicode,lookupdata)
- end
- ligatures[#ligatures+1] = { unicode, lookupdata }
- end,
- }
-
- for unicode, character in next, characters do
- local description = descriptions[unicode]
- local lookups = description.slookups
- if lookups then
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookups[lookupname]
- if lookupdata then
- local lookuptype = lookuptypes[lookupname]
- local action = actions[lookuptype]
- if action then
- action(lookupdata,lookupname,description,unicode)
- end
- end
- end
- end
- local lookups = description.mlookups
- if lookups then
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookuplist = lookups[lookupname]
- if lookuplist then
- local lookuptype = lookuptypes[lookupname]
- local action = actions[lookuptype]
- if action then
- for i=1,#lookuplist do
- action(lookuplist[i],lookupname,description,unicode)
- end
- end
- end
- end
- end
- end
-
- finalize_ligatures(tfmdata,ligatures)
-end
-
-local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) -- todo what kind of kerns, currently all
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local unicodes = resources.unicodes
- local sharedkerns = { }
- local traceindeed = trace_baseinit and trace_kerns
- for unicode, character in next, characters do
- local description = descriptions[unicode]
- local rawkerns = description.kerns -- shared
- if rawkerns then
- local s = sharedkerns[rawkerns]
- if s == false then
- -- skip
- elseif s then
- character.kerns = s
- else
- local newkerns = character.kerns
- local done = false
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local kerns = rawkerns[lookup]
- if kerns then
- for otherunicode, value in next, kerns do
- if value == 0 then
- -- maybe no 0 test here
- elseif not newkerns then
- newkerns = { [otherunicode] = value }
- done = true
- if traceindeed then
- report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
- end
- elseif not newkerns[otherunicode] then -- first wins
- newkerns[otherunicode] = value
- done = true
- if traceindeed then
- report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
- end
- end
- end
- end
- end
- if done then
- sharedkerns[rawkerns] = newkerns
- character.kerns = newkerns -- no empty assignments
- else
- sharedkerns[rawkerns] = false
- end
- end
- end
- end
-end
-
-basemethods.independent = {
- preparesubstitutions = preparesubstitutions,
- preparepositionings = preparepositionings,
-}
-
-local function makefake(tfmdata,name,present)
- local resources = tfmdata.resources
- local private = resources.private
- local character = { intermediate = true, ligatures = { } }
- resources.unicodes[name] = private
- tfmdata.characters[private] = character
- tfmdata.descriptions[private] = { name = name }
- resources.private = private + 1
- present[name] = private
- return character
-end
-
-local function make_1(present,tree,name)
- for k, v in next, tree do
- if k == "ligature" then
- present[name] = v
- else
- make_1(present,v,name .. "_" .. k)
- end
- end
-end
-
-local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname)
- for k, v in next, tree do
- if k == "ligature" then
- local character = characters[preceding]
- if not character then
- if trace_baseinit then
- report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding)
- end
- character = makefake(tfmdata,name,present)
- end
- local ligatures = character.ligatures
- if ligatures then
- ligatures[unicode] = { char = v }
- else
- character.ligatures = { [unicode] = { char = v } }
- end
- if done then
- local d = done[lookupname]
- if not d then
- done[lookupname] = { "dummy", v }
- else
- d[#d+1] = v
- end
- end
- else
- local code = present[name] or unicode
- local name = name .. "_" .. k
- make_2(present,tfmdata,characters,v,name,code,k,done,lookupname)
- end
- end
-end
-
-local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local changed = tfmdata.changed
- local lookuphash = resources.lookuphash
- local lookuptypes = resources.lookuptypes
-
- local ligatures = { }
- local alternate = tonumber(value)
- local defaultalt = otf.defaultbasealternate
-
- local trace_singles = trace_baseinit and trace_singles
- local trace_alternatives = trace_baseinit and trace_alternatives
- local trace_ligatures = trace_baseinit and trace_ligatures
-
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookuphash[lookupname]
- local lookuptype = lookuptypes[lookupname]
- for unicode, data in next, lookupdata do
- if lookuptype == "substitution" then
- if trace_singles then
- report_substitution(feature,lookupname,descriptions,unicode,data)
- end
- changed[unicode] = data
- elseif lookuptype == "alternate" then
- local replacement = data[alternate]
- if replacement then
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
- end
- elseif defaultalt == "first" then
- replacement = data[1]
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- elseif defaultalt == "last" then
- replacement = data[#data]
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- else
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
- end
- end
- elseif lookuptype == "ligature" then
- ligatures[#ligatures+1] = { unicode, data, lookupname }
- if trace_ligatures then
- report_ligature(feature,lookupname,descriptions,unicode,data)
- end
- end
- end
- end
-
- local nofligatures = #ligatures
-
- if nofligatures > 0 then
-
- local characters = tfmdata.characters
- local present = { }
- local done = trace_baseinit and trace_ligatures and { }
-
- for i=1,nofligatures do
- local ligature = ligatures[i]
- local unicode, tree = ligature[1], ligature[2]
- make_1(present,tree,"ctx_"..unicode)
- end
-
- for i=1,nofligatures do
- local ligature = ligatures[i]
- local unicode, tree, lookupname = ligature[1], ligature[2], ligature[3]
- make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname)
- end
-
- end
-
-end
-
-local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local lookuphash = resources.lookuphash
- local traceindeed = trace_baseinit and trace_kerns
-
- -- check out this sharedkerns trickery
-
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookuphash[lookupname]
- for unicode, data in next, lookupdata do
- local character = characters[unicode]
- local kerns = character.kerns
- if not kerns then
- kerns = { }
- character.kerns = kerns
- end
- if traceindeed then
- for otherunicode, kern in next, data do
- if not kerns[otherunicode] and kern ~= 0 then
- kerns[otherunicode] = kern
- report_kern(feature,lookup,descriptions,unicode,otherunicode,kern)
- end
- end
- else
- for otherunicode, kern in next, data do
- if not kerns[otherunicode] and kern ~= 0 then
- kerns[otherunicode] = kern
- end
- end
- end
- end
- end
-
-end
-
-local function initializehashes(tfmdata)
- nodeinitializers.features(tfmdata)
-end
-
-basemethods.shared = {
- initializehashes = initializehashes,
- preparesubstitutions = preparesubstitutions,
- preparepositionings = preparepositionings,
-}
-
-basemethod = "independent"
-
-local function featuresinitializer(tfmdata,value)
- if true then -- value then
- local t = trace_preparing and os.clock()
- local features = tfmdata.shared.features
- if features then
- applybasemethod("initializehashes",tfmdata)
- local collectlookups = otf.collectlookups
- local rawdata = tfmdata.shared.rawdata
- local properties = tfmdata.properties
- local script = properties.script
- local language = properties.language
- local basesubstitutions = rawdata.resources.features.gsub
- local basepositionings = rawdata.resources.features.gpos
- if basesubstitutions then
- for feature, data in next, basesubstitutions do
- local value = features[feature]
- if value then
- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
- registerbasefeature(feature,value)
- end
- end
- end
- end
- if basepositionings then
- for feature, data in next, basepositionings do
- local value = features[feature]
- if value then
- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
- registerbasefeature(feature,value)
- end
- end
- end
- end
- registerbasehash(tfmdata)
- end
- if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %a",os.clock()-t,tfmdata.properties.fullname)
- end
- end
-end
-
-registerotffeature {
- name = "features",
- description = "features",
- default = true,
- initializers = {
- -- position = 1, -- after setscript (temp hack ... we need to force script / language to 1
- base = featuresinitializer,
- }
-}
-
--- independent : collect lookups independently (takes more runtime ... neglectable)
--- shared : shares lookups with node mode (takes more memory unless also a node mode variant is used ... noticeable)
-
-directives.register("fonts.otf.loader.basemethod", function(v)
- if basemethods[v] then
- basemethod = v
- end
-end)
+if not modules then modules = { } end modules ['font-otb'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+local concat = table.concat
+local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
+local type, next, tonumber, tostring = type, next, tonumber, tostring
+local lpegmatch = lpeg.match
+local utfchar = utf.char
+
+local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end)
+local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end)
+local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end)
+local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end)
+local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end)
+local trace_ligatures_detail = false trackers.register("otf.ligatures.detail", function(v) trace_ligatures_detail = v end)
+local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
+local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
+
+local report_prepare = logs.reporter("fonts","otf prepare")
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+
+local otffeatures = otf.features
+local registerotffeature = otffeatures.register
+
+otf.defaultbasealternate = "none" -- first last
+
+local wildcard = "*"
+local default = "dflt"
+
+local formatters = string.formatters
+local f_unicode = formatters["%U"]
+local f_uniname = formatters["%U (%s)"]
+local f_unilist = formatters["% t (% t)"]
+
+local function gref(descriptions,n)
+ if type(n) == "number" then
+ local name = descriptions[n].name
+ if name then
+ return f_uniname(n,name)
+ else
+ return f_unicode(n)
+ end
+ elseif n then
+ local num, nam = { }, { }
+ for i=2,#n do
+ local ni = n[i]
+ if tonumber(ni) then -- first is likely a key
+ local di = descriptions[ni]
+ num[i] = f_unicode(ni)
+ nam[i] = di and di.name or "-"
+ end
+ end
+ return f_unilist(num,nam)
+ else
+ return ""
+ end
+end
+
+local function cref(feature,lookupname)
+ if lookupname then
+ return formatters["feature %a, lookup %a"](feature,lookupname)
+ else
+ return formatters["feature %a"](feature)
+ end
+end
+
+local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment)
+ report_prepare("%s: base alternate %s => %s (%S => %S)",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ replacement and gref(descriptions,replacement),
+ value,
+ comment)
+end
+
+local function report_substitution(feature,lookupname,descriptions,unicode,substitution)
+ report_prepare("%s: base substitution %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,substitution))
+end
+
+local function report_ligature(feature,lookupname,descriptions,unicode,ligature)
+ report_prepare("%s: base ligature %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,ligature),
+ gref(descriptions,unicode))
+end
+
+local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value)
+ report_prepare("%s: base kern %s + %s => %S",
+ cref(feature,lookupname),
+ gref(descriptions,unicode),
+ gref(descriptions,otherunicode),
+ value)
+end
+
+local basemethods = { }
+local basemethod = ""
+
+local function applybasemethod(what,...)
+ local m = basemethods[basemethod][what]
+ if m then
+ return m(...)
+ end
+end
+
+-- We need to make sure that luatex sees the difference between
+-- base fonts that have different glyphs in the same slots in fonts
+-- that have the same fullname (or filename). LuaTeX will merge fonts
+-- eventually (and subset later on). If needed we can use a more
+-- verbose name as long as we don't use <()<>[]{}/%> and the length
+-- is < 128.
+
+local basehash, basehashes, applied = { }, 1, { }
+
+local function registerbasehash(tfmdata)
+ local properties = tfmdata.properties
+ local hash = concat(applied," ")
+ local base = basehash[hash]
+ if not base then
+ basehashes = basehashes + 1
+ base = basehashes
+ basehash[hash] = base
+ end
+ properties.basehash = base
+ properties.fullname = properties.fullname .. "-" .. base
+ -- report_prepare("fullname base hash '%a, featureset %a",tfmdata.properties.fullname,hash)
+ applied = { }
+end
+
+local function registerbasefeature(feature,value)
+ applied[#applied+1] = feature .. "=" .. tostring(value)
+end
+
+-- The original basemode ligature builder used the names of components
+-- and did some expression juggling to get the chain right. The current
+-- variant starts with unicodes but still uses names to make the chain.
+-- This is needed because we have to create intermediates when needed
+-- but use predefined snippets when available. To some extend the
+-- current builder is more stupid but I don't worry that much about it
+-- as ligatures are rather predicatable.
+--
+-- Personally I think that an ff + i == ffi rule as used in for instance
+-- latin modern is pretty weird as no sane person will key that in and
+-- expect a glyph for that ligature plus the following character. Anyhow,
+-- as we need to deal with this, we do, but no guarantes are given.
+--
+-- latin modern dejavu
+--
+-- f+f 102 102 102 102
+-- f+i 102 105 102 105
+-- f+l 102 108 102 108
+-- f+f+i 102 102 105
+-- f+f+l 102 102 108 102 102 108
+-- ff+i 64256 105 64256 105
+-- ff+l 64256 108
+--
+-- As you can see here, latin modern is less complete than dejavu but
+-- in practice one will not notice it.
+--
+-- The while loop is needed because we need to resolve for instance
+-- pseudo names like hyphen_hyphen to endash so in practice we end
+-- up with a bit too many definitions but the overhead is neglectable.
+--
+-- Todo: if changed[first] or changed[second] then ... end
+
+local trace = false
+
+local function finalize_ligatures(tfmdata,ligatures)
+ local nofligatures = #ligatures
+ if nofligatures > 0 then
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ local resources = tfmdata.resources
+ local unicodes = resources.unicodes
+ local private = resources.private
+ local alldone = false
+ while not alldone do
+ local done = 0
+ for i=1,nofligatures do
+ local ligature = ligatures[i]
+ if ligature then
+ local unicode, lookupdata = ligature[1], ligature[2]
+ if trace then
+ trace_ligatures_detail("building % a into %a",lookupdata,unicode)
+ end
+ local size = #lookupdata
+ local firstcode = lookupdata[1] -- [2]
+ local firstdata = characters[firstcode]
+ local okay = false
+ if firstdata then
+ local firstname = "ctx_" .. firstcode
+ for i=1,size-1 do -- for i=2,size-1 do
+ local firstdata = characters[firstcode]
+ if not firstdata then
+ firstcode = private
+ if trace then
+ trace_ligatures_detail("defining %a as %a",firstname,firstcode)
+ end
+ unicodes[firstname] = firstcode
+ firstdata = { intermediate = true, ligatures = { } }
+ characters[firstcode] = firstdata
+ descriptions[firstcode] = { name = firstname }
+ private = private + 1
+ end
+ local target
+ local secondcode = lookupdata[i+1]
+ local secondname = firstname .. "_" .. secondcode
+ if i == size - 1 then
+ target = unicode
+ if not unicodes[secondname] then
+ unicodes[secondname] = unicode -- map final ligature onto intermediates
+ end
+ okay = true
+ else
+ target = unicodes[secondname]
+ if not target then
+ break
+ end
+ end
+ if trace then
+ trace_ligatures_detail("codes (%a,%a) + (%a,%a) -> %a",firstname,firstcode,secondname,secondcode,target)
+ end
+ local firstligs = firstdata.ligatures
+ if firstligs then
+ firstligs[secondcode] = { char = target }
+ else
+ firstdata.ligatures = { [secondcode] = { char = target } }
+ end
+ firstcode = target
+ firstname = secondname
+ end
+ end
+ if okay then
+ ligatures[i] = false
+ done = done + 1
+ end
+ end
+ end
+ alldone = done == 0
+ end
+ if trace then
+ for k, v in next, characters do
+ if v.ligatures then table.print(v,k) end
+ end
+ end
+ tfmdata.resources.private = private
+ end
+end
+
+local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ local resources = tfmdata.resources
+ local changed = tfmdata.changed
+ local unicodes = resources.unicodes
+ local lookuphash = resources.lookuphash
+ local lookuptypes = resources.lookuptypes
+
+ local ligatures = { }
+ local alternate = tonumber(value)
+ local defaultalt = otf.defaultbasealternate
+
+ local trace_singles = trace_baseinit and trace_singles
+ local trace_alternatives = trace_baseinit and trace_alternatives
+ local trace_ligatures = trace_baseinit and trace_ligatures
+
+ local actions = {
+ substitution = function(lookupdata,lookupname,description,unicode)
+ if trace_singles then
+ report_substitution(feature,lookupname,descriptions,unicode,lookupdata)
+ end
+ changed[unicode] = lookupdata
+ end,
+ alternate = function(lookupdata,lookupname,description,unicode)
+ local replacement = lookupdata[alternate]
+ if replacement then
+ changed[unicode] = replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt == "first" then
+ replacement = lookupdata[1]
+ changed[unicode] = replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt == "last" then
+ replacement = lookupdata[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ end,
+ ligature = function(lookupdata,lookupname,description,unicode)
+ if trace_ligatures then
+ report_ligature(feature,lookupname,descriptions,unicode,lookupdata)
+ end
+ ligatures[#ligatures+1] = { unicode, lookupdata }
+ end,
+ }
+
+ for unicode, character in next, characters do
+ local description = descriptions[unicode]
+ local lookups = description.slookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname = lookuplist[l]
+ local lookupdata = lookups[lookupname]
+ if lookupdata then
+ local lookuptype = lookuptypes[lookupname]
+ local action = actions[lookuptype]
+ if action then
+ action(lookupdata,lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ local lookups = description.mlookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname = lookuplist[l]
+ local lookuplist = lookups[lookupname]
+ if lookuplist then
+ local lookuptype = lookuptypes[lookupname]
+ local action = actions[lookuptype]
+ if action then
+ for i=1,#lookuplist do
+ action(lookuplist[i],lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ end
+ end
+
+ finalize_ligatures(tfmdata,ligatures)
+end
+
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) -- todo what kind of kerns, currently all
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ local resources = tfmdata.resources
+ local unicodes = resources.unicodes
+ local sharedkerns = { }
+ local traceindeed = trace_baseinit and trace_kerns
+ for unicode, character in next, characters do
+ local description = descriptions[unicode]
+ local rawkerns = description.kerns -- shared
+ if rawkerns then
+ local s = sharedkerns[rawkerns]
+ if s == false then
+ -- skip
+ elseif s then
+ character.kerns = s
+ else
+ local newkerns = character.kerns
+ local done = false
+ for l=1,#lookuplist do
+ local lookup = lookuplist[l]
+ local kerns = rawkerns[lookup]
+ if kerns then
+ for otherunicode, value in next, kerns do
+ if value == 0 then
+ -- maybe no 0 test here
+ elseif not newkerns then
+ newkerns = { [otherunicode] = value }
+ done = true
+ if traceindeed then
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ end
+ elseif not newkerns[otherunicode] then -- first wins
+ newkerns[otherunicode] = value
+ done = true
+ if traceindeed then
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,value)
+ end
+ end
+ end
+ end
+ end
+ if done then
+ sharedkerns[rawkerns] = newkerns
+ character.kerns = newkerns -- no empty assignments
+ else
+ sharedkerns[rawkerns] = false
+ end
+ end
+ end
+ end
+end
+
+basemethods.independent = {
+ preparesubstitutions = preparesubstitutions,
+ preparepositionings = preparepositionings,
+}
+
+local function makefake(tfmdata,name,present)
+ local resources = tfmdata.resources
+ local private = resources.private
+ local character = { intermediate = true, ligatures = { } }
+ resources.unicodes[name] = private
+ tfmdata.characters[private] = character
+ tfmdata.descriptions[private] = { name = name }
+ resources.private = private + 1
+ present[name] = private
+ return character
+end
+
+local function make_1(present,tree,name)
+ for k, v in next, tree do
+ if k == "ligature" then
+ present[name] = v
+ else
+ make_1(present,v,name .. "_" .. k)
+ end
+ end
+end
+
+local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname)
+ for k, v in next, tree do
+ if k == "ligature" then
+ local character = characters[preceding]
+ if not character then
+ if trace_baseinit then
+ report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding)
+ end
+ character = makefake(tfmdata,name,present)
+ end
+ local ligatures = character.ligatures
+ if ligatures then
+ ligatures[unicode] = { char = v }
+ else
+ character.ligatures = { [unicode] = { char = v } }
+ end
+ if done then
+ local d = done[lookupname]
+ if not d then
+ done[lookupname] = { "dummy", v }
+ else
+ d[#d+1] = v
+ end
+ end
+ else
+ local code = present[name] or unicode
+ local name = name .. "_" .. k
+ make_2(present,tfmdata,characters,v,name,code,k,done,lookupname)
+ end
+ end
+end
+
+local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ local resources = tfmdata.resources
+ local changed = tfmdata.changed
+ local lookuphash = resources.lookuphash
+ local lookuptypes = resources.lookuptypes
+
+ local ligatures = { }
+ local alternate = tonumber(value)
+ local defaultalt = otf.defaultbasealternate
+
+ local trace_singles = trace_baseinit and trace_singles
+ local trace_alternatives = trace_baseinit and trace_alternatives
+ local trace_ligatures = trace_baseinit and trace_ligatures
+
+ for l=1,#lookuplist do
+ local lookupname = lookuplist[l]
+ local lookupdata = lookuphash[lookupname]
+ local lookuptype = lookuptypes[lookupname]
+ for unicode, data in next, lookupdata do
+ if lookuptype == "substitution" then
+ if trace_singles then
+ report_substitution(feature,lookupname,descriptions,unicode,data)
+ end
+ changed[unicode] = data
+ elseif lookuptype == "alternate" then
+ local replacement = data[alternate]
+ if replacement then
+ changed[unicode] = replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt == "first" then
+ replacement = data[1]
+ changed[unicode] = replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt == "last" then
+ replacement = data[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ elseif lookuptype == "ligature" then
+ ligatures[#ligatures+1] = { unicode, data, lookupname }
+ if trace_ligatures then
+ report_ligature(feature,lookupname,descriptions,unicode,data)
+ end
+ end
+ end
+ end
+
+ local nofligatures = #ligatures
+
+ if nofligatures > 0 then
+
+ local characters = tfmdata.characters
+ local present = { }
+ local done = trace_baseinit and trace_ligatures and { }
+
+ for i=1,nofligatures do
+ local ligature = ligatures[i]
+ local unicode, tree = ligature[1], ligature[2]
+ make_1(present,tree,"ctx_"..unicode)
+ end
+
+ for i=1,nofligatures do
+ local ligature = ligatures[i]
+ local unicode, tree, lookupname = ligature[1], ligature[2], ligature[3]
+ make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname)
+ end
+
+ end
+
+end
+
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ local resources = tfmdata.resources
+ local lookuphash = resources.lookuphash
+ local traceindeed = trace_baseinit and trace_kerns
+
+ -- check out this sharedkerns trickery
+
+ for l=1,#lookuplist do
+ local lookupname = lookuplist[l]
+ local lookupdata = lookuphash[lookupname]
+ for unicode, data in next, lookupdata do
+ local character = characters[unicode]
+ local kerns = character.kerns
+ if not kerns then
+ kerns = { }
+ character.kerns = kerns
+ end
+ if traceindeed then
+ for otherunicode, kern in next, data do
+ if not kerns[otherunicode] and kern ~= 0 then
+ kerns[otherunicode] = kern
+ report_kern(feature,lookup,descriptions,unicode,otherunicode,kern)
+ end
+ end
+ else
+ for otherunicode, kern in next, data do
+ if not kerns[otherunicode] and kern ~= 0 then
+ kerns[otherunicode] = kern
+ end
+ end
+ end
+ end
+ end
+
+end
+
+local function initializehashes(tfmdata)
+ nodeinitializers.features(tfmdata)
+end
+
+basemethods.shared = {
+ initializehashes = initializehashes,
+ preparesubstitutions = preparesubstitutions,
+ preparepositionings = preparepositionings,
+}
+
+basemethod = "independent"
+
+local function featuresinitializer(tfmdata,value)
+ if true then -- value then
+ local t = trace_preparing and os.clock()
+ local features = tfmdata.shared.features
+ if features then
+ applybasemethod("initializehashes",tfmdata)
+ local collectlookups = otf.collectlookups
+ local rawdata = tfmdata.shared.rawdata
+ local properties = tfmdata.properties
+ local script = properties.script
+ local language = properties.language
+ local basesubstitutions = rawdata.resources.features.gsub
+ local basepositionings = rawdata.resources.features.gpos
+ if basesubstitutions then
+ for feature, data in next, basesubstitutions do
+ local value = features[feature]
+ if value then
+ local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
+ if validlookups then
+ applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
+ end
+ if basepositionings then
+ for feature, data in next, basepositionings do
+ local value = features[feature]
+ if value then
+ local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
+ if validlookups then
+ applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
+ end
+ registerbasehash(tfmdata)
+ end
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %a",os.clock()-t,tfmdata.properties.fullname)
+ end
+ end
+end
+
+registerotffeature {
+ name = "features",
+ description = "features",
+ default = true,
+ initializers = {
+ -- position = 1, -- after setscript (temp hack ... we need to force script / language to 1
+ base = featuresinitializer,
+ }
+}
+
+-- independent : collect lookups independently (takes more runtime ... neglectable)
+-- shared : shares lookups with node mode (takes more memory unless also a node mode variant is used ... noticeable)
+
+directives.register("fonts.otf.loader.basemethod", function(v)
+ if basemethods[v] then
+ basemethod = v
+ end
+end)
diff --git a/tex/context/base/font-otc.lua b/tex/context/base/font-otc.lua
index a87dcadf8..0ea900008 100644
--- a/tex/context/base/font-otc.lua
+++ b/tex/context/base/font-otc.lua
@@ -1,333 +1,333 @@
-if not modules then modules = { } end modules ['font-otc'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (context)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, insert = string.format, table.insert
-local type, next = type, next
-local lpegmatch = lpeg.match
-
--- we assume that the other otf stuff is loaded already
-
-local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-local report_otf = logs.reporter("fonts","otf loading")
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-local registerotffeature = otf.features.register
-local setmetatableindex = table.setmetatableindex
-
--- In the userdata interface we can not longer tweak the loaded font as
--- conveniently as before. For instance, instead of pushing extra data in
--- in the table using the original structure, we now have to operate on
--- the mkiv representation. And as the fontloader interface is modelled
--- after fontforge we cannot change that one too much either.
-
-local types = {
- substitution = "gsub_single",
- ligature = "gsub_ligature",
- alternate = "gsub_alternate",
-}
-
-setmetatableindex(types, function(t,k) t[k] = k return k end) -- "key"
-
-local everywhere = { ["*"] = { ["*"] = true } } -- or: { ["*"] = { "*" } }
-local noflags = { }
-
-local function addfeature(data,feature,specifications)
- local descriptions = data.descriptions
- local resources = data.resources
- local lookups = resources.lookups
- local gsubfeatures = resources.features.gsub
- if gsubfeatures and gsubfeatures[feature] then
- -- already present
- else
- local sequences = resources.sequences
- local fontfeatures = resources.features
- local unicodes = resources.unicodes
- local lookuptypes = resources.lookuptypes
- local splitter = lpeg.splitter(" ",unicodes)
- local done = 0
- local skip = 0
- if not specifications[1] then
- -- so we accept a one entry specification
- specifications = { specifications }
- end
- -- subtables are tables themselves but we also accept flattened singular subtables
- for s=1,#specifications do
- local specification = specifications[s]
- local valid = specification.valid
- if not valid or valid(data,specification,feature) then
- local initialize = specification.initialize
- if initialize then
- -- when false is returned we initialize only once
- specification.initialize = initialize(specification) and initialize or nil
- end
- local askedfeatures = specification.features or everywhere
- local subtables = specification.subtables or { specification.data } or { }
- local featuretype = types[specification.type or "substitution"]
- local featureflags = specification.flags or noflags
- local added = false
- local featurename = format("ctx_%s_%s",feature,s)
- local st = { }
- for t=1,#subtables do
- local list = subtables[t]
- local full = format("%s_%s",featurename,t)
- st[t] = full
- if featuretype == "gsub_ligature" then
- lookuptypes[full] = "ligature"
- for code, ligature in next, list do
- local unicode = tonumber(code) or unicodes[code]
- local description = descriptions[unicode]
- if description then
- local slookups = description.slookups
- if type(ligature) == "string" then
- ligature = { lpegmatch(splitter,ligature) }
- end
- local present = true
- for i=1,#ligature do
- if not descriptions[ligature[i]] then
- present = false
- break
- end
- end
- if present then
- if slookups then
- slookups[full] = ligature
- else
- description.slookups = { [full] = ligature }
- end
- done, added = done + 1, true
- else
- skip = skip + 1
- end
- end
- end
- elseif featuretype == "gsub_single" then
- lookuptypes[full] = "substitution"
- for code, replacement in next, list do
- local unicode = tonumber(code) or unicodes[code]
- local description = descriptions[unicode]
- if description then
- local slookups = description.slookups
- replacement = tonumber(replacement) or unicodes[replacement]
- if descriptions[replacement] then
- if slookups then
- slookups[full] = replacement
- else
- description.slookups = { [full] = replacement }
- end
- done, added = done + 1, true
- end
- end
- end
- end
- end
- if added then
- -- script = { lang1, lang2, lang3 } or script = { lang1 = true, ... }
- for k, v in next, askedfeatures do
- if v[1] then
- askedfeatures[k] = table.tohash(v)
- end
- end
- sequences[#sequences+1] = {
- chain = 0,
- features = { [feature] = askedfeatures },
- flags = featureflags,
- name = featurename,
- subtables = st,
- type = featuretype,
- }
- -- register in metadata (merge as there can be a few)
- if not gsubfeatures then
- gsubfeatures = { }
- fontfeatures.gsub = gsubfeatures
- end
- local k = gsubfeatures[feature]
- if not k then
- k = { }
- gsubfeatures[feature] = k
- end
- for script, languages in next, askedfeatures do
- local kk = k[script]
- if not kk then
- kk = { }
- k[script] = kk
- end
- for language, value in next, languages do
- kk[language] = value
- end
- end
- end
- end
- end
- if trace_loading then
- report_otf("registering feature %a, affected glyphs %a, skipped glyphs %a",feature,done,skip)
- end
- end
-end
-
-otf.enhancers.addfeature = addfeature
-
-local extrafeatures = { }
-
-function otf.addfeature(name,specification)
- extrafeatures[name] = specification
-end
-
-local function enhance(data,filename,raw)
- for feature, specification in next, extrafeatures do
- addfeature(data,feature,specification)
- end
-end
-
-otf.enhancers.register("check extra features",enhance)
-
--- tlig --
-
-local tlig = {
- endash = "hyphen hyphen",
- emdash = "hyphen hyphen hyphen",
- -- quotedblleft = "quoteleft quoteleft",
- -- quotedblright = "quoteright quoteright",
- -- quotedblleft = "grave grave",
- -- quotedblright = "quotesingle quotesingle",
- -- quotedblbase = "comma comma",
-}
-
-local tlig_specification = {
- type = "ligature",
- features = everywhere,
- data = tlig,
- flags = noflags,
-}
-
-otf.addfeature("tlig",tlig_specification)
-
-registerotffeature {
- name = 'tlig',
- description = 'tex ligatures',
-}
-
--- trep
-
-local trep = {
- -- [0x0022] = 0x201D,
- [0x0027] = 0x2019,
- -- [0x0060] = 0x2018,
-}
-
-local trep_specification = {
- type = "substitution",
- features = everywhere,
- data = trep,
- flags = noflags,
-}
-
-otf.addfeature("trep",trep_specification)
-
-registerotffeature {
- name = 'trep',
- description = 'tex replacements',
-}
-
--- tcom
-
-if characters.combined then
-
- local tcom = { }
-
- local function initialize()
- characters.initialize()
- for first, seconds in next, characters.combined do
- for second, combination in next, seconds do
- tcom[combination] = { first, second }
- end
- end
- -- return false
- end
-
- local tcom_specification = {
- type = "ligature",
- features = everywhere,
- data = tcom,
- flags = noflags,
- initialize = initialize,
- }
-
- otf.addfeature("tcom",tcom_specification)
-
- registerotffeature {
- name = 'tcom',
- description = 'tex combinations',
- }
-
-end
-
--- anum
-
-local anum_arabic = {
- [0x0030] = 0x0660,
- [0x0031] = 0x0661,
- [0x0032] = 0x0662,
- [0x0033] = 0x0663,
- [0x0034] = 0x0664,
- [0x0035] = 0x0665,
- [0x0036] = 0x0666,
- [0x0037] = 0x0667,
- [0x0038] = 0x0668,
- [0x0039] = 0x0669,
-}
-
-local anum_persian = {
- [0x0030] = 0x06F0,
- [0x0031] = 0x06F1,
- [0x0032] = 0x06F2,
- [0x0033] = 0x06F3,
- [0x0034] = 0x06F4,
- [0x0035] = 0x06F5,
- [0x0036] = 0x06F6,
- [0x0037] = 0x06F7,
- [0x0038] = 0x06F8,
- [0x0039] = 0x06F9,
-}
-
-local function valid(data)
- local features = data.resources.features
- if features then
- for k, v in next, features do
- for k, v in next, v do
- if v.arab then
- return true
- end
- end
- end
- end
-end
-
-local anum_specification = {
- {
- type = "substitution",
- features = { arab = { urd = true, dflt = true } },
- data = anum_arabic,
- flags = noflags, -- { },
- valid = valid,
- },
- {
- type = "substitution",
- features = { arab = { urd = true } },
- data = anum_persian,
- flags = noflags, -- { },
- valid = valid,
- },
-}
-
-otf.addfeature("anum",anum_specification) -- todo: only when there is already an arab script feature
-
-registerotffeature {
- name = 'anum',
- description = 'arabic digits',
-}
+if not modules then modules = { } end modules ['font-otc'] = {
+ version = 1.001,
+ comment = "companion to font-otf.lua (context)",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format, insert = string.format, table.insert
+local type, next = type, next
+local lpegmatch = lpeg.match
+
+-- we assume that the other otf stuff is loaded already
+
+local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+local report_otf = logs.reporter("fonts","otf loading")
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+local registerotffeature = otf.features.register
+local setmetatableindex = table.setmetatableindex
+
+-- In the userdata interface we can not longer tweak the loaded font as
+-- conveniently as before. For instance, instead of pushing extra data in
+-- in the table using the original structure, we now have to operate on
+-- the mkiv representation. And as the fontloader interface is modelled
+-- after fontforge we cannot change that one too much either.
+
+local types = {
+ substitution = "gsub_single",
+ ligature = "gsub_ligature",
+ alternate = "gsub_alternate",
+}
+
+setmetatableindex(types, function(t,k) t[k] = k return k end) -- "key"
+
+local everywhere = { ["*"] = { ["*"] = true } } -- or: { ["*"] = { "*" } }
+local noflags = { }
+
+local function addfeature(data,feature,specifications)
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local lookups = resources.lookups
+ local gsubfeatures = resources.features.gsub
+ if gsubfeatures and gsubfeatures[feature] then
+ -- already present
+ else
+ local sequences = resources.sequences
+ local fontfeatures = resources.features
+ local unicodes = resources.unicodes
+ local lookuptypes = resources.lookuptypes
+ local splitter = lpeg.splitter(" ",unicodes)
+ local done = 0
+ local skip = 0
+ if not specifications[1] then
+ -- so we accept a one entry specification
+ specifications = { specifications }
+ end
+ -- subtables are tables themselves but we also accept flattened singular subtables
+ for s=1,#specifications do
+ local specification = specifications[s]
+ local valid = specification.valid
+ if not valid or valid(data,specification,feature) then
+ local initialize = specification.initialize
+ if initialize then
+ -- when false is returned we initialize only once
+ specification.initialize = initialize(specification) and initialize or nil
+ end
+ local askedfeatures = specification.features or everywhere
+ local subtables = specification.subtables or { specification.data } or { }
+ local featuretype = types[specification.type or "substitution"]
+ local featureflags = specification.flags or noflags
+ local added = false
+ local featurename = format("ctx_%s_%s",feature,s)
+ local st = { }
+ for t=1,#subtables do
+ local list = subtables[t]
+ local full = format("%s_%s",featurename,t)
+ st[t] = full
+ if featuretype == "gsub_ligature" then
+ lookuptypes[full] = "ligature"
+ for code, ligature in next, list do
+ local unicode = tonumber(code) or unicodes[code]
+ local description = descriptions[unicode]
+ if description then
+ local slookups = description.slookups
+ if type(ligature) == "string" then
+ ligature = { lpegmatch(splitter,ligature) }
+ end
+ local present = true
+ for i=1,#ligature do
+ if not descriptions[ligature[i]] then
+ present = false
+ break
+ end
+ end
+ if present then
+ if slookups then
+ slookups[full] = ligature
+ else
+ description.slookups = { [full] = ligature }
+ end
+ done, added = done + 1, true
+ else
+ skip = skip + 1
+ end
+ end
+ end
+ elseif featuretype == "gsub_single" then
+ lookuptypes[full] = "substitution"
+ for code, replacement in next, list do
+ local unicode = tonumber(code) or unicodes[code]
+ local description = descriptions[unicode]
+ if description then
+ local slookups = description.slookups
+ replacement = tonumber(replacement) or unicodes[replacement]
+ if descriptions[replacement] then
+ if slookups then
+ slookups[full] = replacement
+ else
+ description.slookups = { [full] = replacement }
+ end
+ done, added = done + 1, true
+ end
+ end
+ end
+ end
+ end
+ if added then
+ -- script = { lang1, lang2, lang3 } or script = { lang1 = true, ... }
+ for k, v in next, askedfeatures do
+ if v[1] then
+ askedfeatures[k] = table.tohash(v)
+ end
+ end
+ sequences[#sequences+1] = {
+ chain = 0,
+ features = { [feature] = askedfeatures },
+ flags = featureflags,
+ name = featurename,
+ subtables = st,
+ type = featuretype,
+ }
+ -- register in metadata (merge as there can be a few)
+ if not gsubfeatures then
+ gsubfeatures = { }
+ fontfeatures.gsub = gsubfeatures
+ end
+ local k = gsubfeatures[feature]
+ if not k then
+ k = { }
+ gsubfeatures[feature] = k
+ end
+ for script, languages in next, askedfeatures do
+ local kk = k[script]
+ if not kk then
+ kk = { }
+ k[script] = kk
+ end
+ for language, value in next, languages do
+ kk[language] = value
+ end
+ end
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("registering feature %a, affected glyphs %a, skipped glyphs %a",feature,done,skip)
+ end
+ end
+end
+
+otf.enhancers.addfeature = addfeature
+
+local extrafeatures = { }
+
+function otf.addfeature(name,specification)
+ extrafeatures[name] = specification
+end
+
+local function enhance(data,filename,raw)
+ for feature, specification in next, extrafeatures do
+ addfeature(data,feature,specification)
+ end
+end
+
+otf.enhancers.register("check extra features",enhance)
+
+-- tlig --
+
+local tlig = {
+ endash = "hyphen hyphen",
+ emdash = "hyphen hyphen hyphen",
+ -- quotedblleft = "quoteleft quoteleft",
+ -- quotedblright = "quoteright quoteright",
+ -- quotedblleft = "grave grave",
+ -- quotedblright = "quotesingle quotesingle",
+ -- quotedblbase = "comma comma",
+}
+
+local tlig_specification = {
+ type = "ligature",
+ features = everywhere,
+ data = tlig,
+ flags = noflags,
+}
+
+otf.addfeature("tlig",tlig_specification)
+
+registerotffeature {
+ name = 'tlig',
+ description = 'tex ligatures',
+}
+
+-- trep
+
+local trep = {
+ -- [0x0022] = 0x201D,
+ [0x0027] = 0x2019,
+ -- [0x0060] = 0x2018,
+}
+
+local trep_specification = {
+ type = "substitution",
+ features = everywhere,
+ data = trep,
+ flags = noflags,
+}
+
+otf.addfeature("trep",trep_specification)
+
+registerotffeature {
+ name = 'trep',
+ description = 'tex replacements',
+}
+
+-- tcom
+
+if characters.combined then
+
+ local tcom = { }
+
+ local function initialize()
+ characters.initialize()
+ for first, seconds in next, characters.combined do
+ for second, combination in next, seconds do
+ tcom[combination] = { first, second }
+ end
+ end
+ -- return false
+ end
+
+ local tcom_specification = {
+ type = "ligature",
+ features = everywhere,
+ data = tcom,
+ flags = noflags,
+ initialize = initialize,
+ }
+
+ otf.addfeature("tcom",tcom_specification)
+
+ registerotffeature {
+ name = 'tcom',
+ description = 'tex combinations',
+ }
+
+end
+
+-- anum
+
+local anum_arabic = {
+ [0x0030] = 0x0660,
+ [0x0031] = 0x0661,
+ [0x0032] = 0x0662,
+ [0x0033] = 0x0663,
+ [0x0034] = 0x0664,
+ [0x0035] = 0x0665,
+ [0x0036] = 0x0666,
+ [0x0037] = 0x0667,
+ [0x0038] = 0x0668,
+ [0x0039] = 0x0669,
+}
+
+local anum_persian = {
+ [0x0030] = 0x06F0,
+ [0x0031] = 0x06F1,
+ [0x0032] = 0x06F2,
+ [0x0033] = 0x06F3,
+ [0x0034] = 0x06F4,
+ [0x0035] = 0x06F5,
+ [0x0036] = 0x06F6,
+ [0x0037] = 0x06F7,
+ [0x0038] = 0x06F8,
+ [0x0039] = 0x06F9,
+}
+
+local function valid(data)
+ local features = data.resources.features
+ if features then
+ for k, v in next, features do
+ for k, v in next, v do
+ if v.arab then
+ return true
+ end
+ end
+ end
+ end
+end
+
+local anum_specification = {
+ {
+ type = "substitution",
+ features = { arab = { urd = true, dflt = true } },
+ data = anum_arabic,
+ flags = noflags, -- { },
+ valid = valid,
+ },
+ {
+ type = "substitution",
+ features = { arab = { urd = true } },
+ data = anum_persian,
+ flags = noflags, -- { },
+ valid = valid,
+ },
+}
+
+otf.addfeature("anum",anum_specification) -- todo: only when there is already an arab script feature
+
+registerotffeature {
+ name = 'anum',
+ description = 'arabic digits',
+}
diff --git a/tex/context/base/font-otd.lua b/tex/context/base/font-otd.lua
index a9d093d6d..12e2da55f 100644
--- a/tex/context/base/font-otd.lua
+++ b/tex/context/base/font-otd.lua
@@ -1,261 +1,261 @@
-if not modules then modules = { } end modules ['font-otd'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local type = type
-local match = string.match
-local sequenced = table.sequenced
-
-local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end)
-local trace_applied = false trackers.register("otf.applied", function(v) trace_applied = v end)
-
-local report_otf = logs.reporter("fonts","otf loading")
-local report_process = logs.reporter("fonts","otf process")
-
-local allocate = utilities.storage.allocate
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-local hashes = fonts.hashes
-local definers = fonts.definers
-local constructors = fonts.constructors
-local specifiers = fonts.specifiers
-
-local fontidentifiers = hashes.identifiers
-local fontresources = hashes.resources
-local fontproperties = hashes.properties
-local fontdynamics = hashes.dynamics
-
-local contextsetups = specifiers.contextsetups
-local contextnumbers = specifiers.contextnumbers
-local contextmerged = specifiers.contextmerged
-
-local setmetatableindex = table.setmetatableindex
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
-local a_to_script = { }
-local a_to_language = { }
-
--- we can have a scripts hash in fonts.hashes
-
-function otf.setdynamics(font,attribute)
- -- local features = contextsetups[contextnumbers[attribute]] -- can be moved to caller
- local features = contextsetups[attribute]
- if features then
- local dynamics = fontdynamics[font]
- dynamic = contextmerged[attribute] or 0
- local script, language
- if dynamic == 2 then -- merge
- language = features.language or fontproperties[font].language or "dflt"
- script = features.script or fontproperties[font].script or "dflt"
- else -- if dynamic == 1 then -- replace
- language = features.language or "dflt"
- script = features.script or "dflt"
- end
- if script == "auto" then
- -- checkedscript and resources are defined later so we cannot shortcut them -- todo: make installer
- script = definers.checkedscript(fontidentifiers[font],fontresources[font],features)
- end
- local ds = dynamics[script] -- can be metatable magic (less testing)
- if not ds then
- ds = { }
- dynamics[script] = ds
- end
- local dsl = ds[language]
- if not dsl then
- dsl = { }
- ds[language] = dsl
- end
- local dsla = dsl[attribute]
- if not dsla then
- local tfmdata = fontidentifiers[font]
- a_to_script [attribute] = script
- a_to_language[attribute] = language
- -- we need to save some values .. quite messy
- local properties = tfmdata.properties
- local shared = tfmdata.shared
- local s_script = properties.script
- local s_language = properties.language
- local s_mode = properties.mode
- local s_features = shared.features
- properties.mode = "node"
- properties.language = language
- properties.script = script
- properties.dynamics = true -- handy for tracing
- shared.features = { }
- -- end of save
- local set = constructors.checkedfeatures("otf",features)
- set.mode = "node" -- really needed
- dsla = otf.setfeatures(tfmdata,set)
- if trace_dynamics then
- report_otf("setting dynamics %s: attribute %a, script %a, language %a, set %a",contextnumbers[attribute],attribute,script,language,set)
- end
- -- we need to restore some values
- properties.script = s_script
- properties.language = s_language
- properties.mode = s_mode
- shared.features = s_features
- -- end of restore
- dynamics[script][language][attribute] = dsla -- cache
- elseif trace_dynamics then
- -- report_otf("using dynamics %s: attribute %a, script %a, language %a",contextnumbers[attribute],attribute,script,language)
- end
- return dsla
- end
-end
-
-function otf.scriptandlanguage(tfmdata,attr)
- local properties = tfmdata.properties
- if attr and attr > 0 then
- return a_to_script[attr] or properties.script or "dflt", a_to_language[attr] or properties.language or "dflt"
- else
- return properties.script or "dflt", properties.language or "dflt"
- end
-end
-
--- we reimplement the dataset resolver
-
-local autofeatures = fonts.analyzers.features -- was: constants
-
-local resolved = { } -- we only resolve a font,script,language,attribute pair once
-local wildcard = "*"
-local default = "dflt"
-
--- what about analyze in local and not in font
-
-local function initialize(sequence,script,language,s_enabled,a_enabled,font,attr,dynamic)
- local features = sequence.features
- if features then
- for kind, scripts in next, features do
- local e_e
- local a_e = a_enabled and a_enabled[kind] -- the value (location)
- if a_e ~= nil then
- e_e = a_e
- else
- e_e = s_enabled and s_enabled[kind] -- the value (font)
- end
- if e_e then
- local languages = scripts[script] or scripts[wildcard]
- if languages then
- -- local valid, what = false
- local valid = false
- -- not languages[language] or languages[default] or languages[wildcard] because we want tracing
- -- only first attribute match check, so we assume simple fina's
- -- default can become a font feature itself
- if languages[language] then
- valid = e_e -- was true
- -- what = language
- -- elseif languages[default] then
- -- valid = true
- -- what = default
- elseif languages[wildcard] then
- valid = e_e -- was true
- -- what = wildcard
- end
- if valid then
- local attribute = autofeatures[kind] or false
- -- if a_e and dynamic < 0 then
- -- valid = false
- -- end
- -- if trace_applied then
- -- local typ, action = match(sequence.type,"(.*)_(.*)") -- brrr
- -- report_process(
- -- "%s font: %03i, dynamic: %03i, kind: %s, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s",
- -- (valid and "+") or "-",font,attr or 0,kind,script,language,what,typ,action,sequence.name)
- -- end
- if trace_applied then
- report_process(
- "font %s, dynamic %a (%a), feature %a, script %a, language %a, lookup %a, value %a",
- font,attr or 0,dynamic,kind,script,language,sequence.name,valid)
- end
- return { valid, attribute, sequence.chain or 0, kind, sequence }
- end
- end
- end
- end
- return false -- { valid, attribute, chain, "generic", sequence } -- false anyway, could be flag instead of table
- else
- return false -- { false, false, chain, false, sequence } -- indirect lookup, part of chain (todo: make this a separate table)
- end
-end
-
--- there is some fuzzy language/script state stuff in properties (temporary)
-
-function otf.dataset(tfmdata,font,attr) -- attr only when explicit (as in special parbuilder)
-
- local script, language, s_enabled, a_enabled, dynamic
-
- if attr and attr ~= 0 then
- dynamic = contextmerged[attr] or 0
- -- local features = contextsetups[contextnumbers[attr]] -- could be a direct list
- local features = contextsetups[attr]
- a_enabled = features -- location based
- if dynamic == 1 then -- or dynamic == -1 then
- -- replace
- language = features.language or "dflt"
- script = features.script or "dflt"
- elseif dynamic == 2 then -- or dynamic == -2 then
- -- merge
- local properties = tfmdata.properties
- s_enabled = tfmdata.shared.features -- font based
- language = features.language or properties.language or "dflt"
- script = features.script or properties.script or "dflt"
- else
- -- error
- local properties = tfmdata.properties
- language = properties.language or "dflt"
- script = properties.script or "dflt"
- end
- else
- local properties = tfmdata.properties
- language = properties.language or "dflt"
- script = properties.script or "dflt"
- s_enabled = tfmdata.shared.features -- can be made local to the resolver
- dynamic = 0
- end
-
- local res = resolved[font]
- if not res then
- res = { }
- resolved[font] = res
- end
- local rs = res[script]
- if not rs then
- rs = { }
- res[script] = rs
- end
- local rl = rs[language]
- if not rl then
- rl = { }
- rs[language] = rl
- end
- local ra = rl[attr]
- if ra == nil then -- attr can be false
- ra = {
- -- indexed but we can also add specific data by key in:
- }
- rl[attr] = ra
- local sequences = tfmdata.resources.sequences
--- setmetatableindex(ra, function(t,k)
--- if type(k) == "number" then
--- local v = initialize(sequences[k],script,language,s_enabled,a_enabled,font,attr,dynamic)
--- t[k] = v or false
--- return v
--- end
--- end)
-for s=1,#sequences do
- local v = initialize(sequences[s],script,language,s_enabled,a_enabled,font,attr,dynamic)
- if v then
- ra[#ra+1] = v
- end
-end
- end
- return ra
-
-end
+if not modules then modules = { } end modules ['font-otd'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type = type
+local match = string.match
+local sequenced = table.sequenced
+
+local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end)
+local trace_applied = false trackers.register("otf.applied", function(v) trace_applied = v end)
+
+local report_otf = logs.reporter("fonts","otf loading")
+local report_process = logs.reporter("fonts","otf process")
+
+local allocate = utilities.storage.allocate
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+local hashes = fonts.hashes
+local definers = fonts.definers
+local constructors = fonts.constructors
+local specifiers = fonts.specifiers
+
+local fontidentifiers = hashes.identifiers
+local fontresources = hashes.resources
+local fontproperties = hashes.properties
+local fontdynamics = hashes.dynamics
+
+local contextsetups = specifiers.contextsetups
+local contextnumbers = specifiers.contextnumbers
+local contextmerged = specifiers.contextmerged
+
+local setmetatableindex = table.setmetatableindex
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+local a_to_script = { }
+local a_to_language = { }
+
+-- we can have a scripts hash in fonts.hashes
+
+function otf.setdynamics(font,attribute)
+ -- local features = contextsetups[contextnumbers[attribute]] -- can be moved to caller
+ local features = contextsetups[attribute]
+ if features then
+ local dynamics = fontdynamics[font]
+ dynamic = contextmerged[attribute] or 0
+ local script, language
+ if dynamic == 2 then -- merge
+ language = features.language or fontproperties[font].language or "dflt"
+ script = features.script or fontproperties[font].script or "dflt"
+ else -- if dynamic == 1 then -- replace
+ language = features.language or "dflt"
+ script = features.script or "dflt"
+ end
+ if script == "auto" then
+ -- checkedscript and resources are defined later so we cannot shortcut them -- todo: make installer
+ script = definers.checkedscript(fontidentifiers[font],fontresources[font],features)
+ end
+ local ds = dynamics[script] -- can be metatable magic (less testing)
+ if not ds then
+ ds = { }
+ dynamics[script] = ds
+ end
+ local dsl = ds[language]
+ if not dsl then
+ dsl = { }
+ ds[language] = dsl
+ end
+ local dsla = dsl[attribute]
+ if not dsla then
+ local tfmdata = fontidentifiers[font]
+ a_to_script [attribute] = script
+ a_to_language[attribute] = language
+ -- we need to save some values .. quite messy
+ local properties = tfmdata.properties
+ local shared = tfmdata.shared
+ local s_script = properties.script
+ local s_language = properties.language
+ local s_mode = properties.mode
+ local s_features = shared.features
+ properties.mode = "node"
+ properties.language = language
+ properties.script = script
+ properties.dynamics = true -- handy for tracing
+ shared.features = { }
+ -- end of save
+ local set = constructors.checkedfeatures("otf",features)
+ set.mode = "node" -- really needed
+ dsla = otf.setfeatures(tfmdata,set)
+ if trace_dynamics then
+ report_otf("setting dynamics %s: attribute %a, script %a, language %a, set %a",contextnumbers[attribute],attribute,script,language,set)
+ end
+ -- we need to restore some values
+ properties.script = s_script
+ properties.language = s_language
+ properties.mode = s_mode
+ shared.features = s_features
+ -- end of restore
+ dynamics[script][language][attribute] = dsla -- cache
+ elseif trace_dynamics then
+ -- report_otf("using dynamics %s: attribute %a, script %a, language %a",contextnumbers[attribute],attribute,script,language)
+ end
+ return dsla
+ end
+end
+
+function otf.scriptandlanguage(tfmdata,attr)
+ local properties = tfmdata.properties
+ if attr and attr > 0 then
+ return a_to_script[attr] or properties.script or "dflt", a_to_language[attr] or properties.language or "dflt"
+ else
+ return properties.script or "dflt", properties.language or "dflt"
+ end
+end
+
+-- we reimplement the dataset resolver
+
+local autofeatures = fonts.analyzers.features -- was: constants
+
+local resolved = { } -- we only resolve a font,script,language,attribute pair once
+local wildcard = "*"
+local default = "dflt"
+
+-- what about analyze in local and not in font
+
+local function initialize(sequence,script,language,s_enabled,a_enabled,font,attr,dynamic)
+ local features = sequence.features
+ if features then
+ for kind, scripts in next, features do
+ local e_e
+ local a_e = a_enabled and a_enabled[kind] -- the value (location)
+ if a_e ~= nil then
+ e_e = a_e
+ else
+ e_e = s_enabled and s_enabled[kind] -- the value (font)
+ end
+ if e_e then
+ local languages = scripts[script] or scripts[wildcard]
+ if languages then
+ -- local valid, what = false
+ local valid = false
+ -- not languages[language] or languages[default] or languages[wildcard] because we want tracing
+ -- only first attribute match check, so we assume simple fina's
+ -- default can become a font feature itself
+ if languages[language] then
+ valid = e_e -- was true
+ -- what = language
+ -- elseif languages[default] then
+ -- valid = true
+ -- what = default
+ elseif languages[wildcard] then
+ valid = e_e -- was true
+ -- what = wildcard
+ end
+ if valid then
+ local attribute = autofeatures[kind] or false
+ -- if a_e and dynamic < 0 then
+ -- valid = false
+ -- end
+ -- if trace_applied then
+ -- local typ, action = match(sequence.type,"(.*)_(.*)") -- brrr
+ -- report_process(
+ -- "%s font: %03i, dynamic: %03i, kind: %s, script: %-4s, language: %-4s (%-4s), type: %s, action: %s, name: %s",
+ -- (valid and "+") or "-",font,attr or 0,kind,script,language,what,typ,action,sequence.name)
+ -- end
+ if trace_applied then
+ report_process(
+ "font %s, dynamic %a (%a), feature %a, script %a, language %a, lookup %a, value %a",
+ font,attr or 0,dynamic,kind,script,language,sequence.name,valid)
+ end
+ return { valid, attribute, sequence.chain or 0, kind, sequence }
+ end
+ end
+ end
+ end
+ return false -- { valid, attribute, chain, "generic", sequence } -- false anyway, could be flag instead of table
+ else
+ return false -- { false, false, chain, false, sequence } -- indirect lookup, part of chain (todo: make this a separate table)
+ end
+end
+
+-- there is some fuzzy language/script state stuff in properties (temporary)
+
+function otf.dataset(tfmdata,font,attr) -- attr only when explicit (as in special parbuilder)
+
+ local script, language, s_enabled, a_enabled, dynamic
+
+ if attr and attr ~= 0 then
+ dynamic = contextmerged[attr] or 0
+ -- local features = contextsetups[contextnumbers[attr]] -- could be a direct list
+ local features = contextsetups[attr]
+ a_enabled = features -- location based
+ if dynamic == 1 then -- or dynamic == -1 then
+ -- replace
+ language = features.language or "dflt"
+ script = features.script or "dflt"
+ elseif dynamic == 2 then -- or dynamic == -2 then
+ -- merge
+ local properties = tfmdata.properties
+ s_enabled = tfmdata.shared.features -- font based
+ language = features.language or properties.language or "dflt"
+ script = features.script or properties.script or "dflt"
+ else
+ -- error
+ local properties = tfmdata.properties
+ language = properties.language or "dflt"
+ script = properties.script or "dflt"
+ end
+ else
+ local properties = tfmdata.properties
+ language = properties.language or "dflt"
+ script = properties.script or "dflt"
+ s_enabled = tfmdata.shared.features -- can be made local to the resolver
+ dynamic = 0
+ end
+
+ local res = resolved[font]
+ if not res then
+ res = { }
+ resolved[font] = res
+ end
+ local rs = res[script]
+ if not rs then
+ rs = { }
+ res[script] = rs
+ end
+ local rl = rs[language]
+ if not rl then
+ rl = { }
+ rs[language] = rl
+ end
+ local ra = rl[attr]
+ if ra == nil then -- attr can be false
+ ra = {
+ -- indexed but we can also add specific data by key in:
+ }
+ rl[attr] = ra
+ local sequences = tfmdata.resources.sequences
+-- setmetatableindex(ra, function(t,k)
+-- if type(k) == "number" then
+-- local v = initialize(sequences[k],script,language,s_enabled,a_enabled,font,attr,dynamic)
+-- t[k] = v or false
+-- return v
+-- end
+-- end)
+for s=1,#sequences do
+ local v = initialize(sequences[s],script,language,s_enabled,a_enabled,font,attr,dynamic)
+ if v then
+ ra[#ra+1] = v
+ end
+end
+ end
+ return ra
+
+end
diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua
index c1f2f14fc..737dc9927 100644
--- a/tex/context/base/font-otf.lua
+++ b/tex/context/base/font-otf.lua
@@ -1,2155 +1,2155 @@
-if not modules then modules = { } end modules ['font-otf'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- langs -> languages enz
--- anchor_classes vs kernclasses
--- modification/creationtime in subfont is runtime dus zinloos
--- to_table -> totable
--- ascent descent
-
--- more checking against low level calls of functions
-
-local utfbyte = utf.byte
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local abs = math.abs
-local getn = table.getn
-local lpegmatch = lpeg.match
-local reversed, concat, remove = table.reversed, table.concat, table.remove
-local ioflush = io.flush
-local fastcopy, tohash, derivetable = table.fastcopy, table.tohash, table.derive
-local formatters = string.formatters
-
-local allocate = utilities.storage.allocate
-local registertracker = trackers.register
-local registerdirective = directives.register
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local elapsedtime = statistics.elapsedtime
-local findbinfile = resolvers.findbinfile
-
-local trace_private = false registertracker("otf.private", function(v) trace_private = v end)
-local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end)
-local trace_features = false registertracker("otf.features", function(v) trace_features = v end)
-local trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end)
-local trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end)
-local trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end)
-local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end)
-
-local report_otf = logs.reporter("fonts","otf loading")
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-otf.glists = { "gsub", "gpos" }
-
-otf.version = 2.743 -- beware: also sync font-mis.lua
-otf.cache = containers.define("fonts", "otf", otf.version, true)
-
-local fontdata = fonts.hashes.identifiers
-local chardata = characters and characters.data -- not used
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
-local enhancers = allocate()
-otf.enhancers = enhancers
-local patches = { }
-enhancers.patches = patches
-
-local definers = fonts.definers
-local readers = fonts.readers
-local constructors = fonts.constructors
-
-local forceload = false
-local cleanup = 0 -- mk: 0=885M 1=765M 2=735M (regular run 730M)
-local usemetatables = false -- .4 slower on mk but 30 M less mem so we might change the default -- will be directive
-local packdata = true
-local syncspace = true
-local forcenotdef = false
-local includesubfonts = false
-
-local wildcard = "*"
-local default = "dflt"
-
-local fontloaderfields = fontloader.fields
-local mainfields = nil
-local glyphfields = nil -- not used yet
-
-registerdirective("fonts.otf.loader.cleanup", function(v) cleanup = tonumber(v) or (v and 1) or 0 end)
-registerdirective("fonts.otf.loader.force", function(v) forceload = v end)
-registerdirective("fonts.otf.loader.usemetatables", function(v) usemetatables = v end)
-registerdirective("fonts.otf.loader.pack", function(v) packdata = v end)
-registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end)
-registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end)
-
-local function load_featurefile(raw,featurefile)
- if featurefile and featurefile ~= "" then
- if trace_loading then
- report_otf("using featurefile %a", featurefile)
- end
- fontloader.apply_featurefile(raw, featurefile)
- end
-end
-
-local function showfeatureorder(rawdata,filename)
- local sequences = rawdata.resources.sequences
- if sequences and #sequences > 0 then
- if trace_loading then
- report_otf("font %a has %s sequences",filename,#sequences)
- report_otf(" ")
- end
- for nos=1,#sequences do
- local sequence = sequences[nos]
- local typ = sequence.type or "no-type"
- local name = sequence.name or "no-name"
- local subtables = sequence.subtables or { "no-subtables" }
- local features = sequence.features
- if trace_loading then
- report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables)
- end
- if features then
- for feature, scripts in next, features do
- local tt = { }
- if type(scripts) == "table" then
- for script, languages in next, scripts do
- local ttt = { }
- for language, _ in next, languages do
- ttt[#ttt+1] = language
- end
- tt[#tt+1] = formatters["[%s: % t]"](script,ttt)
- end
- if trace_loading then
- report_otf(" %s: % t",feature,tt)
- end
- else
- if trace_loading then
- report_otf(" %s: %S",feature,scripts)
- end
- end
- end
- end
- end
- if trace_loading then
- report_otf("\n")
- end
- elseif trace_loading then
- report_otf("font %a has no sequences",filename)
- end
-end
-
---[[ldx--
-
We start with a lot of tables and related functions.
---ldx]]--
-
-local actions = allocate()
-local before = allocate()
-local after = allocate()
-
-patches.before = before
-patches.after = after
-
-local function enhance(name,data,filename,raw)
- local enhancer = actions[name]
- if enhancer then
- if trace_loading then
- report_otf("apply enhancement %a to file %a",name,filename)
- ioflush()
- end
- enhancer(data,filename,raw)
- else
- -- no message as we can have private ones
- end
-end
-
-function enhancers.apply(data,filename,raw)
- local basename = file.basename(lower(filename))
- if trace_loading then
- report_otf("%s enhancing file %a","start",filename)
- end
- ioflush() -- we want instant messages
- for e=1,#ordered_enhancers do
- local enhancer = ordered_enhancers[e]
- local b = before[enhancer]
- if b then
- for pattern, action in next, b do
- if find(basename,pattern) then
- action(data,filename,raw)
- end
- end
- end
- enhance(enhancer,data,filename,raw)
- local a = after[enhancer]
- if a then
- for pattern, action in next, a do
- if find(basename,pattern) then
- action(data,filename,raw)
- end
- end
- end
- ioflush() -- we want instant messages
- end
- if trace_loading then
- report_otf("%s enhancing file %a","stop",filename)
- end
- ioflush() -- we want instant messages
-end
-
--- patches.register("before","migrate metadata","cambria",function() end)
-
-function patches.register(what,where,pattern,action)
- local pw = patches[what]
- if pw then
- local ww = pw[where]
- if ww then
- ww[pattern] = action
- else
- pw[where] = { [pattern] = action}
- end
- end
-end
-
-function patches.report(fmt,...)
- if trace_loading then
- report_otf("patching: %s",formatters[fmt](...))
- end
-end
-
-function enhancers.register(what,action) -- only already registered can be overloaded
- actions[what] = action
-end
-
-function otf.load(filename,format,sub,featurefile)
- local base = file.basename(file.removesuffix(filename))
- local name = file.removesuffix(base)
- local attr = lfs.attributes(filename)
- local size = attr and attr.size or 0
- local time = attr and attr.modification or 0
- if featurefile then
- name = name .. "@" .. file.removesuffix(file.basename(featurefile))
- end
- if sub == "" then
- sub = false
- end
- local hash = name
- if sub then
- hash = hash .. "-" .. sub
- end
- hash = containers.cleanname(hash)
- local featurefiles
- if featurefile then
- featurefiles = { }
- for s in gmatch(featurefile,"[^,]+") do
- local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
- if name == "" then
- report_otf("loading error, no featurefile %a",s)
- else
- local attr = lfs.attributes(name)
- featurefiles[#featurefiles+1] = {
- name = name,
- size = attr and attr.size or 0,
- time = attr and attr.modification or 0,
- }
- end
- end
- if #featurefiles == 0 then
- featurefiles = nil
- end
- end
- local data = containers.read(otf.cache,hash)
- local reload = not data or data.size ~= size or data.time ~= time
- if forceload then
- report_otf("forced reload of %a due to hard coded flag",filename)
- reload = true
- end
- if not reload then
- local featuredata = data.featuredata
- if featurefiles then
- if not featuredata or #featuredata ~= #featurefiles then
- reload = true
- else
- for i=1,#featurefiles do
- local fi, fd = featurefiles[i], featuredata[i]
- if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then
- reload = true
- break
- end
- end
- end
- elseif featuredata then
- reload = true
- end
- if reload then
- report_otf("loading: forced reload due to changed featurefile specification %a",featurefile)
- end
- end
- if reload then
- report_otf("loading %a, hash %a",filename,hash)
- local fontdata, messages
- if sub then
- fontdata, messages = fontloader.open(filename,sub)
- else
- fontdata, messages = fontloader.open(filename)
- end
- if fontdata then
- mainfields = mainfields or (fontloaderfields and fontloaderfields(fontdata))
- end
- if trace_loading and messages and #messages > 0 then
- if type(messages) == "string" then
- report_otf("warning: %s",messages)
- else
- for m=1,#messages do
- report_otf("warning: %S",messages[m])
- end
- end
- else
- report_otf("loading done")
- end
- if fontdata then
- if featurefiles then
- for i=1,#featurefiles do
- load_featurefile(fontdata,featurefiles[i].name)
- end
- end
- local unicodes = {
- -- names to unicodes
- }
- local splitter = lpeg.splitter(" ",unicodes)
- data = {
- size = size,
- time = time,
- format = format,
- featuredata = featurefiles,
- resources = {
- filename = resolvers.unresolve(filename), -- no shortcut
- version = otf.version,
- creator = "context mkiv",
- unicodes = unicodes,
- indices = {
- -- index to unicodes
- },
- duplicates = {
- -- alternative unicodes
- },
- variants = {
- -- alternative unicodes (variants)
- },
- lookuptypes = {
- },
- },
- metadata = {
- -- raw metadata, not to be used
- },
- properties = {
- -- normalized metadata
- },
- descriptions = {
- },
- goodies = {
- },
- helpers = {
- tounicodelist = splitter,
- tounicodetable = lpeg.Ct(splitter),
- },
- }
- starttiming(data)
- report_otf("file size: %s", size)
- enhancers.apply(data,filename,fontdata)
- local packtime = { }
- if packdata then
- if cleanup > 0 then
- collectgarbage("collect")
- end
- starttiming(packtime)
- enhance("pack",data,filename,nil)
- stoptiming(packtime)
- end
- report_otf("saving %a in cache",filename)
- data = containers.write(otf.cache, hash, data)
- if cleanup > 1 then
- collectgarbage("collect")
- end
- stoptiming(data)
- if elapsedtime then -- not in generic
- report_otf("preprocessing and caching time %s, packtime %s",
- elapsedtime(data),packdata and elapsedtime(packtime) or 0)
- end
- fontloader.close(fontdata) -- free memory
- if cleanup > 3 then
- collectgarbage("collect")
- end
- data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one
- if cleanup > 2 then
- collectgarbage("collect")
- end
- else
- data = nil
- report_otf("loading failed due to read error")
- end
- end
- if data then
- if trace_defining then
- report_otf("loading from cache using hash %a",hash)
- end
- enhance("unpack",data,filename,nil,false)
- enhance("add dimensions",data,filename,nil,false)
- if trace_sequences then
- showfeatureorder(data,filename)
- end
- end
- return data
-end
-
-local mt = {
- __index = function(t,k) -- maybe set it
- if k == "height" then
- local ht = t.boundingbox[4]
- return ht < 0 and 0 or ht
- elseif k == "depth" then
- local dp = -t.boundingbox[2]
- return dp < 0 and 0 or dp
- elseif k == "width" then
- return 0
- elseif k == "name" then -- or maybe uni*
- return forcenotdef and ".notdef"
- end
- end
-}
-
-actions["prepare tables"] = function(data,filename,raw)
- data.properties.hasitalics = false
-end
-
-actions["add dimensions"] = function(data,filename)
- -- todo: forget about the width if it's the defaultwidth (saves mem)
- -- we could also build the marks hash here (instead of storing it)
- if data then
- local descriptions = data.descriptions
- local resources = data.resources
- local defaultwidth = resources.defaultwidth or 0
- local defaultheight = resources.defaultheight or 0
- local defaultdepth = resources.defaultdepth or 0
- local basename = trace_markwidth and file.basename(filename)
- if usemetatables then
- for _, d in next, descriptions do
- local wd = d.width
- if not wd then
- d.width = defaultwidth
- elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
- report_otf("mark %a with width %b found in %a",d.name or "",wd,basename)
- -- d.width = -wd
- end
- setmetatable(d,mt)
- end
- else
- for _, d in next, descriptions do
- local bb, wd = d.boundingbox, d.width
- if not wd then
- d.width = defaultwidth
- elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
- report_otf("mark %a with width %b found in %a",d.name or "",wd,basename)
- -- d.width = -wd
- end
- -- if forcenotdef and not d.name then
- -- d.name = ".notdef"
- -- end
- if bb then
- local ht, dp = bb[4], -bb[2]
- if ht == 0 or ht < 0 then
- -- not set
- else
- d.height = ht
- end
- if dp == 0 or dp < 0 then
- -- not set
- else
- d.depth = dp
- end
- end
- end
- end
- end
-end
-
-local function somecopy(old) -- fast one
- if old then
- local new = { }
- if type(old) == "table" then
- for k, v in next, old do
- if k == "glyphs" then
- -- skip
- elseif type(v) == "table" then
- new[k] = somecopy(v)
- else
- new[k] = v
- end
- end
- else
- for i=1,#mainfields do
- local k = mainfields[i]
- local v = old[k]
- if k == "glyphs" then
- -- skip
- elseif type(v) == "table" then
- new[k] = somecopy(v)
- else
- new[k] = v
- end
- end
- end
- return new
- else
- return { }
- end
-end
-
--- not setting hasitalics and class (when nil) during table cronstruction can save some mem
-
-actions["prepare glyphs"] = function(data,filename,raw)
- local rawglyphs = raw.glyphs
- local rawsubfonts = raw.subfonts
- local rawcidinfo = raw.cidinfo
- local criterium = constructors.privateoffset
- local private = criterium
- local resources = data.resources
- local metadata = data.metadata
- local properties = data.properties
- local descriptions = data.descriptions
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicode
- local duplicates = resources.duplicates
- local variants = resources.variants
-
- if rawsubfonts then
-
- metadata.subfonts = includesubfonts and { }
- properties.cidinfo = rawcidinfo
-
- if rawcidinfo.registry then
- local cidmap = fonts.cid.getmap(rawcidinfo)
- if cidmap then
- rawcidinfo.usedname = cidmap.usedname
- local nofnames, nofunicodes = 0, 0
- local cidunicodes, cidnames = cidmap.unicodes, cidmap.names
- for cidindex=1,#rawsubfonts do
- local subfont = rawsubfonts[cidindex]
- local cidglyphs = subfont.glyphs
- if includesubfonts then
- metadata.subfonts[cidindex] = somecopy(subfont)
- end
- for index=0,subfont.glyphcnt-1 do -- we could take the previous glyphcnt instead of 0
- local glyph = cidglyphs[index]
- if glyph then
- local unicode = glyph.unicode
- local name = glyph.name or cidnames[index]
- if not unicode or unicode == -1 or unicode >= criterium then
- unicode = cidunicodes[index]
- end
- if unicode and descriptions[unicode] then
- report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
- unicode = -1
- end
- if not unicode or unicode == -1 or unicode >= criterium then
- if not name then
- name = format("u%06X",private)
- end
- unicode = private
- unicodes[name] = private
- if trace_private then
- report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
- end
- private = private + 1
- nofnames = nofnames + 1
- else
- if not name then
- name = format("u%06X",unicode)
- end
- unicodes[name] = unicode
- nofunicodes = nofunicodes + 1
- end
- indices[index] = unicode -- each index is unique (at least now)
-
- local description = {
- -- width = glyph.width,
- boundingbox = glyph.boundingbox,
- name = glyph.name or name or "unknown", -- uniXXXX
- cidindex = cidindex,
- index = index,
- glyph = glyph,
- }
-
- descriptions[unicode] = description
- else
- -- report_otf("potential problem: glyph %U is used but empty",index)
- end
- end
- end
- if trace_loading then
- report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
- end
- elseif trace_loading then
- report_otf("unable to remap cid font, missing cid file for %a",filename)
- end
- elseif trace_loading then
- report_otf("font %a has no glyphs",filename)
- end
-
- else
-
- for index=0,raw.glyphcnt-1 do -- not raw.glyphmax-1 (as that will crash)
- local glyph = rawglyphs[index]
- if glyph then
- local unicode = glyph.unicode
- local name = glyph.name
- if not unicode or unicode == -1 or unicode >= criterium then
- unicode = private
- unicodes[name] = private
- if trace_private then
- report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
- end
- private = private + 1
- else
- unicodes[name] = unicode
- end
- indices[index] = unicode
- if not name then
- name = format("u%06X",unicode)
- end
- descriptions[unicode] = {
- -- width = glyph.width,
- boundingbox = glyph.boundingbox,
- name = name,
- index = index,
- glyph = glyph,
- }
- local altuni = glyph.altuni
- if altuni then
- local d
- for i=1,#altuni do
- local a = altuni[i]
- local u = a.unicode
- local v = a.variant
- if v then
- -- tricky: no addition to d? needs checking but in practice such dups are either very simple
- -- shapes or e.g cjk with not that many features
- local vv = variants[v]
- if vv then
- vv[u] = unicode
- else -- xits-math has some:
- vv = { [u] = unicode }
- variants[v] = vv
- end
- elseif d then
- d[#d+1] = u
- else
- d = { u }
- end
- end
- if d then
- duplicates[unicode] = d
- end
- end
- else
- report_otf("potential problem: glyph %U is used but empty",index)
- end
- end
-
- end
-
- resources.private = private
-
-end
-
--- the next one is still messy but will get better when we have
--- flattened map/enc tables in the font loader
-
-actions["check encoding"] = function(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local properties = data.properties
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicodes
- local duplicates = resources.duplicates
-
- -- begin of messy (not needed when cidmap)
-
- local mapdata = raw.map or { }
- local unicodetoindex = mapdata and mapdata.map or { }
- -- local encname = lower(data.enc_name or raw.enc_name or mapdata.enc_name or "")
- local encname = lower(data.enc_name or mapdata.enc_name or "")
- local criterium = 0xFFFF -- for instance cambria has a lot of mess up there
-
- -- end of messy
-
- if find(encname,"unicode") then -- unicodebmp, unicodefull, ...
- if trace_loading then
- report_otf("checking embedded unicode map %a",encname)
- end
- for unicode, index in next, unicodetoindex do -- altuni already covers this
- if unicode <= criterium and not descriptions[unicode] then
- local parent = indices[index] -- why nil?
- if not parent then
- report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
- else
- local parentdescription = descriptions[parent]
- if parentdescription then
- local altuni = parentdescription.altuni
- if not altuni then
- altuni = { { unicode = parent } }
- parentdescription.altuni = altuni
- duplicates[parent] = { unicode }
- else
- local done = false
- for i=1,#altuni do
- if altuni[i].unicode == parent then
- done = true
- break
- end
- end
- if not done then
- -- let's assume simple cjk reuse
- altuni[#altuni+1] = { unicode = parent }
- table.insert(duplicates[parent],unicode)
- end
- end
- if trace_loading then
- report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
- end
- else
- report_otf("weird, unicode %U points to %U with index %H",unicode,index)
- end
- end
- end
- end
- elseif properties.cidinfo then
- report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
- else
- report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever")
- end
-
- if mapdata then
- mapdata.map = { } -- clear some memory
- end
-end
-
--- for the moment we assume that a font with lookups will not use
--- altuni so we stick to kerns only
-
-actions["add duplicates"] = function(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local properties = data.properties
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicodes
- local duplicates = resources.duplicates
-
- for unicode, d in next, duplicates do
- for i=1,#d do
- local u = d[i]
- if not descriptions[u] then
- local description = descriptions[unicode]
- local duplicate = table.copy(description) -- else packing problem
- duplicate.comment = format("copy of U+%05X", unicode)
- descriptions[u] = duplicate
- local n = 0
- for _, description in next, descriptions do
- if kerns then
- local kerns = description.kerns
- for _, k in next, kerns do
- local ku = k[unicode]
- if ku then
- k[u] = ku
- n = n + 1
- end
- end
- end
- -- todo: lookups etc
- end
- if trace_loading then
- report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
- end
- end
- end
- end
-end
-
--- class : nil base mark ligature component (maybe we don't need it in description)
--- boundingbox: split into ht/dp takes more memory (larger tables and less sharing)
-
-actions["analyze glyphs"] = function(data,filename,raw) -- maybe integrate this in the previous
- local descriptions = data.descriptions
- local resources = data.resources
- local metadata = data.metadata
- local properties = data.properties
- local hasitalics = false
- local widths = { }
- local marks = { } -- always present (saves checking)
- for unicode, description in next, descriptions do
- local glyph = description.glyph
- local italic = glyph.italic_correction
- if not italic then
- -- skip
- elseif italic == 0 then
- -- skip
- else
- description.italic = italic
- hasitalics = true
- end
- local width = glyph.width
- widths[width] = (widths[width] or 0) + 1
- local class = glyph.class
- if class then
- if class == "mark" then
- marks[unicode] = true
- end
- description.class = class
- end
- end
- -- flag italic
- properties.hasitalics = hasitalics
- -- flag marks
- resources.marks = marks
- -- share most common width for cjk fonts
- local wd, most = 0, 1
- for k,v in next, widths do
- if v > most then
- wd, most = k, v
- end
- end
- if most > 1000 then -- maybe 500
- if trace_loading then
- report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
- end
- for unicode, description in next, descriptions do
- if description.width == wd then
- -- description.width = nil
- else
- description.width = description.glyph.width
- end
- end
- resources.defaultwidth = wd
- else
- for unicode, description in next, descriptions do
- description.width = description.glyph.width
- end
- end
-end
-
-actions["reorganize mark classes"] = function(data,filename,raw)
- local mark_classes = raw.mark_classes
- if mark_classes then
- local resources = data.resources
- local unicodes = resources.unicodes
- local markclasses = { }
- resources.markclasses = markclasses -- reversed
- for name, class in next, mark_classes do
- local t = { }
- for s in gmatch(class,"[^ ]+") do
- t[unicodes[s]] = true
- end
- markclasses[name] = t
- end
- end
-end
-
-actions["reorganize features"] = function(data,filename,raw) -- combine with other
- local features = { }
- data.resources.features = features
- for k, what in next, otf.glists do
- local dw = raw[what]
- if dw then
- local f = { }
- features[what] = f
- for i=1,#dw do
- local d= dw[i]
- local dfeatures = d.features
- if dfeatures then
- for i=1,#dfeatures do
- local df = dfeatures[i]
- local tag = strip(lower(df.tag))
- local ft = f[tag]
- if not ft then
- ft = { }
- f[tag] = ft
- end
- local dscripts = df.scripts
- for i=1,#dscripts do
- local d = dscripts[i]
- local languages = d.langs
- local script = strip(lower(d.script))
- local fts = ft[script] if not fts then fts = {} ft[script] = fts end
- for i=1,#languages do
- fts[strip(lower(languages[i]))] = true
- end
- end
- end
- end
- end
- end
- end
-end
-
-actions["reorganize anchor classes"] = function(data,filename,raw)
- local resources = data.resources
- local anchor_to_lookup = { }
- local lookup_to_anchor = { }
- resources.anchor_to_lookup = anchor_to_lookup
- resources.lookup_to_anchor = lookup_to_anchor
- local classes = raw.anchor_classes -- anchor classes not in final table
- if classes then
- for c=1,#classes do
- local class = classes[c]
- local anchor = class.name
- local lookups = class.lookup
- if type(lookups) ~= "table" then
- lookups = { lookups }
- end
- local a = anchor_to_lookup[anchor]
- if not a then
- a = { }
- anchor_to_lookup[anchor] = a
- end
- for l=1,#lookups do
- local lookup = lookups[l]
- local l = lookup_to_anchor[lookup]
- if l then
- l[anchor] = true
- else
- l = { [anchor] = true }
- lookup_to_anchor[lookup] = l
- end
- a[lookup] = true
- end
- end
- end
-end
-
-actions["prepare tounicode"] = function(data,filename,raw)
- fonts.mappings.addtounicode(data,filename)
-end
-
-local g_directions = {
- gsub_contextchain = 1,
- gpos_contextchain = 1,
- -- gsub_context = 1,
- -- gpos_context = 1,
- gsub_reversecontextchain = -1,
- gpos_reversecontextchain = -1,
-}
-
--- Research by Khaled Hosny has demonstrated that the font loader merges
--- regular and AAT features and that these can interfere (especially because
--- we dropped checking for valid features elsewhere. So, we just check for
--- the special flag and drop the feature if such a tag is found.
-
-local function supported(features)
- for i=1,#features do
- if features[i].ismac then
- return false
- end
- end
- return true
-end
-
-actions["reorganize subtables"] = function(data,filename,raw)
- local resources = data.resources
- local sequences = { }
- local lookups = { }
- local chainedfeatures = { }
- resources.sequences = sequences
- resources.lookups = lookups
- for _, what in next, otf.glists do
- local dw = raw[what]
- if dw then
- for k=1,#dw do
- local gk = dw[k]
- local features = gk.features
--- if features and supported(features) then
- if not features or supported(features) then -- not always features !
- local typ = gk.type
- local chain = g_directions[typ] or 0
- local subtables = gk.subtables
- if subtables then
- local t = { }
- for s=1,#subtables do
- t[s] = subtables[s].name
- end
- subtables = t
- end
- local flags, markclass = gk.flags, nil
- if flags then
- local t = { -- forcing false packs nicer
- (flags.ignorecombiningmarks and "mark") or false,
- (flags.ignoreligatures and "ligature") or false,
- (flags.ignorebaseglyphs and "base") or false,
- flags.r2l or false,
- }
- markclass = flags.mark_class
- if markclass then
- markclass = resources.markclasses[markclass]
- end
- flags = t
- end
- --
- local name = gk.name
- --
- if not name then
- -- in fact an error
- report_otf("skipping weird lookup number %s",k)
- elseif features then
- -- scripts, tag, ismac
- local f = { }
- for i=1,#features do
- local df = features[i]
- local tag = strip(lower(df.tag))
- local ft = f[tag] if not ft then ft = {} f[tag] = ft end
- local dscripts = df.scripts
- for i=1,#dscripts do
- local d = dscripts[i]
- local languages = d.langs
- local script = strip(lower(d.script))
- local fts = ft[script] if not fts then fts = {} ft[script] = fts end
- for i=1,#languages do
- fts[strip(lower(languages[i]))] = true
- end
- end
- end
- sequences[#sequences+1] = {
- type = typ,
- chain = chain,
- flags = flags,
- name = name,
- subtables = subtables,
- markclass = markclass,
- features = f,
- }
- else
- lookups[name] = {
- type = typ,
- chain = chain,
- flags = flags,
- subtables = subtables,
- markclass = markclass,
- }
- end
- end
- end
- end
- end
-end
-
--- test this:
---
--- for _, what in next, otf.glists do
--- raw[what] = nil
--- end
-
-actions["prepare lookups"] = function(data,filename,raw)
- local lookups = raw.lookups
- if lookups then
- data.lookups = lookups
- end
-end
-
--- The reverse handler does a bit redundant splitting but it's seldom
--- seen so we don't bother too much. We could store the replacement
--- in the current list (value instead of true) but it makes other code
--- uglier. Maybe some day.
-
-local function t_uncover(splitter,cache,covers)
- local result = { }
- for n=1,#covers do
- local cover = covers[n]
- local uncovered = cache[cover]
- if not uncovered then
- uncovered = lpegmatch(splitter,cover)
- cache[cover] = uncovered
- end
- result[n] = uncovered
- end
- return result
-end
-
-local function s_uncover(splitter,cache,cover)
- if cover == "" then
- return nil
- else
- local uncovered = cache[cover]
- if not uncovered then
- uncovered = lpegmatch(splitter,cover)
--- for i=1,#uncovered do
--- uncovered[i] = { [uncovered[i]] = true }
--- end
- cache[cover] = uncovered
- end
- return { uncovered }
- end
-end
-
-local function t_hashed(t,cache)
- if t then
- local ht = { }
- for i=1,#t do
- local ti = t[i]
- local tih = cache[ti]
- if not tih then
- tih = { }
- for i=1,#ti do
- tih[ti[i]] = true
- end
- cache[ti] = tih
- end
- ht[i] = tih
- end
- return ht
- else
- return nil
- end
-end
-
--- local s_hashed = t_hashed
-
-local function s_hashed(t,cache)
- if t then
- local ht = { }
- local tf = t[1]
- for i=1,#tf do
- ht[i] = { [tf[i]] = true }
- end
- return ht
- else
- return nil
- end
-end
-
-local function r_uncover(splitter,cache,cover,replacements)
- if cover == "" then
- return nil
- else
- -- we always have current as { } even in the case of one
- local uncovered = cover[1]
- local replaced = cache[replacements]
- if not replaced then
- replaced = lpegmatch(splitter,replacements)
- cache[replacements] = replaced
- end
- local nu, nr = #uncovered, #replaced
- local r = { }
- if nu == nr then
- for i=1,nu do
- r[uncovered[i]] = replaced[i]
- end
- end
- return r
- end
-end
-
-actions["reorganize lookups"] = function(data,filename,raw) -- we could check for "" and n == 0
- -- we prefer the before lookups in a normal order
- if data.lookups then
- local splitter = data.helpers.tounicodetable
- local t_u_cache = { }
- local s_u_cache = t_u_cache -- string keys
- local t_h_cache = { }
- local s_h_cache = t_h_cache -- table keys (so we could use one cache)
- local r_u_cache = { } -- maybe shared
- for _, lookup in next, data.lookups do
- local rules = lookup.rules
- if rules then
- local format = lookup.format
- if format == "class" then
- local before_class = lookup.before_class
- if before_class then
- before_class = t_uncover(splitter,t_u_cache,reversed(before_class))
- end
- local current_class = lookup.current_class
- if current_class then
- current_class = t_uncover(splitter,t_u_cache,current_class)
- end
- local after_class = lookup.after_class
- if after_class then
- after_class = t_uncover(splitter,t_u_cache,after_class)
- end
- for i=1,#rules do
- local rule = rules[i]
- local class = rule.class
- local before = class.before
- if before then
- for i=1,#before do
- before[i] = before_class[before[i]] or { }
- end
- rule.before = t_hashed(before,t_h_cache)
- end
- local current = class.current
- local lookups = rule.lookups
- if current then
- for i=1,#current do
- current[i] = current_class[current[i]] or { }
- -- let's not be sparse
- if lookups and not lookups[i] then
- lookups[i] = "" -- (was: false) e.g. we can have two lookups and one replacement
- end
- -- end of fix
- end
- rule.current = t_hashed(current,t_h_cache)
- end
- local after = class.after
- if after then
- for i=1,#after do
- after[i] = after_class[after[i]] or { }
- end
- rule.after = t_hashed(after,t_h_cache)
- end
- rule.class = nil
- end
- lookup.before_class = nil
- lookup.current_class = nil
- lookup.after_class = nil
- lookup.format = "coverage"
- elseif format == "coverage" then
- for i=1,#rules do
- local rule = rules[i]
- local coverage = rule.coverage
- if coverage then
- local before = coverage.before
- if before then
- before = t_uncover(splitter,t_u_cache,reversed(before))
- rule.before = t_hashed(before,t_h_cache)
- end
- local current = coverage.current
- if current then
- current = t_uncover(splitter,t_u_cache,current)
- -- let's not be sparse
- local lookups = rule.lookups
- if lookups then
- for i=1,#current do
- if not lookups[i] then
- lookups[i] = "" -- fix sparse array
- end
- end
- end
- --
- rule.current = t_hashed(current,t_h_cache)
- end
- local after = coverage.after
- if after then
- after = t_uncover(splitter,t_u_cache,after)
- rule.after = t_hashed(after,t_h_cache)
- end
- rule.coverage = nil
- end
- end
- elseif format == "reversecoverage" then -- special case, single substitution only
- for i=1,#rules do
- local rule = rules[i]
- local reversecoverage = rule.reversecoverage
- if reversecoverage then
- local before = reversecoverage.before
- if before then
- before = t_uncover(splitter,t_u_cache,reversed(before))
- rule.before = t_hashed(before,t_h_cache)
- end
- local current = reversecoverage.current
- if current then
- current = t_uncover(splitter,t_u_cache,current)
- rule.current = t_hashed(current,t_h_cache)
- end
- local after = reversecoverage.after
- if after then
- after = t_uncover(splitter,t_u_cache,after)
- rule.after = t_hashed(after,t_h_cache)
- end
- local replacements = reversecoverage.replacements
- if replacements then
- rule.replacements = r_uncover(splitter,r_u_cache,current,replacements)
- end
- rule.reversecoverage = nil
- end
- end
- elseif format == "glyphs" then
- -- I could store these more efficient (as not we use a nested tables for before,
- -- after and current but this features happens so seldom that I don't bother
- -- about it right now.
- for i=1,#rules do
- local rule = rules[i]
- local glyphs = rule.glyphs
- if glyphs then
- local fore = glyphs.fore
- if fore and fore ~= "" then
- fore = s_uncover(splitter,s_u_cache,fore)
- rule.before = s_hashed(fore,s_h_cache)
- end
- local back = glyphs.back
- if back then
- back = s_uncover(splitter,s_u_cache,back)
- rule.after = s_hashed(back,s_h_cache)
- end
- local names = glyphs.names
- if names then
- names = s_uncover(splitter,s_u_cache,names)
- rule.current = s_hashed(names,s_h_cache)
- end
- rule.glyphs = nil
- end
- end
- end
- end
- end
- end
-end
-
-local function check_variants(unicode,the_variants,splitter,unicodes)
- local variants = the_variants.variants
- if variants then -- use splitter
- local glyphs = lpegmatch(splitter,variants)
- local done = { [unicode] = true }
- local n = 0
- for i=1,#glyphs do
- local g = glyphs[i]
- if done[g] then
- report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
- else
- if n == 0 then
- n = 1
- variants = { g }
- else
- n = n + 1
- variants[n] = g
- end
- done[g] = true
- end
- end
- if n == 0 then
- variants = nil
- end
- end
- local parts = the_variants.parts
- if parts then
- local p = #parts
- if p > 0 then
- for i=1,p do
- local pi = parts[i]
- pi.glyph = unicodes[pi.component] or 0
- pi.component = nil
- end
- else
- parts = nil
- end
- end
- local italic_correction = the_variants.italic_correction
- if italic_correction and italic_correction == 0 then
- italic_correction = nil
- end
- return variants, parts, italic_correction
-end
-
-actions["analyze math"] = function(data,filename,raw)
- if raw.math then
- data.metadata.math = raw.math
- local unicodes = data.resources.unicodes
- local splitter = data.helpers.tounicodetable
- for unicode, description in next, data.descriptions do
- local glyph = description.glyph
- local mathkerns = glyph.mathkern -- singular
- local horiz_variants = glyph.horiz_variants
- local vert_variants = glyph.vert_variants
- local top_accent = glyph.top_accent
- if mathkerns or horiz_variants or vert_variants or top_accent then
- local math = { }
- if top_accent then
- math.top_accent = top_accent
- end
- if mathkerns then
- for k, v in next, mathkerns do
- if not next(v) then
- mathkerns[k] = nil
- else
- for k, v in next, v do
- if v == 0 then
- k[v] = nil -- height / kern can be zero
- end
- end
- end
- end
- math.kerns = mathkerns
- end
- if horiz_variants then
- math.horiz_variants, math.horiz_parts, math.horiz_italic_correction = check_variants(unicode,horiz_variants,splitter,unicodes)
- end
- if vert_variants then
- math.vert_variants, math.vert_parts, math.vert_italic_correction = check_variants(unicode,vert_variants,splitter,unicodes)
- end
- local italic_correction = description.italic
- if italic_correction and italic_correction ~= 0 then
- math.italic_correction = italic_correction
- end
- description.math = math
- end
- end
- end
-end
-
-actions["reorganize glyph kerns"] = function(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local unicodes = resources.unicodes
- for unicode, description in next, descriptions do
- local kerns = description.glyph.kerns
- if kerns then
- local newkerns = { }
- for k, kern in next, kerns do
- local name = kern.char
- local offset = kern.off
- local lookup = kern.lookup
- if name and offset and lookup then
- local unicode = unicodes[name]
- if unicode then
- if type(lookup) == "table" then
- for l=1,#lookup do
- local lookup = lookup[l]
- local lookupkerns = newkerns[lookup]
- if lookupkerns then
- lookupkerns[unicode] = offset
- else
- newkerns[lookup] = { [unicode] = offset }
- end
- end
- else
- local lookupkerns = newkerns[lookup]
- if lookupkerns then
- lookupkerns[unicode] = offset
- else
- newkerns[lookup] = { [unicode] = offset }
- end
- end
- elseif trace_loading then
- report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode)
- end
- end
- end
- description.kerns = newkerns
- end
- end
-end
-
-actions["merge kern classes"] = function(data,filename,raw)
- local gposlist = raw.gpos
- if gposlist then
- local descriptions = data.descriptions
- local resources = data.resources
- local unicodes = resources.unicodes
- local splitter = data.helpers.tounicodetable
- for gp=1,#gposlist do
- local gpos = gposlist[gp]
- local subtables = gpos.subtables
- if subtables then
- for s=1,#subtables do
- local subtable = subtables[s]
- local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
- if kernclass then -- the next one is quite slow
- local split = { } -- saves time
- for k=1,#kernclass do
- local kcl = kernclass[k]
- local firsts = kcl.firsts
- local seconds = kcl.seconds
- local offsets = kcl.offsets
- local lookups = kcl.lookup -- singular
- if type(lookups) ~= "table" then
- lookups = { lookups }
- end
- -- if offsets[1] == nil then
- -- offsets[1] = ""
- -- end
- -- we can check the max in the loop
- -- local maxseconds = getn(seconds)
- for n, s in next, firsts do
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- local maxseconds = 0
- for n, s in next, seconds do
- if n > maxseconds then
- maxseconds = n
- end
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- for l=1,#lookups do
- local lookup = lookups[l]
- for fk=1,#firsts do -- maxfirsts ?
- local fv = firsts[fk]
- local splt = split[fv]
- if splt then
- local extrakerns = { }
- local baseoffset = (fk-1) * maxseconds
- for sk=2,maxseconds do -- will become 1 based in future luatex
- local sv = seconds[sk]
- -- for sk, sv in next, seconds do
- local splt = split[sv]
- if splt then -- redundant test
- local offset = offsets[baseoffset + sk]
- if offset then
- for i=1,#splt do
- extrakerns[splt[i]] = offset
- end
- end
- end
- end
- for i=1,#splt do
- local first_unicode = splt[i]
- local description = descriptions[first_unicode]
- if description then
- local kerns = description.kerns
- if not kerns then
- kerns = { } -- unicode indexed !
- description.kerns = kerns
- end
- local lookupkerns = kerns[lookup]
- if not lookupkerns then
- lookupkerns = { }
- kerns[lookup] = lookupkerns
- end
- for second_unicode, kern in next, extrakerns do
- lookupkerns[second_unicode] = kern
- end
- elseif trace_loading then
- report_otf("no glyph data for %U", first_unicode)
- end
- end
- end
- end
- end
- end
- subtable.kernclass = { }
- end
- end
- end
- end
- end
-end
-
-actions["check glyphs"] = function(data,filename,raw)
- for unicode, description in next, data.descriptions do
- description.glyph = nil
- end
-end
-
--- future versions will remove _
-
-actions["check metadata"] = function(data,filename,raw)
- local metadata = data.metadata
- for _, k in next, mainfields do
- if valid_fields[k] then
- local v = raw[k]
- if not metadata[k] then
- metadata[k] = v
- end
- end
- end
- -- metadata.pfminfo = raw.pfminfo -- not already done?
- local ttftables = metadata.ttf_tables
- if ttftables then
- for i=1,#ttftables do
- ttftables[i].data = "deleted"
- end
- end
-end
-
-actions["cleanup tables"] = function(data,filename,raw)
- data.resources.indices = nil -- not needed
- data.helpers = nil
-end
-
--- kern: ttf has a table with kerns
---
--- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but
--- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of
--- unpredictable alternatively we could force an [1] if not set (maybe I will do that
--- anyway).
-
--- we can share { } as it is never set
-
---- ligatures have an extra specification.char entry that we don't use
-
-actions["reorganize glyph lookups"] = function(data,filename,raw)
- local resources = data.resources
- local unicodes = resources.unicodes
- local descriptions = data.descriptions
- local splitter = data.helpers.tounicodelist
-
- local lookuptypes = resources.lookuptypes
-
- for unicode, description in next, descriptions do
- local lookups = description.glyph.lookups
- if lookups then
- for tag, lookuplist in next, lookups do
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local specification = lookup.specification
- local lookuptype = lookup.type
- local lt = lookuptypes[tag]
- if not lt then
- lookuptypes[tag] = lookuptype
- elseif lt ~= lookuptype then
- report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype)
- end
- if lookuptype == "ligature" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "alternate" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "substitution" then
- lookuplist[l] = unicodes[specification.variant]
- elseif lookuptype == "multiple" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "position" then
- lookuplist[l] = {
- specification.x or 0,
- specification.y or 0,
- specification.h or 0,
- specification.v or 0
- }
- elseif lookuptype == "pair" then
- local one = specification.offsets[1]
- local two = specification.offsets[2]
- local paired = unicodes[specification.paired]
- if one then
- if two then
- lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0 } }
- else
- lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 } }
- end
- else
- if two then
- lookuplist[l] = { paired, { }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0} } -- maybe nil instead of { }
- else
- lookuplist[l] = { paired }
- end
- end
- end
- end
- end
- local slookups, mlookups
- for tag, lookuplist in next, lookups do
- if #lookuplist == 1 then
- if slookups then
- slookups[tag] = lookuplist[1]
- else
- slookups = { [tag] = lookuplist[1] }
- end
- else
- if mlookups then
- mlookups[tag] = lookuplist
- else
- mlookups = { [tag] = lookuplist }
- end
- end
- end
- if slookups then
- description.slookups = slookups
- end
- if mlookups then
- description.mlookups = mlookups
- end
- end
- end
-
-end
-
-actions["reorganize glyph anchors"] = function(data,filename,raw) -- when we replace inplace we safe entries
- local descriptions = data.descriptions
- for unicode, description in next, descriptions do
- local anchors = description.glyph.anchors
- if anchors then
- for class, data in next, anchors do
- if class == "baselig" then
- for tag, specification in next, data do
- for i=1,#specification do
- local si = specification[i]
- specification[i] = { si.x or 0, si.y or 0 }
- end
- end
- else
- for tag, specification in next, data do
- data[tag] = { specification.x or 0, specification.y or 0 }
- end
- end
- end
- description.anchors = anchors
- end
- end
-end
-
--- modes: node, base, none
-
-function otf.setfeatures(tfmdata,features)
- local okay = constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
- if okay then
- return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
- else
- return { } -- will become false
- end
-end
-
--- the first version made a top/mid/not extensible table, now we just
--- pass on the variants data and deal with it in the tfm scaler (there
--- is no longer an extensible table anyway)
---
--- we cannot share descriptions as virtual fonts might extend them (ok,
--- we could use a cache with a hash
---
--- we already assing an empty tabel to characters as we can add for
--- instance protruding info and loop over characters; one is not supposed
--- to change descriptions and if one does so one should make a copy!
-
-local function copytotfm(data,cache_id)
- if data then
- local metadata = data.metadata
- local resources = data.resources
- local properties = derivetable(data.properties)
- local descriptions = derivetable(data.descriptions)
- local goodies = derivetable(data.goodies)
- local characters = { }
- local parameters = { }
- local mathparameters = { }
- --
- local pfminfo = metadata.pfminfo or { }
- local resources = data.resources
- local unicodes = resources.unicodes
- -- local mode = data.mode or "base"
- local spaceunits = 500
- local spacer = "space"
- local designsize = metadata.designsize or metadata.design_size or 100
- local mathspecs = metadata.math
- --
- if designsize == 0 then
- designsize = 100
- end
- if mathspecs then
- for name, value in next, mathspecs do
- mathparameters[name] = value
- end
- end
- for unicode, _ in next, data.descriptions do -- use parent table
- characters[unicode] = { }
- end
- if mathspecs then
- -- we could move this to the scaler but not that much is saved
- -- and this is cleaner
- for unicode, character in next, characters do
- local d = descriptions[unicode]
- local m = d.math
- if m then
- -- watch out: luatex uses horiz_variants for the parts
- local variants = m.horiz_variants
- local parts = m.horiz_parts
- -- local done = { [unicode] = true }
- if variants then
- local c = character
- for i=1,#variants do
- local un = variants[i]
- -- if done[un] then
- -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode)
- -- else
- c.next = un
- c = characters[un]
- -- done[un] = true
- -- end
- end -- c is now last in chain
- c.horiz_variants = parts
- elseif parts then
- character.horiz_variants = parts
- end
- local variants = m.vert_variants
- local parts = m.vert_parts
- -- local done = { [unicode] = true }
- if variants then
- local c = character
- for i=1,#variants do
- local un = variants[i]
- -- if done[un] then
- -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode)
- -- else
- c.next = un
- c = characters[un]
- -- done[un] = true
- -- end
- end -- c is now last in chain
- c.vert_variants = parts
- elseif parts then
- character.vert_variants = parts
- end
- local italic_correction = m.vert_italic_correction
- if italic_correction then
- character.vert_italic_correction = italic_correction -- was c.
- end
- local top_accent = m.top_accent
- if top_accent then
- character.top_accent = top_accent
- end
- local kerns = m.kerns
- if kerns then
- character.mathkerns = kerns
- end
- end
- end
- end
- -- end math
- local monospaced = metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion == "Monospaced")
- local charwidth = pfminfo.avgwidth -- or unset
- local italicangle = metadata.italicangle
- local charxheight = pfminfo.os2_xheight and pfminfo.os2_xheight > 0 and pfminfo.os2_xheight
- properties.monospaced = monospaced
- parameters.italicangle = italicangle
- parameters.charwidth = charwidth
- parameters.charxheight = charxheight
- --
- local space = 0x0020 -- unicodes['space'], unicodes['emdash']
- local emdash = 0x2014 -- unicodes['space'], unicodes['emdash']
- if monospaced then
- if descriptions[space] then
- spaceunits, spacer = descriptions[space].width, "space"
- end
- if not spaceunits and descriptions[emdash] then
- spaceunits, spacer = descriptions[emdash].width, "emdash"
- end
- if not spaceunits and charwidth then
- spaceunits, spacer = charwidth, "charwidth"
- end
- else
- if descriptions[space] then
- spaceunits, spacer = descriptions[space].width, "space"
- end
- if not spaceunits and descriptions[emdash] then
- spaceunits, spacer = descriptions[emdash].width/2, "emdash/2"
- end
- if not spaceunits and charwidth then
- spaceunits, spacer = charwidth, "charwidth"
- end
- end
- spaceunits = tonumber(spaceunits) or 500 -- brrr
- -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?)
- local filename = constructors.checkedfilename(resources)
- local fontname = metadata.fontname
- local fullname = metadata.fullname or fontname
- local units = metadata.units_per_em or 1000
- --
- if units == 0 then -- catch bugs in fonts
- units = 1000
- metadata.units_per_em = 1000
- end
- --
- parameters.slant = 0
- parameters.space = spaceunits -- 3.333 (cmr10)
- parameters.space_stretch = units/2 -- 500 -- 1.666 (cmr10)
- parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10)
- parameters.x_height = 2*units/5 -- 400
- parameters.quad = units -- 1000
- if spaceunits < 2*units/5 then
- -- todo: warning
- end
- if italicangle then
- parameters.italicangle = italicangle
- parameters.italicfactor = math.cos(math.rad(90+italicangle))
- parameters.slant = - math.round(math.tan(italicangle*math.pi/180))
- end
- if monospaced then
- parameters.space_stretch = 0
- parameters.space_shrink = 0
- elseif syncspace then --
- parameters.space_stretch = spaceunits/2
- parameters.space_shrink = spaceunits/3
- end
- parameters.extra_space = parameters.space_shrink -- 1.111 (cmr10)
- if charxheight then
- parameters.x_height = charxheight
- else
- local x = 0x78 -- unicodes['x']
- if x then
- local x = descriptions[x]
- if x then
- parameters.x_height = x.height
- end
- end
- end
- --
- parameters.designsize = (designsize/10)*65536
- parameters.ascender = abs(metadata.ascent or 0)
- parameters.descender = abs(metadata.descent or 0)
- parameters.units = units
- --
- properties.space = spacer
- properties.encodingbytes = 2
- properties.format = data.format or fonts.formats[filename] or "opentype"
- properties.noglyphnames = true
- properties.filename = filename
- properties.fontname = fontname
- properties.fullname = fullname
- properties.psname = fontname or fullname
- properties.name = filename or fullname
- --
- -- properties.name = specification.name
- -- properties.sub = specification.sub
- return {
- characters = characters,
- descriptions = descriptions,
- parameters = parameters,
- mathparameters = mathparameters,
- resources = resources,
- properties = properties,
- goodies = goodies,
- }
- end
-end
-
-local function otftotfm(specification)
- local cache_id = specification.hash
- local tfmdata = containers.read(constructors.cache,cache_id)
- if not tfmdata then
- local name = specification.name
- local sub = specification.sub
- local filename = specification.filename
- local format = specification.format
- local features = specification.features.normal
- local rawdata = otf.load(filename,format,sub,features and features.featurefile)
- if rawdata and next(rawdata) then
- rawdata.lookuphash = { }
- tfmdata = copytotfm(rawdata,cache_id)
- if tfmdata and next(tfmdata) then
- -- at this moment no characters are assigned yet, only empty slots
- local features = constructors.checkedfeatures("otf",features)
- local shared = tfmdata.shared
- if not shared then
- shared = { }
- tfmdata.shared = shared
- end
- shared.rawdata = rawdata
- -- shared.features = features -- default
- shared.dynamics = { }
- -- shared.processes = { }
- tfmdata.changed = { }
- shared.features = features
- shared.processes = otf.setfeatures(tfmdata,features)
- end
- end
- containers.write(constructors.cache,cache_id,tfmdata)
- end
- return tfmdata
-end
-
-local function read_from_otf(specification)
- local tfmdata = otftotfm(specification)
- if tfmdata then
- -- this late ? .. needs checking
- tfmdata.properties.name = specification.name
- tfmdata.properties.sub = specification.sub
- --
- tfmdata = constructors.scale(tfmdata,specification)
- local allfeatures = tfmdata.shared.features or specification.features.normal
- constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
- constructors.setname(tfmdata,specification) -- only otf?
- fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification)
- end
- return tfmdata
-end
-
-local function checkmathsize(tfmdata,mathsize)
- local mathdata = tfmdata.shared.rawdata.metadata.math
- local mathsize = tonumber(mathsize)
- if mathdata then -- we cannot use mathparameters as luatex will complain
- local parameters = tfmdata.parameters
- parameters.scriptpercentage = mathdata.ScriptPercentScaleDown
- parameters.scriptscriptpercentage = mathdata.ScriptScriptPercentScaleDown
- parameters.mathsize = mathsize
- end
-end
-
-registerotffeature {
- name = "mathsize",
- description = "apply mathsize specified in the font",
- initializers = {
- base = checkmathsize,
- node = checkmathsize,
- }
-}
-
--- helpers
-
-function otf.collectlookups(rawdata,kind,script,language)
- local sequences = rawdata.resources.sequences
- if sequences then
- local featuremap, featurelist = { }, { }
- for s=1,#sequences do
- local sequence = sequences[s]
- local features = sequence.features
- features = features and features[kind]
- features = features and (features[script] or features[default] or features[wildcard])
- features = features and (features[language] or features[default] or features[wildcard])
- if features then
- local subtables = sequence.subtables
- if subtables then
- for s=1,#subtables do
- local ss = subtables[s]
- if not featuremap[s] then
- featuremap[ss] = true
- featurelist[#featurelist+1] = ss
- end
- end
- end
- end
- end
- if #featurelist > 0 then
- return featuremap, featurelist
- end
- end
- return nil, nil
-end
-
--- readers
-
-local function check_otf(forced,specification,suffix,what)
- local name = specification.name
- if forced then
- name = file.addsuffix(name,suffix,true)
- end
- local fullname = findbinfile(name,suffix) or ""
- if fullname == "" then
- fullname = fonts.names.getfilename(name,suffix) or ""
- end
- if fullname ~= "" then
- specification.filename = fullname
- specification.format = what
- return read_from_otf(specification)
- end
-end
-
-local function opentypereader(specification,suffix,what)
- local forced = specification.forced or ""
- if forced == "otf" then
- return check_otf(true,specification,forced,"opentype")
- elseif forced == "ttf" or forced == "ttc" or forced == "dfont" then
- return check_otf(true,specification,forced,"truetype")
- else
- return check_otf(false,specification,suffix,what)
- end
-end
-
-readers.opentype = opentypereader
-
-local formats = fonts.formats
-
-formats.otf = "opentype"
-formats.ttf = "truetype"
-formats.ttc = "truetype"
-formats.dfont = "truetype"
-
-function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end
-function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end
-function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end
-function readers.dfont(specification) return opentypereader(specification,"ttf",formats.dfont) end
-
--- this will be overloaded
-
-function otf.scriptandlanguage(tfmdata,attr)
- local properties = tfmdata.properties
- return properties.script or "dflt", properties.language or "dflt"
-end
+if not modules then modules = { } end modules ['font-otf'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- langs -> languages enz
+-- anchor_classes vs kernclasses
+-- modification/creationtime in subfont is runtime dus zinloos
+-- to_table -> totable
+-- ascent descent
+
+-- more checking against low level calls of functions
+
+local utfbyte = utf.byte
+local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
+local type, next, tonumber, tostring = type, next, tonumber, tostring
+local abs = math.abs
+local getn = table.getn
+local lpegmatch = lpeg.match
+local reversed, concat, remove = table.reversed, table.concat, table.remove
+local ioflush = io.flush
+local fastcopy, tohash, derivetable = table.fastcopy, table.tohash, table.derive
+local formatters = string.formatters
+
+local allocate = utilities.storage.allocate
+local registertracker = trackers.register
+local registerdirective = directives.register
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+local elapsedtime = statistics.elapsedtime
+local findbinfile = resolvers.findbinfile
+
+local trace_private = false registertracker("otf.private", function(v) trace_private = v end)
+local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end)
+local trace_features = false registertracker("otf.features", function(v) trace_features = v end)
+local trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end)
+local trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end)
+local trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end)
+local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end)
+
+local report_otf = logs.reporter("fonts","otf loading")
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+
+otf.glists = { "gsub", "gpos" }
+
+otf.version = 2.743 -- beware: also sync font-mis.lua
+otf.cache = containers.define("fonts", "otf", otf.version, true)
+
+local fontdata = fonts.hashes.identifiers
+local chardata = characters and characters.data -- not used
+
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+local enhancers = allocate()
+otf.enhancers = enhancers
+local patches = { }
+enhancers.patches = patches
+
+local definers = fonts.definers
+local readers = fonts.readers
+local constructors = fonts.constructors
+
+local forceload = false
+local cleanup = 0 -- mk: 0=885M 1=765M 2=735M (regular run 730M)
+local usemetatables = false -- .4 slower on mk but 30 M less mem so we might change the default -- will be directive
+local packdata = true
+local syncspace = true
+local forcenotdef = false
+local includesubfonts = false
+
+local wildcard = "*"
+local default = "dflt"
+
+local fontloaderfields = fontloader.fields
+local mainfields = nil
+local glyphfields = nil -- not used yet
+
+registerdirective("fonts.otf.loader.cleanup", function(v) cleanup = tonumber(v) or (v and 1) or 0 end)
+registerdirective("fonts.otf.loader.force", function(v) forceload = v end)
+registerdirective("fonts.otf.loader.usemetatables", function(v) usemetatables = v end)
+registerdirective("fonts.otf.loader.pack", function(v) packdata = v end)
+registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end)
+registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end)
+
+local function load_featurefile(raw,featurefile)
+ if featurefile and featurefile ~= "" then
+ if trace_loading then
+ report_otf("using featurefile %a", featurefile)
+ end
+ fontloader.apply_featurefile(raw, featurefile)
+ end
+end
+
+local function showfeatureorder(rawdata,filename)
+ local sequences = rawdata.resources.sequences
+ if sequences and #sequences > 0 then
+ if trace_loading then
+ report_otf("font %a has %s sequences",filename,#sequences)
+ report_otf(" ")
+ end
+ for nos=1,#sequences do
+ local sequence = sequences[nos]
+ local typ = sequence.type or "no-type"
+ local name = sequence.name or "no-name"
+ local subtables = sequence.subtables or { "no-subtables" }
+ local features = sequence.features
+ if trace_loading then
+ report_otf("%3i %-15s %-20s [% t]",nos,name,typ,subtables)
+ end
+ if features then
+ for feature, scripts in next, features do
+ local tt = { }
+ if type(scripts) == "table" then
+ for script, languages in next, scripts do
+ local ttt = { }
+ for language, _ in next, languages do
+ ttt[#ttt+1] = language
+ end
+ tt[#tt+1] = formatters["[%s: % t]"](script,ttt)
+ end
+ if trace_loading then
+ report_otf(" %s: % t",feature,tt)
+ end
+ else
+ if trace_loading then
+ report_otf(" %s: %S",feature,scripts)
+ end
+ end
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("\n")
+ end
+ elseif trace_loading then
+ report_otf("font %a has no sequences",filename)
+ end
+end
+
+--[[ldx--
+
We start with a lot of tables and related functions.
+--ldx]]--
+
+local actions = allocate()
+local before = allocate()
+local after = allocate()
+
+patches.before = before
+patches.after = after
+
+local function enhance(name,data,filename,raw)
+ local enhancer = actions[name]
+ if enhancer then
+ if trace_loading then
+ report_otf("apply enhancement %a to file %a",name,filename)
+ ioflush()
+ end
+ enhancer(data,filename,raw)
+ else
+ -- no message as we can have private ones
+ end
+end
+
+function enhancers.apply(data,filename,raw)
+ local basename = file.basename(lower(filename))
+ if trace_loading then
+ report_otf("%s enhancing file %a","start",filename)
+ end
+ ioflush() -- we want instant messages
+ for e=1,#ordered_enhancers do
+ local enhancer = ordered_enhancers[e]
+ local b = before[enhancer]
+ if b then
+ for pattern, action in next, b do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ enhance(enhancer,data,filename,raw)
+ local a = after[enhancer]
+ if a then
+ for pattern, action in next, a do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ ioflush() -- we want instant messages
+ end
+ if trace_loading then
+ report_otf("%s enhancing file %a","stop",filename)
+ end
+ ioflush() -- we want instant messages
+end
+
+-- patches.register("before","migrate metadata","cambria",function() end)
+
+function patches.register(what,where,pattern,action)
+ local pw = patches[what]
+ if pw then
+ local ww = pw[where]
+ if ww then
+ ww[pattern] = action
+ else
+ pw[where] = { [pattern] = action}
+ end
+ end
+end
+
+function patches.report(fmt,...)
+ if trace_loading then
+ report_otf("patching: %s",formatters[fmt](...))
+ end
+end
+
+function enhancers.register(what,action) -- only already registered can be overloaded
+ actions[what] = action
+end
+
+function otf.load(filename,format,sub,featurefile)
+ local base = file.basename(file.removesuffix(filename))
+ local name = file.removesuffix(base)
+ local attr = lfs.attributes(filename)
+ local size = attr and attr.size or 0
+ local time = attr and attr.modification or 0
+ if featurefile then
+ name = name .. "@" .. file.removesuffix(file.basename(featurefile))
+ end
+ if sub == "" then
+ sub = false
+ end
+ local hash = name
+ if sub then
+ hash = hash .. "-" .. sub
+ end
+ hash = containers.cleanname(hash)
+ local featurefiles
+ if featurefile then
+ featurefiles = { }
+ for s in gmatch(featurefile,"[^,]+") do
+ local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
+ if name == "" then
+ report_otf("loading error, no featurefile %a",s)
+ else
+ local attr = lfs.attributes(name)
+ featurefiles[#featurefiles+1] = {
+ name = name,
+ size = attr and attr.size or 0,
+ time = attr and attr.modification or 0,
+ }
+ end
+ end
+ if #featurefiles == 0 then
+ featurefiles = nil
+ end
+ end
+ local data = containers.read(otf.cache,hash)
+ local reload = not data or data.size ~= size or data.time ~= time
+ if forceload then
+ report_otf("forced reload of %a due to hard coded flag",filename)
+ reload = true
+ end
+ if not reload then
+ local featuredata = data.featuredata
+ if featurefiles then
+ if not featuredata or #featuredata ~= #featurefiles then
+ reload = true
+ else
+ for i=1,#featurefiles do
+ local fi, fd = featurefiles[i], featuredata[i]
+ if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then
+ reload = true
+ break
+ end
+ end
+ end
+ elseif featuredata then
+ reload = true
+ end
+ if reload then
+ report_otf("loading: forced reload due to changed featurefile specification %a",featurefile)
+ end
+ end
+ if reload then
+ report_otf("loading %a, hash %a",filename,hash)
+ local fontdata, messages
+ if sub then
+ fontdata, messages = fontloader.open(filename,sub)
+ else
+ fontdata, messages = fontloader.open(filename)
+ end
+ if fontdata then
+ mainfields = mainfields or (fontloaderfields and fontloaderfields(fontdata))
+ end
+ if trace_loading and messages and #messages > 0 then
+ if type(messages) == "string" then
+ report_otf("warning: %s",messages)
+ else
+ for m=1,#messages do
+ report_otf("warning: %S",messages[m])
+ end
+ end
+ else
+ report_otf("loading done")
+ end
+ if fontdata then
+ if featurefiles then
+ for i=1,#featurefiles do
+ load_featurefile(fontdata,featurefiles[i].name)
+ end
+ end
+ local unicodes = {
+ -- names to unicodes
+ }
+ local splitter = lpeg.splitter(" ",unicodes)
+ data = {
+ size = size,
+ time = time,
+ format = format,
+ featuredata = featurefiles,
+ resources = {
+ filename = resolvers.unresolve(filename), -- no shortcut
+ version = otf.version,
+ creator = "context mkiv",
+ unicodes = unicodes,
+ indices = {
+ -- index to unicodes
+ },
+ duplicates = {
+ -- alternative unicodes
+ },
+ variants = {
+ -- alternative unicodes (variants)
+ },
+ lookuptypes = {
+ },
+ },
+ metadata = {
+ -- raw metadata, not to be used
+ },
+ properties = {
+ -- normalized metadata
+ },
+ descriptions = {
+ },
+ goodies = {
+ },
+ helpers = {
+ tounicodelist = splitter,
+ tounicodetable = lpeg.Ct(splitter),
+ },
+ }
+ starttiming(data)
+ report_otf("file size: %s", size)
+ enhancers.apply(data,filename,fontdata)
+ local packtime = { }
+ if packdata then
+ if cleanup > 0 then
+ collectgarbage("collect")
+ end
+ starttiming(packtime)
+ enhance("pack",data,filename,nil)
+ stoptiming(packtime)
+ end
+ report_otf("saving %a in cache",filename)
+ data = containers.write(otf.cache, hash, data)
+ if cleanup > 1 then
+ collectgarbage("collect")
+ end
+ stoptiming(data)
+ if elapsedtime then -- not in generic
+ report_otf("preprocessing and caching time %s, packtime %s",
+ elapsedtime(data),packdata and elapsedtime(packtime) or 0)
+ end
+ fontloader.close(fontdata) -- free memory
+ if cleanup > 3 then
+ collectgarbage("collect")
+ end
+ data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one
+ if cleanup > 2 then
+ collectgarbage("collect")
+ end
+ else
+ data = nil
+ report_otf("loading failed due to read error")
+ end
+ end
+ if data then
+ if trace_defining then
+ report_otf("loading from cache using hash %a",hash)
+ end
+ enhance("unpack",data,filename,nil,false)
+ enhance("add dimensions",data,filename,nil,false)
+ if trace_sequences then
+ showfeatureorder(data,filename)
+ end
+ end
+ return data
+end
+
+local mt = {
+ __index = function(t,k) -- maybe set it
+ if k == "height" then
+ local ht = t.boundingbox[4]
+ return ht < 0 and 0 or ht
+ elseif k == "depth" then
+ local dp = -t.boundingbox[2]
+ return dp < 0 and 0 or dp
+ elseif k == "width" then
+ return 0
+ elseif k == "name" then -- or maybe uni*
+ return forcenotdef and ".notdef"
+ end
+ end
+}
+
+actions["prepare tables"] = function(data,filename,raw)
+ data.properties.hasitalics = false
+end
+
+actions["add dimensions"] = function(data,filename)
+ -- todo: forget about the width if it's the defaultwidth (saves mem)
+ -- we could also build the marks hash here (instead of storing it)
+ if data then
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local defaultwidth = resources.defaultwidth or 0
+ local defaultheight = resources.defaultheight or 0
+ local defaultdepth = resources.defaultdepth or 0
+ local basename = trace_markwidth and file.basename(filename)
+ if usemetatables then
+ for _, d in next, descriptions do
+ local wd = d.width
+ if not wd then
+ d.width = defaultwidth
+ elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "",wd,basename)
+ -- d.width = -wd
+ end
+ setmetatable(d,mt)
+ end
+ else
+ for _, d in next, descriptions do
+ local bb, wd = d.boundingbox, d.width
+ if not wd then
+ d.width = defaultwidth
+ elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
+ report_otf("mark %a with width %b found in %a",d.name or "",wd,basename)
+ -- d.width = -wd
+ end
+ -- if forcenotdef and not d.name then
+ -- d.name = ".notdef"
+ -- end
+ if bb then
+ local ht, dp = bb[4], -bb[2]
+ if ht == 0 or ht < 0 then
+ -- not set
+ else
+ d.height = ht
+ end
+ if dp == 0 or dp < 0 then
+ -- not set
+ else
+ d.depth = dp
+ end
+ end
+ end
+ end
+ end
+end
+
+local function somecopy(old) -- fast one
+ if old then
+ local new = { }
+ if type(old) == "table" then
+ for k, v in next, old do
+ if k == "glyphs" then
+ -- skip
+ elseif type(v) == "table" then
+ new[k] = somecopy(v)
+ else
+ new[k] = v
+ end
+ end
+ else
+ for i=1,#mainfields do
+ local k = mainfields[i]
+ local v = old[k]
+ if k == "glyphs" then
+ -- skip
+ elseif type(v) == "table" then
+ new[k] = somecopy(v)
+ else
+ new[k] = v
+ end
+ end
+ end
+ return new
+ else
+ return { }
+ end
+end
+
+-- not setting hasitalics and class (when nil) during table cronstruction can save some mem
+
+actions["prepare glyphs"] = function(data,filename,raw)
+ local rawglyphs = raw.glyphs
+ local rawsubfonts = raw.subfonts
+ local rawcidinfo = raw.cidinfo
+ local criterium = constructors.privateoffset
+ local private = criterium
+ local resources = data.resources
+ local metadata = data.metadata
+ local properties = data.properties
+ local descriptions = data.descriptions
+ local unicodes = resources.unicodes -- name to unicode
+ local indices = resources.indices -- index to unicode
+ local duplicates = resources.duplicates
+ local variants = resources.variants
+
+ if rawsubfonts then
+
+ metadata.subfonts = includesubfonts and { }
+ properties.cidinfo = rawcidinfo
+
+ if rawcidinfo.registry then
+ local cidmap = fonts.cid.getmap(rawcidinfo)
+ if cidmap then
+ rawcidinfo.usedname = cidmap.usedname
+ local nofnames, nofunicodes = 0, 0
+ local cidunicodes, cidnames = cidmap.unicodes, cidmap.names
+ for cidindex=1,#rawsubfonts do
+ local subfont = rawsubfonts[cidindex]
+ local cidglyphs = subfont.glyphs
+ if includesubfonts then
+ metadata.subfonts[cidindex] = somecopy(subfont)
+ end
+ for index=0,subfont.glyphcnt-1 do -- we could take the previous glyphcnt instead of 0
+ local glyph = cidglyphs[index]
+ if glyph then
+ local unicode = glyph.unicode
+ local name = glyph.name or cidnames[index]
+ if not unicode or unicode == -1 or unicode >= criterium then
+ unicode = cidunicodes[index]
+ end
+ if unicode and descriptions[unicode] then
+ report_otf("preventing glyph %a at index %H to overload unicode %U",name or "noname",index,unicode)
+ unicode = -1
+ end
+ if not unicode or unicode == -1 or unicode >= criterium then
+ if not name then
+ name = format("u%06X",private)
+ end
+ unicode = private
+ unicodes[name] = private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ end
+ private = private + 1
+ nofnames = nofnames + 1
+ else
+ if not name then
+ name = format("u%06X",unicode)
+ end
+ unicodes[name] = unicode
+ nofunicodes = nofunicodes + 1
+ end
+ indices[index] = unicode -- each index is unique (at least now)
+
+ local description = {
+ -- width = glyph.width,
+ boundingbox = glyph.boundingbox,
+ name = glyph.name or name or "unknown", -- uniXXXX
+ cidindex = cidindex,
+ index = index,
+ glyph = glyph,
+ }
+
+ descriptions[unicode] = description
+ else
+ -- report_otf("potential problem: glyph %U is used but empty",index)
+ end
+ end
+ end
+ if trace_loading then
+ report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
+ end
+ elseif trace_loading then
+ report_otf("unable to remap cid font, missing cid file for %a",filename)
+ end
+ elseif trace_loading then
+ report_otf("font %a has no glyphs",filename)
+ end
+
+ else
+
+ for index=0,raw.glyphcnt-1 do -- not raw.glyphmax-1 (as that will crash)
+ local glyph = rawglyphs[index]
+ if glyph then
+ local unicode = glyph.unicode
+ local name = glyph.name
+ if not unicode or unicode == -1 or unicode >= criterium then
+ unicode = private
+ unicodes[name] = private
+ if trace_private then
+ report_otf("glyph %a at index %H is moved to private unicode slot %U",name,index,private)
+ end
+ private = private + 1
+ else
+ unicodes[name] = unicode
+ end
+ indices[index] = unicode
+ if not name then
+ name = format("u%06X",unicode)
+ end
+ descriptions[unicode] = {
+ -- width = glyph.width,
+ boundingbox = glyph.boundingbox,
+ name = name,
+ index = index,
+ glyph = glyph,
+ }
+ local altuni = glyph.altuni
+ if altuni then
+ local d
+ for i=1,#altuni do
+ local a = altuni[i]
+ local u = a.unicode
+ local v = a.variant
+ if v then
+ -- tricky: no addition to d? needs checking but in practice such dups are either very simple
+ -- shapes or e.g cjk with not that many features
+ local vv = variants[v]
+ if vv then
+ vv[u] = unicode
+ else -- xits-math has some:
+ vv = { [u] = unicode }
+ variants[v] = vv
+ end
+ elseif d then
+ d[#d+1] = u
+ else
+ d = { u }
+ end
+ end
+ if d then
+ duplicates[unicode] = d
+ end
+ end
+ else
+ report_otf("potential problem: glyph %U is used but empty",index)
+ end
+ end
+
+ end
+
+ resources.private = private
+
+end
+
+-- the next one is still messy but will get better when we have
+-- flattened map/enc tables in the font loader
+
+actions["check encoding"] = function(data,filename,raw)
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local properties = data.properties
+ local unicodes = resources.unicodes -- name to unicode
+ local indices = resources.indices -- index to unicodes
+ local duplicates = resources.duplicates
+
+ -- begin of messy (not needed when cidmap)
+
+ local mapdata = raw.map or { }
+ local unicodetoindex = mapdata and mapdata.map or { }
+ -- local encname = lower(data.enc_name or raw.enc_name or mapdata.enc_name or "")
+ local encname = lower(data.enc_name or mapdata.enc_name or "")
+ local criterium = 0xFFFF -- for instance cambria has a lot of mess up there
+
+ -- end of messy
+
+ if find(encname,"unicode") then -- unicodebmp, unicodefull, ...
+ if trace_loading then
+ report_otf("checking embedded unicode map %a",encname)
+ end
+ for unicode, index in next, unicodetoindex do -- altuni already covers this
+ if unicode <= criterium and not descriptions[unicode] then
+ local parent = indices[index] -- why nil?
+ if not parent then
+ report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
+ else
+ local parentdescription = descriptions[parent]
+ if parentdescription then
+ local altuni = parentdescription.altuni
+ if not altuni then
+ altuni = { { unicode = parent } }
+ parentdescription.altuni = altuni
+ duplicates[parent] = { unicode }
+ else
+ local done = false
+ for i=1,#altuni do
+ if altuni[i].unicode == parent then
+ done = true
+ break
+ end
+ end
+ if not done then
+ -- let's assume simple cjk reuse
+ altuni[#altuni+1] = { unicode = parent }
+ table.insert(duplicates[parent],unicode)
+ end
+ end
+ if trace_loading then
+ report_otf("weird, unicode %U points to nowhere with index %H",unicode,index)
+ end
+ else
+ report_otf("weird, unicode %U points to %U with index %H",unicode,index)
+ end
+ end
+ end
+ end
+ elseif properties.cidinfo then
+ report_otf("warning: no unicode map, used cidmap %a",properties.cidinfo.usedname)
+ else
+ report_otf("warning: non unicode map %a, only using glyph unicode data",encname or "whatever")
+ end
+
+ if mapdata then
+ mapdata.map = { } -- clear some memory
+ end
+end
+
+-- for the moment we assume that a font with lookups will not use
+-- altuni so we stick to kerns only
+
+actions["add duplicates"] = function(data,filename,raw)
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local properties = data.properties
+ local unicodes = resources.unicodes -- name to unicode
+ local indices = resources.indices -- index to unicodes
+ local duplicates = resources.duplicates
+
+ for unicode, d in next, duplicates do
+ for i=1,#d do
+ local u = d[i]
+ if not descriptions[u] then
+ local description = descriptions[unicode]
+ local duplicate = table.copy(description) -- else packing problem
+ duplicate.comment = format("copy of U+%05X", unicode)
+ descriptions[u] = duplicate
+ local n = 0
+ for _, description in next, descriptions do
+ if kerns then
+ local kerns = description.kerns
+ for _, k in next, kerns do
+ local ku = k[unicode]
+ if ku then
+ k[u] = ku
+ n = n + 1
+ end
+ end
+ end
+ -- todo: lookups etc
+ end
+ if trace_loading then
+ report_otf("duplicating %U to %U with index %H (%s kerns)",unicode,u,description.index,n)
+ end
+ end
+ end
+ end
+end
+
+-- class : nil base mark ligature component (maybe we don't need it in description)
+-- boundingbox: split into ht/dp takes more memory (larger tables and less sharing)
+
+actions["analyze glyphs"] = function(data,filename,raw) -- maybe integrate this in the previous
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local metadata = data.metadata
+ local properties = data.properties
+ local hasitalics = false
+ local widths = { }
+ local marks = { } -- always present (saves checking)
+ for unicode, description in next, descriptions do
+ local glyph = description.glyph
+ local italic = glyph.italic_correction
+ if not italic then
+ -- skip
+ elseif italic == 0 then
+ -- skip
+ else
+ description.italic = italic
+ hasitalics = true
+ end
+ local width = glyph.width
+ widths[width] = (widths[width] or 0) + 1
+ local class = glyph.class
+ if class then
+ if class == "mark" then
+ marks[unicode] = true
+ end
+ description.class = class
+ end
+ end
+ -- flag italic
+ properties.hasitalics = hasitalics
+ -- flag marks
+ resources.marks = marks
+ -- share most common width for cjk fonts
+ local wd, most = 0, 1
+ for k,v in next, widths do
+ if v > most then
+ wd, most = k, v
+ end
+ end
+ if most > 1000 then -- maybe 500
+ if trace_loading then
+ report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
+ end
+ for unicode, description in next, descriptions do
+ if description.width == wd then
+ -- description.width = nil
+ else
+ description.width = description.glyph.width
+ end
+ end
+ resources.defaultwidth = wd
+ else
+ for unicode, description in next, descriptions do
+ description.width = description.glyph.width
+ end
+ end
+end
+
+actions["reorganize mark classes"] = function(data,filename,raw)
+ local mark_classes = raw.mark_classes
+ if mark_classes then
+ local resources = data.resources
+ local unicodes = resources.unicodes
+ local markclasses = { }
+ resources.markclasses = markclasses -- reversed
+ for name, class in next, mark_classes do
+ local t = { }
+ for s in gmatch(class,"[^ ]+") do
+ t[unicodes[s]] = true
+ end
+ markclasses[name] = t
+ end
+ end
+end
+
+actions["reorganize features"] = function(data,filename,raw) -- combine with other
+ local features = { }
+ data.resources.features = features
+ for k, what in next, otf.glists do
+ local dw = raw[what]
+ if dw then
+ local f = { }
+ features[what] = f
+ for i=1,#dw do
+ local d= dw[i]
+ local dfeatures = d.features
+ if dfeatures then
+ for i=1,#dfeatures do
+ local df = dfeatures[i]
+ local tag = strip(lower(df.tag))
+ local ft = f[tag]
+ if not ft then
+ ft = { }
+ f[tag] = ft
+ end
+ local dscripts = df.scripts
+ for i=1,#dscripts do
+ local d = dscripts[i]
+ local languages = d.langs
+ local script = strip(lower(d.script))
+ local fts = ft[script] if not fts then fts = {} ft[script] = fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))] = true
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+actions["reorganize anchor classes"] = function(data,filename,raw)
+ local resources = data.resources
+ local anchor_to_lookup = { }
+ local lookup_to_anchor = { }
+ resources.anchor_to_lookup = anchor_to_lookup
+ resources.lookup_to_anchor = lookup_to_anchor
+ local classes = raw.anchor_classes -- anchor classes not in final table
+ if classes then
+ for c=1,#classes do
+ local class = classes[c]
+ local anchor = class.name
+ local lookups = class.lookup
+ if type(lookups) ~= "table" then
+ lookups = { lookups }
+ end
+ local a = anchor_to_lookup[anchor]
+ if not a then
+ a = { }
+ anchor_to_lookup[anchor] = a
+ end
+ for l=1,#lookups do
+ local lookup = lookups[l]
+ local l = lookup_to_anchor[lookup]
+ if l then
+ l[anchor] = true
+ else
+ l = { [anchor] = true }
+ lookup_to_anchor[lookup] = l
+ end
+ a[lookup] = true
+ end
+ end
+ end
+end
+
+actions["prepare tounicode"] = function(data,filename,raw)
+ fonts.mappings.addtounicode(data,filename)
+end
+
+local g_directions = {
+ gsub_contextchain = 1,
+ gpos_contextchain = 1,
+ -- gsub_context = 1,
+ -- gpos_context = 1,
+ gsub_reversecontextchain = -1,
+ gpos_reversecontextchain = -1,
+}
+
+-- Research by Khaled Hosny has demonstrated that the font loader merges
+-- regular and AAT features and that these can interfere (especially because
+-- we dropped checking for valid features elsewhere. So, we just check for
+-- the special flag and drop the feature if such a tag is found.
+
+local function supported(features)
+ for i=1,#features do
+ if features[i].ismac then
+ return false
+ end
+ end
+ return true
+end
+
+actions["reorganize subtables"] = function(data,filename,raw)
+ local resources = data.resources
+ local sequences = { }
+ local lookups = { }
+ local chainedfeatures = { }
+ resources.sequences = sequences
+ resources.lookups = lookups
+ for _, what in next, otf.glists do
+ local dw = raw[what]
+ if dw then
+ for k=1,#dw do
+ local gk = dw[k]
+ local features = gk.features
+-- if features and supported(features) then
+ if not features or supported(features) then -- not always features !
+ local typ = gk.type
+ local chain = g_directions[typ] or 0
+ local subtables = gk.subtables
+ if subtables then
+ local t = { }
+ for s=1,#subtables do
+ t[s] = subtables[s].name
+ end
+ subtables = t
+ end
+ local flags, markclass = gk.flags, nil
+ if flags then
+ local t = { -- forcing false packs nicer
+ (flags.ignorecombiningmarks and "mark") or false,
+ (flags.ignoreligatures and "ligature") or false,
+ (flags.ignorebaseglyphs and "base") or false,
+ flags.r2l or false,
+ }
+ markclass = flags.mark_class
+ if markclass then
+ markclass = resources.markclasses[markclass]
+ end
+ flags = t
+ end
+ --
+ local name = gk.name
+ --
+ if not name then
+ -- in fact an error
+ report_otf("skipping weird lookup number %s",k)
+ elseif features then
+ -- scripts, tag, ismac
+ local f = { }
+ for i=1,#features do
+ local df = features[i]
+ local tag = strip(lower(df.tag))
+ local ft = f[tag] if not ft then ft = {} f[tag] = ft end
+ local dscripts = df.scripts
+ for i=1,#dscripts do
+ local d = dscripts[i]
+ local languages = d.langs
+ local script = strip(lower(d.script))
+ local fts = ft[script] if not fts then fts = {} ft[script] = fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))] = true
+ end
+ end
+ end
+ sequences[#sequences+1] = {
+ type = typ,
+ chain = chain,
+ flags = flags,
+ name = name,
+ subtables = subtables,
+ markclass = markclass,
+ features = f,
+ }
+ else
+ lookups[name] = {
+ type = typ,
+ chain = chain,
+ flags = flags,
+ subtables = subtables,
+ markclass = markclass,
+ }
+ end
+ end
+ end
+ end
+ end
+end
+
+-- test this:
+--
+-- for _, what in next, otf.glists do
+-- raw[what] = nil
+-- end
+
+actions["prepare lookups"] = function(data,filename,raw)
+ local lookups = raw.lookups
+ if lookups then
+ data.lookups = lookups
+ end
+end
+
+-- The reverse handler does a bit redundant splitting but it's seldom
+-- seen so we don't bother too much. We could store the replacement
+-- in the current list (value instead of true) but it makes other code
+-- uglier. Maybe some day.
+
+local function t_uncover(splitter,cache,covers)
+ local result = { }
+ for n=1,#covers do
+ local cover = covers[n]
+ local uncovered = cache[cover]
+ if not uncovered then
+ uncovered = lpegmatch(splitter,cover)
+ cache[cover] = uncovered
+ end
+ result[n] = uncovered
+ end
+ return result
+end
+
+local function s_uncover(splitter,cache,cover)
+ if cover == "" then
+ return nil
+ else
+ local uncovered = cache[cover]
+ if not uncovered then
+ uncovered = lpegmatch(splitter,cover)
+-- for i=1,#uncovered do
+-- uncovered[i] = { [uncovered[i]] = true }
+-- end
+ cache[cover] = uncovered
+ end
+ return { uncovered }
+ end
+end
+
+local function t_hashed(t,cache)
+ if t then
+ local ht = { }
+ for i=1,#t do
+ local ti = t[i]
+ local tih = cache[ti]
+ if not tih then
+ tih = { }
+ for i=1,#ti do
+ tih[ti[i]] = true
+ end
+ cache[ti] = tih
+ end
+ ht[i] = tih
+ end
+ return ht
+ else
+ return nil
+ end
+end
+
+-- local s_hashed = t_hashed
+
+local function s_hashed(t,cache)
+ if t then
+ local ht = { }
+ local tf = t[1]
+ for i=1,#tf do
+ ht[i] = { [tf[i]] = true }
+ end
+ return ht
+ else
+ return nil
+ end
+end
+
+local function r_uncover(splitter,cache,cover,replacements)
+ if cover == "" then
+ return nil
+ else
+ -- we always have current as { } even in the case of one
+ local uncovered = cover[1]
+ local replaced = cache[replacements]
+ if not replaced then
+ replaced = lpegmatch(splitter,replacements)
+ cache[replacements] = replaced
+ end
+ local nu, nr = #uncovered, #replaced
+ local r = { }
+ if nu == nr then
+ for i=1,nu do
+ r[uncovered[i]] = replaced[i]
+ end
+ end
+ return r
+ end
+end
+
+actions["reorganize lookups"] = function(data,filename,raw) -- we could check for "" and n == 0
+ -- we prefer the before lookups in a normal order
+ if data.lookups then
+ local splitter = data.helpers.tounicodetable
+ local t_u_cache = { }
+ local s_u_cache = t_u_cache -- string keys
+ local t_h_cache = { }
+ local s_h_cache = t_h_cache -- table keys (so we could use one cache)
+ local r_u_cache = { } -- maybe shared
+ for _, lookup in next, data.lookups do
+ local rules = lookup.rules
+ if rules then
+ local format = lookup.format
+ if format == "class" then
+ local before_class = lookup.before_class
+ if before_class then
+ before_class = t_uncover(splitter,t_u_cache,reversed(before_class))
+ end
+ local current_class = lookup.current_class
+ if current_class then
+ current_class = t_uncover(splitter,t_u_cache,current_class)
+ end
+ local after_class = lookup.after_class
+ if after_class then
+ after_class = t_uncover(splitter,t_u_cache,after_class)
+ end
+ for i=1,#rules do
+ local rule = rules[i]
+ local class = rule.class
+ local before = class.before
+ if before then
+ for i=1,#before do
+ before[i] = before_class[before[i]] or { }
+ end
+ rule.before = t_hashed(before,t_h_cache)
+ end
+ local current = class.current
+ local lookups = rule.lookups
+ if current then
+ for i=1,#current do
+ current[i] = current_class[current[i]] or { }
+ -- let's not be sparse
+ if lookups and not lookups[i] then
+ lookups[i] = "" -- (was: false) e.g. we can have two lookups and one replacement
+ end
+ -- end of fix
+ end
+ rule.current = t_hashed(current,t_h_cache)
+ end
+ local after = class.after
+ if after then
+ for i=1,#after do
+ after[i] = after_class[after[i]] or { }
+ end
+ rule.after = t_hashed(after,t_h_cache)
+ end
+ rule.class = nil
+ end
+ lookup.before_class = nil
+ lookup.current_class = nil
+ lookup.after_class = nil
+ lookup.format = "coverage"
+ elseif format == "coverage" then
+ for i=1,#rules do
+ local rule = rules[i]
+ local coverage = rule.coverage
+ if coverage then
+ local before = coverage.before
+ if before then
+ before = t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before = t_hashed(before,t_h_cache)
+ end
+ local current = coverage.current
+ if current then
+ current = t_uncover(splitter,t_u_cache,current)
+ -- let's not be sparse
+ local lookups = rule.lookups
+ if lookups then
+ for i=1,#current do
+ if not lookups[i] then
+ lookups[i] = "" -- fix sparse array
+ end
+ end
+ end
+ --
+ rule.current = t_hashed(current,t_h_cache)
+ end
+ local after = coverage.after
+ if after then
+ after = t_uncover(splitter,t_u_cache,after)
+ rule.after = t_hashed(after,t_h_cache)
+ end
+ rule.coverage = nil
+ end
+ end
+ elseif format == "reversecoverage" then -- special case, single substitution only
+ for i=1,#rules do
+ local rule = rules[i]
+ local reversecoverage = rule.reversecoverage
+ if reversecoverage then
+ local before = reversecoverage.before
+ if before then
+ before = t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before = t_hashed(before,t_h_cache)
+ end
+ local current = reversecoverage.current
+ if current then
+ current = t_uncover(splitter,t_u_cache,current)
+ rule.current = t_hashed(current,t_h_cache)
+ end
+ local after = reversecoverage.after
+ if after then
+ after = t_uncover(splitter,t_u_cache,after)
+ rule.after = t_hashed(after,t_h_cache)
+ end
+ local replacements = reversecoverage.replacements
+ if replacements then
+ rule.replacements = r_uncover(splitter,r_u_cache,current,replacements)
+ end
+ rule.reversecoverage = nil
+ end
+ end
+ elseif format == "glyphs" then
+ -- I could store these more efficient (as not we use a nested tables for before,
+ -- after and current but this features happens so seldom that I don't bother
+ -- about it right now.
+ for i=1,#rules do
+ local rule = rules[i]
+ local glyphs = rule.glyphs
+ if glyphs then
+ local fore = glyphs.fore
+ if fore and fore ~= "" then
+ fore = s_uncover(splitter,s_u_cache,fore)
+ rule.before = s_hashed(fore,s_h_cache)
+ end
+ local back = glyphs.back
+ if back then
+ back = s_uncover(splitter,s_u_cache,back)
+ rule.after = s_hashed(back,s_h_cache)
+ end
+ local names = glyphs.names
+ if names then
+ names = s_uncover(splitter,s_u_cache,names)
+ rule.current = s_hashed(names,s_h_cache)
+ end
+ rule.glyphs = nil
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+local function check_variants(unicode,the_variants,splitter,unicodes)
+ local variants = the_variants.variants
+ if variants then -- use splitter
+ local glyphs = lpegmatch(splitter,variants)
+ local done = { [unicode] = true }
+ local n = 0
+ for i=1,#glyphs do
+ local g = glyphs[i]
+ if done[g] then
+ report_otf("skipping cyclic reference %U in math variant %U",g,unicode)
+ else
+ if n == 0 then
+ n = 1
+ variants = { g }
+ else
+ n = n + 1
+ variants[n] = g
+ end
+ done[g] = true
+ end
+ end
+ if n == 0 then
+ variants = nil
+ end
+ end
+ local parts = the_variants.parts
+ if parts then
+ local p = #parts
+ if p > 0 then
+ for i=1,p do
+ local pi = parts[i]
+ pi.glyph = unicodes[pi.component] or 0
+ pi.component = nil
+ end
+ else
+ parts = nil
+ end
+ end
+ local italic_correction = the_variants.italic_correction
+ if italic_correction and italic_correction == 0 then
+ italic_correction = nil
+ end
+ return variants, parts, italic_correction
+end
+
+actions["analyze math"] = function(data,filename,raw)
+ if raw.math then
+ data.metadata.math = raw.math
+ local unicodes = data.resources.unicodes
+ local splitter = data.helpers.tounicodetable
+ for unicode, description in next, data.descriptions do
+ local glyph = description.glyph
+ local mathkerns = glyph.mathkern -- singular
+ local horiz_variants = glyph.horiz_variants
+ local vert_variants = glyph.vert_variants
+ local top_accent = glyph.top_accent
+ if mathkerns or horiz_variants or vert_variants or top_accent then
+ local math = { }
+ if top_accent then
+ math.top_accent = top_accent
+ end
+ if mathkerns then
+ for k, v in next, mathkerns do
+ if not next(v) then
+ mathkerns[k] = nil
+ else
+ for k, v in next, v do
+ if v == 0 then
+ k[v] = nil -- height / kern can be zero
+ end
+ end
+ end
+ end
+ math.kerns = mathkerns
+ end
+ if horiz_variants then
+ math.horiz_variants, math.horiz_parts, math.horiz_italic_correction = check_variants(unicode,horiz_variants,splitter,unicodes)
+ end
+ if vert_variants then
+ math.vert_variants, math.vert_parts, math.vert_italic_correction = check_variants(unicode,vert_variants,splitter,unicodes)
+ end
+ local italic_correction = description.italic
+ if italic_correction and italic_correction ~= 0 then
+ math.italic_correction = italic_correction
+ end
+ description.math = math
+ end
+ end
+ end
+end
+
+actions["reorganize glyph kerns"] = function(data,filename,raw)
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local unicodes = resources.unicodes
+ for unicode, description in next, descriptions do
+ local kerns = description.glyph.kerns
+ if kerns then
+ local newkerns = { }
+ for k, kern in next, kerns do
+ local name = kern.char
+ local offset = kern.off
+ local lookup = kern.lookup
+ if name and offset and lookup then
+ local unicode = unicodes[name]
+ if unicode then
+ if type(lookup) == "table" then
+ for l=1,#lookup do
+ local lookup = lookup[l]
+ local lookupkerns = newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode] = offset
+ else
+ newkerns[lookup] = { [unicode] = offset }
+ end
+ end
+ else
+ local lookupkerns = newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode] = offset
+ else
+ newkerns[lookup] = { [unicode] = offset }
+ end
+ end
+ elseif trace_loading then
+ report_otf("problems with unicode %a of kern %a of glyph %U",name,k,unicode)
+ end
+ end
+ end
+ description.kerns = newkerns
+ end
+ end
+end
+
+actions["merge kern classes"] = function(data,filename,raw)
+ local gposlist = raw.gpos
+ if gposlist then
+ local descriptions = data.descriptions
+ local resources = data.resources
+ local unicodes = resources.unicodes
+ local splitter = data.helpers.tounicodetable
+ for gp=1,#gposlist do
+ local gpos = gposlist[gp]
+ local subtables = gpos.subtables
+ if subtables then
+ for s=1,#subtables do
+ local subtable = subtables[s]
+ local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
+ if kernclass then -- the next one is quite slow
+ local split = { } -- saves time
+ for k=1,#kernclass do
+ local kcl = kernclass[k]
+ local firsts = kcl.firsts
+ local seconds = kcl.seconds
+ local offsets = kcl.offsets
+ local lookups = kcl.lookup -- singular
+ if type(lookups) ~= "table" then
+ lookups = { lookups }
+ end
+ -- if offsets[1] == nil then
+ -- offsets[1] = ""
+ -- end
+ -- we can check the max in the loop
+ -- local maxseconds = getn(seconds)
+ for n, s in next, firsts do
+ split[s] = split[s] or lpegmatch(splitter,s)
+ end
+ local maxseconds = 0
+ for n, s in next, seconds do
+ if n > maxseconds then
+ maxseconds = n
+ end
+ split[s] = split[s] or lpegmatch(splitter,s)
+ end
+ for l=1,#lookups do
+ local lookup = lookups[l]
+ for fk=1,#firsts do -- maxfirsts ?
+ local fv = firsts[fk]
+ local splt = split[fv]
+ if splt then
+ local extrakerns = { }
+ local baseoffset = (fk-1) * maxseconds
+ for sk=2,maxseconds do -- will become 1 based in future luatex
+ local sv = seconds[sk]
+ -- for sk, sv in next, seconds do
+ local splt = split[sv]
+ if splt then -- redundant test
+ local offset = offsets[baseoffset + sk]
+ if offset then
+ for i=1,#splt do
+ extrakerns[splt[i]] = offset
+ end
+ end
+ end
+ end
+ for i=1,#splt do
+ local first_unicode = splt[i]
+ local description = descriptions[first_unicode]
+ if description then
+ local kerns = description.kerns
+ if not kerns then
+ kerns = { } -- unicode indexed !
+ description.kerns = kerns
+ end
+ local lookupkerns = kerns[lookup]
+ if not lookupkerns then
+ lookupkerns = { }
+ kerns[lookup] = lookupkerns
+ end
+ for second_unicode, kern in next, extrakerns do
+ lookupkerns[second_unicode] = kern
+ end
+ elseif trace_loading then
+ report_otf("no glyph data for %U", first_unicode)
+ end
+ end
+ end
+ end
+ end
+ end
+ subtable.kernclass = { }
+ end
+ end
+ end
+ end
+ end
+end
+
+actions["check glyphs"] = function(data,filename,raw)
+ for unicode, description in next, data.descriptions do
+ description.glyph = nil
+ end
+end
+
+-- future versions will remove _
+
+actions["check metadata"] = function(data,filename,raw)
+ local metadata = data.metadata
+ for _, k in next, mainfields do
+ if valid_fields[k] then
+ local v = raw[k]
+ if not metadata[k] then
+ metadata[k] = v
+ end
+ end
+ end
+ -- metadata.pfminfo = raw.pfminfo -- not already done?
+ local ttftables = metadata.ttf_tables
+ if ttftables then
+ for i=1,#ttftables do
+ ttftables[i].data = "deleted"
+ end
+ end
+end
+
+actions["cleanup tables"] = function(data,filename,raw)
+ data.resources.indices = nil -- not needed
+ data.helpers = nil
+end
+
+-- kern: ttf has a table with kerns
+--
+-- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but
+-- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of
+-- unpredictable alternatively we could force an [1] if not set (maybe I will do that
+-- anyway).
+
+-- we can share { } as it is never set
+
+--- ligatures have an extra specification.char entry that we don't use
+
+actions["reorganize glyph lookups"] = function(data,filename,raw)
+ local resources = data.resources
+ local unicodes = resources.unicodes
+ local descriptions = data.descriptions
+ local splitter = data.helpers.tounicodelist
+
+ local lookuptypes = resources.lookuptypes
+
+ for unicode, description in next, descriptions do
+ local lookups = description.glyph.lookups
+ if lookups then
+ for tag, lookuplist in next, lookups do
+ for l=1,#lookuplist do
+ local lookup = lookuplist[l]
+ local specification = lookup.specification
+ local lookuptype = lookup.type
+ local lt = lookuptypes[tag]
+ if not lt then
+ lookuptypes[tag] = lookuptype
+ elseif lt ~= lookuptype then
+ report_otf("conflicting lookuptypes, %a points to %a and %a",tag,lt,lookuptype)
+ end
+ if lookuptype == "ligature" then
+ lookuplist[l] = { lpegmatch(splitter,specification.components) }
+ elseif lookuptype == "alternate" then
+ lookuplist[l] = { lpegmatch(splitter,specification.components) }
+ elseif lookuptype == "substitution" then
+ lookuplist[l] = unicodes[specification.variant]
+ elseif lookuptype == "multiple" then
+ lookuplist[l] = { lpegmatch(splitter,specification.components) }
+ elseif lookuptype == "position" then
+ lookuplist[l] = {
+ specification.x or 0,
+ specification.y or 0,
+ specification.h or 0,
+ specification.v or 0
+ }
+ elseif lookuptype == "pair" then
+ local one = specification.offsets[1]
+ local two = specification.offsets[2]
+ local paired = unicodes[specification.paired]
+ if one then
+ if two then
+ lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0 } }
+ else
+ lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 } }
+ end
+ else
+ if two then
+ lookuplist[l] = { paired, { }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0} } -- maybe nil instead of { }
+ else
+ lookuplist[l] = { paired }
+ end
+ end
+ end
+ end
+ end
+ local slookups, mlookups
+ for tag, lookuplist in next, lookups do
+ if #lookuplist == 1 then
+ if slookups then
+ slookups[tag] = lookuplist[1]
+ else
+ slookups = { [tag] = lookuplist[1] }
+ end
+ else
+ if mlookups then
+ mlookups[tag] = lookuplist
+ else
+ mlookups = { [tag] = lookuplist }
+ end
+ end
+ end
+ if slookups then
+ description.slookups = slookups
+ end
+ if mlookups then
+ description.mlookups = mlookups
+ end
+ end
+ end
+
+end
+
+actions["reorganize glyph anchors"] = function(data,filename,raw) -- when we replace inplace we safe entries
+ local descriptions = data.descriptions
+ for unicode, description in next, descriptions do
+ local anchors = description.glyph.anchors
+ if anchors then
+ for class, data in next, anchors do
+ if class == "baselig" then
+ for tag, specification in next, data do
+ for i=1,#specification do
+ local si = specification[i]
+ specification[i] = { si.x or 0, si.y or 0 }
+ end
+ end
+ else
+ for tag, specification in next, data do
+ data[tag] = { specification.x or 0, specification.y or 0 }
+ end
+ end
+ end
+ description.anchors = anchors
+ end
+ end
+end
+
+-- modes: node, base, none
+
+function otf.setfeatures(tfmdata,features)
+ local okay = constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
+ if okay then
+ return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
+ else
+ return { } -- will become false
+ end
+end
+
+-- the first version made a top/mid/not extensible table, now we just
+-- pass on the variants data and deal with it in the tfm scaler (there
+-- is no longer an extensible table anyway)
+--
+-- we cannot share descriptions as virtual fonts might extend them (ok,
+-- we could use a cache with a hash
+--
+-- we already assing an empty tabel to characters as we can add for
+-- instance protruding info and loop over characters; one is not supposed
+-- to change descriptions and if one does so one should make a copy!
+
+local function copytotfm(data,cache_id)
+ if data then
+ local metadata = data.metadata
+ local resources = data.resources
+ local properties = derivetable(data.properties)
+ local descriptions = derivetable(data.descriptions)
+ local goodies = derivetable(data.goodies)
+ local characters = { }
+ local parameters = { }
+ local mathparameters = { }
+ --
+ local pfminfo = metadata.pfminfo or { }
+ local resources = data.resources
+ local unicodes = resources.unicodes
+ -- local mode = data.mode or "base"
+ local spaceunits = 500
+ local spacer = "space"
+ local designsize = metadata.designsize or metadata.design_size or 100
+ local mathspecs = metadata.math
+ --
+ if designsize == 0 then
+ designsize = 100
+ end
+ if mathspecs then
+ for name, value in next, mathspecs do
+ mathparameters[name] = value
+ end
+ end
+ for unicode, _ in next, data.descriptions do -- use parent table
+ characters[unicode] = { }
+ end
+ if mathspecs then
+ -- we could move this to the scaler but not that much is saved
+ -- and this is cleaner
+ for unicode, character in next, characters do
+ local d = descriptions[unicode]
+ local m = d.math
+ if m then
+ -- watch out: luatex uses horiz_variants for the parts
+ local variants = m.horiz_variants
+ local parts = m.horiz_parts
+ -- local done = { [unicode] = true }
+ if variants then
+ local c = character
+ for i=1,#variants do
+ local un = variants[i]
+ -- if done[un] then
+ -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode)
+ -- else
+ c.next = un
+ c = characters[un]
+ -- done[un] = true
+ -- end
+ end -- c is now last in chain
+ c.horiz_variants = parts
+ elseif parts then
+ character.horiz_variants = parts
+ end
+ local variants = m.vert_variants
+ local parts = m.vert_parts
+ -- local done = { [unicode] = true }
+ if variants then
+ local c = character
+ for i=1,#variants do
+ local un = variants[i]
+ -- if done[un] then
+ -- -- report_otf("skipping cyclic reference %U in math variant %U",un,unicode)
+ -- else
+ c.next = un
+ c = characters[un]
+ -- done[un] = true
+ -- end
+ end -- c is now last in chain
+ c.vert_variants = parts
+ elseif parts then
+ character.vert_variants = parts
+ end
+ local italic_correction = m.vert_italic_correction
+ if italic_correction then
+ character.vert_italic_correction = italic_correction -- was c.
+ end
+ local top_accent = m.top_accent
+ if top_accent then
+ character.top_accent = top_accent
+ end
+ local kerns = m.kerns
+ if kerns then
+ character.mathkerns = kerns
+ end
+ end
+ end
+ end
+ -- end math
+ local monospaced = metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion == "Monospaced")
+ local charwidth = pfminfo.avgwidth -- or unset
+ local italicangle = metadata.italicangle
+ local charxheight = pfminfo.os2_xheight and pfminfo.os2_xheight > 0 and pfminfo.os2_xheight
+ properties.monospaced = monospaced
+ parameters.italicangle = italicangle
+ parameters.charwidth = charwidth
+ parameters.charxheight = charxheight
+ --
+ local space = 0x0020 -- unicodes['space'], unicodes['emdash']
+ local emdash = 0x2014 -- unicodes['space'], unicodes['emdash']
+ if monospaced then
+ if descriptions[space] then
+ spaceunits, spacer = descriptions[space].width, "space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits, spacer = descriptions[emdash].width, "emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits, spacer = charwidth, "charwidth"
+ end
+ else
+ if descriptions[space] then
+ spaceunits, spacer = descriptions[space].width, "space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits, spacer = descriptions[emdash].width/2, "emdash/2"
+ end
+ if not spaceunits and charwidth then
+ spaceunits, spacer = charwidth, "charwidth"
+ end
+ end
+ spaceunits = tonumber(spaceunits) or 500 -- brrr
+ -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?)
+ local filename = constructors.checkedfilename(resources)
+ local fontname = metadata.fontname
+ local fullname = metadata.fullname or fontname
+ local units = metadata.units_per_em or 1000
+ --
+ if units == 0 then -- catch bugs in fonts
+ units = 1000
+ metadata.units_per_em = 1000
+ end
+ --
+ parameters.slant = 0
+ parameters.space = spaceunits -- 3.333 (cmr10)
+ parameters.space_stretch = units/2 -- 500 -- 1.666 (cmr10)
+ parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10)
+ parameters.x_height = 2*units/5 -- 400
+ parameters.quad = units -- 1000
+ if spaceunits < 2*units/5 then
+ -- todo: warning
+ end
+ if italicangle then
+ parameters.italicangle = italicangle
+ parameters.italicfactor = math.cos(math.rad(90+italicangle))
+ parameters.slant = - math.round(math.tan(italicangle*math.pi/180))
+ end
+ if monospaced then
+ parameters.space_stretch = 0
+ parameters.space_shrink = 0
+ elseif syncspace then --
+ parameters.space_stretch = spaceunits/2
+ parameters.space_shrink = spaceunits/3
+ end
+ parameters.extra_space = parameters.space_shrink -- 1.111 (cmr10)
+ if charxheight then
+ parameters.x_height = charxheight
+ else
+ local x = 0x78 -- unicodes['x']
+ if x then
+ local x = descriptions[x]
+ if x then
+ parameters.x_height = x.height
+ end
+ end
+ end
+ --
+ parameters.designsize = (designsize/10)*65536
+ parameters.ascender = abs(metadata.ascent or 0)
+ parameters.descender = abs(metadata.descent or 0)
+ parameters.units = units
+ --
+ properties.space = spacer
+ properties.encodingbytes = 2
+ properties.format = data.format or fonts.formats[filename] or "opentype"
+ properties.noglyphnames = true
+ properties.filename = filename
+ properties.fontname = fontname
+ properties.fullname = fullname
+ properties.psname = fontname or fullname
+ properties.name = filename or fullname
+ --
+ -- properties.name = specification.name
+ -- properties.sub = specification.sub
+ return {
+ characters = characters,
+ descriptions = descriptions,
+ parameters = parameters,
+ mathparameters = mathparameters,
+ resources = resources,
+ properties = properties,
+ goodies = goodies,
+ }
+ end
+end
+
+local function otftotfm(specification)
+ local cache_id = specification.hash
+ local tfmdata = containers.read(constructors.cache,cache_id)
+ if not tfmdata then
+ local name = specification.name
+ local sub = specification.sub
+ local filename = specification.filename
+ local format = specification.format
+ local features = specification.features.normal
+ local rawdata = otf.load(filename,format,sub,features and features.featurefile)
+ if rawdata and next(rawdata) then
+ rawdata.lookuphash = { }
+ tfmdata = copytotfm(rawdata,cache_id)
+ if tfmdata and next(tfmdata) then
+ -- at this moment no characters are assigned yet, only empty slots
+ local features = constructors.checkedfeatures("otf",features)
+ local shared = tfmdata.shared
+ if not shared then
+ shared = { }
+ tfmdata.shared = shared
+ end
+ shared.rawdata = rawdata
+ -- shared.features = features -- default
+ shared.dynamics = { }
+ -- shared.processes = { }
+ tfmdata.changed = { }
+ shared.features = features
+ shared.processes = otf.setfeatures(tfmdata,features)
+ end
+ end
+ containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
+end
+
+local function read_from_otf(specification)
+ local tfmdata = otftotfm(specification)
+ if tfmdata then
+ -- this late ? .. needs checking
+ tfmdata.properties.name = specification.name
+ tfmdata.properties.sub = specification.sub
+ --
+ tfmdata = constructors.scale(tfmdata,specification)
+ local allfeatures = tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
+ constructors.setname(tfmdata,specification) -- only otf?
+ fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification)
+ end
+ return tfmdata
+end
+
+local function checkmathsize(tfmdata,mathsize)
+ local mathdata = tfmdata.shared.rawdata.metadata.math
+ local mathsize = tonumber(mathsize)
+ if mathdata then -- we cannot use mathparameters as luatex will complain
+ local parameters = tfmdata.parameters
+ parameters.scriptpercentage = mathdata.ScriptPercentScaleDown
+ parameters.scriptscriptpercentage = mathdata.ScriptScriptPercentScaleDown
+ parameters.mathsize = mathsize
+ end
+end
+
+registerotffeature {
+ name = "mathsize",
+ description = "apply mathsize specified in the font",
+ initializers = {
+ base = checkmathsize,
+ node = checkmathsize,
+ }
+}
+
+-- helpers
+
+function otf.collectlookups(rawdata,kind,script,language)
+ local sequences = rawdata.resources.sequences
+ if sequences then
+ local featuremap, featurelist = { }, { }
+ for s=1,#sequences do
+ local sequence = sequences[s]
+ local features = sequence.features
+ features = features and features[kind]
+ features = features and (features[script] or features[default] or features[wildcard])
+ features = features and (features[language] or features[default] or features[wildcard])
+ if features then
+ local subtables = sequence.subtables
+ if subtables then
+ for s=1,#subtables do
+ local ss = subtables[s]
+ if not featuremap[s] then
+ featuremap[ss] = true
+ featurelist[#featurelist+1] = ss
+ end
+ end
+ end
+ end
+ end
+ if #featurelist > 0 then
+ return featuremap, featurelist
+ end
+ end
+ return nil, nil
+end
+
+-- readers
+
+local function check_otf(forced,specification,suffix,what)
+ local name = specification.name
+ if forced then
+ name = file.addsuffix(name,suffix,true)
+ end
+ local fullname = findbinfile(name,suffix) or ""
+ if fullname == "" then
+ fullname = fonts.names.getfilename(name,suffix) or ""
+ end
+ if fullname ~= "" then
+ specification.filename = fullname
+ specification.format = what
+ return read_from_otf(specification)
+ end
+end
+
+local function opentypereader(specification,suffix,what)
+ local forced = specification.forced or ""
+ if forced == "otf" then
+ return check_otf(true,specification,forced,"opentype")
+ elseif forced == "ttf" or forced == "ttc" or forced == "dfont" then
+ return check_otf(true,specification,forced,"truetype")
+ else
+ return check_otf(false,specification,suffix,what)
+ end
+end
+
+readers.opentype = opentypereader
+
+local formats = fonts.formats
+
+formats.otf = "opentype"
+formats.ttf = "truetype"
+formats.ttc = "truetype"
+formats.dfont = "truetype"
+
+function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end
+function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end
+function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end
+function readers.dfont(specification) return opentypereader(specification,"ttf",formats.dfont) end
+
+-- this will be overloaded
+
+function otf.scriptandlanguage(tfmdata,attr)
+ local properties = tfmdata.properties
+ return properties.script or "dflt", properties.language or "dflt"
+end
diff --git a/tex/context/base/font-oth.lua b/tex/context/base/font-oth.lua
index 59dca31d9..5e2e567da 100644
--- a/tex/context/base/font-oth.lua
+++ b/tex/context/base/font-oth.lua
@@ -1,51 +1,51 @@
-if not modules then modules = { } end modules ['font-oth'] = {
- version = 1.001,
- comment = "companion to font-oth.lua (helpers)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
--- todo: use nodemode data is available
-
-function otf.getalternate(tfmdata,k,kind,value) -- just initialize nodemode and use that (larger mem print)
- if value then
- local description = tfmdata.descriptions[k]
- if description then
- local slookups = description.slookups -- we assume only slookups (we can always extend)
- if slookups then
- local shared = tfmdata.shared
- local rawdata = shared and shared.rawdata
- if rawdata then
- local lookuptypes = rawdata.resources.lookuptypes
- if lookuptypes then
- local properties = tfmdata.properties
- -- we could cache these
- local validlookups, lookuplist = otf.collectlookups(rawdata,kind,properties.script,properties.language)
- if validlookups then
- local choice = tonumber(value) or 1 -- no random here (yet)
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local found = slookups[lookup]
- if found then
- local lookuptype = lookuptypes[lookup]
- if lookuptype == "substitution" then
- return found
- elseif lookuptype == "alternate" then
- return found[choice] or found[#found]
- else
- -- ignore
- end
- end
- end
- end
- end
- end
- end
- end
- end
- return k
-end
+if not modules then modules = { } end modules ['font-oth'] = {
+ version = 1.001,
+ comment = "companion to font-oth.lua (helpers)",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+
+-- todo: use nodemode data is available
+
+function otf.getalternate(tfmdata,k,kind,value) -- just initialize nodemode and use that (larger mem print)
+ if value then
+ local description = tfmdata.descriptions[k]
+ if description then
+ local slookups = description.slookups -- we assume only slookups (we can always extend)
+ if slookups then
+ local shared = tfmdata.shared
+ local rawdata = shared and shared.rawdata
+ if rawdata then
+ local lookuptypes = rawdata.resources.lookuptypes
+ if lookuptypes then
+ local properties = tfmdata.properties
+ -- we could cache these
+ local validlookups, lookuplist = otf.collectlookups(rawdata,kind,properties.script,properties.language)
+ if validlookups then
+ local choice = tonumber(value) or 1 -- no random here (yet)
+ for l=1,#lookuplist do
+ local lookup = lookuplist[l]
+ local found = slookups[lookup]
+ if found then
+ local lookuptype = lookuptypes[lookup]
+ if lookuptype == "substitution" then
+ return found
+ elseif lookuptype == "alternate" then
+ return found[choice] or found[#found]
+ else
+ -- ignore
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ return k
+end
diff --git a/tex/context/base/font-oti.lua b/tex/context/base/font-oti.lua
index 06c2a42fa..e33b57a6f 100644
--- a/tex/context/base/font-oti.lua
+++ b/tex/context/base/font-oti.lua
@@ -1,91 +1,91 @@
-if not modules then modules = { } end modules ['font-oti'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lower = string.lower
-
-local fonts = fonts
-local constructors = fonts.constructors
-
-local otf = constructors.newhandler("otf")
-local otffeatures = constructors.newfeatures("otf")
-local otftables = otf.tables
-local registerotffeature = otffeatures.register
-
-local allocate = utilities.storage.allocate
-
-registerotffeature {
- name = "features",
- description = "initialization of feature handler",
- default = true,
-}
-
--- these are later hooked into node and base initializaters
-
-local function setmode(tfmdata,value)
- if value then
- tfmdata.properties.mode = lower(value)
- end
-end
-
-local function setlanguage(tfmdata,value)
- if value then
- local cleanvalue = lower(value)
- local languages = otftables and otftables.languages
- local properties = tfmdata.properties
- if not languages then
- properties.language = cleanvalue
- elseif languages[value] then
- properties.language = cleanvalue
- else
- properties.language = "dflt"
- end
- end
-end
-
-local function setscript(tfmdata,value)
- if value then
- local cleanvalue = lower(value)
- local scripts = otftables and otftables.scripts
- local properties = tfmdata.properties
- if not scripts then
- properties.script = cleanvalue
- elseif scripts[value] then
- properties.script = cleanvalue
- else
- properties.script = "dflt"
- end
- end
-end
-
-registerotffeature {
- name = "mode",
- description = "mode",
- initializers = {
- base = setmode,
- node = setmode,
- }
-}
-
-registerotffeature {
- name = "language",
- description = "language",
- initializers = {
- base = setlanguage,
- node = setlanguage,
- }
-}
-
-registerotffeature {
- name = "script",
- description = "script",
- initializers = {
- base = setscript,
- node = setscript,
- }
-}
-
+if not modules then modules = { } end modules ['font-oti'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local lower = string.lower
+
+local fonts = fonts
+local constructors = fonts.constructors
+
+local otf = constructors.newhandler("otf")
+local otffeatures = constructors.newfeatures("otf")
+local otftables = otf.tables
+local registerotffeature = otffeatures.register
+
+local allocate = utilities.storage.allocate
+
+registerotffeature {
+ name = "features",
+ description = "initialization of feature handler",
+ default = true,
+}
+
+-- these are later hooked into node and base initializaters
+
+local function setmode(tfmdata,value)
+ if value then
+ tfmdata.properties.mode = lower(value)
+ end
+end
+
+local function setlanguage(tfmdata,value)
+ if value then
+ local cleanvalue = lower(value)
+ local languages = otftables and otftables.languages
+ local properties = tfmdata.properties
+ if not languages then
+ properties.language = cleanvalue
+ elseif languages[value] then
+ properties.language = cleanvalue
+ else
+ properties.language = "dflt"
+ end
+ end
+end
+
+local function setscript(tfmdata,value)
+ if value then
+ local cleanvalue = lower(value)
+ local scripts = otftables and otftables.scripts
+ local properties = tfmdata.properties
+ if not scripts then
+ properties.script = cleanvalue
+ elseif scripts[value] then
+ properties.script = cleanvalue
+ else
+ properties.script = "dflt"
+ end
+ end
+end
+
+registerotffeature {
+ name = "mode",
+ description = "mode",
+ initializers = {
+ base = setmode,
+ node = setmode,
+ }
+}
+
+registerotffeature {
+ name = "language",
+ description = "language",
+ initializers = {
+ base = setlanguage,
+ node = setlanguage,
+ }
+}
+
+registerotffeature {
+ name = "script",
+ description = "script",
+ initializers = {
+ base = setscript,
+ node = setscript,
+ }
+}
+
diff --git a/tex/context/base/font-otp.lua b/tex/context/base/font-otp.lua
index 217bb7535..f0c2edd86 100644
--- a/tex/context/base/font-otp.lua
+++ b/tex/context/base/font-otp.lua
@@ -1,877 +1,877 @@
-if not modules then modules = { } end modules ['font-otp'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (packing)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: pack math (but not that much to share)
---
--- pitfall 5.2: hashed tables can suddenly become indexed with nil slots
-
-local next, type = next, type
-local sort, concat = table.sort, table.concat
-local sortedhash = table.sortedhash
-
-local trace_packing = false trackers.register("otf.packing", function(v) trace_packing = v end)
-local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-
-local report_otf = logs.reporter("fonts","otf loading")
-
--- also used in other scripts so we need to check some tables:
-
-fonts = fonts or { }
-
-local handlers = fonts.handlers or { }
-fonts.handlers = handlers
-
-local otf = handlers.otf or { }
-handlers.otf = otf
-
-local enhancers = otf.enhancers or { }
-otf.enhancers = enhancers
-
-local glists = otf.glists or { "gsub", "gpos" }
-otf.glists = glists
-
-local criterium = 1
-local threshold = 0
-
-local function tabstr_normal(t)
- local s = { }
- local n = 0
- for k, v in next, t do
- n = n + 1
- if type(v) == "table" then
- s[n] = k .. ">" .. tabstr_normal(v)
- elseif v == true then
- s[n] = k .. "+" -- "=true"
- elseif v then
- s[n] = k .. "=" .. v
- else
- s[n] = k .. "-" -- "=false"
- end
- end
- if n == 0 then
- return ""
- elseif n == 1 then
- return s[1]
- else
- sort(s) -- costly but needed (occasional wrong hit otherwise)
- return concat(s,",")
- end
-end
-
-local function tabstr_flat(t)
- local s = { }
- local n = 0
- for k, v in next, t do
- n = n + 1
- s[n] = k .. "=" .. v
- end
- if n == 0 then
- return ""
- elseif n == 1 then
- return s[1]
- else
- sort(s) -- costly but needed (occasional wrong hit otherwise)
- return concat(s,",")
- end
-end
-
-local function tabstr_mixed(t) -- indexed
- local s = { }
- local n = #t
- if n == 0 then
- return ""
- elseif n == 1 then
- local k = t[1]
- if k == true then
- return "++" -- we need to distinguish from "true"
- elseif k == false then
- return "--" -- we need to distinguish from "false"
- else
- return tostring(k) -- number or string
- end
- else
- for i=1,n do
- local k = t[i]
- if k == true then
- s[i] = "++" -- we need to distinguish from "true"
- elseif k == false then
- s[i] = "--" -- we need to distinguish from "false"
- else
- s[i] = k -- number or string
- end
- end
- return concat(s,",")
- end
-end
-
-local function tabstr_boolean(t)
- local s = { }
- local n = 0
- for k, v in next, t do
- n = n + 1
- if v then
- s[n] = k .. "+"
- else
- s[n] = k .. "-"
- end
- end
- if n == 0 then
- return ""
- elseif n == 1 then
- return s[1]
- else
- sort(s) -- costly but needed (occasional wrong hit otherwise)
- return concat(s,",")
- end
-end
-
--- tabstr_boolean_x = tabstr_boolean
-
--- tabstr_boolean = function(t)
--- local a = tabstr_normal(t)
--- local b = tabstr_boolean_x(t)
--- print(a)
--- print(b)
--- return b
--- end
-
-local function packdata(data)
- if data then
- -- stripdata(data)
- local h, t, c = { }, { }, { }
- local hh, tt, cc = { }, { }, { }
- local nt, ntt = 0, 0
- local function pack_normal(v)
- local tag = tabstr_normal(v)
- local ht = h[tag]
- if ht then
- c[ht] = c[ht] + 1
- return ht
- else
- nt = nt + 1
- t[nt] = v
- h[tag] = nt
- c[nt] = 1
- return nt
- end
- end
- local function pack_flat(v)
- local tag = tabstr_flat(v)
- local ht = h[tag]
- if ht then
- c[ht] = c[ht] + 1
- return ht
- else
- nt = nt + 1
- t[nt] = v
- h[tag] = nt
- c[nt] = 1
- return nt
- end
- end
- local function pack_boolean(v)
- local tag = tabstr_boolean(v)
- local ht = h[tag]
- if ht then
- c[ht] = c[ht] + 1
- return ht
- else
- nt = nt + 1
- t[nt] = v
- h[tag] = nt
- c[nt] = 1
- return nt
- end
- end
- local function pack_indexed(v)
- local tag = concat(v," ")
- local ht = h[tag]
- if ht then
- c[ht] = c[ht] + 1
- return ht
- else
- nt = nt + 1
- t[nt] = v
- h[tag] = nt
- c[nt] = 1
- return nt
- end
- end
- local function pack_mixed(v)
- local tag = tabstr_mixed(v)
- local ht = h[tag]
- if ht then
- c[ht] = c[ht] + 1
- return ht
- else
- nt = nt + 1
- t[nt] = v
- h[tag] = nt
- c[nt] = 1
- return nt
- end
- end
- local function pack_final(v)
- -- v == number
- if c[v] <= criterium then
- return t[v]
- else
- -- compact hash
- local hv = hh[v]
- if hv then
- return hv
- else
- ntt = ntt + 1
- tt[ntt] = t[v]
- hh[v] = ntt
- cc[ntt] = c[v]
- return ntt
- end
- end
- end
- local function success(stage,pass)
- if nt == 0 then
- if trace_loading or trace_packing then
- report_otf("pack quality: nothing to pack")
- end
- return false
- elseif nt >= threshold then
- local one, two, rest = 0, 0, 0
- if pass == 1 then
- for k,v in next, c do
- if v == 1 then
- one = one + 1
- elseif v == 2 then
- two = two + 1
- else
- rest = rest + 1
- end
- end
- else
- for k,v in next, cc do
- if v > 20 then
- rest = rest + 1
- elseif v > 10 then
- two = two + 1
- else
- one = one + 1
- end
- end
- data.tables = tt
- end
- if trace_loading or trace_packing then
- report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)", stage, pass, one+two+rest, one, two, rest, criterium)
- end
- return true
- else
- if trace_loading or trace_packing then
- report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)", stage, pass, nt, threshold)
- end
- return false
- end
- end
- local function packers(pass)
- if pass == 1 then
- return pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed
- else
- return pack_final, pack_final, pack_final, pack_final, pack_final
- end
- end
- local resources = data.resources
- local lookuptypes = resources.lookuptypes
- for pass=1,2 do
- if trace_packing then
- report_otf("start packing: stage 1, pass %s",pass)
- end
- local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
- for unicode, description in next, data.descriptions do
- local boundingbox = description.boundingbox
- if boundingbox then
- description.boundingbox = pack_indexed(boundingbox)
- end
- local slookups = description.slookups
- if slookups then
- for tag, slookup in next, slookups do
- local what = lookuptypes[tag]
- if what == "pair" then
- local t = slookup[2] if t then slookup[2] = pack_indexed(t) end
- local t = slookup[3] if t then slookup[3] = pack_indexed(t) end
- elseif what ~= "substitution" then
- slookups[tag] = pack_indexed(slookup) -- true is new
- end
- end
- end
- local mlookups = description.mlookups
- if mlookups then
- for tag, mlookup in next, mlookups do
- local what = lookuptypes[tag]
- if what == "pair" then
- for i=1,#mlookup do
- local lookup = mlookup[i]
- local t = lookup[2] if t then lookup[2] = pack_indexed(t) end
- local t = lookup[3] if t then lookup[3] = pack_indexed(t) end
- end
- elseif what ~= "substitution" then
- for i=1,#mlookup do
- mlookup[i] = pack_indexed(mlookup[i]) -- true is new
- end
- end
- end
- end
- local kerns = description.kerns
- if kerns then
- for tag, kern in next, kerns do
- kerns[tag] = pack_flat(kern)
- end
- end
- local math = description.math
- if math then
- local kerns = math.kerns
- if kerns then
- for tag, kern in next, kerns do
- kerns[tag] = pack_normal(kern)
- end
- end
- end
- local anchors = description.anchors
- if anchors then
- for what, anchor in next, anchors do
- if what == "baselig" then
- for _, a in next, anchor do
- for k=1,#a do
- a[k] = pack_indexed(a[k])
- end
- end
- else
- for k, v in next, anchor do
- anchor[k] = pack_indexed(v)
- end
- end
- end
- end
- local altuni = description.altuni
- if altuni then
- for i=1,#altuni do
- altuni[i] = pack_flat(altuni[i])
- end
- end
- end
- local lookups = data.lookups
- if lookups then
- for _, lookup in next, lookups do
- local rules = lookup.rules
- if rules then
- for i=1,#rules do
- local rule = rules[i]
- local r = rule.before if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
- local r = rule.after if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
- local r = rule.current if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
- local r = rule.replacements if r then rule.replacements = pack_flat (r) end -- can have holes
- local r = rule.lookups if r then rule.lookups = pack_indexed(r) end -- can have ""
- -- local r = rule.lookups if r then rule.lookups = pack_flat(r) end -- can have holes (already taken care of some cases)
- end
- end
- end
- end
- local anchor_to_lookup = resources.anchor_to_lookup
- if anchor_to_lookup then
- for anchor, lookup in next, anchor_to_lookup do
- anchor_to_lookup[anchor] = pack_normal(lookup)
- end
- end
- local lookup_to_anchor = resources.lookup_to_anchor
- if lookup_to_anchor then
- for lookup, anchor in next, lookup_to_anchor do
- lookup_to_anchor[lookup] = pack_normal(anchor)
- end
- end
- local sequences = resources.sequences
- if sequences then
- for feature, sequence in next, sequences do
- local flags = sequence.flags
- if flags then
- sequence.flags = pack_normal(flags)
- end
- local subtables = sequence.subtables
- if subtables then
- sequence.subtables = pack_normal(subtables)
- end
- local features = sequence.features
- if features then
- for script, feature in next, features do
- features[script] = pack_normal(feature)
- end
- end
- end
- end
- local lookups = resources.lookups
- if lookups then
- for name, lookup in next, lookups do
- local flags = lookup.flags
- if flags then
- lookup.flags = pack_normal(flags)
- end
- local subtables = lookup.subtables
- if subtables then
- lookup.subtables = pack_normal(subtables)
- end
- end
- end
- local features = resources.features
- if features then
- for _, what in next, glists do
- local list = features[what]
- if list then
- for feature, spec in next, list do
- list[feature] = pack_normal(spec)
- end
- end
- end
- end
- if not success(1,pass) then
- return
- end
- end
- if nt > 0 then
- for pass=1,2 do
- if trace_packing then
- report_otf("start packing: stage 2, pass %s",pass)
- end
- local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
- for unicode, description in next, data.descriptions do
- local kerns = description.kerns
- if kerns then
- description.kerns = pack_normal(kerns)
- end
- local math = description.math
- if math then
- local kerns = math.kerns
- if kerns then
- math.kerns = pack_normal(kerns)
- end
- end
- local anchors = description.anchors
- if anchors then
- description.anchors = pack_normal(anchors)
- end
- local mlookups = description.mlookups
- if mlookups then
- for tag, mlookup in next, mlookups do
- mlookups[tag] = pack_normal(mlookup)
- end
- end
- local altuni = description.altuni
- if altuni then
- description.altuni = pack_normal(altuni)
- end
- end
- local lookups = data.lookups
- if lookups then
- for _, lookup in next, lookups do
- local rules = lookup.rules
- if rules then
- for i=1,#rules do -- was next loop
- local rule = rules[i]
- local r = rule.before if r then rule.before = pack_normal(r) end
- local r = rule.after if r then rule.after = pack_normal(r) end
- local r = rule.current if r then rule.current = pack_normal(r) end
- end
- end
- end
- end
- local sequences = resources.sequences
- if sequences then
- for feature, sequence in next, sequences do
- sequence.features = pack_normal(sequence.features)
- end
- end
- if not success(2,pass) then
- -- return
- end
- end
-
- for pass=1,2 do
- local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
- for unicode, description in next, data.descriptions do
- local slookups = description.slookups
- if slookups then
- description.slookups = pack_normal(slookups)
- end
- local mlookups = description.mlookups
- if mlookups then
- description.mlookups = pack_normal(mlookups)
- end
- end
- end
-
- end
- end
-end
-
-local unpacked_mt = {
- __index =
- function(t,k)
- t[k] = false
- return k -- next time true
- end
-}
-
-local function unpackdata(data)
- if data then
- local tables = data.tables
- if tables then
- local resources = data.resources
- local lookuptypes = resources.lookuptypes
- local unpacked = { }
- setmetatable(unpacked,unpacked_mt)
- for unicode, description in next, data.descriptions do
- local tv = tables[description.boundingbox]
- if tv then
- description.boundingbox = tv
- end
- local slookups = description.slookups
- if slookups then
- local tv = tables[slookups]
- if tv then
- description.slookups = tv
- slookups = unpacked[tv]
- end
- if slookups then
- for tag, lookup in next, slookups do
- local what = lookuptypes[tag]
- if what == "pair" then
- local tv = tables[lookup[2]]
- if tv then
- lookup[2] = tv
- end
- local tv = tables[lookup[3]]
- if tv then
- lookup[3] = tv
- end
- elseif what ~= "substitution" then
- local tv = tables[lookup]
- if tv then
- slookups[tag] = tv
- end
- end
- end
- end
- end
- local mlookups = description.mlookups
- if mlookups then
- local tv = tables[mlookups]
- if tv then
- description.mlookups = tv
- mlookups = unpacked[tv]
- end
- if mlookups then
- for tag, list in next, mlookups do
- local tv = tables[list]
- if tv then
- mlookups[tag] = tv
- list = unpacked[tv]
- end
- if list then
- local what = lookuptypes[tag]
- if what == "pair" then
- for i=1,#list do
- local lookup = list[i]
- local tv = tables[lookup[2]]
- if tv then
- lookup[2] = tv
- end
- local tv = tables[lookup[3]]
- if tv then
- lookup[3] = tv
- end
- end
- elseif what ~= "substitution" then
- for i=1,#list do
- local tv = tables[list[i]]
- if tv then
- list[i] = tv
- end
- end
- end
- end
- end
- end
- end
- local kerns = description.kerns
- if kerns then
- local tm = tables[kerns]
- if tm then
- description.kerns = tm
- kerns = unpacked[tm]
- end
- if kerns then
- for k, kern in next, kerns do
- local tv = tables[kern]
- if tv then
- kerns[k] = tv
- end
- end
- end
- end
- local math = description.math
- if math then
- local kerns = math.kerns
- if kerns then
- local tm = tables[kerns]
- if tm then
- math.kerns = tm
- kerns = unpacked[tm]
- end
- if kerns then
- for k, kern in next, kerns do
- local tv = tables[kern]
- if tv then
- kerns[k] = tv
- end
- end
- end
- end
- end
- local anchors = description.anchors
- if anchors then
- local ta = tables[anchors]
- if ta then
- description.anchors = ta
- anchors = unpacked[ta]
- end
- if anchors then
- for tag, anchor in next, anchors do
- if tag == "baselig" then
- for _, list in next, anchor do
- for i=1,#list do
- local tv = tables[list[i]]
- if tv then
- list[i] = tv
- end
- end
- end
- else
- for a, data in next, anchor do
- local tv = tables[data]
- if tv then
- anchor[a] = tv
- end
- end
- end
- end
- end
- end
- local altuni = description.altuni
- if altuni then
- local altuni = tables[altuni]
- if altuni then
- description.altuni = altuni
- for i=1,#altuni do
- local tv = tables[altuni[i]]
- if tv then
- altuni[i] = tv
- end
- end
- end
- end
- end
- local lookups = data.lookups
- if lookups then
- for _, lookup in next, lookups do
- local rules = lookup.rules
- if rules then
- for i=1,#rules do -- was next loop
- local rule = rules[i]
- local before = rule.before
- if before then
- local tv = tables[before]
- if tv then
- rule.before = tv
- before = unpacked[tv]
- end
- if before then
- for i=1,#before do
- local tv = tables[before[i]]
- if tv then
- before[i] = tv
- end
- end
- end
- end
- local after = rule.after
- if after then
- local tv = tables[after]
- if tv then
- rule.after = tv
- after = unpacked[tv]
- end
- if after then
- for i=1,#after do
- local tv = tables[after[i]]
- if tv then
- after[i] = tv
- end
- end
- end
- end
- local current = rule.current
- if current then
- local tv = tables[current]
- if tv then
- rule.current = tv
- current = unpacked[tv]
- end
- if current then
- for i=1,#current do
- local tv = tables[current[i]]
- if tv then
- current[i] = tv
- end
- end
- end
- end
- local replacements = rule.replacements
- if replacements then
- local tv = tables[replacements]
- if tv then
- rule.replacements = tv
- end
- end
- local fore = rule.fore
- if fore then
- local tv = tables[fore]
- if tv then
- rule.fore = tv
- end
- end
- local back = rule.back
- if back then
- local tv = tables[back]
- if tv then
- rule.back = tv
- end
- end
- local names = rule.names
- if names then
- local tv = tables[names]
- if tv then
- rule.names = tv
- end
- end
- local lookups = rule.lookups
- if lookups then
- local tv = tables[lookups]
- if tv then
- rule.lookups = tv
- end
- end
- end
- end
- end
- end
- local anchor_to_lookup = resources.anchor_to_lookup
- if anchor_to_lookup then
- for anchor, lookup in next, anchor_to_lookup do
- local tv = tables[lookup]
- if tv then
- anchor_to_lookup[anchor] = tv
- end
- end
- end
- local lookup_to_anchor = resources.lookup_to_anchor
- if lookup_to_anchor then
- for lookup, anchor in next, lookup_to_anchor do
- local tv = tables[anchor]
- if tv then
- lookup_to_anchor[lookup] = tv
- end
- end
- end
- local ls = resources.sequences
- if ls then
- for _, feature in next, ls do
- local flags = feature.flags
- if flags then
- local tv = tables[flags]
- if tv then
- feature.flags = tv
- end
- end
- local subtables = feature.subtables
- if subtables then
- local tv = tables[subtables]
- if tv then
- feature.subtables = tv
- end
- end
- local features = feature.features
- if features then
- local tv = tables[features]
- if tv then
- feature.features = tv
- features = unpacked[tv]
- end
- if features then
- for script, data in next, features do
- local tv = tables[data]
- if tv then
- features[script] = tv
- end
- end
- end
- end
- end
- end
- local lookups = resources.lookups
- if lookups then
- for _, lookup in next, lookups do
- local flags = lookup.flags
- if flags then
- local tv = tables[flags]
- if tv then
- lookup.flags = tv
- end
- end
- local subtables = lookup.subtables
- if subtables then
- local tv = tables[subtables]
- if tv then
- lookup.subtables = tv
- end
- end
- end
- end
- local features = resources.features
- if features then
- for _, what in next, glists do
- local feature = features[what]
- if feature then
- for tag, spec in next, feature do
- local tv = tables[spec]
- if tv then
- feature[tag] = tv
- end
- end
- end
- end
- end
- data.tables = nil
- end
- end
-end
-
-if otf.enhancers.register then
-
- otf.enhancers.register( "pack", packdata)
- otf.enhancers.register("unpack",unpackdata)
-
--- todo: directive
-
-end
-
-otf.enhancers.unpack = unpackdata -- used elsewhere
+if not modules then modules = { } end modules ['font-otp'] = {
+ version = 1.001,
+ comment = "companion to font-otf.lua (packing)",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: pack math (but not that much to share)
+--
+-- pitfall 5.2: hashed tables can suddenly become indexed with nil slots
+
+local next, type = next, type
+local sort, concat = table.sort, table.concat
+local sortedhash = table.sortedhash
+
+local trace_packing = false trackers.register("otf.packing", function(v) trace_packing = v end)
+local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
+
+local report_otf = logs.reporter("fonts","otf loading")
+
+-- also used in other scripts so we need to check some tables:
+
+fonts = fonts or { }
+
+local handlers = fonts.handlers or { }
+fonts.handlers = handlers
+
+local otf = handlers.otf or { }
+handlers.otf = otf
+
+local enhancers = otf.enhancers or { }
+otf.enhancers = enhancers
+
+local glists = otf.glists or { "gsub", "gpos" }
+otf.glists = glists
+
+local criterium = 1
+local threshold = 0
+
+local function tabstr_normal(t)
+ local s = { }
+ local n = 0
+ for k, v in next, t do
+ n = n + 1
+ if type(v) == "table" then
+ s[n] = k .. ">" .. tabstr_normal(v)
+ elseif v == true then
+ s[n] = k .. "+" -- "=true"
+ elseif v then
+ s[n] = k .. "=" .. v
+ else
+ s[n] = k .. "-" -- "=false"
+ end
+ end
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ return s[1]
+ else
+ sort(s) -- costly but needed (occasional wrong hit otherwise)
+ return concat(s,",")
+ end
+end
+
+local function tabstr_flat(t)
+ local s = { }
+ local n = 0
+ for k, v in next, t do
+ n = n + 1
+ s[n] = k .. "=" .. v
+ end
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ return s[1]
+ else
+ sort(s) -- costly but needed (occasional wrong hit otherwise)
+ return concat(s,",")
+ end
+end
+
+local function tabstr_mixed(t) -- indexed
+ local s = { }
+ local n = #t
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ local k = t[1]
+ if k == true then
+ return "++" -- we need to distinguish from "true"
+ elseif k == false then
+ return "--" -- we need to distinguish from "false"
+ else
+ return tostring(k) -- number or string
+ end
+ else
+ for i=1,n do
+ local k = t[i]
+ if k == true then
+ s[i] = "++" -- we need to distinguish from "true"
+ elseif k == false then
+ s[i] = "--" -- we need to distinguish from "false"
+ else
+ s[i] = k -- number or string
+ end
+ end
+ return concat(s,",")
+ end
+end
+
+local function tabstr_boolean(t)
+ local s = { }
+ local n = 0
+ for k, v in next, t do
+ n = n + 1
+ if v then
+ s[n] = k .. "+"
+ else
+ s[n] = k .. "-"
+ end
+ end
+ if n == 0 then
+ return ""
+ elseif n == 1 then
+ return s[1]
+ else
+ sort(s) -- costly but needed (occasional wrong hit otherwise)
+ return concat(s,",")
+ end
+end
+
+-- tabstr_boolean_x = tabstr_boolean
+
+-- tabstr_boolean = function(t)
+-- local a = tabstr_normal(t)
+-- local b = tabstr_boolean_x(t)
+-- print(a)
+-- print(b)
+-- return b
+-- end
+
+local function packdata(data)
+ if data then
+ -- stripdata(data)
+ local h, t, c = { }, { }, { }
+ local hh, tt, cc = { }, { }, { }
+ local nt, ntt = 0, 0
+ local function pack_normal(v)
+ local tag = tabstr_normal(v)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+ local function pack_flat(v)
+ local tag = tabstr_flat(v)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+ local function pack_boolean(v)
+ local tag = tabstr_boolean(v)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+ local function pack_indexed(v)
+ local tag = concat(v," ")
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+ local function pack_mixed(v)
+ local tag = tabstr_mixed(v)
+ local ht = h[tag]
+ if ht then
+ c[ht] = c[ht] + 1
+ return ht
+ else
+ nt = nt + 1
+ t[nt] = v
+ h[tag] = nt
+ c[nt] = 1
+ return nt
+ end
+ end
+ local function pack_final(v)
+ -- v == number
+ if c[v] <= criterium then
+ return t[v]
+ else
+ -- compact hash
+ local hv = hh[v]
+ if hv then
+ return hv
+ else
+ ntt = ntt + 1
+ tt[ntt] = t[v]
+ hh[v] = ntt
+ cc[ntt] = c[v]
+ return ntt
+ end
+ end
+ end
+ local function success(stage,pass)
+ if nt == 0 then
+ if trace_loading or trace_packing then
+ report_otf("pack quality: nothing to pack")
+ end
+ return false
+ elseif nt >= threshold then
+ local one, two, rest = 0, 0, 0
+ if pass == 1 then
+ for k,v in next, c do
+ if v == 1 then
+ one = one + 1
+ elseif v == 2 then
+ two = two + 1
+ else
+ rest = rest + 1
+ end
+ end
+ else
+ for k,v in next, cc do
+ if v > 20 then
+ rest = rest + 1
+ elseif v > 10 then
+ two = two + 1
+ else
+ one = one + 1
+ end
+ end
+ data.tables = tt
+ end
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, 1-10:%s, 11-20:%s, rest:%s (criterium: %s)", stage, pass, one+two+rest, one, two, rest, criterium)
+ end
+ return true
+ else
+ if trace_loading or trace_packing then
+ report_otf("pack quality: stage %s, pass %s, %s packed, aborting pack (threshold: %s)", stage, pass, nt, threshold)
+ end
+ return false
+ end
+ end
+ local function packers(pass)
+ if pass == 1 then
+ return pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed
+ else
+ return pack_final, pack_final, pack_final, pack_final, pack_final
+ end
+ end
+ local resources = data.resources
+ local lookuptypes = resources.lookuptypes
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 1, pass %s",pass)
+ end
+ local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
+ for unicode, description in next, data.descriptions do
+ local boundingbox = description.boundingbox
+ if boundingbox then
+ description.boundingbox = pack_indexed(boundingbox)
+ end
+ local slookups = description.slookups
+ if slookups then
+ for tag, slookup in next, slookups do
+ local what = lookuptypes[tag]
+ if what == "pair" then
+ local t = slookup[2] if t then slookup[2] = pack_indexed(t) end
+ local t = slookup[3] if t then slookup[3] = pack_indexed(t) end
+ elseif what ~= "substitution" then
+ slookups[tag] = pack_indexed(slookup) -- true is new
+ end
+ end
+ end
+ local mlookups = description.mlookups
+ if mlookups then
+ for tag, mlookup in next, mlookups do
+ local what = lookuptypes[tag]
+ if what == "pair" then
+ for i=1,#mlookup do
+ local lookup = mlookup[i]
+ local t = lookup[2] if t then lookup[2] = pack_indexed(t) end
+ local t = lookup[3] if t then lookup[3] = pack_indexed(t) end
+ end
+ elseif what ~= "substitution" then
+ for i=1,#mlookup do
+ mlookup[i] = pack_indexed(mlookup[i]) -- true is new
+ end
+ end
+ end
+ end
+ local kerns = description.kerns
+ if kerns then
+ for tag, kern in next, kerns do
+ kerns[tag] = pack_flat(kern)
+ end
+ end
+ local math = description.math
+ if math then
+ local kerns = math.kerns
+ if kerns then
+ for tag, kern in next, kerns do
+ kerns[tag] = pack_normal(kern)
+ end
+ end
+ end
+ local anchors = description.anchors
+ if anchors then
+ for what, anchor in next, anchors do
+ if what == "baselig" then
+ for _, a in next, anchor do
+ for k=1,#a do
+ a[k] = pack_indexed(a[k])
+ end
+ end
+ else
+ for k, v in next, anchor do
+ anchor[k] = pack_indexed(v)
+ end
+ end
+ end
+ end
+ local altuni = description.altuni
+ if altuni then
+ for i=1,#altuni do
+ altuni[i] = pack_flat(altuni[i])
+ end
+ end
+ end
+ local lookups = data.lookups
+ if lookups then
+ for _, lookup in next, lookups do
+ local rules = lookup.rules
+ if rules then
+ for i=1,#rules do
+ local rule = rules[i]
+ local r = rule.before if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
+ local r = rule.after if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
+ local r = rule.current if r then for i=1,#r do r[i] = pack_boolean(r[i]) end end
+ local r = rule.replacements if r then rule.replacements = pack_flat (r) end -- can have holes
+ local r = rule.lookups if r then rule.lookups = pack_indexed(r) end -- can have ""
+ -- local r = rule.lookups if r then rule.lookups = pack_flat(r) end -- can have holes (already taken care of some cases)
+ end
+ end
+ end
+ end
+ local anchor_to_lookup = resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor, lookup in next, anchor_to_lookup do
+ anchor_to_lookup[anchor] = pack_normal(lookup)
+ end
+ end
+ local lookup_to_anchor = resources.lookup_to_anchor
+ if lookup_to_anchor then
+ for lookup, anchor in next, lookup_to_anchor do
+ lookup_to_anchor[lookup] = pack_normal(anchor)
+ end
+ end
+ local sequences = resources.sequences
+ if sequences then
+ for feature, sequence in next, sequences do
+ local flags = sequence.flags
+ if flags then
+ sequence.flags = pack_normal(flags)
+ end
+ local subtables = sequence.subtables
+ if subtables then
+ sequence.subtables = pack_normal(subtables)
+ end
+ local features = sequence.features
+ if features then
+ for script, feature in next, features do
+ features[script] = pack_normal(feature)
+ end
+ end
+ end
+ end
+ local lookups = resources.lookups
+ if lookups then
+ for name, lookup in next, lookups do
+ local flags = lookup.flags
+ if flags then
+ lookup.flags = pack_normal(flags)
+ end
+ local subtables = lookup.subtables
+ if subtables then
+ lookup.subtables = pack_normal(subtables)
+ end
+ end
+ end
+ local features = resources.features
+ if features then
+ for _, what in next, glists do
+ local list = features[what]
+ if list then
+ for feature, spec in next, list do
+ list[feature] = pack_normal(spec)
+ end
+ end
+ end
+ end
+ if not success(1,pass) then
+ return
+ end
+ end
+ if nt > 0 then
+ for pass=1,2 do
+ if trace_packing then
+ report_otf("start packing: stage 2, pass %s",pass)
+ end
+ local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
+ for unicode, description in next, data.descriptions do
+ local kerns = description.kerns
+ if kerns then
+ description.kerns = pack_normal(kerns)
+ end
+ local math = description.math
+ if math then
+ local kerns = math.kerns
+ if kerns then
+ math.kerns = pack_normal(kerns)
+ end
+ end
+ local anchors = description.anchors
+ if anchors then
+ description.anchors = pack_normal(anchors)
+ end
+ local mlookups = description.mlookups
+ if mlookups then
+ for tag, mlookup in next, mlookups do
+ mlookups[tag] = pack_normal(mlookup)
+ end
+ end
+ local altuni = description.altuni
+ if altuni then
+ description.altuni = pack_normal(altuni)
+ end
+ end
+ local lookups = data.lookups
+ if lookups then
+ for _, lookup in next, lookups do
+ local rules = lookup.rules
+ if rules then
+ for i=1,#rules do -- was next loop
+ local rule = rules[i]
+ local r = rule.before if r then rule.before = pack_normal(r) end
+ local r = rule.after if r then rule.after = pack_normal(r) end
+ local r = rule.current if r then rule.current = pack_normal(r) end
+ end
+ end
+ end
+ end
+ local sequences = resources.sequences
+ if sequences then
+ for feature, sequence in next, sequences do
+ sequence.features = pack_normal(sequence.features)
+ end
+ end
+ if not success(2,pass) then
+ -- return
+ end
+ end
+
+ for pass=1,2 do
+ local pack_normal, pack_indexed, pack_flat, pack_boolean, pack_mixed = packers(pass)
+ for unicode, description in next, data.descriptions do
+ local slookups = description.slookups
+ if slookups then
+ description.slookups = pack_normal(slookups)
+ end
+ local mlookups = description.mlookups
+ if mlookups then
+ description.mlookups = pack_normal(mlookups)
+ end
+ end
+ end
+
+ end
+ end
+end
+
+local unpacked_mt = {
+ __index =
+ function(t,k)
+ t[k] = false
+ return k -- next time true
+ end
+}
+
+local function unpackdata(data)
+ if data then
+ local tables = data.tables
+ if tables then
+ local resources = data.resources
+ local lookuptypes = resources.lookuptypes
+ local unpacked = { }
+ setmetatable(unpacked,unpacked_mt)
+ for unicode, description in next, data.descriptions do
+ local tv = tables[description.boundingbox]
+ if tv then
+ description.boundingbox = tv
+ end
+ local slookups = description.slookups
+ if slookups then
+ local tv = tables[slookups]
+ if tv then
+ description.slookups = tv
+ slookups = unpacked[tv]
+ end
+ if slookups then
+ for tag, lookup in next, slookups do
+ local what = lookuptypes[tag]
+ if what == "pair" then
+ local tv = tables[lookup[2]]
+ if tv then
+ lookup[2] = tv
+ end
+ local tv = tables[lookup[3]]
+ if tv then
+ lookup[3] = tv
+ end
+ elseif what ~= "substitution" then
+ local tv = tables[lookup]
+ if tv then
+ slookups[tag] = tv
+ end
+ end
+ end
+ end
+ end
+ local mlookups = description.mlookups
+ if mlookups then
+ local tv = tables[mlookups]
+ if tv then
+ description.mlookups = tv
+ mlookups = unpacked[tv]
+ end
+ if mlookups then
+ for tag, list in next, mlookups do
+ local tv = tables[list]
+ if tv then
+ mlookups[tag] = tv
+ list = unpacked[tv]
+ end
+ if list then
+ local what = lookuptypes[tag]
+ if what == "pair" then
+ for i=1,#list do
+ local lookup = list[i]
+ local tv = tables[lookup[2]]
+ if tv then
+ lookup[2] = tv
+ end
+ local tv = tables[lookup[3]]
+ if tv then
+ lookup[3] = tv
+ end
+ end
+ elseif what ~= "substitution" then
+ for i=1,#list do
+ local tv = tables[list[i]]
+ if tv then
+ list[i] = tv
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ local kerns = description.kerns
+ if kerns then
+ local tm = tables[kerns]
+ if tm then
+ description.kerns = tm
+ kerns = unpacked[tm]
+ end
+ if kerns then
+ for k, kern in next, kerns do
+ local tv = tables[kern]
+ if tv then
+ kerns[k] = tv
+ end
+ end
+ end
+ end
+ local math = description.math
+ if math then
+ local kerns = math.kerns
+ if kerns then
+ local tm = tables[kerns]
+ if tm then
+ math.kerns = tm
+ kerns = unpacked[tm]
+ end
+ if kerns then
+ for k, kern in next, kerns do
+ local tv = tables[kern]
+ if tv then
+ kerns[k] = tv
+ end
+ end
+ end
+ end
+ end
+ local anchors = description.anchors
+ if anchors then
+ local ta = tables[anchors]
+ if ta then
+ description.anchors = ta
+ anchors = unpacked[ta]
+ end
+ if anchors then
+ for tag, anchor in next, anchors do
+ if tag == "baselig" then
+ for _, list in next, anchor do
+ for i=1,#list do
+ local tv = tables[list[i]]
+ if tv then
+ list[i] = tv
+ end
+ end
+ end
+ else
+ for a, data in next, anchor do
+ local tv = tables[data]
+ if tv then
+ anchor[a] = tv
+ end
+ end
+ end
+ end
+ end
+ end
+ local altuni = description.altuni
+ if altuni then
+ local altuni = tables[altuni]
+ if altuni then
+ description.altuni = altuni
+ for i=1,#altuni do
+ local tv = tables[altuni[i]]
+ if tv then
+ altuni[i] = tv
+ end
+ end
+ end
+ end
+ end
+ local lookups = data.lookups
+ if lookups then
+ for _, lookup in next, lookups do
+ local rules = lookup.rules
+ if rules then
+ for i=1,#rules do -- was next loop
+ local rule = rules[i]
+ local before = rule.before
+ if before then
+ local tv = tables[before]
+ if tv then
+ rule.before = tv
+ before = unpacked[tv]
+ end
+ if before then
+ for i=1,#before do
+ local tv = tables[before[i]]
+ if tv then
+ before[i] = tv
+ end
+ end
+ end
+ end
+ local after = rule.after
+ if after then
+ local tv = tables[after]
+ if tv then
+ rule.after = tv
+ after = unpacked[tv]
+ end
+ if after then
+ for i=1,#after do
+ local tv = tables[after[i]]
+ if tv then
+ after[i] = tv
+ end
+ end
+ end
+ end
+ local current = rule.current
+ if current then
+ local tv = tables[current]
+ if tv then
+ rule.current = tv
+ current = unpacked[tv]
+ end
+ if current then
+ for i=1,#current do
+ local tv = tables[current[i]]
+ if tv then
+ current[i] = tv
+ end
+ end
+ end
+ end
+ local replacements = rule.replacements
+ if replacements then
+ local tv = tables[replacements]
+ if tv then
+ rule.replacements = tv
+ end
+ end
+ local fore = rule.fore
+ if fore then
+ local tv = tables[fore]
+ if tv then
+ rule.fore = tv
+ end
+ end
+ local back = rule.back
+ if back then
+ local tv = tables[back]
+ if tv then
+ rule.back = tv
+ end
+ end
+ local names = rule.names
+ if names then
+ local tv = tables[names]
+ if tv then
+ rule.names = tv
+ end
+ end
+ local lookups = rule.lookups
+ if lookups then
+ local tv = tables[lookups]
+ if tv then
+ rule.lookups = tv
+ end
+ end
+ end
+ end
+ end
+ end
+ local anchor_to_lookup = resources.anchor_to_lookup
+ if anchor_to_lookup then
+ for anchor, lookup in next, anchor_to_lookup do
+ local tv = tables[lookup]
+ if tv then
+ anchor_to_lookup[anchor] = tv
+ end
+ end
+ end
+ local lookup_to_anchor = resources.lookup_to_anchor
+ if lookup_to_anchor then
+ for lookup, anchor in next, lookup_to_anchor do
+ local tv = tables[anchor]
+ if tv then
+ lookup_to_anchor[lookup] = tv
+ end
+ end
+ end
+ local ls = resources.sequences
+ if ls then
+ for _, feature in next, ls do
+ local flags = feature.flags
+ if flags then
+ local tv = tables[flags]
+ if tv then
+ feature.flags = tv
+ end
+ end
+ local subtables = feature.subtables
+ if subtables then
+ local tv = tables[subtables]
+ if tv then
+ feature.subtables = tv
+ end
+ end
+ local features = feature.features
+ if features then
+ local tv = tables[features]
+ if tv then
+ feature.features = tv
+ features = unpacked[tv]
+ end
+ if features then
+ for script, data in next, features do
+ local tv = tables[data]
+ if tv then
+ features[script] = tv
+ end
+ end
+ end
+ end
+ end
+ end
+ local lookups = resources.lookups
+ if lookups then
+ for _, lookup in next, lookups do
+ local flags = lookup.flags
+ if flags then
+ local tv = tables[flags]
+ if tv then
+ lookup.flags = tv
+ end
+ end
+ local subtables = lookup.subtables
+ if subtables then
+ local tv = tables[subtables]
+ if tv then
+ lookup.subtables = tv
+ end
+ end
+ end
+ end
+ local features = resources.features
+ if features then
+ for _, what in next, glists do
+ local feature = features[what]
+ if feature then
+ for tag, spec in next, feature do
+ local tv = tables[spec]
+ if tv then
+ feature[tag] = tv
+ end
+ end
+ end
+ end
+ end
+ data.tables = nil
+ end
+ end
+end
+
+if otf.enhancers.register then
+
+ otf.enhancers.register( "pack", packdata)
+ otf.enhancers.register("unpack",unpackdata)
+
+-- todo: directive
+
+end
+
+otf.enhancers.unpack = unpackdata -- used elsewhere
diff --git a/tex/context/base/font-ott.lua b/tex/context/base/font-ott.lua
index e3aacd0d1..3b171c4a4 100644
--- a/tex/context/base/font-ott.lua
+++ b/tex/context/base/font-ott.lua
@@ -1,1113 +1,1113 @@
-if not modules then modules = { } end modules ['font-ott'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (tables)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
- -- dataonly = true,
-}
-
-local type, next, tonumber, tostring, rawget, rawset = type, next, tonumber, tostring, rawget, rawset
-local gsub, lower, format, match = string.gsub, string.lower, string.format, string.match
-local is_boolean = string.is_boolean
-
-local setmetatableindex = table.setmetatableindex
-local setmetatablenewindex = table.setmetatablenewindex
-local allocate = utilities.storage.allocate
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-local otffeatures = otf.features
-local registerotffeature = otffeatures.register
-
-local tables = otf.tables or { }
-otf.tables = tables
-
-local statistics = otf.statistics or { }
-otf.statistics = statistics
-
-local scripts = allocate {
- ['arab'] = 'arabic',
- ['armn'] = 'armenian',
- ['bali'] = 'balinese',
- ['beng'] = 'bengali',
- ['bopo'] = 'bopomofo',
- ['brai'] = 'braille',
- ['bugi'] = 'buginese',
- ['buhd'] = 'buhid',
- ['byzm'] = 'byzantine music',
- ['cans'] = 'canadian syllabics',
- ['cher'] = 'cherokee',
- ['copt'] = 'coptic',
- ['cprt'] = 'cypriot syllabary',
- ['cyrl'] = 'cyrillic',
- ['deva'] = 'devanagari',
- ['dsrt'] = 'deseret',
- ['ethi'] = 'ethiopic',
- ['geor'] = 'georgian',
- ['glag'] = 'glagolitic',
- ['goth'] = 'gothic',
- ['grek'] = 'greek',
- ['gujr'] = 'gujarati',
- ['guru'] = 'gurmukhi',
- ['hang'] = 'hangul',
- ['hani'] = 'cjk ideographic',
- ['hano'] = 'hanunoo',
- ['hebr'] = 'hebrew',
- ['ital'] = 'old italic',
- ['jamo'] = 'hangul jamo',
- ['java'] = 'javanese',
- ['kana'] = 'hiragana and katakana',
- ['khar'] = 'kharosthi',
- ['khmr'] = 'khmer',
- ['knda'] = 'kannada',
- ['lao' ] = 'lao',
- ['latn'] = 'latin',
- ['limb'] = 'limbu',
- ['linb'] = 'linear b',
- ['math'] = 'mathematical alphanumeric symbols',
- ['mlym'] = 'malayalam',
- ['mong'] = 'mongolian',
- ['musc'] = 'musical symbols',
- ['mymr'] = 'myanmar',
- ['nko' ] = "n'ko",
- ['ogam'] = 'ogham',
- ['orya'] = 'oriya',
- ['osma'] = 'osmanya',
- ['phag'] = 'phags-pa',
- ['phnx'] = 'phoenician',
- ['runr'] = 'runic',
- ['shaw'] = 'shavian',
- ['sinh'] = 'sinhala',
- ['sylo'] = 'syloti nagri',
- ['syrc'] = 'syriac',
- ['tagb'] = 'tagbanwa',
- ['tale'] = 'tai le',
- ['talu'] = 'tai lu',
- ['taml'] = 'tamil',
- ['telu'] = 'telugu',
- ['tfng'] = 'tifinagh',
- ['tglg'] = 'tagalog',
- ['thaa'] = 'thaana',
- ['thai'] = 'thai',
- ['tibt'] = 'tibetan',
- ['ugar'] = 'ugaritic cuneiform',
- ['xpeo'] = 'old persian cuneiform',
- ['xsux'] = 'sumero-akkadian cuneiform',
- ['yi' ] = 'yi',
-}
-
-local languages = allocate {
- ['aba'] = 'abaza',
- ['abk'] = 'abkhazian',
- ['ady'] = 'adyghe',
- ['afk'] = 'afrikaans',
- ['afr'] = 'afar',
- ['agw'] = 'agaw',
- ['als'] = 'alsatian',
- ['alt'] = 'altai',
- ['amh'] = 'amharic',
- ['ara'] = 'arabic',
- ['ari'] = 'aari',
- ['ark'] = 'arakanese',
- ['asm'] = 'assamese',
- ['ath'] = 'athapaskan',
- ['avr'] = 'avar',
- ['awa'] = 'awadhi',
- ['aym'] = 'aymara',
- ['aze'] = 'azeri',
- ['bad'] = 'badaga',
- ['bag'] = 'baghelkhandi',
- ['bal'] = 'balkar',
- ['bau'] = 'baule',
- ['bbr'] = 'berber',
- ['bch'] = 'bench',
- ['bcr'] = 'bible cree',
- ['bel'] = 'belarussian',
- ['bem'] = 'bemba',
- ['ben'] = 'bengali',
- ['bgr'] = 'bulgarian',
- ['bhi'] = 'bhili',
- ['bho'] = 'bhojpuri',
- ['bik'] = 'bikol',
- ['bil'] = 'bilen',
- ['bkf'] = 'blackfoot',
- ['bli'] = 'balochi',
- ['bln'] = 'balante',
- ['blt'] = 'balti',
- ['bmb'] = 'bambara',
- ['bml'] = 'bamileke',
- ['bos'] = 'bosnian',
- ['bre'] = 'breton',
- ['brh'] = 'brahui',
- ['bri'] = 'braj bhasha',
- ['brm'] = 'burmese',
- ['bsh'] = 'bashkir',
- ['bti'] = 'beti',
- ['cat'] = 'catalan',
- ['ceb'] = 'cebuano',
- ['che'] = 'chechen',
- ['chg'] = 'chaha gurage',
- ['chh'] = 'chattisgarhi',
- ['chi'] = 'chichewa',
- ['chk'] = 'chukchi',
- ['chp'] = 'chipewyan',
- ['chr'] = 'cherokee',
- ['chu'] = 'chuvash',
- ['cmr'] = 'comorian',
- ['cop'] = 'coptic',
- ['cos'] = 'corsican',
- ['cre'] = 'cree',
- ['crr'] = 'carrier',
- ['crt'] = 'crimean tatar',
- ['csl'] = 'church slavonic',
- ['csy'] = 'czech',
- ['dan'] = 'danish',
- ['dar'] = 'dargwa',
- ['dcr'] = 'woods cree',
- ['deu'] = 'german',
- ['dgr'] = 'dogri',
- ['div'] = 'divehi',
- ['djr'] = 'djerma',
- ['dng'] = 'dangme',
- ['dnk'] = 'dinka',
- ['dri'] = 'dari',
- ['dun'] = 'dungan',
- ['dzn'] = 'dzongkha',
- ['ebi'] = 'ebira',
- ['ecr'] = 'eastern cree',
- ['edo'] = 'edo',
- ['efi'] = 'efik',
- ['ell'] = 'greek',
- ['eng'] = 'english',
- ['erz'] = 'erzya',
- ['esp'] = 'spanish',
- ['eti'] = 'estonian',
- ['euq'] = 'basque',
- ['evk'] = 'evenki',
- ['evn'] = 'even',
- ['ewe'] = 'ewe',
- ['fan'] = 'french antillean',
- ['far'] = 'farsi',
- ['fin'] = 'finnish',
- ['fji'] = 'fijian',
- ['fle'] = 'flemish',
- ['fne'] = 'forest nenets',
- ['fon'] = 'fon',
- ['fos'] = 'faroese',
- ['fra'] = 'french',
- ['fri'] = 'frisian',
- ['frl'] = 'friulian',
- ['fta'] = 'futa',
- ['ful'] = 'fulani',
- ['gad'] = 'ga',
- ['gae'] = 'gaelic',
- ['gag'] = 'gagauz',
- ['gal'] = 'galician',
- ['gar'] = 'garshuni',
- ['gaw'] = 'garhwali',
- ['gez'] = "ge'ez",
- ['gil'] = 'gilyak',
- ['gmz'] = 'gumuz',
- ['gon'] = 'gondi',
- ['grn'] = 'greenlandic',
- ['gro'] = 'garo',
- ['gua'] = 'guarani',
- ['guj'] = 'gujarati',
- ['hai'] = 'haitian',
- ['hal'] = 'halam',
- ['har'] = 'harauti',
- ['hau'] = 'hausa',
- ['haw'] = 'hawaiin',
- ['hbn'] = 'hammer-banna',
- ['hil'] = 'hiligaynon',
- ['hin'] = 'hindi',
- ['hma'] = 'high mari',
- ['hnd'] = 'hindko',
- ['ho'] = 'ho',
- ['hri'] = 'harari',
- ['hrv'] = 'croatian',
- ['hun'] = 'hungarian',
- ['hye'] = 'armenian',
- ['ibo'] = 'igbo',
- ['ijo'] = 'ijo',
- ['ilo'] = 'ilokano',
- ['ind'] = 'indonesian',
- ['ing'] = 'ingush',
- ['inu'] = 'inuktitut',
- ['iri'] = 'irish',
- ['irt'] = 'irish traditional',
- ['isl'] = 'icelandic',
- ['ism'] = 'inari sami',
- ['ita'] = 'italian',
- ['iwr'] = 'hebrew',
- ['jan'] = 'japanese',
- ['jav'] = 'javanese',
- ['jii'] = 'yiddish',
- ['jud'] = 'judezmo',
- ['jul'] = 'jula',
- ['kab'] = 'kabardian',
- ['kac'] = 'kachchi',
- ['kal'] = 'kalenjin',
- ['kan'] = 'kannada',
- ['kar'] = 'karachay',
- ['kat'] = 'georgian',
- ['kaz'] = 'kazakh',
- ['keb'] = 'kebena',
- ['kge'] = 'khutsuri georgian',
- ['kha'] = 'khakass',
- ['khk'] = 'khanty-kazim',
- ['khm'] = 'khmer',
- ['khs'] = 'khanty-shurishkar',
- ['khv'] = 'khanty-vakhi',
- ['khw'] = 'khowar',
- ['kik'] = 'kikuyu',
- ['kir'] = 'kirghiz',
- ['kis'] = 'kisii',
- ['kkn'] = 'kokni',
- ['klm'] = 'kalmyk',
- ['kmb'] = 'kamba',
- ['kmn'] = 'kumaoni',
- ['kmo'] = 'komo',
- ['kms'] = 'komso',
- ['knr'] = 'kanuri',
- ['kod'] = 'kodagu',
- ['koh'] = 'korean old hangul',
- ['kok'] = 'konkani',
- ['kon'] = 'kikongo',
- ['kop'] = 'komi-permyak',
- ['kor'] = 'korean',
- ['koz'] = 'komi-zyrian',
- ['kpl'] = 'kpelle',
- ['kri'] = 'krio',
- ['krk'] = 'karakalpak',
- ['krl'] = 'karelian',
- ['krm'] = 'karaim',
- ['krn'] = 'karen',
- ['krt'] = 'koorete',
- ['ksh'] = 'kashmiri',
- ['ksi'] = 'khasi',
- ['ksm'] = 'kildin sami',
- ['kui'] = 'kui',
- ['kul'] = 'kulvi',
- ['kum'] = 'kumyk',
- ['kur'] = 'kurdish',
- ['kuu'] = 'kurukh',
- ['kuy'] = 'kuy',
- ['kyk'] = 'koryak',
- ['lad'] = 'ladin',
- ['lah'] = 'lahuli',
- ['lak'] = 'lak',
- ['lam'] = 'lambani',
- ['lao'] = 'lao',
- ['lat'] = 'latin',
- ['laz'] = 'laz',
- ['lcr'] = 'l-cree',
- ['ldk'] = 'ladakhi',
- ['lez'] = 'lezgi',
- ['lin'] = 'lingala',
- ['lma'] = 'low mari',
- ['lmb'] = 'limbu',
- ['lmw'] = 'lomwe',
- ['lsb'] = 'lower sorbian',
- ['lsm'] = 'lule sami',
- ['lth'] = 'lithuanian',
- ['ltz'] = 'luxembourgish',
- ['lub'] = 'luba',
- ['lug'] = 'luganda',
- ['luh'] = 'luhya',
- ['luo'] = 'luo',
- ['lvi'] = 'latvian',
- ['maj'] = 'majang',
- ['mak'] = 'makua',
- ['mal'] = 'malayalam traditional',
- ['man'] = 'mansi',
- ['map'] = 'mapudungun',
- ['mar'] = 'marathi',
- ['maw'] = 'marwari',
- ['mbn'] = 'mbundu',
- ['mch'] = 'manchu',
- ['mcr'] = 'moose cree',
- ['mde'] = 'mende',
- ['men'] = "me'en",
- ['miz'] = 'mizo',
- ['mkd'] = 'macedonian',
- ['mle'] = 'male',
- ['mlg'] = 'malagasy',
- ['mln'] = 'malinke',
- ['mlr'] = 'malayalam reformed',
- ['mly'] = 'malay',
- ['mnd'] = 'mandinka',
- ['mng'] = 'mongolian',
- ['mni'] = 'manipuri',
- ['mnk'] = 'maninka',
- ['mnx'] = 'manx gaelic',
- ['moh'] = 'mohawk',
- ['mok'] = 'moksha',
- ['mol'] = 'moldavian',
- ['mon'] = 'mon',
- ['mor'] = 'moroccan',
- ['mri'] = 'maori',
- ['mth'] = 'maithili',
- ['mts'] = 'maltese',
- ['mun'] = 'mundari',
- ['nag'] = 'naga-assamese',
- ['nan'] = 'nanai',
- ['nas'] = 'naskapi',
- ['ncr'] = 'n-cree',
- ['ndb'] = 'ndebele',
- ['ndg'] = 'ndonga',
- ['nep'] = 'nepali',
- ['new'] = 'newari',
- ['ngr'] = 'nagari',
- ['nhc'] = 'norway house cree',
- ['nis'] = 'nisi',
- ['niu'] = 'niuean',
- ['nkl'] = 'nkole',
- ['nko'] = "n'ko",
- ['nld'] = 'dutch',
- ['nog'] = 'nogai',
- ['nor'] = 'norwegian',
- ['nsm'] = 'northern sami',
- ['nta'] = 'northern tai',
- ['nto'] = 'esperanto',
- ['nyn'] = 'nynorsk',
- ['oci'] = 'occitan',
- ['ocr'] = 'oji-cree',
- ['ojb'] = 'ojibway',
- ['ori'] = 'oriya',
- ['oro'] = 'oromo',
- ['oss'] = 'ossetian',
- ['paa'] = 'palestinian aramaic',
- ['pal'] = 'pali',
- ['pan'] = 'punjabi',
- ['pap'] = 'palpa',
- ['pas'] = 'pashto',
- ['pgr'] = 'polytonic greek',
- ['pil'] = 'pilipino',
- ['plg'] = 'palaung',
- ['plk'] = 'polish',
- ['pro'] = 'provencal',
- ['ptg'] = 'portuguese',
- ['qin'] = 'chin',
- ['raj'] = 'rajasthani',
- ['rbu'] = 'russian buriat',
- ['rcr'] = 'r-cree',
- ['ria'] = 'riang',
- ['rms'] = 'rhaeto-romanic',
- ['rom'] = 'romanian',
- ['roy'] = 'romany',
- ['rsy'] = 'rusyn',
- ['rua'] = 'ruanda',
- ['rus'] = 'russian',
- ['sad'] = 'sadri',
- ['san'] = 'sanskrit',
- ['sat'] = 'santali',
- ['say'] = 'sayisi',
- ['sek'] = 'sekota',
- ['sel'] = 'selkup',
- ['sgo'] = 'sango',
- ['shn'] = 'shan',
- ['sib'] = 'sibe',
- ['sid'] = 'sidamo',
- ['sig'] = 'silte gurage',
- ['sks'] = 'skolt sami',
- ['sky'] = 'slovak',
- ['sla'] = 'slavey',
- ['slv'] = 'slovenian',
- ['sml'] = 'somali',
- ['smo'] = 'samoan',
- ['sna'] = 'sena',
- ['snd'] = 'sindhi',
- ['snh'] = 'sinhalese',
- ['snk'] = 'soninke',
- ['sog'] = 'sodo gurage',
- ['sot'] = 'sotho',
- ['sqi'] = 'albanian',
- ['srb'] = 'serbian',
- ['srk'] = 'saraiki',
- ['srr'] = 'serer',
- ['ssl'] = 'south slavey',
- ['ssm'] = 'southern sami',
- ['sur'] = 'suri',
- ['sva'] = 'svan',
- ['sve'] = 'swedish',
- ['swa'] = 'swadaya aramaic',
- ['swk'] = 'swahili',
- ['swz'] = 'swazi',
- ['sxt'] = 'sutu',
- ['syr'] = 'syriac',
- ['tab'] = 'tabasaran',
- ['taj'] = 'tajiki',
- ['tam'] = 'tamil',
- ['tat'] = 'tatar',
- ['tcr'] = 'th-cree',
- ['tel'] = 'telugu',
- ['tgn'] = 'tongan',
- ['tgr'] = 'tigre',
- ['tgy'] = 'tigrinya',
- ['tha'] = 'thai',
- ['tht'] = 'tahitian',
- ['tib'] = 'tibetan',
- ['tkm'] = 'turkmen',
- ['tmn'] = 'temne',
- ['tna'] = 'tswana',
- ['tne'] = 'tundra nenets',
- ['tng'] = 'tonga',
- ['tod'] = 'todo',
- ['trk'] = 'turkish',
- ['tsg'] = 'tsonga',
- ['tua'] = 'turoyo aramaic',
- ['tul'] = 'tulu',
- ['tuv'] = 'tuvin',
- ['twi'] = 'twi',
- ['udm'] = 'udmurt',
- ['ukr'] = 'ukrainian',
- ['urd'] = 'urdu',
- ['usb'] = 'upper sorbian',
- ['uyg'] = 'uyghur',
- ['uzb'] = 'uzbek',
- ['ven'] = 'venda',
- ['vit'] = 'vietnamese',
- ['wa' ] = 'wa',
- ['wag'] = 'wagdi',
- ['wcr'] = 'west-cree',
- ['wel'] = 'welsh',
- ['wlf'] = 'wolof',
- ['xbd'] = 'tai lue',
- ['xhs'] = 'xhosa',
- ['yak'] = 'yakut',
- ['yba'] = 'yoruba',
- ['ycr'] = 'y-cree',
- ['yic'] = 'yi classic',
- ['yim'] = 'yi modern',
- ['zhh'] = 'chinese hong kong',
- ['zhp'] = 'chinese phonetic',
- ['zhs'] = 'chinese simplified',
- ['zht'] = 'chinese traditional',
- ['znd'] = 'zande',
- ['zul'] = 'zulu'
-}
-
-local features = allocate {
- ['aalt'] = 'access all alternates',
- ['abvf'] = 'above-base forms',
- ['abvm'] = 'above-base mark positioning',
- ['abvs'] = 'above-base substitutions',
- ['afrc'] = 'alternative fractions',
- ['akhn'] = 'akhands',
- ['blwf'] = 'below-base forms',
- ['blwm'] = 'below-base mark positioning',
- ['blws'] = 'below-base substitutions',
- ['c2pc'] = 'petite capitals from capitals',
- ['c2sc'] = 'small capitals from capitals',
- ['calt'] = 'contextual alternates',
- ['case'] = 'case-sensitive forms',
- ['ccmp'] = 'glyph composition/decomposition',
- ['cjct'] = 'conjunct forms',
- ['clig'] = 'contextual ligatures',
- ['cpsp'] = 'capital spacing',
- ['cswh'] = 'contextual swash',
- ['curs'] = 'cursive positioning',
- ['dflt'] = 'default processing',
- ['dist'] = 'distances',
- ['dlig'] = 'discretionary ligatures',
- ['dnom'] = 'denominators',
- ['dtls'] = 'dotless forms', -- math
- ['expt'] = 'expert forms',
- ['falt'] = 'final glyph alternates',
- ['fin2'] = 'terminal forms #2',
- ['fin3'] = 'terminal forms #3',
- ['fina'] = 'terminal forms',
- ['flac'] = 'flattened accents over capitals', -- math
- ['frac'] = 'fractions',
- ['fwid'] = 'full width',
- ['half'] = 'half forms',
- ['haln'] = 'halant forms',
- ['halt'] = 'alternate half width',
- ['hist'] = 'historical forms',
- ['hkna'] = 'horizontal kana alternates',
- ['hlig'] = 'historical ligatures',
- ['hngl'] = 'hangul',
- ['hojo'] = 'hojo kanji forms',
- ['hwid'] = 'half width',
- ['init'] = 'initial forms',
- ['isol'] = 'isolated forms',
- ['ital'] = 'italics',
- ['jalt'] = 'justification alternatives',
- ['jp04'] = 'jis2004 forms',
- ['jp78'] = 'jis78 forms',
- ['jp83'] = 'jis83 forms',
- ['jp90'] = 'jis90 forms',
- ['kern'] = 'kerning',
- ['lfbd'] = 'left bounds',
- ['liga'] = 'standard ligatures',
- ['ljmo'] = 'leading jamo forms',
- ['lnum'] = 'lining figures',
- ['locl'] = 'localized forms',
- ['mark'] = 'mark positioning',
- ['med2'] = 'medial forms #2',
- ['medi'] = 'medial forms',
- ['mgrk'] = 'mathematical greek',
- ['mkmk'] = 'mark to mark positioning',
- ['mset'] = 'mark positioning via substitution',
- ['nalt'] = 'alternate annotation forms',
- ['nlck'] = 'nlc kanji forms',
- ['nukt'] = 'nukta forms',
- ['numr'] = 'numerators',
- ['onum'] = 'old style figures',
- ['opbd'] = 'optical bounds',
- ['ordn'] = 'ordinals',
- ['ornm'] = 'ornaments',
- ['palt'] = 'proportional alternate width',
- ['pcap'] = 'petite capitals',
- ['pnum'] = 'proportional figures',
- ['pref'] = 'pre-base forms',
- ['pres'] = 'pre-base substitutions',
- ['pstf'] = 'post-base forms',
- ['psts'] = 'post-base substitutions',
- ['pwid'] = 'proportional widths',
- ['qwid'] = 'quarter widths',
- ['rand'] = 'randomize',
- ['rkrf'] = 'rakar forms',
- ['rlig'] = 'required ligatures',
- ['rphf'] = 'reph form',
- ['rtbd'] = 'right bounds',
- ['rtla'] = 'right-to-left alternates',
- ['rtlm'] = 'right to left math', -- math
- ['ruby'] = 'ruby notation forms',
- ['salt'] = 'stylistic alternates',
- ['sinf'] = 'scientific inferiors',
- ['size'] = 'optical size',
- ['smcp'] = 'small capitals',
- ['smpl'] = 'simplified forms',
- -- ['ss01'] = 'stylistic set 1',
- -- ['ss02'] = 'stylistic set 2',
- -- ['ss03'] = 'stylistic set 3',
- -- ['ss04'] = 'stylistic set 4',
- -- ['ss05'] = 'stylistic set 5',
- -- ['ss06'] = 'stylistic set 6',
- -- ['ss07'] = 'stylistic set 7',
- -- ['ss08'] = 'stylistic set 8',
- -- ['ss09'] = 'stylistic set 9',
- -- ['ss10'] = 'stylistic set 10',
- -- ['ss11'] = 'stylistic set 11',
- -- ['ss12'] = 'stylistic set 12',
- -- ['ss13'] = 'stylistic set 13',
- -- ['ss14'] = 'stylistic set 14',
- -- ['ss15'] = 'stylistic set 15',
- -- ['ss16'] = 'stylistic set 16',
- -- ['ss17'] = 'stylistic set 17',
- -- ['ss18'] = 'stylistic set 18',
- -- ['ss19'] = 'stylistic set 19',
- -- ['ss20'] = 'stylistic set 20',
- ['ssty'] = 'script style', -- math
- ['subs'] = 'subscript',
- ['sups'] = 'superscript',
- ['swsh'] = 'swash',
- ['titl'] = 'titling',
- ['tjmo'] = 'trailing jamo forms',
- ['tnam'] = 'traditional name forms',
- ['tnum'] = 'tabular figures',
- ['trad'] = 'traditional forms',
- ['twid'] = 'third widths',
- ['unic'] = 'unicase',
- ['valt'] = 'alternate vertical metrics',
- ['vatu'] = 'vattu variants',
- ['vert'] = 'vertical writing',
- ['vhal'] = 'alternate vertical half metrics',
- ['vjmo'] = 'vowel jamo forms',
- ['vkna'] = 'vertical kana alternates',
- ['vkrn'] = 'vertical kerning',
- ['vpal'] = 'proportional alternate vertical metrics',
- ['vrt2'] = 'vertical rotation',
- ['zero'] = 'slashed zero',
-
- ['trep'] = 'traditional tex replacements',
- ['tlig'] = 'traditional tex ligatures',
-
- ['ss..'] = 'stylistic set ..',
- ['cv..'] = 'character variant ..',
- ['js..'] = 'justification ..',
-
- ["dv.."] = "devanagari ..",
-}
-
-local baselines = allocate {
- ['hang'] = 'hanging baseline',
- ['icfb'] = 'ideographic character face bottom edge baseline',
- ['icft'] = 'ideographic character face tope edige baseline',
- ['ideo'] = 'ideographic em-box bottom edge baseline',
- ['idtp'] = 'ideographic em-box top edge baseline',
- ['math'] = 'mathmatical centered baseline',
- ['romn'] = 'roman baseline'
-}
-
-tables.scripts = scripts
-tables.languages = languages
-tables.features = features
-tables.baselines = baselines
-
-local acceptscripts = true directives.register("otf.acceptscripts", function(v) acceptscripts = v end)
-local acceptlanguages = true directives.register("otf.acceptlanguages", function(v) acceptlanguages = v end)
-
-local report_checks = logs.reporter("fonts","checks")
-
--- hm, we overload the metatables
-
-if otffeatures.features then
- for k, v in next, otffeatures.features do
- features[k] = v
- end
- otffeatures.features = features
-end
-
-local function swapped(h)
- local r = { }
- for k, v in next, h do
- r[gsub(v,"[^a-z0-9]","")] = k -- is already lower
- end
- return r
-end
-
-local verbosescripts = allocate(swapped(scripts ))
-local verboselanguages = allocate(swapped(languages))
-local verbosefeatures = allocate(swapped(features ))
-local verbosebaselines = allocate(swapped(baselines))
-
--- lets forget about trailing spaces
-
-local function resolve(t,k)
- if k then
- k = gsub(lower(k),"[^a-z0-9]","")
- local v = rawget(t,k)
- if v then
- return v
- end
- end
-end
-
-setmetatableindex(verbosescripts, resolve)
-setmetatableindex(verboselanguages, resolve)
-setmetatableindex(verbosefeatures, resolve)
-setmetatableindex(verbosebaselines, resolve)
-
--- We could optimize the next lookups by using an extra metatable and storing
--- already found values but in practice there are not that many lookups so
--- it's never a bottleneck.
-
-setmetatableindex(scripts, function(t,k)
- if k then
- k = lower(k)
- if k == "dflt" then
- return k
- end
- local v = rawget(t,k)
- if v then
- return v
- end
- k = gsub(k," ","")
- v = rawget(t,v)
- if v then
- return v
- elseif acceptscripts then
- report_checks("registering extra script %a",k)
- rawset(t,k,k)
- return k
- end
- end
- return "dflt"
-end)
-
-setmetatableindex(languages, function(t,k)
- if k then
- k = lower(k)
- if k == "dflt" then
- return k
- end
- local v = rawget(t,k)
- if v then
- return v
- end
- k = gsub(k," ","")
- v = rawget(t,v)
- if v then
- return v
- elseif acceptlanguages then
- report_checks("registering extra language %a",k)
- rawset(t,k,k)
- return k
- end
- end
- return "dflt"
-end)
-
-setmetatablenewindex(languages, "ignore")
-setmetatablenewindex(baselines, "ignore")
-setmetatablenewindex(baselines, "ignore")
-
-local function resolve(t,k)
- if k then
- k = lower(k)
- local v = rawget(t,k)
- if v then
- return v
- end
- k = gsub(k," ","")
- local v = rawget(t,k)
- if v then
- return v
- end
- local tag, dd = match(k,"(..)(%d+)")
- if tag and dd then
- local v = rawget(t,tag)
- if v then
- return v -- return format(v,tonumber(dd)) -- old way
- else
- local v = rawget(t,tag.."..") -- nicer in overview
- if v then
- return (gsub(v,"%.%.",tonumber(dd))) -- new way
- end
- end
- end
- end
- return k -- "dflt"
-end
-
-setmetatableindex(features, resolve)
-
-local function assign(t,k,v)
- if k and v then
- v = lower(v)
- rawset(t,k,v) -- rawset ?
- -- rawset(features,gsub(v,"[^a-z0-9]",""),k) -- why ? old code
- end
-end
-
-setmetatablenewindex(features, assign)
-
-local checkers = {
- rand = function(v)
- return v == true and "random" or v
- end
-}
-
--- Keep this:
---
--- function otf.features.normalize(features)
--- if features then
--- local h = { }
--- for k, v in next, features do
--- k = lower(k)
--- if k == "language" then
--- v = gsub(lower(v),"[^a-z0-9]","")
--- h.language = rawget(verboselanguages,v) or (languages[v] and v) or "dflt" -- auto adds
--- elseif k == "script" then
--- v = gsub(lower(v),"[^a-z0-9]","")
--- h.script = rawget(verbosescripts,v) or (scripts[v] and v) or "dflt" -- auto adds
--- else
--- if type(v) == "string" then
--- local b = is_boolean(v)
--- if type(b) == "nil" then
--- v = tonumber(v) or lower(v)
--- else
--- v = b
--- end
--- end
--- if not rawget(features,k) then
--- k = rawget(verbosefeatures,k) or k
--- end
--- local c = checkers[k]
--- h[k] = c and c(v) or v
--- end
--- end
--- return h
--- end
--- end
-
--- inspect(fonts.handlers.otf.statistics.usedfeatures)
-
-if not storage then
- return
-end
-
-local usedfeatures = statistics.usedfeatures or { }
-statistics.usedfeatures = usedfeatures
-
-table.setmetatableindex(usedfeatures, function(t,k) if k then local v = { } t[k] = v return v end end) -- table.autotable
-
-storage.register("fonts/otf/usedfeatures", usedfeatures, "fonts.handlers.otf.statistics.usedfeatures" )
-
-function otf.features.normalize(features)
- if features then
- local h = { }
- for key, value in next, features do
- local k = lower(key)
- if k == "language" then
- local v = gsub(lower(value),"[^a-z0-9]","")
- h.language = rawget(verboselanguages,v) or (languages[v] and v) or "dflt" -- auto adds
- elseif k == "script" then
- local v = gsub(lower(value),"[^a-z0-9]","")
- h.script = rawget(verbosescripts,v) or (scripts[v] and v) or "dflt" -- auto adds
- else
- local uk = usedfeatures[key]
- local uv = uk[value]
- if uv then
- -- report_checks("feature value %a first seen at %a",value,key)
- else
- if type(value) == "string" then
- local b = is_boolean(value)
- if type(b) == "nil" then
- uv = tonumber(value) or lower(value)
- else
- uv = b
- end
- else
- uv = v
- end
- if not rawget(features,k) then
- k = rawget(verbosefeatures,k) or k
- end
- local c = checkers[k]
- if c then
- uv = c(uv) or vc
- end
- uk[value] = uv
- end
- h[k] = uv
- end
- end
- return h
- end
-end
-
---~ table.print(otf.features.normalize({ language = "dutch", liga = "yes", ss99 = true, aalt = 3, abcd = "yes" } ))
-
--- When I feel the need ...
-
---~ tables.aat = {
---~ [ 0] = {
---~ name = "allTypographicFeaturesType",
---~ [ 0] = "allTypeFeaturesOnSelector",
---~ [ 1] = "allTypeFeaturesOffSelector",
---~ },
---~ [ 1] = {
---~ name = "ligaturesType",
---~ [0 ] = "requiredLigaturesOnSelector",
---~ [1 ] = "requiredLigaturesOffSelector",
---~ [2 ] = "commonLigaturesOnSelector",
---~ [3 ] = "commonLigaturesOffSelector",
---~ [4 ] = "rareLigaturesOnSelector",
---~ [5 ] = "rareLigaturesOffSelector",
---~ [6 ] = "logosOnSelector ",
---~ [7 ] = "logosOffSelector ",
---~ [8 ] = "rebusPicturesOnSelector",
---~ [9 ] = "rebusPicturesOffSelector",
---~ [10] = "diphthongLigaturesOnSelector",
---~ [11] = "diphthongLigaturesOffSelector",
---~ [12] = "squaredLigaturesOnSelector",
---~ [13] = "squaredLigaturesOffSelector",
---~ [14] = "abbrevSquaredLigaturesOnSelector",
---~ [15] = "abbrevSquaredLigaturesOffSelector",
---~ },
---~ [ 2] = {
---~ name = "cursiveConnectionType",
---~ [ 0] = "unconnectedSelector",
---~ [ 1] = "partiallyConnectedSelector",
---~ [ 2] = "cursiveSelector ",
---~ },
---~ [ 3] = {
---~ name = "letterCaseType",
---~ [ 0] = "upperAndLowerCaseSelector",
---~ [ 1] = "allCapsSelector ",
---~ [ 2] = "allLowerCaseSelector",
---~ [ 3] = "smallCapsSelector ",
---~ [ 4] = "initialCapsSelector",
---~ [ 5] = "initialCapsAndSmallCapsSelector",
---~ },
---~ [ 4] = {
---~ name = "verticalSubstitutionType",
---~ [ 0] = "substituteVerticalFormsOnSelector",
---~ [ 1] = "substituteVerticalFormsOffSelector",
---~ },
---~ [ 5] = {
---~ name = "linguisticRearrangementType",
---~ [ 0] = "linguisticRearrangementOnSelector",
---~ [ 1] = "linguisticRearrangementOffSelector",
---~ },
---~ [ 6] = {
---~ name = "numberSpacingType",
---~ [ 0] = "monospacedNumbersSelector",
---~ [ 1] = "proportionalNumbersSelector",
---~ },
---~ [ 7] = {
---~ name = "appleReserved1Type",
---~ },
---~ [ 8] = {
---~ name = "smartSwashType",
---~ [ 0] = "wordInitialSwashesOnSelector",
---~ [ 1] = "wordInitialSwashesOffSelector",
---~ [ 2] = "wordFinalSwashesOnSelector",
---~ [ 3] = "wordFinalSwashesOffSelector",
---~ [ 4] = "lineInitialSwashesOnSelector",
---~ [ 5] = "lineInitialSwashesOffSelector",
---~ [ 6] = "lineFinalSwashesOnSelector",
---~ [ 7] = "lineFinalSwashesOffSelector",
---~ [ 8] = "nonFinalSwashesOnSelector",
---~ [ 9] = "nonFinalSwashesOffSelector",
---~ },
---~ [ 9] = {
---~ name = "diacriticsType",
---~ [ 0] = "showDiacriticsSelector",
---~ [ 1] = "hideDiacriticsSelector",
---~ [ 2] = "decomposeDiacriticsSelector",
---~ },
---~ [10] = {
---~ name = "verticalPositionType",
---~ [ 0] = "normalPositionSelector",
---~ [ 1] = "superiorsSelector ",
---~ [ 2] = "inferiorsSelector ",
---~ [ 3] = "ordinalsSelector ",
---~ },
---~ [11] = {
---~ name = "fractionsType",
---~ [ 0] = "noFractionsSelector",
---~ [ 1] = "verticalFractionsSelector",
---~ [ 2] = "diagonalFractionsSelector",
---~ },
---~ [12] = {
---~ name = "appleReserved2Type",
---~ },
---~ [13] = {
---~ name = "overlappingCharactersType",
---~ [ 0] = "preventOverlapOnSelector",
---~ [ 1] = "preventOverlapOffSelector",
---~ },
---~ [14] = {
---~ name = "typographicExtrasType",
---~ [0 ] = "hyphensToEmDashOnSelector",
---~ [1 ] = "hyphensToEmDashOffSelector",
---~ [2 ] = "hyphenToEnDashOnSelector",
---~ [3 ] = "hyphenToEnDashOffSelector",
---~ [4 ] = "unslashedZeroOnSelector",
---~ [5 ] = "unslashedZeroOffSelector",
---~ [6 ] = "formInterrobangOnSelector",
---~ [7 ] = "formInterrobangOffSelector",
---~ [8 ] = "smartQuotesOnSelector",
---~ [9 ] = "smartQuotesOffSelector",
---~ [10] = "periodsToEllipsisOnSelector",
---~ [11] = "periodsToEllipsisOffSelector",
---~ },
---~ [15] = {
---~ name = "mathematicalExtrasType",
---~ [ 0] = "hyphenToMinusOnSelector",
---~ [ 1] = "hyphenToMinusOffSelector",
---~ [ 2] = "asteriskToMultiplyOnSelector",
---~ [ 3] = "asteriskToMultiplyOffSelector",
---~ [ 4] = "slashToDivideOnSelector",
---~ [ 5] = "slashToDivideOffSelector",
---~ [ 6] = "inequalityLigaturesOnSelector",
---~ [ 7] = "inequalityLigaturesOffSelector",
---~ [ 8] = "exponentsOnSelector",
---~ [ 9] = "exponentsOffSelector",
---~ },
---~ [16] = {
---~ name = "ornamentSetsType",
---~ [ 0] = "noOrnamentsSelector",
---~ [ 1] = "dingbatsSelector ",
---~ [ 2] = "piCharactersSelector",
---~ [ 3] = "fleuronsSelector ",
---~ [ 4] = "decorativeBordersSelector",
---~ [ 5] = "internationalSymbolsSelector",
---~ [ 6] = "mathSymbolsSelector",
---~ },
---~ [17] = {
---~ name = "characterAlternativesType",
---~ [ 0] = "noAlternatesSelector",
---~ },
---~ [18] = {
---~ name = "designComplexityType",
---~ [ 0] = "designLevel1Selector",
---~ [ 1] = "designLevel2Selector",
---~ [ 2] = "designLevel3Selector",
---~ [ 3] = "designLevel4Selector",
---~ [ 4] = "designLevel5Selector",
---~ },
---~ [19] = {
---~ name = "styleOptionsType",
---~ [ 0] = "noStyleOptionsSelector",
---~ [ 1] = "displayTextSelector",
---~ [ 2] = "engravedTextSelector",
---~ [ 3] = "illuminatedCapsSelector",
---~ [ 4] = "titlingCapsSelector",
---~ [ 5] = "tallCapsSelector ",
---~ },
---~ [20] = {
---~ name = "characterShapeType",
---~ [0 ] = "traditionalCharactersSelector",
---~ [1 ] = "simplifiedCharactersSelector",
---~ [2 ] = "jis1978CharactersSelector",
---~ [3 ] = "jis1983CharactersSelector",
---~ [4 ] = "jis1990CharactersSelector",
---~ [5 ] = "traditionalAltOneSelector",
---~ [6 ] = "traditionalAltTwoSelector",
---~ [7 ] = "traditionalAltThreeSelector",
---~ [8 ] = "traditionalAltFourSelector",
---~ [9 ] = "traditionalAltFiveSelector",
---~ [10] = "expertCharactersSelector",
---~ },
---~ [21] = {
---~ name = "numberCaseType",
---~ [ 0] = "lowerCaseNumbersSelector",
---~ [ 1] = "upperCaseNumbersSelector",
---~ },
---~ [22] = {
---~ name = "textSpacingType",
---~ [ 0] = "proportionalTextSelector",
---~ [ 1] = "monospacedTextSelector",
---~ [ 2] = "halfWidthTextSelector",
---~ [ 3] = "normallySpacedTextSelector",
---~ },
---~ [23] = {
---~ name = "transliterationType",
---~ [ 0] = "noTransliterationSelector",
---~ [ 1] = "hanjaToHangulSelector",
---~ [ 2] = "hiraganaToKatakanaSelector",
---~ [ 3] = "katakanaToHiraganaSelector",
---~ [ 4] = "kanaToRomanizationSelector",
---~ [ 5] = "romanizationToHiraganaSelector",
---~ [ 6] = "romanizationToKatakanaSelector",
---~ [ 7] = "hanjaToHangulAltOneSelector",
---~ [ 8] = "hanjaToHangulAltTwoSelector",
---~ [ 9] = "hanjaToHangulAltThreeSelector",
---~ },
---~ [24] = {
---~ name = "annotationType",
---~ [ 0] = "noAnnotationSelector",
---~ [ 1] = "boxAnnotationSelector",
---~ [ 2] = "roundedBoxAnnotationSelector",
---~ [ 3] = "circleAnnotationSelector",
---~ [ 4] = "invertedCircleAnnotationSelector",
---~ [ 5] = "parenthesisAnnotationSelector",
---~ [ 6] = "periodAnnotationSelector",
---~ [ 7] = "romanNumeralAnnotationSelector",
---~ [ 8] = "diamondAnnotationSelector",
---~ },
---~ [25] = {
---~ name = "kanaSpacingType",
---~ [ 0] = "fullWidthKanaSelector",
---~ [ 1] = "proportionalKanaSelector",
---~ },
---~ [26] = {
---~ name = "ideographicSpacingType",
---~ [ 0] = "fullWidthIdeographsSelector",
---~ [ 1] = "proportionalIdeographsSelector",
---~ },
---~ [103] = {
---~ name = "cjkRomanSpacingType",
---~ [ 0] = "halfWidthCJKRomanSelector",
---~ [ 1] = "proportionalCJKRomanSelector",
---~ [ 2] = "defaultCJKRomanSelector",
---~ [ 3] = "fullWidthCJKRomanSelector",
---~ },
---~ }
+if not modules then modules = { } end modules ['font-ott'] = {
+ version = 1.001,
+ comment = "companion to font-otf.lua (tables)",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+ -- dataonly = true,
+}
+
+local type, next, tonumber, tostring, rawget, rawset = type, next, tonumber, tostring, rawget, rawset
+local gsub, lower, format, match = string.gsub, string.lower, string.format, string.match
+local is_boolean = string.is_boolean
+
+local setmetatableindex = table.setmetatableindex
+local setmetatablenewindex = table.setmetatablenewindex
+local allocate = utilities.storage.allocate
+
+local fonts = fonts
+local otf = fonts.handlers.otf
+local otffeatures = otf.features
+local registerotffeature = otffeatures.register
+
+local tables = otf.tables or { }
+otf.tables = tables
+
+local statistics = otf.statistics or { }
+otf.statistics = statistics
+
+local scripts = allocate {
+ ['arab'] = 'arabic',
+ ['armn'] = 'armenian',
+ ['bali'] = 'balinese',
+ ['beng'] = 'bengali',
+ ['bopo'] = 'bopomofo',
+ ['brai'] = 'braille',
+ ['bugi'] = 'buginese',
+ ['buhd'] = 'buhid',
+ ['byzm'] = 'byzantine music',
+ ['cans'] = 'canadian syllabics',
+ ['cher'] = 'cherokee',
+ ['copt'] = 'coptic',
+ ['cprt'] = 'cypriot syllabary',
+ ['cyrl'] = 'cyrillic',
+ ['deva'] = 'devanagari',
+ ['dsrt'] = 'deseret',
+ ['ethi'] = 'ethiopic',
+ ['geor'] = 'georgian',
+ ['glag'] = 'glagolitic',
+ ['goth'] = 'gothic',
+ ['grek'] = 'greek',
+ ['gujr'] = 'gujarati',
+ ['guru'] = 'gurmukhi',
+ ['hang'] = 'hangul',
+ ['hani'] = 'cjk ideographic',
+ ['hano'] = 'hanunoo',
+ ['hebr'] = 'hebrew',
+ ['ital'] = 'old italic',
+ ['jamo'] = 'hangul jamo',
+ ['java'] = 'javanese',
+ ['kana'] = 'hiragana and katakana',
+ ['khar'] = 'kharosthi',
+ ['khmr'] = 'khmer',
+ ['knda'] = 'kannada',
+ ['lao' ] = 'lao',
+ ['latn'] = 'latin',
+ ['limb'] = 'limbu',
+ ['linb'] = 'linear b',
+ ['math'] = 'mathematical alphanumeric symbols',
+ ['mlym'] = 'malayalam',
+ ['mong'] = 'mongolian',
+ ['musc'] = 'musical symbols',
+ ['mymr'] = 'myanmar',
+ ['nko' ] = "n'ko",
+ ['ogam'] = 'ogham',
+ ['orya'] = 'oriya',
+ ['osma'] = 'osmanya',
+ ['phag'] = 'phags-pa',
+ ['phnx'] = 'phoenician',
+ ['runr'] = 'runic',
+ ['shaw'] = 'shavian',
+ ['sinh'] = 'sinhala',
+ ['sylo'] = 'syloti nagri',
+ ['syrc'] = 'syriac',
+ ['tagb'] = 'tagbanwa',
+ ['tale'] = 'tai le',
+ ['talu'] = 'tai lu',
+ ['taml'] = 'tamil',
+ ['telu'] = 'telugu',
+ ['tfng'] = 'tifinagh',
+ ['tglg'] = 'tagalog',
+ ['thaa'] = 'thaana',
+ ['thai'] = 'thai',
+ ['tibt'] = 'tibetan',
+ ['ugar'] = 'ugaritic cuneiform',
+ ['xpeo'] = 'old persian cuneiform',
+ ['xsux'] = 'sumero-akkadian cuneiform',
+ ['yi' ] = 'yi',
+}
+
+local languages = allocate {
+ ['aba'] = 'abaza',
+ ['abk'] = 'abkhazian',
+ ['ady'] = 'adyghe',
+ ['afk'] = 'afrikaans',
+ ['afr'] = 'afar',
+ ['agw'] = 'agaw',
+ ['als'] = 'alsatian',
+ ['alt'] = 'altai',
+ ['amh'] = 'amharic',
+ ['ara'] = 'arabic',
+ ['ari'] = 'aari',
+ ['ark'] = 'arakanese',
+ ['asm'] = 'assamese',
+ ['ath'] = 'athapaskan',
+ ['avr'] = 'avar',
+ ['awa'] = 'awadhi',
+ ['aym'] = 'aymara',
+ ['aze'] = 'azeri',
+ ['bad'] = 'badaga',
+ ['bag'] = 'baghelkhandi',
+ ['bal'] = 'balkar',
+ ['bau'] = 'baule',
+ ['bbr'] = 'berber',
+ ['bch'] = 'bench',
+ ['bcr'] = 'bible cree',
+ ['bel'] = 'belarussian',
+ ['bem'] = 'bemba',
+ ['ben'] = 'bengali',
+ ['bgr'] = 'bulgarian',
+ ['bhi'] = 'bhili',
+ ['bho'] = 'bhojpuri',
+ ['bik'] = 'bikol',
+ ['bil'] = 'bilen',
+ ['bkf'] = 'blackfoot',
+ ['bli'] = 'balochi',
+ ['bln'] = 'balante',
+ ['blt'] = 'balti',
+ ['bmb'] = 'bambara',
+ ['bml'] = 'bamileke',
+ ['bos'] = 'bosnian',
+ ['bre'] = 'breton',
+ ['brh'] = 'brahui',
+ ['bri'] = 'braj bhasha',
+ ['brm'] = 'burmese',
+ ['bsh'] = 'bashkir',
+ ['bti'] = 'beti',
+ ['cat'] = 'catalan',
+ ['ceb'] = 'cebuano',
+ ['che'] = 'chechen',
+ ['chg'] = 'chaha gurage',
+ ['chh'] = 'chattisgarhi',
+ ['chi'] = 'chichewa',
+ ['chk'] = 'chukchi',
+ ['chp'] = 'chipewyan',
+ ['chr'] = 'cherokee',
+ ['chu'] = 'chuvash',
+ ['cmr'] = 'comorian',
+ ['cop'] = 'coptic',
+ ['cos'] = 'corsican',
+ ['cre'] = 'cree',
+ ['crr'] = 'carrier',
+ ['crt'] = 'crimean tatar',
+ ['csl'] = 'church slavonic',
+ ['csy'] = 'czech',
+ ['dan'] = 'danish',
+ ['dar'] = 'dargwa',
+ ['dcr'] = 'woods cree',
+ ['deu'] = 'german',
+ ['dgr'] = 'dogri',
+ ['div'] = 'divehi',
+ ['djr'] = 'djerma',
+ ['dng'] = 'dangme',
+ ['dnk'] = 'dinka',
+ ['dri'] = 'dari',
+ ['dun'] = 'dungan',
+ ['dzn'] = 'dzongkha',
+ ['ebi'] = 'ebira',
+ ['ecr'] = 'eastern cree',
+ ['edo'] = 'edo',
+ ['efi'] = 'efik',
+ ['ell'] = 'greek',
+ ['eng'] = 'english',
+ ['erz'] = 'erzya',
+ ['esp'] = 'spanish',
+ ['eti'] = 'estonian',
+ ['euq'] = 'basque',
+ ['evk'] = 'evenki',
+ ['evn'] = 'even',
+ ['ewe'] = 'ewe',
+ ['fan'] = 'french antillean',
+ ['far'] = 'farsi',
+ ['fin'] = 'finnish',
+ ['fji'] = 'fijian',
+ ['fle'] = 'flemish',
+ ['fne'] = 'forest nenets',
+ ['fon'] = 'fon',
+ ['fos'] = 'faroese',
+ ['fra'] = 'french',
+ ['fri'] = 'frisian',
+ ['frl'] = 'friulian',
+ ['fta'] = 'futa',
+ ['ful'] = 'fulani',
+ ['gad'] = 'ga',
+ ['gae'] = 'gaelic',
+ ['gag'] = 'gagauz',
+ ['gal'] = 'galician',
+ ['gar'] = 'garshuni',
+ ['gaw'] = 'garhwali',
+ ['gez'] = "ge'ez",
+ ['gil'] = 'gilyak',
+ ['gmz'] = 'gumuz',
+ ['gon'] = 'gondi',
+ ['grn'] = 'greenlandic',
+ ['gro'] = 'garo',
+ ['gua'] = 'guarani',
+ ['guj'] = 'gujarati',
+ ['hai'] = 'haitian',
+ ['hal'] = 'halam',
+ ['har'] = 'harauti',
+ ['hau'] = 'hausa',
+ ['haw'] = 'hawaiin',
+ ['hbn'] = 'hammer-banna',
+ ['hil'] = 'hiligaynon',
+ ['hin'] = 'hindi',
+ ['hma'] = 'high mari',
+ ['hnd'] = 'hindko',
+ ['ho'] = 'ho',
+ ['hri'] = 'harari',
+ ['hrv'] = 'croatian',
+ ['hun'] = 'hungarian',
+ ['hye'] = 'armenian',
+ ['ibo'] = 'igbo',
+ ['ijo'] = 'ijo',
+ ['ilo'] = 'ilokano',
+ ['ind'] = 'indonesian',
+ ['ing'] = 'ingush',
+ ['inu'] = 'inuktitut',
+ ['iri'] = 'irish',
+ ['irt'] = 'irish traditional',
+ ['isl'] = 'icelandic',
+ ['ism'] = 'inari sami',
+ ['ita'] = 'italian',
+ ['iwr'] = 'hebrew',
+ ['jan'] = 'japanese',
+ ['jav'] = 'javanese',
+ ['jii'] = 'yiddish',
+ ['jud'] = 'judezmo',
+ ['jul'] = 'jula',
+ ['kab'] = 'kabardian',
+ ['kac'] = 'kachchi',
+ ['kal'] = 'kalenjin',
+ ['kan'] = 'kannada',
+ ['kar'] = 'karachay',
+ ['kat'] = 'georgian',
+ ['kaz'] = 'kazakh',
+ ['keb'] = 'kebena',
+ ['kge'] = 'khutsuri georgian',
+ ['kha'] = 'khakass',
+ ['khk'] = 'khanty-kazim',
+ ['khm'] = 'khmer',
+ ['khs'] = 'khanty-shurishkar',
+ ['khv'] = 'khanty-vakhi',
+ ['khw'] = 'khowar',
+ ['kik'] = 'kikuyu',
+ ['kir'] = 'kirghiz',
+ ['kis'] = 'kisii',
+ ['kkn'] = 'kokni',
+ ['klm'] = 'kalmyk',
+ ['kmb'] = 'kamba',
+ ['kmn'] = 'kumaoni',
+ ['kmo'] = 'komo',
+ ['kms'] = 'komso',
+ ['knr'] = 'kanuri',
+ ['kod'] = 'kodagu',
+ ['koh'] = 'korean old hangul',
+ ['kok'] = 'konkani',
+ ['kon'] = 'kikongo',
+ ['kop'] = 'komi-permyak',
+ ['kor'] = 'korean',
+ ['koz'] = 'komi-zyrian',
+ ['kpl'] = 'kpelle',
+ ['kri'] = 'krio',
+ ['krk'] = 'karakalpak',
+ ['krl'] = 'karelian',
+ ['krm'] = 'karaim',
+ ['krn'] = 'karen',
+ ['krt'] = 'koorete',
+ ['ksh'] = 'kashmiri',
+ ['ksi'] = 'khasi',
+ ['ksm'] = 'kildin sami',
+ ['kui'] = 'kui',
+ ['kul'] = 'kulvi',
+ ['kum'] = 'kumyk',
+ ['kur'] = 'kurdish',
+ ['kuu'] = 'kurukh',
+ ['kuy'] = 'kuy',
+ ['kyk'] = 'koryak',
+ ['lad'] = 'ladin',
+ ['lah'] = 'lahuli',
+ ['lak'] = 'lak',
+ ['lam'] = 'lambani',
+ ['lao'] = 'lao',
+ ['lat'] = 'latin',
+ ['laz'] = 'laz',
+ ['lcr'] = 'l-cree',
+ ['ldk'] = 'ladakhi',
+ ['lez'] = 'lezgi',
+ ['lin'] = 'lingala',
+ ['lma'] = 'low mari',
+ ['lmb'] = 'limbu',
+ ['lmw'] = 'lomwe',
+ ['lsb'] = 'lower sorbian',
+ ['lsm'] = 'lule sami',
+ ['lth'] = 'lithuanian',
+ ['ltz'] = 'luxembourgish',
+ ['lub'] = 'luba',
+ ['lug'] = 'luganda',
+ ['luh'] = 'luhya',
+ ['luo'] = 'luo',
+ ['lvi'] = 'latvian',
+ ['maj'] = 'majang',
+ ['mak'] = 'makua',
+ ['mal'] = 'malayalam traditional',
+ ['man'] = 'mansi',
+ ['map'] = 'mapudungun',
+ ['mar'] = 'marathi',
+ ['maw'] = 'marwari',
+ ['mbn'] = 'mbundu',
+ ['mch'] = 'manchu',
+ ['mcr'] = 'moose cree',
+ ['mde'] = 'mende',
+ ['men'] = "me'en",
+ ['miz'] = 'mizo',
+ ['mkd'] = 'macedonian',
+ ['mle'] = 'male',
+ ['mlg'] = 'malagasy',
+ ['mln'] = 'malinke',
+ ['mlr'] = 'malayalam reformed',
+ ['mly'] = 'malay',
+ ['mnd'] = 'mandinka',
+ ['mng'] = 'mongolian',
+ ['mni'] = 'manipuri',
+ ['mnk'] = 'maninka',
+ ['mnx'] = 'manx gaelic',
+ ['moh'] = 'mohawk',
+ ['mok'] = 'moksha',
+ ['mol'] = 'moldavian',
+ ['mon'] = 'mon',
+ ['mor'] = 'moroccan',
+ ['mri'] = 'maori',
+ ['mth'] = 'maithili',
+ ['mts'] = 'maltese',
+ ['mun'] = 'mundari',
+ ['nag'] = 'naga-assamese',
+ ['nan'] = 'nanai',
+ ['nas'] = 'naskapi',
+ ['ncr'] = 'n-cree',
+ ['ndb'] = 'ndebele',
+ ['ndg'] = 'ndonga',
+ ['nep'] = 'nepali',
+ ['new'] = 'newari',
+ ['ngr'] = 'nagari',
+ ['nhc'] = 'norway house cree',
+ ['nis'] = 'nisi',
+ ['niu'] = 'niuean',
+ ['nkl'] = 'nkole',
+ ['nko'] = "n'ko",
+ ['nld'] = 'dutch',
+ ['nog'] = 'nogai',
+ ['nor'] = 'norwegian',
+ ['nsm'] = 'northern sami',
+ ['nta'] = 'northern tai',
+ ['nto'] = 'esperanto',
+ ['nyn'] = 'nynorsk',
+ ['oci'] = 'occitan',
+ ['ocr'] = 'oji-cree',
+ ['ojb'] = 'ojibway',
+ ['ori'] = 'oriya',
+ ['oro'] = 'oromo',
+ ['oss'] = 'ossetian',
+ ['paa'] = 'palestinian aramaic',
+ ['pal'] = 'pali',
+ ['pan'] = 'punjabi',
+ ['pap'] = 'palpa',
+ ['pas'] = 'pashto',
+ ['pgr'] = 'polytonic greek',
+ ['pil'] = 'pilipino',
+ ['plg'] = 'palaung',
+ ['plk'] = 'polish',
+ ['pro'] = 'provencal',
+ ['ptg'] = 'portuguese',
+ ['qin'] = 'chin',
+ ['raj'] = 'rajasthani',
+ ['rbu'] = 'russian buriat',
+ ['rcr'] = 'r-cree',
+ ['ria'] = 'riang',
+ ['rms'] = 'rhaeto-romanic',
+ ['rom'] = 'romanian',
+ ['roy'] = 'romany',
+ ['rsy'] = 'rusyn',
+ ['rua'] = 'ruanda',
+ ['rus'] = 'russian',
+ ['sad'] = 'sadri',
+ ['san'] = 'sanskrit',
+ ['sat'] = 'santali',
+ ['say'] = 'sayisi',
+ ['sek'] = 'sekota',
+ ['sel'] = 'selkup',
+ ['sgo'] = 'sango',
+ ['shn'] = 'shan',
+ ['sib'] = 'sibe',
+ ['sid'] = 'sidamo',
+ ['sig'] = 'silte gurage',
+ ['sks'] = 'skolt sami',
+ ['sky'] = 'slovak',
+ ['sla'] = 'slavey',
+ ['slv'] = 'slovenian',
+ ['sml'] = 'somali',
+ ['smo'] = 'samoan',
+ ['sna'] = 'sena',
+ ['snd'] = 'sindhi',
+ ['snh'] = 'sinhalese',
+ ['snk'] = 'soninke',
+ ['sog'] = 'sodo gurage',
+ ['sot'] = 'sotho',
+ ['sqi'] = 'albanian',
+ ['srb'] = 'serbian',
+ ['srk'] = 'saraiki',
+ ['srr'] = 'serer',
+ ['ssl'] = 'south slavey',
+ ['ssm'] = 'southern sami',
+ ['sur'] = 'suri',
+ ['sva'] = 'svan',
+ ['sve'] = 'swedish',
+ ['swa'] = 'swadaya aramaic',
+ ['swk'] = 'swahili',
+ ['swz'] = 'swazi',
+ ['sxt'] = 'sutu',
+ ['syr'] = 'syriac',
+ ['tab'] = 'tabasaran',
+ ['taj'] = 'tajiki',
+ ['tam'] = 'tamil',
+ ['tat'] = 'tatar',
+ ['tcr'] = 'th-cree',
+ ['tel'] = 'telugu',
+ ['tgn'] = 'tongan',
+ ['tgr'] = 'tigre',
+ ['tgy'] = 'tigrinya',
+ ['tha'] = 'thai',
+ ['tht'] = 'tahitian',
+ ['tib'] = 'tibetan',
+ ['tkm'] = 'turkmen',
+ ['tmn'] = 'temne',
+ ['tna'] = 'tswana',
+ ['tne'] = 'tundra nenets',
+ ['tng'] = 'tonga',
+ ['tod'] = 'todo',
+ ['trk'] = 'turkish',
+ ['tsg'] = 'tsonga',
+ ['tua'] = 'turoyo aramaic',
+ ['tul'] = 'tulu',
+ ['tuv'] = 'tuvin',
+ ['twi'] = 'twi',
+ ['udm'] = 'udmurt',
+ ['ukr'] = 'ukrainian',
+ ['urd'] = 'urdu',
+ ['usb'] = 'upper sorbian',
+ ['uyg'] = 'uyghur',
+ ['uzb'] = 'uzbek',
+ ['ven'] = 'venda',
+ ['vit'] = 'vietnamese',
+ ['wa' ] = 'wa',
+ ['wag'] = 'wagdi',
+ ['wcr'] = 'west-cree',
+ ['wel'] = 'welsh',
+ ['wlf'] = 'wolof',
+ ['xbd'] = 'tai lue',
+ ['xhs'] = 'xhosa',
+ ['yak'] = 'yakut',
+ ['yba'] = 'yoruba',
+ ['ycr'] = 'y-cree',
+ ['yic'] = 'yi classic',
+ ['yim'] = 'yi modern',
+ ['zhh'] = 'chinese hong kong',
+ ['zhp'] = 'chinese phonetic',
+ ['zhs'] = 'chinese simplified',
+ ['zht'] = 'chinese traditional',
+ ['znd'] = 'zande',
+ ['zul'] = 'zulu'
+}
+
+local features = allocate {
+ ['aalt'] = 'access all alternates',
+ ['abvf'] = 'above-base forms',
+ ['abvm'] = 'above-base mark positioning',
+ ['abvs'] = 'above-base substitutions',
+ ['afrc'] = 'alternative fractions',
+ ['akhn'] = 'akhands',
+ ['blwf'] = 'below-base forms',
+ ['blwm'] = 'below-base mark positioning',
+ ['blws'] = 'below-base substitutions',
+ ['c2pc'] = 'petite capitals from capitals',
+ ['c2sc'] = 'small capitals from capitals',
+ ['calt'] = 'contextual alternates',
+ ['case'] = 'case-sensitive forms',
+ ['ccmp'] = 'glyph composition/decomposition',
+ ['cjct'] = 'conjunct forms',
+ ['clig'] = 'contextual ligatures',
+ ['cpsp'] = 'capital spacing',
+ ['cswh'] = 'contextual swash',
+ ['curs'] = 'cursive positioning',
+ ['dflt'] = 'default processing',
+ ['dist'] = 'distances',
+ ['dlig'] = 'discretionary ligatures',
+ ['dnom'] = 'denominators',
+ ['dtls'] = 'dotless forms', -- math
+ ['expt'] = 'expert forms',
+ ['falt'] = 'final glyph alternates',
+ ['fin2'] = 'terminal forms #2',
+ ['fin3'] = 'terminal forms #3',
+ ['fina'] = 'terminal forms',
+ ['flac'] = 'flattened accents over capitals', -- math
+ ['frac'] = 'fractions',
+ ['fwid'] = 'full width',
+ ['half'] = 'half forms',
+ ['haln'] = 'halant forms',
+ ['halt'] = 'alternate half width',
+ ['hist'] = 'historical forms',
+ ['hkna'] = 'horizontal kana alternates',
+ ['hlig'] = 'historical ligatures',
+ ['hngl'] = 'hangul',
+ ['hojo'] = 'hojo kanji forms',
+ ['hwid'] = 'half width',
+ ['init'] = 'initial forms',
+ ['isol'] = 'isolated forms',
+ ['ital'] = 'italics',
+ ['jalt'] = 'justification alternatives',
+ ['jp04'] = 'jis2004 forms',
+ ['jp78'] = 'jis78 forms',
+ ['jp83'] = 'jis83 forms',
+ ['jp90'] = 'jis90 forms',
+ ['kern'] = 'kerning',
+ ['lfbd'] = 'left bounds',
+ ['liga'] = 'standard ligatures',
+ ['ljmo'] = 'leading jamo forms',
+ ['lnum'] = 'lining figures',
+ ['locl'] = 'localized forms',
+ ['mark'] = 'mark positioning',
+ ['med2'] = 'medial forms #2',
+ ['medi'] = 'medial forms',
+ ['mgrk'] = 'mathematical greek',
+ ['mkmk'] = 'mark to mark positioning',
+ ['mset'] = 'mark positioning via substitution',
+ ['nalt'] = 'alternate annotation forms',
+ ['nlck'] = 'nlc kanji forms',
+ ['nukt'] = 'nukta forms',
+ ['numr'] = 'numerators',
+ ['onum'] = 'old style figures',
+ ['opbd'] = 'optical bounds',
+ ['ordn'] = 'ordinals',
+ ['ornm'] = 'ornaments',
+ ['palt'] = 'proportional alternate width',
+ ['pcap'] = 'petite capitals',
+ ['pnum'] = 'proportional figures',
+ ['pref'] = 'pre-base forms',
+ ['pres'] = 'pre-base substitutions',
+ ['pstf'] = 'post-base forms',
+ ['psts'] = 'post-base substitutions',
+ ['pwid'] = 'proportional widths',
+ ['qwid'] = 'quarter widths',
+ ['rand'] = 'randomize',
+ ['rkrf'] = 'rakar forms',
+ ['rlig'] = 'required ligatures',
+ ['rphf'] = 'reph form',
+ ['rtbd'] = 'right bounds',
+ ['rtla'] = 'right-to-left alternates',
+ ['rtlm'] = 'right to left math', -- math
+ ['ruby'] = 'ruby notation forms',
+ ['salt'] = 'stylistic alternates',
+ ['sinf'] = 'scientific inferiors',
+ ['size'] = 'optical size',
+ ['smcp'] = 'small capitals',
+ ['smpl'] = 'simplified forms',
+ -- ['ss01'] = 'stylistic set 1',
+ -- ['ss02'] = 'stylistic set 2',
+ -- ['ss03'] = 'stylistic set 3',
+ -- ['ss04'] = 'stylistic set 4',
+ -- ['ss05'] = 'stylistic set 5',
+ -- ['ss06'] = 'stylistic set 6',
+ -- ['ss07'] = 'stylistic set 7',
+ -- ['ss08'] = 'stylistic set 8',
+ -- ['ss09'] = 'stylistic set 9',
+ -- ['ss10'] = 'stylistic set 10',
+ -- ['ss11'] = 'stylistic set 11',
+ -- ['ss12'] = 'stylistic set 12',
+ -- ['ss13'] = 'stylistic set 13',
+ -- ['ss14'] = 'stylistic set 14',
+ -- ['ss15'] = 'stylistic set 15',
+ -- ['ss16'] = 'stylistic set 16',
+ -- ['ss17'] = 'stylistic set 17',
+ -- ['ss18'] = 'stylistic set 18',
+ -- ['ss19'] = 'stylistic set 19',
+ -- ['ss20'] = 'stylistic set 20',
+ ['ssty'] = 'script style', -- math
+ ['subs'] = 'subscript',
+ ['sups'] = 'superscript',
+ ['swsh'] = 'swash',
+ ['titl'] = 'titling',
+ ['tjmo'] = 'trailing jamo forms',
+ ['tnam'] = 'traditional name forms',
+ ['tnum'] = 'tabular figures',
+ ['trad'] = 'traditional forms',
+ ['twid'] = 'third widths',
+ ['unic'] = 'unicase',
+ ['valt'] = 'alternate vertical metrics',
+ ['vatu'] = 'vattu variants',
+ ['vert'] = 'vertical writing',
+ ['vhal'] = 'alternate vertical half metrics',
+ ['vjmo'] = 'vowel jamo forms',
+ ['vkna'] = 'vertical kana alternates',
+ ['vkrn'] = 'vertical kerning',
+ ['vpal'] = 'proportional alternate vertical metrics',
+ ['vrt2'] = 'vertical rotation',
+ ['zero'] = 'slashed zero',
+
+ ['trep'] = 'traditional tex replacements',
+ ['tlig'] = 'traditional tex ligatures',
+
+ ['ss..'] = 'stylistic set ..',
+ ['cv..'] = 'character variant ..',
+ ['js..'] = 'justification ..',
+
+ ["dv.."] = "devanagari ..",
+}
+
+local baselines = allocate {
+ ['hang'] = 'hanging baseline',
+ ['icfb'] = 'ideographic character face bottom edge baseline',
+ ['icft'] = 'ideographic character face tope edige baseline',
+ ['ideo'] = 'ideographic em-box bottom edge baseline',
+ ['idtp'] = 'ideographic em-box top edge baseline',
+ ['math'] = 'mathmatical centered baseline',
+ ['romn'] = 'roman baseline'
+}
+
+tables.scripts = scripts
+tables.languages = languages
+tables.features = features
+tables.baselines = baselines
+
+local acceptscripts = true directives.register("otf.acceptscripts", function(v) acceptscripts = v end)
+local acceptlanguages = true directives.register("otf.acceptlanguages", function(v) acceptlanguages = v end)
+
+local report_checks = logs.reporter("fonts","checks")
+
+-- hm, we overload the metatables
+
+if otffeatures.features then
+ for k, v in next, otffeatures.features do
+ features[k] = v
+ end
+ otffeatures.features = features
+end
+
+local function swapped(h)
+ local r = { }
+ for k, v in next, h do
+ r[gsub(v,"[^a-z0-9]","")] = k -- is already lower
+ end
+ return r
+end
+
+local verbosescripts = allocate(swapped(scripts ))
+local verboselanguages = allocate(swapped(languages))
+local verbosefeatures = allocate(swapped(features ))
+local verbosebaselines = allocate(swapped(baselines))
+
+-- lets forget about trailing spaces
+
+local function resolve(t,k)
+ if k then
+ k = gsub(lower(k),"[^a-z0-9]","")
+ local v = rawget(t,k)
+ if v then
+ return v
+ end
+ end
+end
+
+setmetatableindex(verbosescripts, resolve)
+setmetatableindex(verboselanguages, resolve)
+setmetatableindex(verbosefeatures, resolve)
+setmetatableindex(verbosebaselines, resolve)
+
+-- We could optimize the next lookups by using an extra metatable and storing
+-- already found values but in practice there are not that many lookups so
+-- it's never a bottleneck.
+
+setmetatableindex(scripts, function(t,k)
+ if k then
+ k = lower(k)
+ if k == "dflt" then
+ return k
+ end
+ local v = rawget(t,k)
+ if v then
+ return v
+ end
+ k = gsub(k," ","")
+ v = rawget(t,v)
+ if v then
+ return v
+ elseif acceptscripts then
+ report_checks("registering extra script %a",k)
+ rawset(t,k,k)
+ return k
+ end
+ end
+ return "dflt"
+end)
+
+setmetatableindex(languages, function(t,k)
+ if k then
+ k = lower(k)
+ if k == "dflt" then
+ return k
+ end
+ local v = rawget(t,k)
+ if v then
+ return v
+ end
+ k = gsub(k," ","")
+ v = rawget(t,v)
+ if v then
+ return v
+ elseif acceptlanguages then
+ report_checks("registering extra language %a",k)
+ rawset(t,k,k)
+ return k
+ end
+ end
+ return "dflt"
+end)
+
+setmetatablenewindex(languages, "ignore")
+setmetatablenewindex(baselines, "ignore")
+setmetatablenewindex(baselines, "ignore")
+
+local function resolve(t,k)
+ if k then
+ k = lower(k)
+ local v = rawget(t,k)
+ if v then
+ return v
+ end
+ k = gsub(k," ","")
+ local v = rawget(t,k)
+ if v then
+ return v
+ end
+ local tag, dd = match(k,"(..)(%d+)")
+ if tag and dd then
+ local v = rawget(t,tag)
+ if v then
+ return v -- return format(v,tonumber(dd)) -- old way
+ else
+ local v = rawget(t,tag.."..") -- nicer in overview
+ if v then
+ return (gsub(v,"%.%.",tonumber(dd))) -- new way
+ end
+ end
+ end
+ end
+ return k -- "dflt"
+end
+
+setmetatableindex(features, resolve)
+
+local function assign(t,k,v)
+ if k and v then
+ v = lower(v)
+ rawset(t,k,v) -- rawset ?
+ -- rawset(features,gsub(v,"[^a-z0-9]",""),k) -- why ? old code
+ end
+end
+
+setmetatablenewindex(features, assign)
+
+local checkers = {
+ rand = function(v)
+ return v == true and "random" or v
+ end
+}
+
+-- Keep this:
+--
+-- function otf.features.normalize(features)
+-- if features then
+-- local h = { }
+-- for k, v in next, features do
+-- k = lower(k)
+-- if k == "language" then
+-- v = gsub(lower(v),"[^a-z0-9]","")
+-- h.language = rawget(verboselanguages,v) or (languages[v] and v) or "dflt" -- auto adds
+-- elseif k == "script" then
+-- v = gsub(lower(v),"[^a-z0-9]","")
+-- h.script = rawget(verbosescripts,v) or (scripts[v] and v) or "dflt" -- auto adds
+-- else
+-- if type(v) == "string" then
+-- local b = is_boolean(v)
+-- if type(b) == "nil" then
+-- v = tonumber(v) or lower(v)
+-- else
+-- v = b
+-- end
+-- end
+-- if not rawget(features,k) then
+-- k = rawget(verbosefeatures,k) or k
+-- end
+-- local c = checkers[k]
+-- h[k] = c and c(v) or v
+-- end
+-- end
+-- return h
+-- end
+-- end
+
+-- inspect(fonts.handlers.otf.statistics.usedfeatures)
+
+if not storage then
+ return
+end
+
+local usedfeatures = statistics.usedfeatures or { }
+statistics.usedfeatures = usedfeatures
+
+table.setmetatableindex(usedfeatures, function(t,k) if k then local v = { } t[k] = v return v end end) -- table.autotable
+
+storage.register("fonts/otf/usedfeatures", usedfeatures, "fonts.handlers.otf.statistics.usedfeatures" )
+
+function otf.features.normalize(features)
+ if features then
+ local h = { }
+ for key, value in next, features do
+ local k = lower(key)
+ if k == "language" then
+ local v = gsub(lower(value),"[^a-z0-9]","")
+ h.language = rawget(verboselanguages,v) or (languages[v] and v) or "dflt" -- auto adds
+ elseif k == "script" then
+ local v = gsub(lower(value),"[^a-z0-9]","")
+ h.script = rawget(verbosescripts,v) or (scripts[v] and v) or "dflt" -- auto adds
+ else
+ local uk = usedfeatures[key]
+ local uv = uk[value]
+ if uv then
+ -- report_checks("feature value %a first seen at %a",value,key)
+ else
+ if type(value) == "string" then
+ local b = is_boolean(value)
+ if type(b) == "nil" then
+ uv = tonumber(value) or lower(value)
+ else
+ uv = b
+ end
+ else
+ uv = v
+ end
+ if not rawget(features,k) then
+ k = rawget(verbosefeatures,k) or k
+ end
+ local c = checkers[k]
+ if c then
+ uv = c(uv) or vc
+ end
+ uk[value] = uv
+ end
+ h[k] = uv
+ end
+ end
+ return h
+ end
+end
+
+--~ table.print(otf.features.normalize({ language = "dutch", liga = "yes", ss99 = true, aalt = 3, abcd = "yes" } ))
+
+-- When I feel the need ...
+
+--~ tables.aat = {
+--~ [ 0] = {
+--~ name = "allTypographicFeaturesType",
+--~ [ 0] = "allTypeFeaturesOnSelector",
+--~ [ 1] = "allTypeFeaturesOffSelector",
+--~ },
+--~ [ 1] = {
+--~ name = "ligaturesType",
+--~ [0 ] = "requiredLigaturesOnSelector",
+--~ [1 ] = "requiredLigaturesOffSelector",
+--~ [2 ] = "commonLigaturesOnSelector",
+--~ [3 ] = "commonLigaturesOffSelector",
+--~ [4 ] = "rareLigaturesOnSelector",
+--~ [5 ] = "rareLigaturesOffSelector",
+--~ [6 ] = "logosOnSelector ",
+--~ [7 ] = "logosOffSelector ",
+--~ [8 ] = "rebusPicturesOnSelector",
+--~ [9 ] = "rebusPicturesOffSelector",
+--~ [10] = "diphthongLigaturesOnSelector",
+--~ [11] = "diphthongLigaturesOffSelector",
+--~ [12] = "squaredLigaturesOnSelector",
+--~ [13] = "squaredLigaturesOffSelector",
+--~ [14] = "abbrevSquaredLigaturesOnSelector",
+--~ [15] = "abbrevSquaredLigaturesOffSelector",
+--~ },
+--~ [ 2] = {
+--~ name = "cursiveConnectionType",
+--~ [ 0] = "unconnectedSelector",
+--~ [ 1] = "partiallyConnectedSelector",
+--~ [ 2] = "cursiveSelector ",
+--~ },
+--~ [ 3] = {
+--~ name = "letterCaseType",
+--~ [ 0] = "upperAndLowerCaseSelector",
+--~ [ 1] = "allCapsSelector ",
+--~ [ 2] = "allLowerCaseSelector",
+--~ [ 3] = "smallCapsSelector ",
+--~ [ 4] = "initialCapsSelector",
+--~ [ 5] = "initialCapsAndSmallCapsSelector",
+--~ },
+--~ [ 4] = {
+--~ name = "verticalSubstitutionType",
+--~ [ 0] = "substituteVerticalFormsOnSelector",
+--~ [ 1] = "substituteVerticalFormsOffSelector",
+--~ },
+--~ [ 5] = {
+--~ name = "linguisticRearrangementType",
+--~ [ 0] = "linguisticRearrangementOnSelector",
+--~ [ 1] = "linguisticRearrangementOffSelector",
+--~ },
+--~ [ 6] = {
+--~ name = "numberSpacingType",
+--~ [ 0] = "monospacedNumbersSelector",
+--~ [ 1] = "proportionalNumbersSelector",
+--~ },
+--~ [ 7] = {
+--~ name = "appleReserved1Type",
+--~ },
+--~ [ 8] = {
+--~ name = "smartSwashType",
+--~ [ 0] = "wordInitialSwashesOnSelector",
+--~ [ 1] = "wordInitialSwashesOffSelector",
+--~ [ 2] = "wordFinalSwashesOnSelector",
+--~ [ 3] = "wordFinalSwashesOffSelector",
+--~ [ 4] = "lineInitialSwashesOnSelector",
+--~ [ 5] = "lineInitialSwashesOffSelector",
+--~ [ 6] = "lineFinalSwashesOnSelector",
+--~ [ 7] = "lineFinalSwashesOffSelector",
+--~ [ 8] = "nonFinalSwashesOnSelector",
+--~ [ 9] = "nonFinalSwashesOffSelector",
+--~ },
+--~ [ 9] = {
+--~ name = "diacriticsType",
+--~ [ 0] = "showDiacriticsSelector",
+--~ [ 1] = "hideDiacriticsSelector",
+--~ [ 2] = "decomposeDiacriticsSelector",
+--~ },
+--~ [10] = {
+--~ name = "verticalPositionType",
+--~ [ 0] = "normalPositionSelector",
+--~ [ 1] = "superiorsSelector ",
+--~ [ 2] = "inferiorsSelector ",
+--~ [ 3] = "ordinalsSelector ",
+--~ },
+--~ [11] = {
+--~ name = "fractionsType",
+--~ [ 0] = "noFractionsSelector",
+--~ [ 1] = "verticalFractionsSelector",
+--~ [ 2] = "diagonalFractionsSelector",
+--~ },
+--~ [12] = {
+--~ name = "appleReserved2Type",
+--~ },
+--~ [13] = {
+--~ name = "overlappingCharactersType",
+--~ [ 0] = "preventOverlapOnSelector",
+--~ [ 1] = "preventOverlapOffSelector",
+--~ },
+--~ [14] = {
+--~ name = "typographicExtrasType",
+--~ [0 ] = "hyphensToEmDashOnSelector",
+--~ [1 ] = "hyphensToEmDashOffSelector",
+--~ [2 ] = "hyphenToEnDashOnSelector",
+--~ [3 ] = "hyphenToEnDashOffSelector",
+--~ [4 ] = "unslashedZeroOnSelector",
+--~ [5 ] = "unslashedZeroOffSelector",
+--~ [6 ] = "formInterrobangOnSelector",
+--~ [7 ] = "formInterrobangOffSelector",
+--~ [8 ] = "smartQuotesOnSelector",
+--~ [9 ] = "smartQuotesOffSelector",
+--~ [10] = "periodsToEllipsisOnSelector",
+--~ [11] = "periodsToEllipsisOffSelector",
+--~ },
+--~ [15] = {
+--~ name = "mathematicalExtrasType",
+--~ [ 0] = "hyphenToMinusOnSelector",
+--~ [ 1] = "hyphenToMinusOffSelector",
+--~ [ 2] = "asteriskToMultiplyOnSelector",
+--~ [ 3] = "asteriskToMultiplyOffSelector",
+--~ [ 4] = "slashToDivideOnSelector",
+--~ [ 5] = "slashToDivideOffSelector",
+--~ [ 6] = "inequalityLigaturesOnSelector",
+--~ [ 7] = "inequalityLigaturesOffSelector",
+--~ [ 8] = "exponentsOnSelector",
+--~ [ 9] = "exponentsOffSelector",
+--~ },
+--~ [16] = {
+--~ name = "ornamentSetsType",
+--~ [ 0] = "noOrnamentsSelector",
+--~ [ 1] = "dingbatsSelector ",
+--~ [ 2] = "piCharactersSelector",
+--~ [ 3] = "fleuronsSelector ",
+--~ [ 4] = "decorativeBordersSelector",
+--~ [ 5] = "internationalSymbolsSelector",
+--~ [ 6] = "mathSymbolsSelector",
+--~ },
+--~ [17] = {
+--~ name = "characterAlternativesType",
+--~ [ 0] = "noAlternatesSelector",
+--~ },
+--~ [18] = {
+--~ name = "designComplexityType",
+--~ [ 0] = "designLevel1Selector",
+--~ [ 1] = "designLevel2Selector",
+--~ [ 2] = "designLevel3Selector",
+--~ [ 3] = "designLevel4Selector",
+--~ [ 4] = "designLevel5Selector",
+--~ },
+--~ [19] = {
+--~ name = "styleOptionsType",
+--~ [ 0] = "noStyleOptionsSelector",
+--~ [ 1] = "displayTextSelector",
+--~ [ 2] = "engravedTextSelector",
+--~ [ 3] = "illuminatedCapsSelector",
+--~ [ 4] = "titlingCapsSelector",
+--~ [ 5] = "tallCapsSelector ",
+--~ },
+--~ [20] = {
+--~ name = "characterShapeType",
+--~ [0 ] = "traditionalCharactersSelector",
+--~ [1 ] = "simplifiedCharactersSelector",
+--~ [2 ] = "jis1978CharactersSelector",
+--~ [3 ] = "jis1983CharactersSelector",
+--~ [4 ] = "jis1990CharactersSelector",
+--~ [5 ] = "traditionalAltOneSelector",
+--~ [6 ] = "traditionalAltTwoSelector",
+--~ [7 ] = "traditionalAltThreeSelector",
+--~ [8 ] = "traditionalAltFourSelector",
+--~ [9 ] = "traditionalAltFiveSelector",
+--~ [10] = "expertCharactersSelector",
+--~ },
+--~ [21] = {
+--~ name = "numberCaseType",
+--~ [ 0] = "lowerCaseNumbersSelector",
+--~ [ 1] = "upperCaseNumbersSelector",
+--~ },
+--~ [22] = {
+--~ name = "textSpacingType",
+--~ [ 0] = "proportionalTextSelector",
+--~ [ 1] = "monospacedTextSelector",
+--~ [ 2] = "halfWidthTextSelector",
+--~ [ 3] = "normallySpacedTextSelector",
+--~ },
+--~ [23] = {
+--~ name = "transliterationType",
+--~ [ 0] = "noTransliterationSelector",
+--~ [ 1] = "hanjaToHangulSelector",
+--~ [ 2] = "hiraganaToKatakanaSelector",
+--~ [ 3] = "katakanaToHiraganaSelector",
+--~ [ 4] = "kanaToRomanizationSelector",
+--~ [ 5] = "romanizationToHiraganaSelector",
+--~ [ 6] = "romanizationToKatakanaSelector",
+--~ [ 7] = "hanjaToHangulAltOneSelector",
+--~ [ 8] = "hanjaToHangulAltTwoSelector",
+--~ [ 9] = "hanjaToHangulAltThreeSelector",
+--~ },
+--~ [24] = {
+--~ name = "annotationType",
+--~ [ 0] = "noAnnotationSelector",
+--~ [ 1] = "boxAnnotationSelector",
+--~ [ 2] = "roundedBoxAnnotationSelector",
+--~ [ 3] = "circleAnnotationSelector",
+--~ [ 4] = "invertedCircleAnnotationSelector",
+--~ [ 5] = "parenthesisAnnotationSelector",
+--~ [ 6] = "periodAnnotationSelector",
+--~ [ 7] = "romanNumeralAnnotationSelector",
+--~ [ 8] = "diamondAnnotationSelector",
+--~ },
+--~ [25] = {
+--~ name = "kanaSpacingType",
+--~ [ 0] = "fullWidthKanaSelector",
+--~ [ 1] = "proportionalKanaSelector",
+--~ },
+--~ [26] = {
+--~ name = "ideographicSpacingType",
+--~ [ 0] = "fullWidthIdeographsSelector",
+--~ [ 1] = "proportionalIdeographsSelector",
+--~ },
+--~ [103] = {
+--~ name = "cjkRomanSpacingType",
+--~ [ 0] = "halfWidthCJKRomanSelector",
+--~ [ 1] = "proportionalCJKRomanSelector",
+--~ [ 2] = "defaultCJKRomanSelector",
+--~ [ 3] = "fullWidthCJKRomanSelector",
+--~ },
+--~ }
diff --git a/tex/context/base/font-sol.lua b/tex/context/base/font-sol.lua
index db2dd24c2..b37ab8869 100644
--- a/tex/context/base/font-sol.lua
+++ b/tex/context/base/font-sol.lua
@@ -1,884 +1,884 @@
-if not modules then modules = { } end modules ['font-sol'] = { -- this was: node-spl
- version = 1.001,
- comment = "companion to font-sol.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This module is dedicated to the oriental tex project and for
--- the moment is too experimental to be publicly supported.
---
--- We could cache solutions: say that we store the featureset and
--- all 'words' -> replacement ... so we create a large solution
--- database (per font)
---
--- This module can be optimized by using a dedicated dynamics handler
--- but I'll only do that when the rest of the code is stable.
---
--- Todo: bind setups to paragraph.
-
-local gmatch, concat, format, remove = string.gmatch, table.concat, string.format, table.remove
-local next, tostring, tonumber = next, tostring, tonumber
-local insert, remove = table.insert, table.remove
-local utfchar = utf.char
-local random = math.random
-
-local utilities, logs, statistics, fonts, trackers = utilities, logs, statistics, fonts, trackers
-local interfaces, commands, attributes = interfaces, commands, attributes
-local nodes, node, tex = nodes, node, tex
-
-local trace_split = false trackers.register("builders.paragraphs.solutions.splitters.splitter", function(v) trace_split = v end)
-local trace_optimize = false trackers.register("builders.paragraphs.solutions.splitters.optimizer", function(v) trace_optimize = v end)
-local trace_colors = false trackers.register("builders.paragraphs.solutions.splitters.colors", function(v) trace_colors = v end)
-local trace_goodies = false trackers.register("fonts.goodies", function(v) trace_goodies = v end)
-
-local report_solutions = logs.reporter("fonts","solutions")
-local report_splitters = logs.reporter("fonts","splitters")
-local report_optimizers = logs.reporter("fonts","optimizers")
-
-local variables = interfaces.variables
-
-local v_normal = variables.normal
-local v_reverse = variables.reverse
-local v_preroll = variables.preroll
-local v_random = variables.random
-local v_split = variables.split
-
-local settings_to_array = utilities.parsers.settings_to_array
-local settings_to_hash = utilities.parsers.settings_to_hash
-
-local find_node_tail = node.tail or node.slide
-local free_node = node.free
-local free_nodelist = node.flush_list
-local copy_nodelist = node.copy_list
-local traverse_nodes = node.traverse
-local traverse_ids = node.traverse_id
-local protect_glyphs = nodes.handlers.protectglyphs or node.protect_glyphs
-local hpack_nodes = node.hpack
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local repack_hlist = nodes.repackhlist
-local nodes_to_utf = nodes.listtoutf
-
-local setnodecolor = nodes.tracers.colors.set
-
-local nodecodes = nodes.nodecodes
-local whatsitcodes = nodes.whatsitcodes
-local kerncodes = nodes.kerncodes
-
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local kern_code = nodecodes.kern
-local hlist_code = nodecodes.hlist
-local whatsit_code = nodecodes.whatsit
-
-local fontkern_code = kerncodes.fontkern
-
-local localpar_code = whatsitcodes.localpar
-local dir_code = whatsitcodes.dir
-local userdefined_code = whatsitcodes.userdefined
-
-local nodepool = nodes.pool
-local tasks = nodes.tasks
-local usernodeids = nodepool.userids
-
-local new_textdir = nodepool.textdir
-local new_usernumber = nodepool.usernumber
-local new_glue = nodepool.glue
-local new_leftskip = nodepool.leftskip
-
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local process_characters = nodes.handlers.characters
-local inject_kerns = nodes.injections.handler
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-local setfontdynamics = fonthashes.setdynamics
-local fontprocesses = fonthashes.processes
-
-local texsetattribute = tex.setattribute
-local unsetvalue = attributes.unsetvalue
-
-local parbuilders = builders.paragraphs
-parbuilders.solutions = parbuilders.solutions or { }
-local parsolutions = parbuilders.solutions
-parsolutions.splitters = parsolutions.splitters or { }
-local splitters = parsolutions.splitters
-
-local solutions = { } -- attribute sets
-local registered = { } -- backmapping
-splitters.registered = registered
-
-local a_split = attributes.private('splitter')
-
-local preroll = true
-local criterium = 0
-local randomseed = nil
-local optimize = nil -- set later
-local variant = v_normal
-local splitwords = true
-
-local cache = { }
-local variants = { }
-local max_less = 0
-local max_more = 0
-
-local stack = { }
-
-local dummy = {
- attribute = unsetvalue,
- randomseed = 0,
- criterium = 0,
- preroll = false,
- optimize = nil,
- splitwords = false,
- variant = v_normal,
-}
-
-local function checksettings(r,settings)
- local s = r.settings
- local method = settings_to_hash(settings.method or "")
- local optimize, preroll, splitwords
- for k, v in next, method do
- if k == v_preroll then
- preroll = true
- elseif k == v_split then
- splitwords = true
- elseif variants[k] then
- variant = k
- optimize = variants[k] -- last one wins
- end
- end
- r.randomseed = tonumber(settings.randomseed) or s.randomseed or r.randomseed or 0
- r.criterium = tonumber(settings.criterium ) or s.criterium or r.criterium or 0
- r.preroll = preroll or false
- r.splitwords = splitwords or false
- r.optimize = optimize or s.optimize or r.optimize or variants[v_normal]
-end
-
-local function pushsplitter(name,settings)
- local r = name and registered[name]
- if r then
- if settings then
- checksettings(r,settings)
- end
- else
- r = dummy
- end
- insert(stack,r)
- -- brr
- randomseed = r.randomseed or 0
- criterium = r.criterium or 0
- preroll = r.preroll or false
- optimize = r.optimize or nil
- splitwords = r.splitwords or nil
- --
- texsetattribute(a_split,r.attribute)
- return #stack
-end
-
-local function popsplitter()
- remove(stack)
- local n = #stack
- local r = stack[n] or dummy
- --
- randomseed = r.randomseed or 0
- criterium = r.criterium or 0
- preroll = r.preroll or false
- optimize = r.optimize or nil
- --
- texsetattribute(a_split,r.attribute)
- return n
-end
-
-local contextsetups = fonts.specifiers.contextsetups
-
-local function convert(featuresets,name,list)
- if list then
- local numbers = { }
- local nofnumbers = 0
- for i=1,#list do
- local feature = list[i]
- local fs = featuresets[feature]
- local fn = fs and fs.number
- if not fn then
- -- fall back on global features
- fs = contextsetups[feature]
- fn = fs and fs.number
- end
- if fn then
- nofnumbers = nofnumbers + 1
- numbers[nofnumbers] = fn
- if trace_goodies or trace_optimize then
- report_solutions("solution %a of %a uses feature %a with number %s",i,name,feature,fn)
- end
- else
- report_solutions("solution %a of %a has an invalid feature reference %a",i,name,feature)
- end
- end
- return nofnumbers > 0 and numbers
- end
-end
-
-local function initialize(goodies)
- local solutions = goodies.solutions
- if solutions then
- local featuresets = goodies.featuresets
- local goodiesname = goodies.name
- if trace_goodies or trace_optimize then
- report_solutions("checking solutions in %a",goodiesname)
- end
- for name, set in next, solutions do
- set.less = convert(featuresets,name,set.less)
- set.more = convert(featuresets,name,set.more)
- end
- end
-end
-
-fonts.goodies.register("solutions",initialize)
-
-function splitters.define(name,settings)
- local goodies = settings.goodies
- local solution = settings.solution
- local less = settings.less
- local more = settings.more
- local less_set, more_set
- local l = less and settings_to_array(less)
- local m = more and settings_to_array(more)
- if goodies then
- goodies = fonts.goodies.load(goodies) -- also in tfmdata
- if goodies then
- local featuresets = goodies.featuresets
- local solution = solution and goodies.solutions[solution]
- if l and #l > 0 then
- less_set = convert(featuresets,name,less) -- take from settings
- else
- less_set = solution and solution.less -- take from goodies
- end
- if m and #m > 0 then
- more_set = convert(featuresets,name,more) -- take from settings
- else
- more_set = solution and solution.more -- take from goodies
- end
- end
- else
- if l then
- local n = #less_set
- for i=1,#l do
- local ss = contextsetups[l[i]]
- if ss then
- n = n + 1
- less_set[n] = ss.number
- end
- end
- end
- if m then
- local n = #more_set
- for i=1,#m do
- local ss = contextsetups[m[i]]
- if ss then
- n = n + 1
- more_set[n] = ss.number
- end
- end
- end
- end
- if trace_optimize then
- report_solutions("defining solutions %a, less %a, more %a",name,concat(less_set or {}," "),concat(more_set or {}," "))
- end
- local nofsolutions = #solutions + 1
- local t = {
- solution = solution,
- less = less_set or { },
- more = more_set or { },
- settings = settings, -- for tracing
- attribute = nofsolutions,
- }
- solutions[nofsolutions] = t
- registered[name] = t
- return nofsolutions
-end
-
-local nofwords, noftries, nofadapted, nofkept, nofparagraphs = 0, 0, 0, 0, 0
-
-local splitter_one = usernodeids["splitters.one"]
-local splitter_two = usernodeids["splitters.two"]
-
-local a_word = attributes.private('word')
-local a_fontkern = attributes.private('fontkern')
-
-local encapsulate = false
-
-directives.register("builders.paragraphs.solutions.splitters.encapsulate", function(v)
- encapsulate = v
-end)
-
-function splitters.split(head)
- -- quite fast
- local current, done, rlmode, start, stop, attribute = head, false, false, nil, nil, 0
- cache, max_less, max_more = { }, 0, 0
- local function flush() -- we can move this
- local font = start.font
- local last = stop.next
- local list = last and copy_nodelist(start,last) or copy_nodelist(start)
- local n = #cache + 1
- if encapsulate then
- local user_one = new_usernumber(splitter_one,n)
- local user_two = new_usernumber(splitter_two,n)
- head, start = insert_node_before(head,start,user_one)
- insert_node_after(head,stop,user_two)
- else
- local current = start
- while true do
- current[a_word] = n
- if current == stop then
- break
- else
- current = current.next
- end
- end
- end
- if rlmode == "TRT" or rlmode == "+TRT" then
- local dirnode = new_textdir("+TRT")
- list.prev = dirnode
- dirnode.next = list
- list = dirnode
- end
- local c = {
- original = list,
- attribute = attribute,
- direction = rlmode,
- font = font
- }
- if trace_split then
- report_splitters("cached %4i: font %a, attribute %a, direction %a, word %a",
- n, font, attribute, nodes_to_utf(list,true), rlmode and "r2l" or "l2r")
- end
- cache[n] = c
- local solution = solutions[attribute]
- local l, m = #solution.less, #solution.more
- if l > max_less then max_less = l end
- if m > max_more then max_more = m end
- start, stop, done = nil, nil, true
- end
- while current do -- also nextid
- local next = current.next
- local id = current.id
- if id == glyph_code then
- if current.subtype < 256 then
- local a = current[a_split]
- if not a then
- start, stop = nil, nil
- elseif not start then
- start, stop, attribute = current, current, a
- elseif a ~= attribute then
- start, stop = nil, nil
- else
- stop = current
- end
- end
- elseif id == disc_code then
- if splitwords then
- if start then
- flush()
- end
- elseif start and next and next.id == glyph_code and next.subtype < 256 then
- -- beware: we can cross future lines
- stop = next
- else
- start, stop = nil, nil
- end
- elseif id == whatsit_code then
- if start then
- flush()
- end
- local subtype = current.subtype
- if subtype == dir_code or subtype == localpar_code then
- rlmode = current.dir
- end
- else
- if start then
- flush()
- end
- end
- current = next
- end
- if start then
- flush()
- end
- nofparagraphs = nofparagraphs + 1
- nofwords = nofwords + #cache
- return head, done
-end
-
-local function collect_words(list) -- can be made faster for attributes
- local words, w, word = { }, 0, nil
- if encapsulate then
- for current in traverse_ids(whatsit_code,list) do
- if current.subtype == userdefined_code then -- hm
- local user_id = current.user_id
- if user_id == splitter_one then
- word = { current.value, current, current }
- w = w + 1
- words[w] = word
- elseif user_id == splitter_two then
- if word then
- word[3] = current
- else
- -- something is wrong
- end
- end
- end
- end
- else
- local current, first, last, index = list, nil, nil, nil
- while current do
- -- todo: disc and kern
- local id = current.id
- if id == glyph_code or id == disc_code then
- local a = current[a_word]
- if a then
- if a == index then
- -- same word
- last = current
- elseif index then
- w = w + 1
- words[w] = { index, first, last }
- first = current
- last = current
- index = a
- elseif first then
- last = current
- index = a
- else
- first = current
- last = current
- index = a
- end
- elseif index then
- if first then
- w = w + 1
- words[w] = { index, first, last }
- end
- index = nil
- first = nil
- elseif trace_split then
- if id == disc_code then
- report_splitters("skipped: disc node")
- else
- report_splitters("skipped: %C",current.char)
- end
- end
- elseif id == kern_code and (current.subtype == fontkern_code or current[a_fontkern]) then
- if first then
- last = current
- else
- first = current
- last = current
- end
- elseif index then
- w = w + 1
- words[w] = { index, first, last }
- index = nil
- first = nil
- if id == disc_node then
- if trace_split then
- report_splitters("skipped: disc node")
- end
- end
- end
- current = current.next
- end
- if index then
- w = w + 1
- words[w] = { index, first, last }
- end
- if trace_split then
- for i=1,#words do
- local w = words[i]
- local n, f, l = w[1], w[2], w[3]
- local c = cache[n]
- if c then
- report_splitters("found %4i: word %a, cached %a",n,nodes_to_utf(f,true,true,l),nodes_to_utf(c.original,true))
- else
- report_splitters("found %4i: word %a, not in cache",n,nodes_to_utf(f,true,true,l))
- end
- end
- end
- end
- return words, list -- check for empty (elsewhere)
-end
-
--- we could avoid a hpack but hpack is not that slow
-
-local function doit(word,list,best,width,badness,line,set,listdir)
- local changed = 0
- local n = word[1]
- local found = cache[n]
- if found then
- local h, t
- if encapsulate then
- h = word[2].next -- head of current word
- t = word[3].prev -- tail of current word
- else
- h = word[2]
- t = word[3]
- end
- if splitwords then
- -- there are no lines crossed in a word
- else
- local ok = false
- local c = h
- while c do
- if c == t then
- ok = true
- break
- else
- c = c.next
- end
- end
- if not ok then
- report_solutions("skipping hyphenated word (for now)")
- -- todo: mark in words as skipped, saves a bit runtime
- return false, changed
- end
- end
- local original, attribute, direction = found.original, found.attribute, found.direction
- local solution = solutions[attribute]
- local features = solution and solution[set]
- if features then
- local featurenumber = features[best] -- not ok probably
- if featurenumber then
- noftries = noftries + 1
- local first = copy_nodelist(original)
- if not trace_colors then
- for n in traverse_nodes(first) do -- maybe fast force so no attr needed
- n[0] = featurenumber -- this forces dynamics
- end
- elseif set == "less" then
- for n in traverse_nodes(first) do
- setnodecolor(n,"font:isol") -- yellow
- n[0] = featurenumber
- end
- else
- for n in traverse_nodes(first) do
- setnodecolor(n,"font:medi") -- green
- n[0] = featurenumber
- end
- end
- local font = found.font
- local setdynamics = setfontdynamics[font]
- if setdynamics then
- local processes = setdynamics(font,featurenumber)
- for i=1,#processes do -- often more than 1
- first = processes[i](first,font,featurenumber)
- end
- else
- report_solutions("fatal error, no dynamics for font %a",font)
- end
- first = inject_kerns(first)
- if first.id == whatsit_code then
- local temp = first
- first = first.next
- free_node(temp)
- end
- local last = find_node_tail(first)
- -- replace [u]h->t by [u]first->last
- local prev = h.prev
- local next = t.next
- prev.next = first
- first.prev = prev
- if next then
- last.next = next
- next.prev = last
- end
- -- check new pack
- local temp, b = repack_hlist(list,width,'exactly',listdir)
- if b > badness then
- if trace_optimize then
- report_optimizers("line %a, badness before %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"quit")
- end
- -- remove last insert
- prev.next = h
- h.prev = prev
- if next then
- t.next = next
- next.prev = t
- else
- t.next = nil
- end
- last.next = nil
- free_nodelist(first)
- else
- if trace_optimize then
- report_optimizers("line %a, badness before: %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"continue")
- end
- -- free old h->t
- t.next = nil
- free_nodelist(h) -- somhow fails
- if not encapsulate then
- word[2] = first
- word[3] = last
- end
- changed, badness = changed + 1, b
- end
- if b <= criterium then
- return true, changed
- end
- end
- end
- end
- return false, changed
-end
-
--- We repeat some code but adding yet another layer of indirectness is not
--- making things better.
-
-variants[v_normal] = function(words,list,best,width,badness,line,set,listdir)
- local changed = 0
- for i=1,#words do
- local done, c = doit(words[i],list,best,width,badness,line,set,listdir)
- changed = changed + c
- if done then
- break
- end
- end
- if changed > 0 then
- nofadapted = nofadapted + 1
- -- todo: get rid of pack when ok because we already have packed and we only need the last b
- local list, b = repack_hlist(list,width,'exactly',listdir)
- return list, true, changed, b -- badness
- else
- nofkept = nofkept + 1
- return list, false, 0, badness
- end
-end
-
-variants[v_reverse] = function(words,list,best,width,badness,line,set,listdir)
- local changed = 0
- for i=#words,1,-1 do
- local done, c = doit(words[i],list,best,width,badness,line,set,listdir)
- changed = changed + c
- if done then
- break
- end
- end
- if changed > 0 then
- nofadapted = nofadapted + 1
- -- todo: get rid of pack when ok because we already have packed and we only need the last b
- local list, b = repack_hlist(list,width,'exactly',listdir)
- return list, true, changed, b -- badness
- else
- nofkept = nofkept + 1
- return list, false, 0, badness
- end
-end
-
-variants[v_random] = function(words,list,best,width,badness,line,set,listdir)
- local changed = 0
- while #words > 0 do
- local done, c = doit(remove(words,random(1,#words)),list,best,width,badness,line,set,listdir)
- changed = changed + c
- if done then
- break
- end
- end
- if changed > 0 then
- nofadapted = nofadapted + 1
- -- todo: get rid of pack when ok because we already have packed and we only need the last b
- local list, b = repack_hlist(list,width,'exactly',listdir)
- return list, true, changed, b -- badness
- else
- nofkept = nofkept + 1
- return list, false, 0, badness
- end
-end
-
-local function show_quality(current,what,line)
- local set = current.glue_set
- local sign = current.glue_sign
- local order = current.glue_order
- local amount = set * ((sign == 2 and -1) or 1)
- report_optimizers("line %a, category %a, amount %a, set %a, sign %a, how %a, order %a",line,what,amount,set,sign,how,order)
-end
-
-function splitters.optimize(head)
- if not optimize then
- report_optimizers("no optimizer set")
- return
- end
- local nc = #cache
- if nc == 0 then
- return
- end
- starttiming(splitters)
- local listdir = nil -- todo ! ! !
- if randomseed then
- math.setrandomseedi(randomseed)
- randomseed = nil
- end
- local line = 0
- local tex_hbadness, tex_hfuzz = tex.hbadness, tex.hfuzz
- tex.hbadness, tex.hfuzz = 10000, number.maxdimen
- if trace_optimize then
- report_optimizers("preroll %a, variant %a, criterium %a, cache size %a",preroll,variant,criterium,nc)
- end
- for current in traverse_ids(hlist_code,head) do
- -- report_splitters("before: [%s] => %s",current.dir,nodes.tosequence(current.list,nil))
- line = line + 1
- local sign, dir, list, width = current.glue_sign, current.dir, current.list, current.width
- if not encapsulate and list.id == glyph_code then
- -- nasty .. we always assume a prev being there .. future luatex will always have a leftskip set
- -- current.list, list = insert_node_before(list,list,new_glue(0))
- current.list, list = insert_node_before(list,list,new_leftskip(0))
- end
- local temp, badness = repack_hlist(list,width,'exactly',dir) -- it would be nice if the badness was stored in the node
- if badness > 0 then
- if sign == 0 then
- if trace_optimize then
- report_optimizers("line %a, badness %a, outcome %a, verdict %a",line,badness,"okay","okay")
- end
- else
- local set, max
- if sign == 1 then
- if trace_optimize then
- report_optimizers("line %a, badness %a, outcome %a, verdict %a",line,badness,"underfull","trying more")
- end
- set, max = "more", max_more
- else
- if trace_optimize then
- report_optimizers("line %a, badness %a, outcome %a, verdict %a",line,badness,"overfull","trying less")
- end
- set, max = "less", max_less
- end
- -- we can keep the best variants
- local lastbest, lastbadness = nil, badness
- if preroll then
- local bb, base
- for i=1,max do
- if base then
- free_nodelist(base)
- end
- base = copy_nodelist(list)
- local words = collect_words(base) -- beware: words is adapted
- for j=i,max do
- local temp, done, changes, b = optimize(words,base,j,width,badness,line,set,dir)
- base = temp
- if trace_optimize then
- report_optimizers("line %a, alternative %a.%a, changes %a, badness %a",line,i,j,changes,b)
- end
- bb = b
- if b <= criterium then
- break
- end
- -- if done then
- -- break
- -- end
- end
- if bb and bb > criterium then -- needs checking
- if not lastbest then
- lastbest, lastbadness = i, bb
- elseif bb > lastbadness then
- lastbest, lastbadness = i, bb
- end
- else
- break
- end
- end
- free_nodelist(base)
- end
- local words = collect_words(list)
- for best=lastbest or 1,max do
- local temp, done, changes, b = optimize(words,list,best,width,badness,line,set,dir)
- current.list = temp
- if trace_optimize then
- report_optimizers("line %a, alternative %a, changes %a, badness %a",line,best,changes,b)
- end
- if done then
- if b <= criterium then -- was == 0
- protect_glyphs(list)
- break
- end
- end
- end
- end
- else
- if trace_optimize then
- report_optimizers("line %a, verdict %a",line,"not bad enough")
- end
- end
- -- we pack inside the outer hpack and that way keep the original wd/ht/dp as bonus
- current.list = hpack_nodes(current.list,width,'exactly',listdir)
- -- report_splitters("after: [%s] => %s",temp.dir,nodes.tosequence(temp.list,nil))
- end
- for i=1,nc do
- local ci = cache[i]
- free_nodelist(ci.original)
- end
- cache = { }
- tex.hbadness, tex.hfuzz = tex_hbadness, tex_hfuzz
- stoptiming(splitters)
-end
-
-statistics.register("optimizer statistics", function()
- if nofwords > 0 then
- local elapsed = statistics.elapsedtime(splitters)
- local average = noftries/elapsed
- return format("%s words identified in %s paragraphs, %s words retried, %s lines tried, %0.3f seconds used, %s adapted, %0.1f lines per second",
- nofwords,nofparagraphs,noftries,nofadapted+nofkept,elapsed,nofadapted,average)
- end
-end)
-
--- we could use a stack
-
-local enableaction = tasks.enableaction
-local disableaction = tasks.disableaction
-
-local function enable()
- enableaction("processors", "builders.paragraphs.solutions.splitters.split")
- enableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize")
-end
-
-local function disable()
- disableaction("processors", "builders.paragraphs.solutions.splitters.split")
- disableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize")
-end
-
-function splitters.start(name,settings)
- if pushsplitter(name,settings) == 1 then
- enable()
- end
-end
-
-function splitters.stop()
- if popsplitter() == 0 then
- disable()
- end
-end
-
-function splitters.set(name,settings)
- if #stack > 0 then
- stack = { }
- else
- enable()
- end
- pushsplitter(name,settings) -- sets attribute etc
-end
-
-function splitters.reset()
- if #stack > 0 then
- stack = { }
- popsplitter() -- resets attribute etc
- disable()
- end
-end
-
--- interface
-
-commands.definefontsolution = splitters.define
-commands.startfontsolution = splitters.start
-commands.stopfontsolution = splitters.stop
-commands.setfontsolution = splitters.set
-commands.resetfontsolution = splitters.reset
+if not modules then modules = { } end modules ['font-sol'] = { -- this was: node-spl
+ version = 1.001,
+ comment = "companion to font-sol.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This module is dedicated to the oriental tex project and for
+-- the moment is too experimental to be publicly supported.
+--
+-- We could cache solutions: say that we store the featureset and
+-- all 'words' -> replacement ... so we create a large solution
+-- database (per font)
+--
+-- This module can be optimized by using a dedicated dynamics handler
+-- but I'll only do that when the rest of the code is stable.
+--
+-- Todo: bind setups to paragraph.
+
+local gmatch, concat, format, remove = string.gmatch, table.concat, string.format, table.remove
+local next, tostring, tonumber = next, tostring, tonumber
+local insert, remove = table.insert, table.remove
+local utfchar = utf.char
+local random = math.random
+
+local utilities, logs, statistics, fonts, trackers = utilities, logs, statistics, fonts, trackers
+local interfaces, commands, attributes = interfaces, commands, attributes
+local nodes, node, tex = nodes, node, tex
+
+local trace_split = false trackers.register("builders.paragraphs.solutions.splitters.splitter", function(v) trace_split = v end)
+local trace_optimize = false trackers.register("builders.paragraphs.solutions.splitters.optimizer", function(v) trace_optimize = v end)
+local trace_colors = false trackers.register("builders.paragraphs.solutions.splitters.colors", function(v) trace_colors = v end)
+local trace_goodies = false trackers.register("fonts.goodies", function(v) trace_goodies = v end)
+
+local report_solutions = logs.reporter("fonts","solutions")
+local report_splitters = logs.reporter("fonts","splitters")
+local report_optimizers = logs.reporter("fonts","optimizers")
+
+local variables = interfaces.variables
+
+local v_normal = variables.normal
+local v_reverse = variables.reverse
+local v_preroll = variables.preroll
+local v_random = variables.random
+local v_split = variables.split
+
+local settings_to_array = utilities.parsers.settings_to_array
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local find_node_tail = node.tail or node.slide
+local free_node = node.free
+local free_nodelist = node.flush_list
+local copy_nodelist = node.copy_list
+local traverse_nodes = node.traverse
+local traverse_ids = node.traverse_id
+local protect_glyphs = nodes.handlers.protectglyphs or node.protect_glyphs
+local hpack_nodes = node.hpack
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local repack_hlist = nodes.repackhlist
+local nodes_to_utf = nodes.listtoutf
+
+local setnodecolor = nodes.tracers.colors.set
+
+local nodecodes = nodes.nodecodes
+local whatsitcodes = nodes.whatsitcodes
+local kerncodes = nodes.kerncodes
+
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local kern_code = nodecodes.kern
+local hlist_code = nodecodes.hlist
+local whatsit_code = nodecodes.whatsit
+
+local fontkern_code = kerncodes.fontkern
+
+local localpar_code = whatsitcodes.localpar
+local dir_code = whatsitcodes.dir
+local userdefined_code = whatsitcodes.userdefined
+
+local nodepool = nodes.pool
+local tasks = nodes.tasks
+local usernodeids = nodepool.userids
+
+local new_textdir = nodepool.textdir
+local new_usernumber = nodepool.usernumber
+local new_glue = nodepool.glue
+local new_leftskip = nodepool.leftskip
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+local process_characters = nodes.handlers.characters
+local inject_kerns = nodes.injections.handler
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+local setfontdynamics = fonthashes.setdynamics
+local fontprocesses = fonthashes.processes
+
+local texsetattribute = tex.setattribute
+local unsetvalue = attributes.unsetvalue
+
+local parbuilders = builders.paragraphs
+parbuilders.solutions = parbuilders.solutions or { }
+local parsolutions = parbuilders.solutions
+parsolutions.splitters = parsolutions.splitters or { }
+local splitters = parsolutions.splitters
+
+local solutions = { } -- attribute sets
+local registered = { } -- backmapping
+splitters.registered = registered
+
+local a_split = attributes.private('splitter')
+
+local preroll = true
+local criterium = 0
+local randomseed = nil
+local optimize = nil -- set later
+local variant = v_normal
+local splitwords = true
+
+local cache = { }
+local variants = { }
+local max_less = 0
+local max_more = 0
+
+local stack = { }
+
+local dummy = {
+ attribute = unsetvalue,
+ randomseed = 0,
+ criterium = 0,
+ preroll = false,
+ optimize = nil,
+ splitwords = false,
+ variant = v_normal,
+}
+
+local function checksettings(r,settings)
+ local s = r.settings
+ local method = settings_to_hash(settings.method or "")
+ local optimize, preroll, splitwords
+ for k, v in next, method do
+ if k == v_preroll then
+ preroll = true
+ elseif k == v_split then
+ splitwords = true
+ elseif variants[k] then
+ variant = k
+ optimize = variants[k] -- last one wins
+ end
+ end
+ r.randomseed = tonumber(settings.randomseed) or s.randomseed or r.randomseed or 0
+ r.criterium = tonumber(settings.criterium ) or s.criterium or r.criterium or 0
+ r.preroll = preroll or false
+ r.splitwords = splitwords or false
+ r.optimize = optimize or s.optimize or r.optimize or variants[v_normal]
+end
+
+local function pushsplitter(name,settings)
+ local r = name and registered[name]
+ if r then
+ if settings then
+ checksettings(r,settings)
+ end
+ else
+ r = dummy
+ end
+ insert(stack,r)
+ -- brr
+ randomseed = r.randomseed or 0
+ criterium = r.criterium or 0
+ preroll = r.preroll or false
+ optimize = r.optimize or nil
+ splitwords = r.splitwords or nil
+ --
+ texsetattribute(a_split,r.attribute)
+ return #stack
+end
+
+local function popsplitter()
+ remove(stack)
+ local n = #stack
+ local r = stack[n] or dummy
+ --
+ randomseed = r.randomseed or 0
+ criterium = r.criterium or 0
+ preroll = r.preroll or false
+ optimize = r.optimize or nil
+ --
+ texsetattribute(a_split,r.attribute)
+ return n
+end
+
+local contextsetups = fonts.specifiers.contextsetups
+
+local function convert(featuresets,name,list)
+ if list then
+ local numbers = { }
+ local nofnumbers = 0
+ for i=1,#list do
+ local feature = list[i]
+ local fs = featuresets[feature]
+ local fn = fs and fs.number
+ if not fn then
+ -- fall back on global features
+ fs = contextsetups[feature]
+ fn = fs and fs.number
+ end
+ if fn then
+ nofnumbers = nofnumbers + 1
+ numbers[nofnumbers] = fn
+ if trace_goodies or trace_optimize then
+ report_solutions("solution %a of %a uses feature %a with number %s",i,name,feature,fn)
+ end
+ else
+ report_solutions("solution %a of %a has an invalid feature reference %a",i,name,feature)
+ end
+ end
+ return nofnumbers > 0 and numbers
+ end
+end
+
+local function initialize(goodies)
+ local solutions = goodies.solutions
+ if solutions then
+ local featuresets = goodies.featuresets
+ local goodiesname = goodies.name
+ if trace_goodies or trace_optimize then
+ report_solutions("checking solutions in %a",goodiesname)
+ end
+ for name, set in next, solutions do
+ set.less = convert(featuresets,name,set.less)
+ set.more = convert(featuresets,name,set.more)
+ end
+ end
+end
+
+fonts.goodies.register("solutions",initialize)
+
+function splitters.define(name,settings)
+ local goodies = settings.goodies
+ local solution = settings.solution
+ local less = settings.less
+ local more = settings.more
+ local less_set, more_set
+ local l = less and settings_to_array(less)
+ local m = more and settings_to_array(more)
+ if goodies then
+ goodies = fonts.goodies.load(goodies) -- also in tfmdata
+ if goodies then
+ local featuresets = goodies.featuresets
+ local solution = solution and goodies.solutions[solution]
+ if l and #l > 0 then
+ less_set = convert(featuresets,name,less) -- take from settings
+ else
+ less_set = solution and solution.less -- take from goodies
+ end
+ if m and #m > 0 then
+ more_set = convert(featuresets,name,more) -- take from settings
+ else
+ more_set = solution and solution.more -- take from goodies
+ end
+ end
+ else
+ if l then
+ local n = #less_set
+ for i=1,#l do
+ local ss = contextsetups[l[i]]
+ if ss then
+ n = n + 1
+ less_set[n] = ss.number
+ end
+ end
+ end
+ if m then
+ local n = #more_set
+ for i=1,#m do
+ local ss = contextsetups[m[i]]
+ if ss then
+ n = n + 1
+ more_set[n] = ss.number
+ end
+ end
+ end
+ end
+ if trace_optimize then
+ report_solutions("defining solutions %a, less %a, more %a",name,concat(less_set or {}," "),concat(more_set or {}," "))
+ end
+ local nofsolutions = #solutions + 1
+ local t = {
+ solution = solution,
+ less = less_set or { },
+ more = more_set or { },
+ settings = settings, -- for tracing
+ attribute = nofsolutions,
+ }
+ solutions[nofsolutions] = t
+ registered[name] = t
+ return nofsolutions
+end
+
+local nofwords, noftries, nofadapted, nofkept, nofparagraphs = 0, 0, 0, 0, 0
+
+local splitter_one = usernodeids["splitters.one"]
+local splitter_two = usernodeids["splitters.two"]
+
+local a_word = attributes.private('word')
+local a_fontkern = attributes.private('fontkern')
+
+local encapsulate = false
+
+directives.register("builders.paragraphs.solutions.splitters.encapsulate", function(v)
+ encapsulate = v
+end)
+
+function splitters.split(head)
+ -- quite fast
+ local current, done, rlmode, start, stop, attribute = head, false, false, nil, nil, 0
+ cache, max_less, max_more = { }, 0, 0
+ local function flush() -- we can move this
+ local font = start.font
+ local last = stop.next
+ local list = last and copy_nodelist(start,last) or copy_nodelist(start)
+ local n = #cache + 1
+ if encapsulate then
+ local user_one = new_usernumber(splitter_one,n)
+ local user_two = new_usernumber(splitter_two,n)
+ head, start = insert_node_before(head,start,user_one)
+ insert_node_after(head,stop,user_two)
+ else
+ local current = start
+ while true do
+ current[a_word] = n
+ if current == stop then
+ break
+ else
+ current = current.next
+ end
+ end
+ end
+ if rlmode == "TRT" or rlmode == "+TRT" then
+ local dirnode = new_textdir("+TRT")
+ list.prev = dirnode
+ dirnode.next = list
+ list = dirnode
+ end
+ local c = {
+ original = list,
+ attribute = attribute,
+ direction = rlmode,
+ font = font
+ }
+ if trace_split then
+ report_splitters("cached %4i: font %a, attribute %a, direction %a, word %a",
+ n, font, attribute, nodes_to_utf(list,true), rlmode and "r2l" or "l2r")
+ end
+ cache[n] = c
+ local solution = solutions[attribute]
+ local l, m = #solution.less, #solution.more
+ if l > max_less then max_less = l end
+ if m > max_more then max_more = m end
+ start, stop, done = nil, nil, true
+ end
+ while current do -- also nextid
+ local next = current.next
+ local id = current.id
+ if id == glyph_code then
+ if current.subtype < 256 then
+ local a = current[a_split]
+ if not a then
+ start, stop = nil, nil
+ elseif not start then
+ start, stop, attribute = current, current, a
+ elseif a ~= attribute then
+ start, stop = nil, nil
+ else
+ stop = current
+ end
+ end
+ elseif id == disc_code then
+ if splitwords then
+ if start then
+ flush()
+ end
+ elseif start and next and next.id == glyph_code and next.subtype < 256 then
+ -- beware: we can cross future lines
+ stop = next
+ else
+ start, stop = nil, nil
+ end
+ elseif id == whatsit_code then
+ if start then
+ flush()
+ end
+ local subtype = current.subtype
+ if subtype == dir_code or subtype == localpar_code then
+ rlmode = current.dir
+ end
+ else
+ if start then
+ flush()
+ end
+ end
+ current = next
+ end
+ if start then
+ flush()
+ end
+ nofparagraphs = nofparagraphs + 1
+ nofwords = nofwords + #cache
+ return head, done
+end
+
+local function collect_words(list) -- can be made faster for attributes
+ local words, w, word = { }, 0, nil
+ if encapsulate then
+ for current in traverse_ids(whatsit_code,list) do
+ if current.subtype == userdefined_code then -- hm
+ local user_id = current.user_id
+ if user_id == splitter_one then
+ word = { current.value, current, current }
+ w = w + 1
+ words[w] = word
+ elseif user_id == splitter_two then
+ if word then
+ word[3] = current
+ else
+ -- something is wrong
+ end
+ end
+ end
+ end
+ else
+ local current, first, last, index = list, nil, nil, nil
+ while current do
+ -- todo: disc and kern
+ local id = current.id
+ if id == glyph_code or id == disc_code then
+ local a = current[a_word]
+ if a then
+ if a == index then
+ -- same word
+ last = current
+ elseif index then
+ w = w + 1
+ words[w] = { index, first, last }
+ first = current
+ last = current
+ index = a
+ elseif first then
+ last = current
+ index = a
+ else
+ first = current
+ last = current
+ index = a
+ end
+ elseif index then
+ if first then
+ w = w + 1
+ words[w] = { index, first, last }
+ end
+ index = nil
+ first = nil
+ elseif trace_split then
+ if id == disc_code then
+ report_splitters("skipped: disc node")
+ else
+ report_splitters("skipped: %C",current.char)
+ end
+ end
+ elseif id == kern_code and (current.subtype == fontkern_code or current[a_fontkern]) then
+ if first then
+ last = current
+ else
+ first = current
+ last = current
+ end
+ elseif index then
+ w = w + 1
+ words[w] = { index, first, last }
+ index = nil
+ first = nil
+ if id == disc_node then
+ if trace_split then
+ report_splitters("skipped: disc node")
+ end
+ end
+ end
+ current = current.next
+ end
+ if index then
+ w = w + 1
+ words[w] = { index, first, last }
+ end
+ if trace_split then
+ for i=1,#words do
+ local w = words[i]
+ local n, f, l = w[1], w[2], w[3]
+ local c = cache[n]
+ if c then
+ report_splitters("found %4i: word %a, cached %a",n,nodes_to_utf(f,true,true,l),nodes_to_utf(c.original,true))
+ else
+ report_splitters("found %4i: word %a, not in cache",n,nodes_to_utf(f,true,true,l))
+ end
+ end
+ end
+ end
+ return words, list -- check for empty (elsewhere)
+end
+
+-- we could avoid a hpack but hpack is not that slow
+
+local function doit(word,list,best,width,badness,line,set,listdir)
+ local changed = 0
+ local n = word[1]
+ local found = cache[n]
+ if found then
+ local h, t
+ if encapsulate then
+ h = word[2].next -- head of current word
+ t = word[3].prev -- tail of current word
+ else
+ h = word[2]
+ t = word[3]
+ end
+ if splitwords then
+ -- there are no lines crossed in a word
+ else
+ local ok = false
+ local c = h
+ while c do
+ if c == t then
+ ok = true
+ break
+ else
+ c = c.next
+ end
+ end
+ if not ok then
+ report_solutions("skipping hyphenated word (for now)")
+ -- todo: mark in words as skipped, saves a bit runtime
+ return false, changed
+ end
+ end
+ local original, attribute, direction = found.original, found.attribute, found.direction
+ local solution = solutions[attribute]
+ local features = solution and solution[set]
+ if features then
+ local featurenumber = features[best] -- not ok probably
+ if featurenumber then
+ noftries = noftries + 1
+ local first = copy_nodelist(original)
+ if not trace_colors then
+ for n in traverse_nodes(first) do -- maybe fast force so no attr needed
+ n[0] = featurenumber -- this forces dynamics
+ end
+ elseif set == "less" then
+ for n in traverse_nodes(first) do
+ setnodecolor(n,"font:isol") -- yellow
+ n[0] = featurenumber
+ end
+ else
+ for n in traverse_nodes(first) do
+ setnodecolor(n,"font:medi") -- green
+ n[0] = featurenumber
+ end
+ end
+ local font = found.font
+ local setdynamics = setfontdynamics[font]
+ if setdynamics then
+ local processes = setdynamics(font,featurenumber)
+ for i=1,#processes do -- often more than 1
+ first = processes[i](first,font,featurenumber)
+ end
+ else
+ report_solutions("fatal error, no dynamics for font %a",font)
+ end
+ first = inject_kerns(first)
+ if first.id == whatsit_code then
+ local temp = first
+ first = first.next
+ free_node(temp)
+ end
+ local last = find_node_tail(first)
+ -- replace [u]h->t by [u]first->last
+ local prev = h.prev
+ local next = t.next
+ prev.next = first
+ first.prev = prev
+ if next then
+ last.next = next
+ next.prev = last
+ end
+ -- check new pack
+ local temp, b = repack_hlist(list,width,'exactly',listdir)
+ if b > badness then
+ if trace_optimize then
+ report_optimizers("line %a, badness before %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"quit")
+ end
+ -- remove last insert
+ prev.next = h
+ h.prev = prev
+ if next then
+ t.next = next
+ next.prev = t
+ else
+ t.next = nil
+ end
+ last.next = nil
+ free_nodelist(first)
+ else
+ if trace_optimize then
+ report_optimizers("line %a, badness before: %a, after %a, criterium %a, verdict %a",line,badness,b,criterium,"continue")
+ end
+ -- free old h->t
+ t.next = nil
+ free_nodelist(h) -- somhow fails
+ if not encapsulate then
+ word[2] = first
+ word[3] = last
+ end
+ changed, badness = changed + 1, b
+ end
+ if b <= criterium then
+ return true, changed
+ end
+ end
+ end
+ end
+ return false, changed
+end
+
+-- We repeat some code but adding yet another layer of indirectness is not
+-- making things better.
+
+variants[v_normal] = function(words,list,best,width,badness,line,set,listdir)
+ local changed = 0
+ for i=1,#words do
+ local done, c = doit(words[i],list,best,width,badness,line,set,listdir)
+ changed = changed + c
+ if done then
+ break
+ end
+ end
+ if changed > 0 then
+ nofadapted = nofadapted + 1
+ -- todo: get rid of pack when ok because we already have packed and we only need the last b
+ local list, b = repack_hlist(list,width,'exactly',listdir)
+ return list, true, changed, b -- badness
+ else
+ nofkept = nofkept + 1
+ return list, false, 0, badness
+ end
+end
+
+variants[v_reverse] = function(words,list,best,width,badness,line,set,listdir)
+ local changed = 0
+ for i=#words,1,-1 do
+ local done, c = doit(words[i],list,best,width,badness,line,set,listdir)
+ changed = changed + c
+ if done then
+ break
+ end
+ end
+ if changed > 0 then
+ nofadapted = nofadapted + 1
+ -- todo: get rid of pack when ok because we already have packed and we only need the last b
+ local list, b = repack_hlist(list,width,'exactly',listdir)
+ return list, true, changed, b -- badness
+ else
+ nofkept = nofkept + 1
+ return list, false, 0, badness
+ end
+end
+
+variants[v_random] = function(words,list,best,width,badness,line,set,listdir)
+ local changed = 0
+ while #words > 0 do
+ local done, c = doit(remove(words,random(1,#words)),list,best,width,badness,line,set,listdir)
+ changed = changed + c
+ if done then
+ break
+ end
+ end
+ if changed > 0 then
+ nofadapted = nofadapted + 1
+ -- todo: get rid of pack when ok because we already have packed and we only need the last b
+ local list, b = repack_hlist(list,width,'exactly',listdir)
+ return list, true, changed, b -- badness
+ else
+ nofkept = nofkept + 1
+ return list, false, 0, badness
+ end
+end
+
+local function show_quality(current,what,line)
+ local set = current.glue_set
+ local sign = current.glue_sign
+ local order = current.glue_order
+ local amount = set * ((sign == 2 and -1) or 1)
+ report_optimizers("line %a, category %a, amount %a, set %a, sign %a, how %a, order %a",line,what,amount,set,sign,how,order)
+end
+
+function splitters.optimize(head)
+ if not optimize then
+ report_optimizers("no optimizer set")
+ return
+ end
+ local nc = #cache
+ if nc == 0 then
+ return
+ end
+ starttiming(splitters)
+ local listdir = nil -- todo ! ! !
+ if randomseed then
+ math.setrandomseedi(randomseed)
+ randomseed = nil
+ end
+ local line = 0
+ local tex_hbadness, tex_hfuzz = tex.hbadness, tex.hfuzz
+ tex.hbadness, tex.hfuzz = 10000, number.maxdimen
+ if trace_optimize then
+ report_optimizers("preroll %a, variant %a, criterium %a, cache size %a",preroll,variant,criterium,nc)
+ end
+ for current in traverse_ids(hlist_code,head) do
+ -- report_splitters("before: [%s] => %s",current.dir,nodes.tosequence(current.list,nil))
+ line = line + 1
+ local sign, dir, list, width = current.glue_sign, current.dir, current.list, current.width
+ if not encapsulate and list.id == glyph_code then
+ -- nasty .. we always assume a prev being there .. future luatex will always have a leftskip set
+ -- current.list, list = insert_node_before(list,list,new_glue(0))
+ current.list, list = insert_node_before(list,list,new_leftskip(0))
+ end
+ local temp, badness = repack_hlist(list,width,'exactly',dir) -- it would be nice if the badness was stored in the node
+ if badness > 0 then
+ if sign == 0 then
+ if trace_optimize then
+ report_optimizers("line %a, badness %a, outcome %a, verdict %a",line,badness,"okay","okay")
+ end
+ else
+ local set, max
+ if sign == 1 then
+ if trace_optimize then
+ report_optimizers("line %a, badness %a, outcome %a, verdict %a",line,badness,"underfull","trying more")
+ end
+ set, max = "more", max_more
+ else
+ if trace_optimize then
+ report_optimizers("line %a, badness %a, outcome %a, verdict %a",line,badness,"overfull","trying less")
+ end
+ set, max = "less", max_less
+ end
+ -- we can keep the best variants
+ local lastbest, lastbadness = nil, badness
+ if preroll then
+ local bb, base
+ for i=1,max do
+ if base then
+ free_nodelist(base)
+ end
+ base = copy_nodelist(list)
+ local words = collect_words(base) -- beware: words is adapted
+ for j=i,max do
+ local temp, done, changes, b = optimize(words,base,j,width,badness,line,set,dir)
+ base = temp
+ if trace_optimize then
+ report_optimizers("line %a, alternative %a.%a, changes %a, badness %a",line,i,j,changes,b)
+ end
+ bb = b
+ if b <= criterium then
+ break
+ end
+ -- if done then
+ -- break
+ -- end
+ end
+ if bb and bb > criterium then -- needs checking
+ if not lastbest then
+ lastbest, lastbadness = i, bb
+ elseif bb > lastbadness then
+ lastbest, lastbadness = i, bb
+ end
+ else
+ break
+ end
+ end
+ free_nodelist(base)
+ end
+ local words = collect_words(list)
+ for best=lastbest or 1,max do
+ local temp, done, changes, b = optimize(words,list,best,width,badness,line,set,dir)
+ current.list = temp
+ if trace_optimize then
+ report_optimizers("line %a, alternative %a, changes %a, badness %a",line,best,changes,b)
+ end
+ if done then
+ if b <= criterium then -- was == 0
+ protect_glyphs(list)
+ break
+ end
+ end
+ end
+ end
+ else
+ if trace_optimize then
+ report_optimizers("line %a, verdict %a",line,"not bad enough")
+ end
+ end
+ -- we pack inside the outer hpack and that way keep the original wd/ht/dp as bonus
+ current.list = hpack_nodes(current.list,width,'exactly',listdir)
+ -- report_splitters("after: [%s] => %s",temp.dir,nodes.tosequence(temp.list,nil))
+ end
+ for i=1,nc do
+ local ci = cache[i]
+ free_nodelist(ci.original)
+ end
+ cache = { }
+ tex.hbadness, tex.hfuzz = tex_hbadness, tex_hfuzz
+ stoptiming(splitters)
+end
+
+statistics.register("optimizer statistics", function()
+ if nofwords > 0 then
+ local elapsed = statistics.elapsedtime(splitters)
+ local average = noftries/elapsed
+ return format("%s words identified in %s paragraphs, %s words retried, %s lines tried, %0.3f seconds used, %s adapted, %0.1f lines per second",
+ nofwords,nofparagraphs,noftries,nofadapted+nofkept,elapsed,nofadapted,average)
+ end
+end)
+
+-- we could use a stack
+
+local enableaction = tasks.enableaction
+local disableaction = tasks.disableaction
+
+local function enable()
+ enableaction("processors", "builders.paragraphs.solutions.splitters.split")
+ enableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize")
+end
+
+local function disable()
+ disableaction("processors", "builders.paragraphs.solutions.splitters.split")
+ disableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize")
+end
+
+function splitters.start(name,settings)
+ if pushsplitter(name,settings) == 1 then
+ enable()
+ end
+end
+
+function splitters.stop()
+ if popsplitter() == 0 then
+ disable()
+ end
+end
+
+function splitters.set(name,settings)
+ if #stack > 0 then
+ stack = { }
+ else
+ enable()
+ end
+ pushsplitter(name,settings) -- sets attribute etc
+end
+
+function splitters.reset()
+ if #stack > 0 then
+ stack = { }
+ popsplitter() -- resets attribute etc
+ disable()
+ end
+end
+
+-- interface
+
+commands.definefontsolution = splitters.define
+commands.startfontsolution = splitters.start
+commands.stopfontsolution = splitters.stop
+commands.setfontsolution = splitters.set
+commands.resetfontsolution = splitters.reset
diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua
index 27176dade..dd6c47a88 100644
--- a/tex/context/base/font-syn.lua
+++ b/tex/context/base/font-syn.lua
@@ -1,1724 +1,1724 @@
-if not modules then modules = { } end modules ['font-syn'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: subs in lookups requests
-
-local next, tonumber, type, tostring = next, tonumber, type, tostring
-local sub, gsub, lower, match, find, lower, upper = string.sub, string.gsub, string.lower, string.match, string.find, string.lower, string.upper
-local find, gmatch = string.find, string.gmatch
-local concat, sort, format = table.concat, table.sort, string.format
-local serialize = table.serialize
-local lpegmatch = lpeg.match
-local unpack = unpack or table.unpack
-local formatters = string.formatters
-
-local allocate = utilities.storage.allocate
-local sparse = utilities.storage.sparse
-
-local removesuffix = file.removesuffix
-local splitbase = file.splitbase
-local splitname = file.splitname
-local basename = file.basename
-local nameonly = file.nameonly
-local pathpart = file.pathpart
-local filejoin = file.join
-local is_qualified_path = file.is_qualified_path
-local exists = io.exists
-
-local findfile = resolvers.findfile
-local cleanpath = resolvers.cleanpath
-local resolveresolved = resolvers.resolve
-
-local trace_names = false trackers.register("fonts.names", function(v) trace_names = v end)
-local trace_warnings = false trackers.register("fonts.warnings", function(v) trace_warnings = v end)
-local trace_specifications = false trackers.register("fonts.specifications", function(v) trace_specifications = v end)
-
-local report_names = logs.reporter("fonts","names")
-
---[[ldx--
-
This module implements a name to filename resolver. Names are resolved
-using a table that has keys filtered from the font related files.
---ldx]]--
-
-local P, C, Cc, Cs = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cs
-
--- what to do with 'thin'
-
-local weights = Cs ( -- not extra
- P("demibold")
- + P("semibold")
- + P("mediumbold")
- + P("ultrabold")
- + P("extrabold")
- + P("ultralight")
- + P("bold")
- + P("demi")
- + P("semi")
- + P("light")
- + P("medium")
- + P("heavy")
- + P("ultra")
- + P("black")
- + P("bol") -- / "bold"
- + P("regular") / "normal"
-)
-
-local normalized_weights = sparse {
- regular = "normal",
-}
-
-local styles = Cs (
- P("reverseoblique") / "reverseitalic"
- + P("regular") / "normal"
- + P("italic")
- + P("oblique") / "italic"
- + P("slanted")
- + P("roman") / "normal"
- + P("ital") / "italic"
- + P("ita") / "italic"
-)
-
-local normalized_styles = sparse {
- reverseoblique = "reverseitalic",
- regular = "normal",
- oblique = "italic",
-}
-
-local widths = Cs(
- P("condensed")
- + P("thin")
- + P("expanded")
- + P("cond") / "condensed"
- + P("normal")
- + P("book") / "normal"
-)
-
-local normalized_widths = sparse()
-
-local variants = Cs( -- fax casual
- P("smallcaps")
- + P("oldstyle")
- + P("caps") / "smallcaps"
-)
-
-local normalized_variants = sparse()
-
-names.knownweights = {
- "black",
- "bold",
- "demi",
- "demibold",
- "extrabold",
- "heavy",
- "light",
- "medium",
- "mediumbold",
- "normal",
- "regular",
- "semi",
- "semibold",
- "ultra",
- "ultrabold",
- "ultralight",
-}
-
-names.knownstyles = {
- "italic",
- "normal",
- "oblique",
- "regular",
- "reverseitalic",
- "reverseoblique",
- "roman",
- "slanted",
-}
-
-names.knownwidths = {
- "book",
- "condensed",
- "expanded",
- "normal",
- "thin",
-}
-
-names.knownvariants = {
- "normal",
- "oldstyle",
- "smallcaps",
-}
-
-local any = P(1)
-
-local analyzed_table
-
-local analyzer = Cs (
- (
- weights / function(s) analyzed_table[1] = s return "" end
- + styles / function(s) analyzed_table[2] = s return "" end
- + widths / function(s) analyzed_table[3] = s return "" end
- + variants / function(s) analyzed_table[4] = s return "" end
- + any
- )^0
-)
-
-local splitter = lpeg.splitat("-")
-
-function names.splitspec(askedname)
- local name, weight, style, width, variant = lpegmatch(splitter,askedname)
- weight = weight and lpegmatch(weights, weight) or weight
- style = style and lpegmatch(styles, style) or style
- width = width and lpegmatch(widths, width) or width
- variant = variant and lpegmatch(variants,variant) or variant
- if trace_names then
- report_names("requested name %a split in name %a, weight %a, style %a, width %a and variant %a",
- askedname,name,weight,style,width,variant)
- end
- if not weight or not weight or not width or not variant then
- weight, style, width, variant = weight or "normal", style or "normal", width or "normal", variant or "normal"
- if trace_names then
- report_names("request %a normalized to '%s-%s-%s-%s-%s'",
- askedname,name,weight,style,width,variant)
- end
- end
- return name or askedname, weight, style, width, variant
-end
-
-local function analyzespec(somename)
- if somename then
- analyzed_table = { }
- local name = lpegmatch(analyzer,somename)
- return name, analyzed_table[1], analyzed_table[2], analyzed_table[3], analyzed_table[4]
- end
-end
-
---[[ldx--
-
It would make sense to implement the filters in the related modules,
-but to keep the overview, we define them here.
---ldx]]--
-
-filters.otf = fontloader.info
-filters.ttf = fontloader.info
-filters.ttc = fontloader.info
-filters.dfont = fontloader.info
-
-function fontloader.fullinfo(...) -- check with taco what we get / could get
- local ff = fontloader.open(...)
- if ff then
- local d = ff and fontloader.to_table(ff)
- d.glyphs, d.subfonts, d.gpos, d.gsub, d.lookups = nil, nil, nil, nil, nil
- fontloader.close(ff)
- return d
- else
- return nil, "error in loading font"
- end
-end
-
-filters.otf = fontloader.fullinfo
-
-function filters.afm(name)
- -- we could parse the afm file as well, and then report an error but
- -- it's not worth the trouble
- local pfbname = findfile(removesuffix(name)..".pfb","pfb") or ""
- if pfbname == "" then
- pfbname = findfile(nameonly(name)..".pfb","pfb") or ""
- end
- if pfbname ~= "" then
- local f = io.open(name)
- if f then
- local hash = { }
- for line in f:lines() do
- local key, value = match(line,"^(.+)%s+(.+)%s*$")
- if key and #key > 0 then
- hash[lower(key)] = value
- end
- if find(line,"StartCharMetrics") then
- break
- end
- end
- f:close()
- return hash
- end
- end
- return nil, "no matching pfb file"
-end
-
-function filters.pfb(name)
- return fontloader.info(name)
-end
-
---[[ldx--
-
The scanner loops over the filters using the information stored in
-the file databases. Watch how we check not only for the names, but also
-for combination with the weight of a font.
---ldx]]--
-
-filters.list = {
- "otf", "ttf", "ttc", "dfont", "afm",
- -- "ttc", "otf", "ttf", "dfont", "afm",
-}
-
-names.fontconfigfile = "fonts.conf" -- a bit weird format, bonus feature
-names.osfontdirvariable = "OSFONTDIR" -- the official way, in minimals etc
-
-filters.paths = { }
-filters.names = { }
-
-function names.getpaths(trace)
- local hash, result, r = { }, { }, 0
- local function collect(t,where)
- for i=1,#t do
- local v = cleanpath(t[i])
- v = gsub(v,"/+$","") -- not needed any more
- local key = lower(v)
- report_names("%a specifies path %a",where,v)
- if not hash[key] then
- r = r + 1
- result[r] = v
- hash[key] = true
- end
- end
- end
- local path = names.osfontdirvariable or ""
- if path ~= "" then
- collect(resolvers.expandedpathlist(path),path)
- end
- if xml then
- local confname = resolvers.expansion("FONTCONFIG_FILE") or ""
- if confname == "" then
- confname = names.fontconfigfile or ""
- end
- if confname ~= "" then
- -- first look in the tex tree
- local name = findfile(confname,"fontconfig files") or ""
- if name == "" then
- -- after all, fontconfig is a unix thing
- name = filejoin("/etc",confname)
- if not lfs.isfile(name) then
- name = "" -- force quit
- end
- end
- if name ~= "" and lfs.isfile(name) then
- if trace_names then
- report_names("%s fontconfig file %a","loading",name)
- end
- local xmldata = xml.load(name)
- -- begin of untested mess
- xml.include(xmldata,"include","",true,function(incname)
- if not is_qualified_path(incname) then
- local path = pathpart(name) -- main name
- if path ~= "" then
- incname = filejoin(path,incname)
- end
- end
- if lfs.isfile(incname) then
- if trace_names then
- report_names("%s fontconfig file %a","merging included",incname)
- end
- return io.loaddata(incname)
- elseif trace_names then
- report_names("%s fontconfig file: %a","ignoring included",incname)
- end
- end)
- -- end of untested mess
- local fontdirs = xml.collect_texts(xmldata,"dir",true)
- if trace_names then
- report_names("%s dirs found in fontconfig",#fontdirs)
- end
- collect(fontdirs,"fontconfig file")
- end
- end
- end
- function names.getpaths()
- return result
- end
- return result
-end
-
-local function cleanname(name)
- return (gsub(lower(name),"[^%a%d]",""))
-end
-
-local function cleanfilename(fullname,defaultsuffix)
- local path, name, suffix = splitname(fullname)
- name = gsub(lower(name),"[^%a%d]","")
- if suffix and suffix ~= "" then
- return name .. ".".. suffix
- elseif defaultsuffix and defaultsuffix ~= "" then
- return name .. ".".. defaultsuffix
- else
- return name
- end
-end
-
-names.cleanname = cleanname
-names.cleanfilename = cleanfilename
-
-local function check_names(result)
- local names = result.names
- if names then
- for i=1,#names do
- local name = names[i]
- if name.lang == "English (US)" then
- return name.names
- end
- end
- end
-end
-
-local function walk_tree(pathlist,suffix,identify)
- if pathlist then
- for i=1,#pathlist do
- local path = pathlist[i]
- path = cleanpath(path .. "/")
- path = gsub(path,"/+","/")
- local pattern = path .. "**." .. suffix -- ** forces recurse
- report_names("globbing path %a",pattern)
- local t = dir.glob(pattern)
- sort(t,sorter)
- for j=1,#t do
- local completename = t[j]
- identify(completename,basename(completename),suffix,completename)
- end
- end
- end
-end
-
-local function check_name(data,result,filename,modification,suffix,subfont)
- -- shortcuts
- local specifications = data.specifications
- -- prepare
- local names = check_names(result)
- -- fetch
- local familyname = names and names.preffamilyname or result.familyname
- local fullname = names and names.fullname or result.fullname
- local fontname = result.fontname
- local subfamily = names and names.subfamily
- local modifiers = names and names.prefmodifiers
- local weight = names and names.weight or result.weight
- local italicangle = tonumber(result.italicangle)
- local subfont = subfont or nil
- local rawname = fullname or fontname or familyname
- -- normalize
- familyname = familyname and cleanname(familyname)
- fullname = fullname and cleanname(fullname)
- fontname = fontname and cleanname(fontname)
- subfamily = subfamily and cleanname(subfamily)
- modifiers = modifiers and cleanname(modifiers)
- weight = weight and cleanname(weight)
- italicangle = italicangle == 0 and nil
- -- analyze
- local a_name, a_weight, a_style, a_width, a_variant = analyzespec(fullname or fontname or familyname)
- -- check
- local width = a_width
- local variant = a_variant
- local style = modifiers and gsub(modifiers,"[^%a]","")
- if not style and italicangle then
- style = "italic"
- end
- if not variant or variant == "" then
- variant = "normal"
- end
- if not weight or weight == "" then
- weight = a_weight
- end
- if not style or style == "" then
- style = a_style
- end
- if not familyname then
- familyname = a_name
- end
- fontname = fontname or fullname or familyname or basename(filename)
- fullname = fullname or fontname
- familyname = familyname or fontname
- specifications[#specifications + 1] = {
- filename = filename, -- unresolved
- format = lower(suffix),
- subfont = subfont,
- rawname = rawname,
- familyname = familyname,
- fullname = fullname,
- fontname = fontname,
- subfamily = subfamily,
- modifiers = modifiers,
- weight = weight,
- style = style,
- width = width,
- variant = variant,
- minsize = result.design_range_bottom or 0,
- maxsize = result.design_range_top or 0,
- designsize = result.design_size or 0,
- modification = modification or 0,
- }
-end
-
-local function cleanupkeywords()
- local data = names.data
- local specifications = names.data.specifications
- if specifications then
- local weights = { }
- local styles = { }
- local widths = { }
- local variants = { }
- for i=1,#specifications do
- local s = specifications[i]
- -- fix (sofar styles are taken from the name, and widths from the specification)
- local _, b_weight, b_style, b_width, b_variant = analyzespec(s.weight)
- local _, c_weight, c_style, c_width, c_variant = analyzespec(s.style)
- local _, d_weight, d_style, d_width, d_variant = analyzespec(s.width)
- local _, e_weight, e_style, e_width, e_variant = analyzespec(s.variant)
- local _, f_weight, f_style, f_width, f_variant = analyzespec(s.fullname or "")
- local weight = b_weight or c_weight or d_weight or e_weight or f_weight or "normal"
- local style = b_style or c_style or d_style or e_style or f_style or "normal"
- local width = b_width or c_width or d_width or e_width or f_width or "normal"
- local variant = b_variant or c_variant or d_variant or e_variant or f_variant or "normal"
- if not weight or weight == "" then weight = "normal" end
- if not style or style == "" then style = "normal" end
- if not width or width == "" then width = "normal" end
- if not variant or variant == "" then variant = "normal" end
- weights [weight ] = (weights [weight ] or 0) + 1
- styles [style ] = (styles [style ] or 0) + 1
- widths [width ] = (widths [width ] or 0) + 1
- variants[variant] = (variants[variant] or 0) + 1
- if weight ~= s.weight then
- s.fontweight = s.weight
- end
- s.weight, s.style, s.width, s.variant = weight, style, width, variant
- end
- local stats = data.statistics
- stats.used_weights, stats.used_styles, stats.used_widths, stats.used_variants = weights, styles, widths, variants
- end
-end
-
-local function collectstatistics()
- local data = names.data
- local specifications = data.specifications
- if specifications then
- local weights = { }
- local styles = { }
- local widths = { }
- local variants = { }
- for i=1,#specifications do
- local s = specifications[i]
- local weight = s.weight
- local style = s.style
- local width = s.width
- local variant = s.variant
- if weight then weights [weight ] = (weights [weight ] or 0) + 1 end
- if style then styles [style ] = (styles [style ] or 0) + 1 end
- if width then widths [width ] = (widths [width ] or 0) + 1 end
- if variant then variants[variant] = (variants[variant] or 0) + 1 end
- end
- local stats = data.statistics
- stats.weights = weights
- stats.styles = styles
- stats.widths = widths
- stats.variants = variants
- stats.fonts = #specifications
- end
-end
-
-local function collecthashes()
- local data = names.data
- local mappings = data.mappings
- local fallbacks = data.fallbacks
- local specifications = data.specifications
- local nofmappings = 0
- local noffallbacks = 0
- if specifications then
- -- maybe multiple passes
- for index=1,#specifications do
- local s = specifications[index]
- local format, fullname, fontname, familyname, weight, subfamily = s.format, s.fullname, s.fontname, s.familyname, s.weight, s.subfamily
- local mf, ff = mappings[format], fallbacks[format]
- if fullname and not mf[fullname] then
- mf[fullname], nofmappings = index, nofmappings + 1
- end
- if fontname and not mf[fontname] then
- mf[fontname], nofmappings = index, nofmappings + 1
- end
- if familyname and weight and weight ~= sub(familyname,#familyname-#weight+1,#familyname) then
- local madename = familyname .. weight
- if not mf[madename] and not ff[madename] then
- ff[madename], noffallbacks = index, noffallbacks + 1
- end
- end
- if familyname and subfamily and subfamily ~= sub(familyname,#familyname-#subfamily+1,#familyname) then
- local extraname = familyname .. subfamily
- if not mf[extraname] and not ff[extraname] then
- ff[extraname], noffallbacks = index, noffallbacks + 1
- end
- end
- if familyname and not mf[familyname] and not ff[familyname] then
- ff[familyname], noffallbacks = index, noffallbacks + 1
- end
- end
- end
- return nofmappings, noffallbacks
-end
-
-local function collectfamilies()
- local data = names.data
- local specifications = data.specifications
- local families = data.families
- for index=1,#specifications do
- local familyname = specifications[index].familyname
- local family = families[familyname]
- if not family then
- families[familyname] = { index }
- else
- family[#family+1] = index
- end
- end
-end
-
-local function checkduplicate(where) -- fails on "Romantik" but that's a border case anyway
- local data = names.data
- local mapping = data[where]
- local specifications = data.specifications
- local loaded = { }
- if specifications and mapping then
- for _, m in next, mapping do
- for k, v in next, m do
- local s = specifications[v]
- local hash = formatters["%s-%s-%s-%s-%s"](s.familyname,s.weight or "*",s.style or "*",s.width or "*",s.variant or "*")
- local h = loaded[hash]
- if h then
- local ok = true
- local fn = s.filename
- for i=1,#h do
- local hn = s.filename
- if h[i] == fn then
- ok = false
- break
- end
- end
- if ok then
- h[#h+1] = fn
- end
- else
- loaded[hash] = { s.filename }
- end
- end
- end
- end
- local n = 0
- for k, v in table.sortedhash(loaded) do
- local nv = #v
- if nv > 1 then
- if trace_warnings then
- report_names("lookup %a clashes with %a",k,v)
- end
- n = n + nv
- end
- end
- report_names("%a double lookups in %a",n,where)
-end
-
-local function checkduplicates()
- checkduplicate("mappings")
- checkduplicate("fallbacks")
-end
-
-local sorter = function(a,b)
- return a > b -- to be checked
-end
-
-local function sorthashes()
- local data = names.data
- local list = filters.list
- local mappings = data.mappings
- local fallbacks = data.fallbacks
- local sorted_mappings = { }
- local sorted_fallbacks = { }
- data.sorted_mappings = sorted_mappings
- data.sorted_fallbacks = sorted_fallbacks
- for i=1,#list do
- local l = list[i]
- sorted_mappings [l] = table.keys(mappings[l])
- sorted_fallbacks[l] = table.keys(fallbacks[l])
- sort(sorted_mappings [l],sorter)
- sort(sorted_fallbacks[l],sorter)
- end
- data.sorted_families = table.keys(data.families)
- sort(data.sorted_families,sorter)
-end
-
-local function unpackreferences()
- local data = names.data
- local specifications = data.specifications
- if specifications then
- for k, v in next, data.families do
- for i=1,#v do
- v[i] = specifications[v[i]]
- end
- end
- local mappings = data.mappings
- if mappings then
- for _, m in next, mappings do
- for k, v in next, m do
- m[k] = specifications[v]
- end
- end
- end
- local fallbacks = data.fallbacks
- if fallbacks then
- for _, f in next, fallbacks do
- for k, v in next, f do
- f[k] = specifications[v]
- end
- end
- end
- end
-end
-
-local function analyzefiles(olddata)
- if not trace_warnings then
- report_names("warnings are disabled (tracker 'fonts.warnings')")
- end
- local data = names.data
- local done = { }
- local totalnofread = 0
- local totalnofskipped = 0
- local totalnofduplicates = 0
- local nofread = 0
- local nofskipped = 0
- local nofduplicates = 0
- local skip_paths = filters.paths
- local skip_names = filters.names
- local specifications = data.specifications
- local oldindices = olddata and olddata.indices or { }
- local oldspecifications = olddata and olddata.specifications or { }
- local oldrejected = olddata and olddata.rejected or { }
- local treatmentdata = fonts.treatments.data
- local function identify(completename,name,suffix,storedname)
- local pathpart, basepart = splitbase(completename)
- nofread = nofread + 1
- local treatment = treatmentdata[completename] or treatmentdata[basepart]
- if treatment and treatment.ignored then
- if trace_names then
- report_names("%s font %a is ignored, reason %a",suffix,completename,treatment.comment or "unknown")
- end
- nofskipped = nofskipped + 1
- elseif done[name] then
- -- already done (avoid otf afm clash)
- if trace_names then
- report_names("%s font %a already done",suffix,completename)
- end
- nofduplicates = nofduplicates + 1
- nofskipped = nofskipped + 1
- elseif not exists(completename) then
- -- weird error
- if trace_names then
- report_names("%s font %a does not really exist",suffix,completename)
- end
- nofskipped = nofskipped + 1
- elseif not is_qualified_path(completename) and findfile(completename,suffix) == "" then
- -- not locatable by backend anyway
- if trace_names then
- report_names("%s font %a cannot be found by backend",suffix,completename)
- end
- nofskipped = nofskipped + 1
- else
- if #skip_paths > 0 then
- for i=1,#skip_paths do
- if find(pathpart,skip_paths[i]) then
- if trace_names then
- report_names("rejecting path of %s font %a",suffix,completename)
- end
- nofskipped = nofskipped + 1
- return
- end
- end
- end
- if #skip_names > 0 then
- for i=1,#skip_paths do
- if find(basepart,skip_names[i]) then
- done[name] = true
- if trace_names then
- report_names("rejecting name of %s font %a",suffix,completename)
- end
- nofskipped = nofskipped + 1
- return
- end
- end
- end
- if trace_names then
- report_names("identifying %s font %a",suffix,completename)
- end
- local result = nil
- local modification = lfs.attributes(completename,"modification")
- if olddata and modification and modification > 0 then
- local oldindex = oldindices[storedname] -- index into specifications
- if oldindex then
- local oldspecification = oldspecifications[oldindex]
- if oldspecification and oldspecification.filename == storedname then -- double check for out of sync
- local oldmodification = oldspecification.modification
- if oldmodification == modification then
- result = oldspecification
- specifications[#specifications + 1] = result
- else
- end
- else
- end
- elseif oldrejected[storedname] == modification then
- result = false
- end
- end
- if result == nil then
- local result, message = filters[lower(suffix)](completename)
- if result then
- if result[1] then
- for r=1,#result do
- local ok = check_name(data,result[r],storedname,modification,suffix,r-1) -- subfonts start at zero
- -- if not ok then
- -- nofskipped = nofskipped + 1
- -- end
- end
- else
- local ok = check_name(data,result,storedname,modification,suffix)
- -- if not ok then
- -- nofskipped = nofskipped + 1
- -- end
- end
- if trace_warnings and message and message ~= "" then
- report_names("warning when identifying %s font %a, %s",suffix,completename,message)
- end
- elseif trace_warnings then
- nofskipped = nofskipped + 1
- report_names("error when identifying %s font %a, %s",suffix,completename,message or "unknown")
- end
- end
- done[name] = true
- end
- logs.flush() -- a bit overkill for each font, maybe not needed here
- end
- local function traverse(what, method)
- local list = filters.list
- for n=1,#list do
- local suffix = list[n]
- local t = os.gettimeofday() -- use elapser
- nofread, nofskipped, nofduplicates = 0, 0, 0
- suffix = lower(suffix)
- report_names("identifying %s font files with suffix %a",what,suffix)
- method(suffix)
- suffix = upper(suffix)
- report_names("identifying %s font files with suffix %a",what,suffix)
- method(suffix)
- totalnofread, totalnofskipped, totalnofduplicates = totalnofread + nofread, totalnofskipped + nofskipped, totalnofduplicates + nofduplicates
- local elapsed = os.gettimeofday() - t
- report_names("%s %s files identified, %s skipped, %s duplicates, %s hash entries added, runtime %0.3f seconds",nofread,what,nofskipped,nofduplicates,nofread-nofskipped,elapsed)
- end
- logs.flush()
- end
- -- problem .. this will not take care of duplicates
- local function withtree(suffix)
- resolvers.dowithfilesintree(".*%." .. suffix .. "$", function(method,root,path,name)
- if method == "file" or method == "tree" then
- local completename = root .."/" .. path .. "/" .. name
- completename = resolveresolved(completename) -- no shortcut
- identify(completename,name,suffix,name)
- return true
- end
- end, function(blobtype,blobpath,pattern)
- blobpath = resolveresolved(blobpath) -- no shortcut
- report_names("scanning path %a for %s files",blobpath,suffix)
- end, function(blobtype,blobpath,pattern,total,checked,done)
- blobpath = resolveresolved(blobpath) -- no shortcut
- report_names("%s entries found, %s %s files checked, %s okay",total,checked,suffix,done)
- end)
- end
- local function withlsr(suffix) -- all trees
- -- we do this only for a stupid names run, not used for context itself,
- -- using the vars is too clumsy so we just stick to a full scan instead
- local pathlist = resolvers.splitpath(resolvers.showpath("ls-R") or "")
- walk_tree(pathlist,suffix,identify)
- end
- local function withsystem(suffix) -- OSFONTDIR cum suis
- walk_tree(names.getpaths(trace),suffix,identify)
- end
- traverse("tree",withtree) -- TEXTREE only
- if texconfig.kpse_init then
- traverse("lsr", withlsr)
- else
- traverse("system", withsystem)
- end
- data.statistics.readfiles = totalnofread
- data.statistics.skippedfiles = totalnofskipped
- data.statistics.duplicatefiles = totalnofduplicates
-end
-
-local function addfilenames()
- local data = names.data
- local specifications = data.specifications
- local indices = { }
- local files = { }
- for i=1,#specifications do
- local fullname = specifications[i].filename
- files[cleanfilename(fullname)] = fullname
- indices[fullname] = i
- end
- data.files = files
- data.indices = indices
-end
-
-local function rejectclashes() -- just to be sure, so no explicit afm will be found then
- local specifications = names.data.specifications
- local used = { }
- local okay = { }
- local rejected = { } -- only keep modification
- local o = 0
- for i=1,#specifications do
- local s = specifications[i]
- local f = s.fontname
- if f then
- local fnd = used[f]
- local fnm = s.filename
- if fnd then
- if trace_warnings then
- report_names("fontname %a clashes, %a rejected in favor of %a",f,fnm,fnd)
- end
- rejected[f] = s.modification
- else
- used[f] = fnm
- o = o + 1
- okay[o] = s
- end
- else
- o = o + 1
- okay[o] = s
- end
- end
- local d = #specifications - #okay
- if d > 0 then
- report_names("%s files rejected due to clashes",d)
- end
- names.data.specifications = okay
- names.data.rejected = rejected
-end
-
-local function resetdata()
- local mappings = { }
- local fallbacks = { }
- for _, k in next, filters.list do
- mappings [k] = { }
- fallbacks[k] = { }
- end
- names.data = {
- version = names.version,
- mappings = mappings,
- fallbacks = fallbacks,
- specifications = { },
- families = { },
- statistics = { },
- names = { },
- indices = { },
- rejected = { },
- datastate = resolvers.datastate(),
- }
-end
-
-function names.identify(force)
- local starttime = os.gettimeofday() -- use elapser
- resetdata()
- analyzefiles(not force and names.readdata(names.basename))
- rejectclashes()
- collectfamilies()
- collectstatistics()
- cleanupkeywords()
- collecthashes()
- checkduplicates()
- addfilenames()
- -- sorthashes() -- will be resorted when saved
- report_names("total scan time %0.3f seconds",os.gettimeofday()-starttime)
-end
-
-function names.is_permitted(name)
- return containers.is_usable(names.cache, name)
-end
-function names.writedata(name,data)
- containers.write(names.cache,name,data)
-end
-function names.readdata(name)
- return containers.read(names.cache,name)
-end
-
-function names.load(reload,force)
- if not names.loaded then
- if reload then
- if names.is_permitted(names.basename) then
- names.identify(force)
- names.writedata(names.basename,names.data)
- else
- report_names("unable to access database cache")
- end
- names.saved = true
- end
- local data = names.readdata(names.basename)
- names.data = data
- if not names.saved then
- if not data or not next(data) or not data.specifications or not next(data.specifications) then
- names.load(true)
- end
- names.saved = true
- end
- if not data then
- report_names("accessing the data table failed")
- else
- unpackreferences()
- sorthashes()
- end
- names.loaded = true
- end
-end
-
-local function list_them(mapping,sorted,pattern,t,all)
- if mapping[pattern] then
- t[pattern] = mapping[pattern]
- else
- for k=1,#sorted do
- local v = sorted[k]
- if not t[v] and find(v,pattern) then
- t[v] = mapping[v]
- if not all then
- return
- end
- end
- end
- end
-end
-
-function names.list(pattern,reload,all) -- here?
- names.load() -- todo reload
- if names.loaded then
- local t = { }
- local data = names.data
- if data then
- local list = filters.list
- local mappings = data.mappings
- local sorted_mappings = data.sorted_mappings
- local fallbacks = data.fallbacks
- local sorted_fallbacks = data.sorted_fallbacks
- for i=1,#list do
- local format = list[i]
- list_them(mappings[format],sorted_mappings[format],pattern,t,all)
- if next(t) and not all then
- return t
- end
- list_them(fallbacks[format],sorted_fallbacks[format],pattern,t,all)
- if next(t) and not all then
- return t
- end
- end
- end
- return t
- end
-end
-
-local reloaded = false
-
-local function is_reloaded()
- if not reloaded then
- local data = names.data
- if autoreload then
- local c_status = serialize(resolvers.datastate())
- local f_status = serialize(data.datastate)
- if c_status == f_status then
- if trace_names then
- report_names("font database has matching configuration and file hashes")
- end
- return
- else
- report_names("font database has mismatching configuration and file hashes")
- end
- else
- report_names("font database is regenerated (controlled by directive 'fonts.autoreload')")
- end
- names.loaded = false
- reloaded = true
- logs.flush()
- names.load(true)
- end
-end
-
---[[ldx--
-
The resolver also checks if the cached names are loaded. Being clever
-here is for testing purposes only (it deals with names prefixed by an
-encoding name).
---ldx]]--
-
-local function fuzzy(mapping,sorted,name,sub)
- local condensed = gsub(name,"[^%a%d]","")
- for k=1,#sorted do
- local v = sorted[k]
- if find(v,condensed) then
- return mapping[v], v
- end
- end
-end
-
--- we could cache a lookup .. maybe some day ... (only when auto loaded!)
-
-local function foundname(name,sub) -- sub is not used currently
- local data = names.data
- local mappings = data.mappings
- local sorted_mappings = data.sorted_mappings
- local fallbacks = data.fallbacks
- local sorted_fallbacks = data.sorted_fallbacks
- local list = filters.list
- -- dilemma: we lookup in the order otf ttf ttc ... afm but now an otf fallback
- -- can come after an afm match ... well, one should provide nice names anyway
- -- and having two lists is not an option
- for i=1,#list do
- local l = list[i]
- local found = mappings[l][name]
- if found then
- if trace_names then
- report_names("resolved via direct name match: %a",name)
- end
- return found
- end
- end
- for i=1,#list do
- local l = list[i]
- local found, fname = fuzzy(mappings[l],sorted_mappings[l],name,sub)
- if found then
- if trace_names then
- report_names("resolved via fuzzy name match: %a onto %a",name,fname)
- end
- return found
- end
- end
- for i=1,#list do
- local l = list[i]
- local found = fallbacks[l][name]
- if found then
- if trace_names then
- report_names("resolved via direct fallback match: %a",name)
- end
- return found
- end
- end
- for i=1,#list do
- local l = list[i]
- local found, fname = fuzzy(sorted_mappings[l],sorted_fallbacks[l],name,sub)
- if found then
- if trace_names then
- report_names("resolved via fuzzy fallback match: %a onto %a",name,fname)
- end
- return found
- end
- end
- if trace_names then
- report_names("font with name %a cannot be found",name)
- end
-end
-
-function names.resolvedspecification(askedname,sub)
- if askedname and askedname ~= "" and names.enabled then
- askedname = cleanname(askedname)
- names.load()
- local found = foundname(askedname,sub)
- if not found and is_reloaded() then
- found = foundname(askedname,sub)
- end
- return found
- end
-end
-
-function names.resolve(askedname,sub)
- local found = names.resolvedspecification(askedname,sub)
- if found then
- return found.filename, found.subfont and found.rawname
- end
-end
-
--- function names.getfilename(askedname,suffix) -- last resort, strip funny chars
--- names.load()
--- local files = names.data.files
--- askedname = files and files[cleanfilename(askedname,suffix)] or ""
--- if askedname == "" then
--- return ""
--- else -- never entered
--- return resolvers.findbinfile(askedname,suffix) or ""
--- end
--- end
-
-function names.getfilename(askedname,suffix) -- last resort, strip funny chars
- names.load()
- local files = names.data.files
- local cleanname = cleanfilename(askedname,suffix)
- local found = files and files[cleanname] or ""
- if found == "" and is_reloaded() then
- files = names.data.files
- found = files and files[cleanname] or ""
- end
- if found and found ~= "" then
- return resolvers.findbinfile(found,suffix) or "" -- we still need to locate it
- end
-end
-
--- specified search
-
-local function s_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,family)
- if family then
- for i=1,#family do
- local f = family[i]
- if f and weight == f.weight and style == f.style and width == f.width and variant == f.variant then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-local function m_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,families,sorted,strictname)
- for i=1,#sorted do
- local k = sorted[i]
- local family = families[k]
- for i=1,#family do
- local f = family[i]
- if not done[f] and weight == f.weight and style == f.style and width == f.width and variant == f.variant and find(f.fontname,strictname) then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-
-local function s_collect_weight_style_width(found,done,all,weight,style,width,family)
- if family then
- for i=1,#family do
- local f = family[i]
- if f and weight == f.weight and style == f.style and width == f.width then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-local function m_collect_weight_style_width(found,done,all,weight,style,width,families,sorted,strictname)
- for i=1,#sorted do
- local k = sorted[i]
- local family = families[k]
- for i=1,#family do
- local f = family[i]
- if not done[f] and weight == f.weight and style == f.style and width == f.width and find(f.fontname,strictname) then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-
-local function s_collect_weight_style(found,done,all,weight,style,family)
- if family then
- for i=1,#family do local f = family[i]
- if f and weight == f.weight and style == f.style then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-local function m_collect_weight_style(found,done,all,weight,style,families,sorted,strictname)
- for i=1,#sorted do
- local k = sorted[i]
- local family = families[k]
- for i=1,#family do
- local f = family[i]
- if not done[f] and weight == f.weight and style == f.style and find(f.fontname,strictname) then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-
-local function s_collect_style_width(found,done,all,style,width,family)
- if family then
- for i=1,#family do local f = family[i]
- if f and style == f.style and width == f.width then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-local function m_collect_style_width(found,done,all,style,width,families,sorted,strictname)
- for i=1,#sorted do
- local k = sorted[i]
- local family = families[k]
- for i=1,#family do
- local f = family[i]
- if not done[f] and style == f.style and width == f.width and find(f.fontname,strictname) then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-
-local function s_collect_weight(found,done,all,weight,family)
- if family then
- for i=1,#family do local f = family[i]
- if f and weight == f.weight then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-local function m_collect_weight(found,done,all,weight,families,sorted,strictname)
- for i=1,#sorted do
- local k = sorted[i]
- local family = families[k]
- for i=1,#family do
- local f = family[i]
- if not done[f] and weight == f.weight and find(f.fontname,strictname) then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-
-local function s_collect_style(found,done,all,style,family)
- if family then
- for i=1,#family do local f = family[i]
- if f and style == f.style then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-local function m_collect_style(found,done,all,style,families,sorted,strictname)
- for i=1,#sorted do
- local k = sorted[i]
- local family = families[k]
- for i=1,#family do
- local f = family[i]
- if not done[f] and style == f.style and find(f.fontname,strictname) then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-
-local function s_collect_width(found,done,all,width,family)
- if family then
- for i=1,#family do local f = family[i]
- if f and width == f.width then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-local function m_collect_width(found,done,all,width,families,sorted,strictname)
- for i=1,#sorted do
- local k = sorted[i]
- local family = families[k]
- for i=1,#family do
- local f = family[i]
- if not done[f] and width == f.width and find(f.fontname,strictname) then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-
-local function s_collect(found,done,all,family)
- if family then
- for i=1,#family do local f = family[i]
- if f then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-local function m_collect(found,done,all,families,sorted,strictname)
- for i=1,#sorted do
- local k = sorted[i]
- local family = families[k]
- for i=1,#family do
- local f = family[i]
- if not done[f] and find(f.fontname,strictname) then
- found[#found+1], done[f] = f, true
- if not all then return end
- end
- end
- end
-end
-
-local function collect(stage,found,done,name,weight,style,width,variant,all)
- local data = names.data
- local families = data.families
- local sorted = data.sorted_families
- local strictname = "^".. name -- to be checked
- local family = families[name]
- if trace_names then
- report_names("resolving name %a, weight %a, style %a, width %a, variant %a",name,weight,style,width,variant)
- end
- if weight and weight ~= "" then
- if style and style ~= "" then
- if width and width ~= "" then
- if variant and variant ~= "" then
- if trace_names then
- report_names("resolving stage %s, name %a, weight %a, style %a, width %a, variant %a",stage,name,weight,style,width,variant)
- end
- s_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,family)
- m_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,families,sorted,strictname)
- else
- if trace_names then
- report_names("resolving stage %s, name %a, weight %a, style %a, width %a",stage,name,weight,style,width)
- end
- s_collect_weight_style_width(found,done,all,weight,style,width,family)
- m_collect_weight_style_width(found,done,all,weight,style,width,families,sorted,strictname)
- end
- else
- if trace_names then
- report_names("resolving stage %s, name %a, weight %a, style %a",stage,name,weight,style)
- end
- s_collect_weight_style(found,done,all,weight,style,family)
- m_collect_weight_style(found,done,all,weight,style,families,sorted,strictname)
- end
- else
- if trace_names then
- report_names("resolving stage %s, name %a, weight %a",stage,name,weight)
- end
- s_collect_weight(found,done,all,weight,family)
- m_collect_weight(found,done,all,weight,families,sorted,strictname)
- end
- elseif style and style ~= "" then
- if width and width ~= "" then
- if trace_names then
- report_names("resolving stage %s, name %a, style %a, width %a",stage,name,style,width)
- end
- s_collect_style_width(found,done,all,style,width,family)
- m_collect_style_width(found,done,all,style,width,families,sorted,strictname)
- else
- if trace_names then
- report_names("resolving stage %s, name %a, style %a",stage,name,style)
- end
- s_collect_style(found,done,all,style,family)
- m_collect_style(found,done,all,style,families,sorted,strictname)
- end
- elseif width and width ~= "" then
- if trace_names then
- report_names("resolving stage %s, name %a, width %a",stage,name,width)
- end
- s_collect_width(found,done,all,width,family)
- m_collect_width(found,done,all,width,families,sorted,strictname)
- else
- if trace_names then
- report_names("resolving stage %s, name %a",stage,name)
- end
- s_collect(found,done,all,family)
- m_collect(found,done,all,families,sorted,strictname)
- end
-end
-
-local function heuristic(name,weight,style,width,variant,all) -- todo: fallbacks
- local found, done = { }, { }
---~ print(name,weight,style,width,variant)
- weight, style, width, variant = weight or "normal", style or "normal", width or "normal", variant or "normal"
- name = cleanname(name)
- collect(1,found,done,name,weight,style,width,variant,all)
- -- still needed ?
- if #found == 0 and variant ~= "normal" then -- not weight
- variant = "normal"
- collect(4,found,done,name,weight,style,width,variant,all)
- end
- if #found == 0 and width ~= "normal" then
- width = "normal"
- collect(2,found,done,name,weight,style,width,variant,all)
- end
- if #found == 0 and weight ~= "normal" then -- not style
- weight = "normal"
- collect(3,found,done,name,weight,style,width,variant,all)
- end
- if #found == 0 and style ~= "normal" then -- not weight
- style = "normal"
- collect(4,found,done,name,weight,style,width,variant,all)
- end
- --
- local nf = #found
- if trace_names then
- if nf then
- local t = { }
- for i=1,nf do
- t[i] = formatters["%a"](found[i].fontname)
- end
- report_names("name %a resolved to %s instances: % t",name,nf,t)
- else
- report_names("name %a unresolved",name)
- end
- end
- if all then
- return nf > 0 and found
- else
- return found[1]
- end
-end
-
-function names.specification(askedname,weight,style,width,variant,reload,all)
- if askedname and askedname ~= "" and names.enabled then
- askedname = cleanname(askedname) -- or cleanname
- names.load(reload)
- local found = heuristic(askedname,weight,style,width,variant,all)
- if not found and is_reloaded() then
- found = heuristic(askedname,weight,style,width,variant,all)
- if not filename then
- found = foundname(askedname) -- old method
- end
- end
- return found
- end
-end
-
-function names.collect(askedname,weight,style,width,variant,reload,all)
- if askedname and askedname ~= "" and names.enabled then
- askedname = cleanname(askedname) -- or cleanname
- names.load(reload)
- local list = heuristic(askedname,weight,style,width,variant,true)
- if not list or #list == 0 and is_reloaded() then
- list = heuristic(askedname,weight,style,width,variant,true)
- end
- return list
- end
-end
-
-function names.collectspec(askedname,reload,all)
- local name, weight, style, width, variant = names.splitspec(askedname)
- return names.collect(name,weight,style,width,variant,reload,all)
-end
-
-function names.resolvespec(askedname,sub) -- redefined later
- local found = names.specification(names.splitspec(askedname))
- if found then
- return found.filename, found.subfont and found.rawname
- end
-end
-
-function names.collectfiles(askedname,reload) -- no all
- if askedname and askedname ~= "" and names.enabled then
- askedname = cleanname(askedname) -- or cleanname
- names.load(reload)
- local list = { }
- local specifications = names.data.specifications
- for i=1,#specifications do
- local s = specifications[i]
- if find(cleanname(basename(s.filename)),askedname) then
- list[#list+1] = s
- end
- end
- return list
- end
-end
-
--- todo:
---
--- blacklisted = {
--- ["cmr10.ttf"] = "completely messed up",
--- }
-
-function names.exists(name)
- local found = false
- local list = filters.list
- for k=1,#list do
- local v = list[k]
- found = (findfile(name,v) or "") ~= ""
- if found then
- return found
- end
- end
- return (findfile(name,"tfm") or "") ~= "" or (names.resolve(name) or "") ~= ""
-end
-
-local lastlookups, lastpattern = { }, ""
-
-function names.lookup(pattern,name,reload) -- todo: find
- if lastpattern ~= pattern then
- names.load(reload)
- local specifications = names.data.specifications
- local families = names.data.families
- local lookups = specifications
- if name then
- lookups = families[name]
- elseif not find(pattern,"=") then
- lookups = families[pattern]
- end
- if trace_names then
- report_names("starting with %s lookups for %a",#lookups,pattern)
- end
- if lookups then
- for key, value in gmatch(pattern,"([^=,]+)=([^=,]+)") do
- local t, n = { }, 0
- if find(value,"*") then
- value = string.topattern(value)
- for i=1,#lookups do
- local s = lookups[i]
- if find(s[key],value) then
- n = n + 1
- t[n] = lookups[i]
- end
- end
- else
- for i=1,#lookups do
- local s = lookups[i]
- if s[key] == value then
- n = n + 1
- t[n] = lookups[i]
- end
- end
- end
- if trace_names then
- report_names("%s matches for key %a with value %a",#t,key,value)
- end
- lookups = t
- end
- end
- lastpattern = pattern
- lastlookups = lookups or { }
- end
- return #lastlookups
-end
-
-function names.getlookupkey(key,n)
- local l = lastlookups[n or 1]
- return (l and l[key]) or ""
-end
-
-function names.noflookups()
- return #lastlookups
-end
-
-function names.getlookups(pattern,name,reload)
- if pattern then
- names.lookup(pattern,name,reload)
- end
- return lastlookups
-end
-
--- The following is new ... watch the overload!
-
-local specifications = allocate()
-names.specifications = specifications
-
--- files = {
--- name = "antykwapoltawskiego",
--- list = {
--- ["AntPoltLtCond-Regular.otf"] = {
--- -- name = "antykwapoltawskiego",
--- style = "regular",
--- weight = "light",
--- width = "condensed",
--- },
--- },
--- }
-
-function names.register(files)
- if files then
- local list, commonname = files.list, files.name
- if list then
- local n, m = 0, 0
- for filename, filespec in next, list do
- local name = lower(filespec.name or commonname)
- if name and name ~= "" then
- local style = normalized_styles [lower(filespec.style or "normal")]
- local width = normalized_widths [lower(filespec.width or "normal")]
- local weight = normalized_weights [lower(filespec.weight or "normal")]
- local variant = normalized_variants[lower(filespec.variant or "normal")]
- local weights = specifications[name ] if not weights then weights = { } specifications[name ] = weights end
- local styles = weights [weight] if not styles then styles = { } weights [weight] = styles end
- local widths = styles [style ] if not widths then widths = { } styles [style ] = widths end
- local variants = widths [width ] if not variants then variants = { } widths [width ] = variants end
- variants[variant] = filename
- n = n + 1
- else
- m = m + 1
- end
- end
- if trace_specifications then
- report_names("%s filenames registered, %s filenames rejected",n,m)
- end
- end
- end
-end
-
-function names.registered(name,weight,style,width,variant)
- local ok = specifications[name]
- ok = ok and (ok[(weight and weight ~= "" and weight ) or "normal"] or ok.normal)
- ok = ok and (ok[(style and style ~= "" and style ) or "normal"] or ok.normal)
- ok = ok and (ok[(width and width ~= "" and width ) or "normal"] or ok.normal)
- ok = ok and (ok[(variant and variant ~= "" and variant) or "normal"] or ok.normal)
- --
- -- todo: same fallbacks as with database
- --
- if ok then
- return {
- filename = ok,
- subname = "",
- -- rawname = nil,
- }
- end
-end
-
-function names.resolvespec(askedname,sub) -- overloads previous definition
- local name, weight, style, width, variant = names.splitspec(askedname)
- if trace_specifications then
- report_names("resolving specification: %a to name=%s, weight=%s, style=%s, width=%s, variant=%s",askedname,name,weight,style,width,variant)
- end
- local found = names.registered(name,weight,style,width,variant)
- if found and found.filename then
- if trace_specifications then
- report_names("resolved by registered names: %a to %s",askedname,found.filename)
- end
- return found.filename, found.subname, found.rawname
- else
- found = names.specification(name,weight,style,width,variant)
- if found and found.filename then
- if trace_specifications then
- report_names("resolved by font database: %a to %s",askedname,found.filename)
- end
- return found.filename, found.subfont and found.rawname
- end
- end
- if trace_specifications then
- report_names("unresolved: %s",askedname)
- end
-end
+if not modules then modules = { } end modules ['font-syn'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: subs in lookups requests
+
+local next, tonumber, type, tostring = next, tonumber, type, tostring
+local sub, gsub, lower, match, find, lower, upper = string.sub, string.gsub, string.lower, string.match, string.find, string.lower, string.upper
+local find, gmatch = string.find, string.gmatch
+local concat, sort, format = table.concat, table.sort, string.format
+local serialize = table.serialize
+local lpegmatch = lpeg.match
+local unpack = unpack or table.unpack
+local formatters = string.formatters
+
+local allocate = utilities.storage.allocate
+local sparse = utilities.storage.sparse
+
+local removesuffix = file.removesuffix
+local splitbase = file.splitbase
+local splitname = file.splitname
+local basename = file.basename
+local nameonly = file.nameonly
+local pathpart = file.pathpart
+local filejoin = file.join
+local is_qualified_path = file.is_qualified_path
+local exists = io.exists
+
+local findfile = resolvers.findfile
+local cleanpath = resolvers.cleanpath
+local resolveresolved = resolvers.resolve
+
+local trace_names = false trackers.register("fonts.names", function(v) trace_names = v end)
+local trace_warnings = false trackers.register("fonts.warnings", function(v) trace_warnings = v end)
+local trace_specifications = false trackers.register("fonts.specifications", function(v) trace_specifications = v end)
+
+local report_names = logs.reporter("fonts","names")
+
+--[[ldx--
+
This module implements a name to filename resolver. Names are resolved
+using a table that has keys filtered from the font related files.
+--ldx]]--
+
+local P, C, Cc, Cs = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cs
+
+-- what to do with 'thin'
+
+local weights = Cs ( -- not extra
+ P("demibold")
+ + P("semibold")
+ + P("mediumbold")
+ + P("ultrabold")
+ + P("extrabold")
+ + P("ultralight")
+ + P("bold")
+ + P("demi")
+ + P("semi")
+ + P("light")
+ + P("medium")
+ + P("heavy")
+ + P("ultra")
+ + P("black")
+ + P("bol") -- / "bold"
+ + P("regular") / "normal"
+)
+
+local normalized_weights = sparse {
+ regular = "normal",
+}
+
+local styles = Cs (
+ P("reverseoblique") / "reverseitalic"
+ + P("regular") / "normal"
+ + P("italic")
+ + P("oblique") / "italic"
+ + P("slanted")
+ + P("roman") / "normal"
+ + P("ital") / "italic"
+ + P("ita") / "italic"
+)
+
+local normalized_styles = sparse {
+ reverseoblique = "reverseitalic",
+ regular = "normal",
+ oblique = "italic",
+}
+
+local widths = Cs(
+ P("condensed")
+ + P("thin")
+ + P("expanded")
+ + P("cond") / "condensed"
+ + P("normal")
+ + P("book") / "normal"
+)
+
+local normalized_widths = sparse()
+
+local variants = Cs( -- fax casual
+ P("smallcaps")
+ + P("oldstyle")
+ + P("caps") / "smallcaps"
+)
+
+local normalized_variants = sparse()
+
+names.knownweights = {
+ "black",
+ "bold",
+ "demi",
+ "demibold",
+ "extrabold",
+ "heavy",
+ "light",
+ "medium",
+ "mediumbold",
+ "normal",
+ "regular",
+ "semi",
+ "semibold",
+ "ultra",
+ "ultrabold",
+ "ultralight",
+}
+
+names.knownstyles = {
+ "italic",
+ "normal",
+ "oblique",
+ "regular",
+ "reverseitalic",
+ "reverseoblique",
+ "roman",
+ "slanted",
+}
+
+names.knownwidths = {
+ "book",
+ "condensed",
+ "expanded",
+ "normal",
+ "thin",
+}
+
+names.knownvariants = {
+ "normal",
+ "oldstyle",
+ "smallcaps",
+}
+
+local any = P(1)
+
+local analyzed_table
+
+local analyzer = Cs (
+ (
+ weights / function(s) analyzed_table[1] = s return "" end
+ + styles / function(s) analyzed_table[2] = s return "" end
+ + widths / function(s) analyzed_table[3] = s return "" end
+ + variants / function(s) analyzed_table[4] = s return "" end
+ + any
+ )^0
+)
+
+local splitter = lpeg.splitat("-")
+
+function names.splitspec(askedname)
+ local name, weight, style, width, variant = lpegmatch(splitter,askedname)
+ weight = weight and lpegmatch(weights, weight) or weight
+ style = style and lpegmatch(styles, style) or style
+ width = width and lpegmatch(widths, width) or width
+ variant = variant and lpegmatch(variants,variant) or variant
+ if trace_names then
+ report_names("requested name %a split in name %a, weight %a, style %a, width %a and variant %a",
+ askedname,name,weight,style,width,variant)
+ end
+ if not weight or not weight or not width or not variant then
+ weight, style, width, variant = weight or "normal", style or "normal", width or "normal", variant or "normal"
+ if trace_names then
+ report_names("request %a normalized to '%s-%s-%s-%s-%s'",
+ askedname,name,weight,style,width,variant)
+ end
+ end
+ return name or askedname, weight, style, width, variant
+end
+
+local function analyzespec(somename)
+ if somename then
+ analyzed_table = { }
+ local name = lpegmatch(analyzer,somename)
+ return name, analyzed_table[1], analyzed_table[2], analyzed_table[3], analyzed_table[4]
+ end
+end
+
+--[[ldx--
+
It would make sense to implement the filters in the related modules,
+but to keep the overview, we define them here.
+--ldx]]--
+
+filters.otf = fontloader.info
+filters.ttf = fontloader.info
+filters.ttc = fontloader.info
+filters.dfont = fontloader.info
+
+function fontloader.fullinfo(...) -- check with taco what we get / could get
+ local ff = fontloader.open(...)
+ if ff then
+ local d = ff and fontloader.to_table(ff)
+ d.glyphs, d.subfonts, d.gpos, d.gsub, d.lookups = nil, nil, nil, nil, nil
+ fontloader.close(ff)
+ return d
+ else
+ return nil, "error in loading font"
+ end
+end
+
+filters.otf = fontloader.fullinfo
+
+function filters.afm(name)
+ -- we could parse the afm file as well, and then report an error but
+ -- it's not worth the trouble
+ local pfbname = findfile(removesuffix(name)..".pfb","pfb") or ""
+ if pfbname == "" then
+ pfbname = findfile(nameonly(name)..".pfb","pfb") or ""
+ end
+ if pfbname ~= "" then
+ local f = io.open(name)
+ if f then
+ local hash = { }
+ for line in f:lines() do
+ local key, value = match(line,"^(.+)%s+(.+)%s*$")
+ if key and #key > 0 then
+ hash[lower(key)] = value
+ end
+ if find(line,"StartCharMetrics") then
+ break
+ end
+ end
+ f:close()
+ return hash
+ end
+ end
+ return nil, "no matching pfb file"
+end
+
+function filters.pfb(name)
+ return fontloader.info(name)
+end
+
+--[[ldx--
+
The scanner loops over the filters using the information stored in
+the file databases. Watch how we check not only for the names, but also
+for combination with the weight of a font.
+--ldx]]--
+
+filters.list = {
+ "otf", "ttf", "ttc", "dfont", "afm",
+ -- "ttc", "otf", "ttf", "dfont", "afm",
+}
+
+names.fontconfigfile = "fonts.conf" -- a bit weird format, bonus feature
+names.osfontdirvariable = "OSFONTDIR" -- the official way, in minimals etc
+
+filters.paths = { }
+filters.names = { }
+
+function names.getpaths(trace)
+ local hash, result, r = { }, { }, 0
+ local function collect(t,where)
+ for i=1,#t do
+ local v = cleanpath(t[i])
+ v = gsub(v,"/+$","") -- not needed any more
+ local key = lower(v)
+ report_names("%a specifies path %a",where,v)
+ if not hash[key] then
+ r = r + 1
+ result[r] = v
+ hash[key] = true
+ end
+ end
+ end
+ local path = names.osfontdirvariable or ""
+ if path ~= "" then
+ collect(resolvers.expandedpathlist(path),path)
+ end
+ if xml then
+ local confname = resolvers.expansion("FONTCONFIG_FILE") or ""
+ if confname == "" then
+ confname = names.fontconfigfile or ""
+ end
+ if confname ~= "" then
+ -- first look in the tex tree
+ local name = findfile(confname,"fontconfig files") or ""
+ if name == "" then
+ -- after all, fontconfig is a unix thing
+ name = filejoin("/etc",confname)
+ if not lfs.isfile(name) then
+ name = "" -- force quit
+ end
+ end
+ if name ~= "" and lfs.isfile(name) then
+ if trace_names then
+ report_names("%s fontconfig file %a","loading",name)
+ end
+ local xmldata = xml.load(name)
+ -- begin of untested mess
+ xml.include(xmldata,"include","",true,function(incname)
+ if not is_qualified_path(incname) then
+ local path = pathpart(name) -- main name
+ if path ~= "" then
+ incname = filejoin(path,incname)
+ end
+ end
+ if lfs.isfile(incname) then
+ if trace_names then
+ report_names("%s fontconfig file %a","merging included",incname)
+ end
+ return io.loaddata(incname)
+ elseif trace_names then
+ report_names("%s fontconfig file: %a","ignoring included",incname)
+ end
+ end)
+ -- end of untested mess
+ local fontdirs = xml.collect_texts(xmldata,"dir",true)
+ if trace_names then
+ report_names("%s dirs found in fontconfig",#fontdirs)
+ end
+ collect(fontdirs,"fontconfig file")
+ end
+ end
+ end
+ function names.getpaths()
+ return result
+ end
+ return result
+end
+
+local function cleanname(name)
+ return (gsub(lower(name),"[^%a%d]",""))
+end
+
+local function cleanfilename(fullname,defaultsuffix)
+ local path, name, suffix = splitname(fullname)
+ name = gsub(lower(name),"[^%a%d]","")
+ if suffix and suffix ~= "" then
+ return name .. ".".. suffix
+ elseif defaultsuffix and defaultsuffix ~= "" then
+ return name .. ".".. defaultsuffix
+ else
+ return name
+ end
+end
+
+names.cleanname = cleanname
+names.cleanfilename = cleanfilename
+
+local function check_names(result)
+ local names = result.names
+ if names then
+ for i=1,#names do
+ local name = names[i]
+ if name.lang == "English (US)" then
+ return name.names
+ end
+ end
+ end
+end
+
+local function walk_tree(pathlist,suffix,identify)
+ if pathlist then
+ for i=1,#pathlist do
+ local path = pathlist[i]
+ path = cleanpath(path .. "/")
+ path = gsub(path,"/+","/")
+ local pattern = path .. "**." .. suffix -- ** forces recurse
+ report_names("globbing path %a",pattern)
+ local t = dir.glob(pattern)
+ sort(t,sorter)
+ for j=1,#t do
+ local completename = t[j]
+ identify(completename,basename(completename),suffix,completename)
+ end
+ end
+ end
+end
+
+local function check_name(data,result,filename,modification,suffix,subfont)
+ -- shortcuts
+ local specifications = data.specifications
+ -- prepare
+ local names = check_names(result)
+ -- fetch
+ local familyname = names and names.preffamilyname or result.familyname
+ local fullname = names and names.fullname or result.fullname
+ local fontname = result.fontname
+ local subfamily = names and names.subfamily
+ local modifiers = names and names.prefmodifiers
+ local weight = names and names.weight or result.weight
+ local italicangle = tonumber(result.italicangle)
+ local subfont = subfont or nil
+ local rawname = fullname or fontname or familyname
+ -- normalize
+ familyname = familyname and cleanname(familyname)
+ fullname = fullname and cleanname(fullname)
+ fontname = fontname and cleanname(fontname)
+ subfamily = subfamily and cleanname(subfamily)
+ modifiers = modifiers and cleanname(modifiers)
+ weight = weight and cleanname(weight)
+ italicangle = italicangle == 0 and nil
+ -- analyze
+ local a_name, a_weight, a_style, a_width, a_variant = analyzespec(fullname or fontname or familyname)
+ -- check
+ local width = a_width
+ local variant = a_variant
+ local style = modifiers and gsub(modifiers,"[^%a]","")
+ if not style and italicangle then
+ style = "italic"
+ end
+ if not variant or variant == "" then
+ variant = "normal"
+ end
+ if not weight or weight == "" then
+ weight = a_weight
+ end
+ if not style or style == "" then
+ style = a_style
+ end
+ if not familyname then
+ familyname = a_name
+ end
+ fontname = fontname or fullname or familyname or basename(filename)
+ fullname = fullname or fontname
+ familyname = familyname or fontname
+ specifications[#specifications + 1] = {
+ filename = filename, -- unresolved
+ format = lower(suffix),
+ subfont = subfont,
+ rawname = rawname,
+ familyname = familyname,
+ fullname = fullname,
+ fontname = fontname,
+ subfamily = subfamily,
+ modifiers = modifiers,
+ weight = weight,
+ style = style,
+ width = width,
+ variant = variant,
+ minsize = result.design_range_bottom or 0,
+ maxsize = result.design_range_top or 0,
+ designsize = result.design_size or 0,
+ modification = modification or 0,
+ }
+end
+
+local function cleanupkeywords()
+ local data = names.data
+ local specifications = names.data.specifications
+ if specifications then
+ local weights = { }
+ local styles = { }
+ local widths = { }
+ local variants = { }
+ for i=1,#specifications do
+ local s = specifications[i]
+ -- fix (sofar styles are taken from the name, and widths from the specification)
+ local _, b_weight, b_style, b_width, b_variant = analyzespec(s.weight)
+ local _, c_weight, c_style, c_width, c_variant = analyzespec(s.style)
+ local _, d_weight, d_style, d_width, d_variant = analyzespec(s.width)
+ local _, e_weight, e_style, e_width, e_variant = analyzespec(s.variant)
+ local _, f_weight, f_style, f_width, f_variant = analyzespec(s.fullname or "")
+ local weight = b_weight or c_weight or d_weight or e_weight or f_weight or "normal"
+ local style = b_style or c_style or d_style or e_style or f_style or "normal"
+ local width = b_width or c_width or d_width or e_width or f_width or "normal"
+ local variant = b_variant or c_variant or d_variant or e_variant or f_variant or "normal"
+ if not weight or weight == "" then weight = "normal" end
+ if not style or style == "" then style = "normal" end
+ if not width or width == "" then width = "normal" end
+ if not variant or variant == "" then variant = "normal" end
+ weights [weight ] = (weights [weight ] or 0) + 1
+ styles [style ] = (styles [style ] or 0) + 1
+ widths [width ] = (widths [width ] or 0) + 1
+ variants[variant] = (variants[variant] or 0) + 1
+ if weight ~= s.weight then
+ s.fontweight = s.weight
+ end
+ s.weight, s.style, s.width, s.variant = weight, style, width, variant
+ end
+ local stats = data.statistics
+ stats.used_weights, stats.used_styles, stats.used_widths, stats.used_variants = weights, styles, widths, variants
+ end
+end
+
+local function collectstatistics()
+ local data = names.data
+ local specifications = data.specifications
+ if specifications then
+ local weights = { }
+ local styles = { }
+ local widths = { }
+ local variants = { }
+ for i=1,#specifications do
+ local s = specifications[i]
+ local weight = s.weight
+ local style = s.style
+ local width = s.width
+ local variant = s.variant
+ if weight then weights [weight ] = (weights [weight ] or 0) + 1 end
+ if style then styles [style ] = (styles [style ] or 0) + 1 end
+ if width then widths [width ] = (widths [width ] or 0) + 1 end
+ if variant then variants[variant] = (variants[variant] or 0) + 1 end
+ end
+ local stats = data.statistics
+ stats.weights = weights
+ stats.styles = styles
+ stats.widths = widths
+ stats.variants = variants
+ stats.fonts = #specifications
+ end
+end
+
+local function collecthashes()
+ local data = names.data
+ local mappings = data.mappings
+ local fallbacks = data.fallbacks
+ local specifications = data.specifications
+ local nofmappings = 0
+ local noffallbacks = 0
+ if specifications then
+ -- maybe multiple passes
+ for index=1,#specifications do
+ local s = specifications[index]
+ local format, fullname, fontname, familyname, weight, subfamily = s.format, s.fullname, s.fontname, s.familyname, s.weight, s.subfamily
+ local mf, ff = mappings[format], fallbacks[format]
+ if fullname and not mf[fullname] then
+ mf[fullname], nofmappings = index, nofmappings + 1
+ end
+ if fontname and not mf[fontname] then
+ mf[fontname], nofmappings = index, nofmappings + 1
+ end
+ if familyname and weight and weight ~= sub(familyname,#familyname-#weight+1,#familyname) then
+ local madename = familyname .. weight
+ if not mf[madename] and not ff[madename] then
+ ff[madename], noffallbacks = index, noffallbacks + 1
+ end
+ end
+ if familyname and subfamily and subfamily ~= sub(familyname,#familyname-#subfamily+1,#familyname) then
+ local extraname = familyname .. subfamily
+ if not mf[extraname] and not ff[extraname] then
+ ff[extraname], noffallbacks = index, noffallbacks + 1
+ end
+ end
+ if familyname and not mf[familyname] and not ff[familyname] then
+ ff[familyname], noffallbacks = index, noffallbacks + 1
+ end
+ end
+ end
+ return nofmappings, noffallbacks
+end
+
+local function collectfamilies()
+ local data = names.data
+ local specifications = data.specifications
+ local families = data.families
+ for index=1,#specifications do
+ local familyname = specifications[index].familyname
+ local family = families[familyname]
+ if not family then
+ families[familyname] = { index }
+ else
+ family[#family+1] = index
+ end
+ end
+end
+
+local function checkduplicate(where) -- fails on "Romantik" but that's a border case anyway
+ local data = names.data
+ local mapping = data[where]
+ local specifications = data.specifications
+ local loaded = { }
+ if specifications and mapping then
+ for _, m in next, mapping do
+ for k, v in next, m do
+ local s = specifications[v]
+ local hash = formatters["%s-%s-%s-%s-%s"](s.familyname,s.weight or "*",s.style or "*",s.width or "*",s.variant or "*")
+ local h = loaded[hash]
+ if h then
+ local ok = true
+ local fn = s.filename
+ for i=1,#h do
+ local hn = s.filename
+ if h[i] == fn then
+ ok = false
+ break
+ end
+ end
+ if ok then
+ h[#h+1] = fn
+ end
+ else
+ loaded[hash] = { s.filename }
+ end
+ end
+ end
+ end
+ local n = 0
+ for k, v in table.sortedhash(loaded) do
+ local nv = #v
+ if nv > 1 then
+ if trace_warnings then
+ report_names("lookup %a clashes with %a",k,v)
+ end
+ n = n + nv
+ end
+ end
+ report_names("%a double lookups in %a",n,where)
+end
+
+local function checkduplicates()
+ checkduplicate("mappings")
+ checkduplicate("fallbacks")
+end
+
+local sorter = function(a,b)
+ return a > b -- to be checked
+end
+
+local function sorthashes()
+ local data = names.data
+ local list = filters.list
+ local mappings = data.mappings
+ local fallbacks = data.fallbacks
+ local sorted_mappings = { }
+ local sorted_fallbacks = { }
+ data.sorted_mappings = sorted_mappings
+ data.sorted_fallbacks = sorted_fallbacks
+ for i=1,#list do
+ local l = list[i]
+ sorted_mappings [l] = table.keys(mappings[l])
+ sorted_fallbacks[l] = table.keys(fallbacks[l])
+ sort(sorted_mappings [l],sorter)
+ sort(sorted_fallbacks[l],sorter)
+ end
+ data.sorted_families = table.keys(data.families)
+ sort(data.sorted_families,sorter)
+end
+
+local function unpackreferences()
+ local data = names.data
+ local specifications = data.specifications
+ if specifications then
+ for k, v in next, data.families do
+ for i=1,#v do
+ v[i] = specifications[v[i]]
+ end
+ end
+ local mappings = data.mappings
+ if mappings then
+ for _, m in next, mappings do
+ for k, v in next, m do
+ m[k] = specifications[v]
+ end
+ end
+ end
+ local fallbacks = data.fallbacks
+ if fallbacks then
+ for _, f in next, fallbacks do
+ for k, v in next, f do
+ f[k] = specifications[v]
+ end
+ end
+ end
+ end
+end
+
+local function analyzefiles(olddata)
+ if not trace_warnings then
+ report_names("warnings are disabled (tracker 'fonts.warnings')")
+ end
+ local data = names.data
+ local done = { }
+ local totalnofread = 0
+ local totalnofskipped = 0
+ local totalnofduplicates = 0
+ local nofread = 0
+ local nofskipped = 0
+ local nofduplicates = 0
+ local skip_paths = filters.paths
+ local skip_names = filters.names
+ local specifications = data.specifications
+ local oldindices = olddata and olddata.indices or { }
+ local oldspecifications = olddata and olddata.specifications or { }
+ local oldrejected = olddata and olddata.rejected or { }
+ local treatmentdata = fonts.treatments.data
+ local function identify(completename,name,suffix,storedname)
+ local pathpart, basepart = splitbase(completename)
+ nofread = nofread + 1
+ local treatment = treatmentdata[completename] or treatmentdata[basepart]
+ if treatment and treatment.ignored then
+ if trace_names then
+ report_names("%s font %a is ignored, reason %a",suffix,completename,treatment.comment or "unknown")
+ end
+ nofskipped = nofskipped + 1
+ elseif done[name] then
+ -- already done (avoid otf afm clash)
+ if trace_names then
+ report_names("%s font %a already done",suffix,completename)
+ end
+ nofduplicates = nofduplicates + 1
+ nofskipped = nofskipped + 1
+ elseif not exists(completename) then
+ -- weird error
+ if trace_names then
+ report_names("%s font %a does not really exist",suffix,completename)
+ end
+ nofskipped = nofskipped + 1
+ elseif not is_qualified_path(completename) and findfile(completename,suffix) == "" then
+ -- not locatable by backend anyway
+ if trace_names then
+ report_names("%s font %a cannot be found by backend",suffix,completename)
+ end
+ nofskipped = nofskipped + 1
+ else
+ if #skip_paths > 0 then
+ for i=1,#skip_paths do
+ if find(pathpart,skip_paths[i]) then
+ if trace_names then
+ report_names("rejecting path of %s font %a",suffix,completename)
+ end
+ nofskipped = nofskipped + 1
+ return
+ end
+ end
+ end
+ if #skip_names > 0 then
+ for i=1,#skip_paths do
+ if find(basepart,skip_names[i]) then
+ done[name] = true
+ if trace_names then
+ report_names("rejecting name of %s font %a",suffix,completename)
+ end
+ nofskipped = nofskipped + 1
+ return
+ end
+ end
+ end
+ if trace_names then
+ report_names("identifying %s font %a",suffix,completename)
+ end
+ local result = nil
+ local modification = lfs.attributes(completename,"modification")
+ if olddata and modification and modification > 0 then
+ local oldindex = oldindices[storedname] -- index into specifications
+ if oldindex then
+ local oldspecification = oldspecifications[oldindex]
+ if oldspecification and oldspecification.filename == storedname then -- double check for out of sync
+ local oldmodification = oldspecification.modification
+ if oldmodification == modification then
+ result = oldspecification
+ specifications[#specifications + 1] = result
+ else
+ end
+ else
+ end
+ elseif oldrejected[storedname] == modification then
+ result = false
+ end
+ end
+ if result == nil then
+ local result, message = filters[lower(suffix)](completename)
+ if result then
+ if result[1] then
+ for r=1,#result do
+ local ok = check_name(data,result[r],storedname,modification,suffix,r-1) -- subfonts start at zero
+ -- if not ok then
+ -- nofskipped = nofskipped + 1
+ -- end
+ end
+ else
+ local ok = check_name(data,result,storedname,modification,suffix)
+ -- if not ok then
+ -- nofskipped = nofskipped + 1
+ -- end
+ end
+ if trace_warnings and message and message ~= "" then
+ report_names("warning when identifying %s font %a, %s",suffix,completename,message)
+ end
+ elseif trace_warnings then
+ nofskipped = nofskipped + 1
+ report_names("error when identifying %s font %a, %s",suffix,completename,message or "unknown")
+ end
+ end
+ done[name] = true
+ end
+ logs.flush() -- a bit overkill for each font, maybe not needed here
+ end
+ local function traverse(what, method)
+ local list = filters.list
+ for n=1,#list do
+ local suffix = list[n]
+ local t = os.gettimeofday() -- use elapser
+ nofread, nofskipped, nofduplicates = 0, 0, 0
+ suffix = lower(suffix)
+ report_names("identifying %s font files with suffix %a",what,suffix)
+ method(suffix)
+ suffix = upper(suffix)
+ report_names("identifying %s font files with suffix %a",what,suffix)
+ method(suffix)
+ totalnofread, totalnofskipped, totalnofduplicates = totalnofread + nofread, totalnofskipped + nofskipped, totalnofduplicates + nofduplicates
+ local elapsed = os.gettimeofday() - t
+ report_names("%s %s files identified, %s skipped, %s duplicates, %s hash entries added, runtime %0.3f seconds",nofread,what,nofskipped,nofduplicates,nofread-nofskipped,elapsed)
+ end
+ logs.flush()
+ end
+ -- problem .. this will not take care of duplicates
+ local function withtree(suffix)
+ resolvers.dowithfilesintree(".*%." .. suffix .. "$", function(method,root,path,name)
+ if method == "file" or method == "tree" then
+ local completename = root .."/" .. path .. "/" .. name
+ completename = resolveresolved(completename) -- no shortcut
+ identify(completename,name,suffix,name)
+ return true
+ end
+ end, function(blobtype,blobpath,pattern)
+ blobpath = resolveresolved(blobpath) -- no shortcut
+ report_names("scanning path %a for %s files",blobpath,suffix)
+ end, function(blobtype,blobpath,pattern,total,checked,done)
+ blobpath = resolveresolved(blobpath) -- no shortcut
+ report_names("%s entries found, %s %s files checked, %s okay",total,checked,suffix,done)
+ end)
+ end
+ local function withlsr(suffix) -- all trees
+ -- we do this only for a stupid names run, not used for context itself,
+ -- using the vars is too clumsy so we just stick to a full scan instead
+ local pathlist = resolvers.splitpath(resolvers.showpath("ls-R") or "")
+ walk_tree(pathlist,suffix,identify)
+ end
+ local function withsystem(suffix) -- OSFONTDIR cum suis
+ walk_tree(names.getpaths(trace),suffix,identify)
+ end
+ traverse("tree",withtree) -- TEXTREE only
+ if texconfig.kpse_init then
+ traverse("lsr", withlsr)
+ else
+ traverse("system", withsystem)
+ end
+ data.statistics.readfiles = totalnofread
+ data.statistics.skippedfiles = totalnofskipped
+ data.statistics.duplicatefiles = totalnofduplicates
+end
+
+local function addfilenames()
+ local data = names.data
+ local specifications = data.specifications
+ local indices = { }
+ local files = { }
+ for i=1,#specifications do
+ local fullname = specifications[i].filename
+ files[cleanfilename(fullname)] = fullname
+ indices[fullname] = i
+ end
+ data.files = files
+ data.indices = indices
+end
+
+local function rejectclashes() -- just to be sure, so no explicit afm will be found then
+ local specifications = names.data.specifications
+ local used = { }
+ local okay = { }
+ local rejected = { } -- only keep modification
+ local o = 0
+ for i=1,#specifications do
+ local s = specifications[i]
+ local f = s.fontname
+ if f then
+ local fnd = used[f]
+ local fnm = s.filename
+ if fnd then
+ if trace_warnings then
+ report_names("fontname %a clashes, %a rejected in favor of %a",f,fnm,fnd)
+ end
+ rejected[f] = s.modification
+ else
+ used[f] = fnm
+ o = o + 1
+ okay[o] = s
+ end
+ else
+ o = o + 1
+ okay[o] = s
+ end
+ end
+ local d = #specifications - #okay
+ if d > 0 then
+ report_names("%s files rejected due to clashes",d)
+ end
+ names.data.specifications = okay
+ names.data.rejected = rejected
+end
+
+local function resetdata()
+ local mappings = { }
+ local fallbacks = { }
+ for _, k in next, filters.list do
+ mappings [k] = { }
+ fallbacks[k] = { }
+ end
+ names.data = {
+ version = names.version,
+ mappings = mappings,
+ fallbacks = fallbacks,
+ specifications = { },
+ families = { },
+ statistics = { },
+ names = { },
+ indices = { },
+ rejected = { },
+ datastate = resolvers.datastate(),
+ }
+end
+
+function names.identify(force)
+ local starttime = os.gettimeofday() -- use elapser
+ resetdata()
+ analyzefiles(not force and names.readdata(names.basename))
+ rejectclashes()
+ collectfamilies()
+ collectstatistics()
+ cleanupkeywords()
+ collecthashes()
+ checkduplicates()
+ addfilenames()
+ -- sorthashes() -- will be resorted when saved
+ report_names("total scan time %0.3f seconds",os.gettimeofday()-starttime)
+end
+
+function names.is_permitted(name)
+ return containers.is_usable(names.cache, name)
+end
+function names.writedata(name,data)
+ containers.write(names.cache,name,data)
+end
+function names.readdata(name)
+ return containers.read(names.cache,name)
+end
+
+function names.load(reload,force)
+ if not names.loaded then
+ if reload then
+ if names.is_permitted(names.basename) then
+ names.identify(force)
+ names.writedata(names.basename,names.data)
+ else
+ report_names("unable to access database cache")
+ end
+ names.saved = true
+ end
+ local data = names.readdata(names.basename)
+ names.data = data
+ if not names.saved then
+ if not data or not next(data) or not data.specifications or not next(data.specifications) then
+ names.load(true)
+ end
+ names.saved = true
+ end
+ if not data then
+ report_names("accessing the data table failed")
+ else
+ unpackreferences()
+ sorthashes()
+ end
+ names.loaded = true
+ end
+end
+
+local function list_them(mapping,sorted,pattern,t,all)
+ if mapping[pattern] then
+ t[pattern] = mapping[pattern]
+ else
+ for k=1,#sorted do
+ local v = sorted[k]
+ if not t[v] and find(v,pattern) then
+ t[v] = mapping[v]
+ if not all then
+ return
+ end
+ end
+ end
+ end
+end
+
+function names.list(pattern,reload,all) -- here?
+ names.load() -- todo reload
+ if names.loaded then
+ local t = { }
+ local data = names.data
+ if data then
+ local list = filters.list
+ local mappings = data.mappings
+ local sorted_mappings = data.sorted_mappings
+ local fallbacks = data.fallbacks
+ local sorted_fallbacks = data.sorted_fallbacks
+ for i=1,#list do
+ local format = list[i]
+ list_them(mappings[format],sorted_mappings[format],pattern,t,all)
+ if next(t) and not all then
+ return t
+ end
+ list_them(fallbacks[format],sorted_fallbacks[format],pattern,t,all)
+ if next(t) and not all then
+ return t
+ end
+ end
+ end
+ return t
+ end
+end
+
+local reloaded = false
+
+local function is_reloaded()
+ if not reloaded then
+ local data = names.data
+ if autoreload then
+ local c_status = serialize(resolvers.datastate())
+ local f_status = serialize(data.datastate)
+ if c_status == f_status then
+ if trace_names then
+ report_names("font database has matching configuration and file hashes")
+ end
+ return
+ else
+ report_names("font database has mismatching configuration and file hashes")
+ end
+ else
+ report_names("font database is regenerated (controlled by directive 'fonts.autoreload')")
+ end
+ names.loaded = false
+ reloaded = true
+ logs.flush()
+ names.load(true)
+ end
+end
+
+--[[ldx--
+
The resolver also checks if the cached names are loaded. Being clever
+here is for testing purposes only (it deals with names prefixed by an
+encoding name).
+--ldx]]--
+
+local function fuzzy(mapping,sorted,name,sub)
+ local condensed = gsub(name,"[^%a%d]","")
+ for k=1,#sorted do
+ local v = sorted[k]
+ if find(v,condensed) then
+ return mapping[v], v
+ end
+ end
+end
+
+-- we could cache a lookup .. maybe some day ... (only when auto loaded!)
+
+local function foundname(name,sub) -- sub is not used currently
+ local data = names.data
+ local mappings = data.mappings
+ local sorted_mappings = data.sorted_mappings
+ local fallbacks = data.fallbacks
+ local sorted_fallbacks = data.sorted_fallbacks
+ local list = filters.list
+ -- dilemma: we lookup in the order otf ttf ttc ... afm but now an otf fallback
+ -- can come after an afm match ... well, one should provide nice names anyway
+ -- and having two lists is not an option
+ for i=1,#list do
+ local l = list[i]
+ local found = mappings[l][name]
+ if found then
+ if trace_names then
+ report_names("resolved via direct name match: %a",name)
+ end
+ return found
+ end
+ end
+ for i=1,#list do
+ local l = list[i]
+ local found, fname = fuzzy(mappings[l],sorted_mappings[l],name,sub)
+ if found then
+ if trace_names then
+ report_names("resolved via fuzzy name match: %a onto %a",name,fname)
+ end
+ return found
+ end
+ end
+ for i=1,#list do
+ local l = list[i]
+ local found = fallbacks[l][name]
+ if found then
+ if trace_names then
+ report_names("resolved via direct fallback match: %a",name)
+ end
+ return found
+ end
+ end
+ for i=1,#list do
+ local l = list[i]
+ local found, fname = fuzzy(sorted_mappings[l],sorted_fallbacks[l],name,sub)
+ if found then
+ if trace_names then
+ report_names("resolved via fuzzy fallback match: %a onto %a",name,fname)
+ end
+ return found
+ end
+ end
+ if trace_names then
+ report_names("font with name %a cannot be found",name)
+ end
+end
+
+function names.resolvedspecification(askedname,sub)
+ if askedname and askedname ~= "" and names.enabled then
+ askedname = cleanname(askedname)
+ names.load()
+ local found = foundname(askedname,sub)
+ if not found and is_reloaded() then
+ found = foundname(askedname,sub)
+ end
+ return found
+ end
+end
+
+function names.resolve(askedname,sub)
+ local found = names.resolvedspecification(askedname,sub)
+ if found then
+ return found.filename, found.subfont and found.rawname
+ end
+end
+
+-- function names.getfilename(askedname,suffix) -- last resort, strip funny chars
+-- names.load()
+-- local files = names.data.files
+-- askedname = files and files[cleanfilename(askedname,suffix)] or ""
+-- if askedname == "" then
+-- return ""
+-- else -- never entered
+-- return resolvers.findbinfile(askedname,suffix) or ""
+-- end
+-- end
+
+function names.getfilename(askedname,suffix) -- last resort, strip funny chars
+ names.load()
+ local files = names.data.files
+ local cleanname = cleanfilename(askedname,suffix)
+ local found = files and files[cleanname] or ""
+ if found == "" and is_reloaded() then
+ files = names.data.files
+ found = files and files[cleanname] or ""
+ end
+ if found and found ~= "" then
+ return resolvers.findbinfile(found,suffix) or "" -- we still need to locate it
+ end
+end
+
+-- specified search
+
+local function s_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,family)
+ if family then
+ for i=1,#family do
+ local f = family[i]
+ if f and weight == f.weight and style == f.style and width == f.width and variant == f.variant then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+local function m_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,families,sorted,strictname)
+ for i=1,#sorted do
+ local k = sorted[i]
+ local family = families[k]
+ for i=1,#family do
+ local f = family[i]
+ if not done[f] and weight == f.weight and style == f.style and width == f.width and variant == f.variant and find(f.fontname,strictname) then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+
+local function s_collect_weight_style_width(found,done,all,weight,style,width,family)
+ if family then
+ for i=1,#family do
+ local f = family[i]
+ if f and weight == f.weight and style == f.style and width == f.width then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+local function m_collect_weight_style_width(found,done,all,weight,style,width,families,sorted,strictname)
+ for i=1,#sorted do
+ local k = sorted[i]
+ local family = families[k]
+ for i=1,#family do
+ local f = family[i]
+ if not done[f] and weight == f.weight and style == f.style and width == f.width and find(f.fontname,strictname) then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+
+local function s_collect_weight_style(found,done,all,weight,style,family)
+ if family then
+ for i=1,#family do local f = family[i]
+ if f and weight == f.weight and style == f.style then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+local function m_collect_weight_style(found,done,all,weight,style,families,sorted,strictname)
+ for i=1,#sorted do
+ local k = sorted[i]
+ local family = families[k]
+ for i=1,#family do
+ local f = family[i]
+ if not done[f] and weight == f.weight and style == f.style and find(f.fontname,strictname) then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+
+local function s_collect_style_width(found,done,all,style,width,family)
+ if family then
+ for i=1,#family do local f = family[i]
+ if f and style == f.style and width == f.width then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+local function m_collect_style_width(found,done,all,style,width,families,sorted,strictname)
+ for i=1,#sorted do
+ local k = sorted[i]
+ local family = families[k]
+ for i=1,#family do
+ local f = family[i]
+ if not done[f] and style == f.style and width == f.width and find(f.fontname,strictname) then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+
+local function s_collect_weight(found,done,all,weight,family)
+ if family then
+ for i=1,#family do local f = family[i]
+ if f and weight == f.weight then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+local function m_collect_weight(found,done,all,weight,families,sorted,strictname)
+ for i=1,#sorted do
+ local k = sorted[i]
+ local family = families[k]
+ for i=1,#family do
+ local f = family[i]
+ if not done[f] and weight == f.weight and find(f.fontname,strictname) then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+
+local function s_collect_style(found,done,all,style,family)
+ if family then
+ for i=1,#family do local f = family[i]
+ if f and style == f.style then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+local function m_collect_style(found,done,all,style,families,sorted,strictname)
+ for i=1,#sorted do
+ local k = sorted[i]
+ local family = families[k]
+ for i=1,#family do
+ local f = family[i]
+ if not done[f] and style == f.style and find(f.fontname,strictname) then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+
+local function s_collect_width(found,done,all,width,family)
+ if family then
+ for i=1,#family do local f = family[i]
+ if f and width == f.width then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+local function m_collect_width(found,done,all,width,families,sorted,strictname)
+ for i=1,#sorted do
+ local k = sorted[i]
+ local family = families[k]
+ for i=1,#family do
+ local f = family[i]
+ if not done[f] and width == f.width and find(f.fontname,strictname) then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+
+local function s_collect(found,done,all,family)
+ if family then
+ for i=1,#family do local f = family[i]
+ if f then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+local function m_collect(found,done,all,families,sorted,strictname)
+ for i=1,#sorted do
+ local k = sorted[i]
+ local family = families[k]
+ for i=1,#family do
+ local f = family[i]
+ if not done[f] and find(f.fontname,strictname) then
+ found[#found+1], done[f] = f, true
+ if not all then return end
+ end
+ end
+ end
+end
+
+local function collect(stage,found,done,name,weight,style,width,variant,all)
+ local data = names.data
+ local families = data.families
+ local sorted = data.sorted_families
+ local strictname = "^".. name -- to be checked
+ local family = families[name]
+ if trace_names then
+ report_names("resolving name %a, weight %a, style %a, width %a, variant %a",name,weight,style,width,variant)
+ end
+ if weight and weight ~= "" then
+ if style and style ~= "" then
+ if width and width ~= "" then
+ if variant and variant ~= "" then
+ if trace_names then
+ report_names("resolving stage %s, name %a, weight %a, style %a, width %a, variant %a",stage,name,weight,style,width,variant)
+ end
+ s_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,family)
+ m_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,families,sorted,strictname)
+ else
+ if trace_names then
+ report_names("resolving stage %s, name %a, weight %a, style %a, width %a",stage,name,weight,style,width)
+ end
+ s_collect_weight_style_width(found,done,all,weight,style,width,family)
+ m_collect_weight_style_width(found,done,all,weight,style,width,families,sorted,strictname)
+ end
+ else
+ if trace_names then
+ report_names("resolving stage %s, name %a, weight %a, style %a",stage,name,weight,style)
+ end
+ s_collect_weight_style(found,done,all,weight,style,family)
+ m_collect_weight_style(found,done,all,weight,style,families,sorted,strictname)
+ end
+ else
+ if trace_names then
+ report_names("resolving stage %s, name %a, weight %a",stage,name,weight)
+ end
+ s_collect_weight(found,done,all,weight,family)
+ m_collect_weight(found,done,all,weight,families,sorted,strictname)
+ end
+ elseif style and style ~= "" then
+ if width and width ~= "" then
+ if trace_names then
+ report_names("resolving stage %s, name %a, style %a, width %a",stage,name,style,width)
+ end
+ s_collect_style_width(found,done,all,style,width,family)
+ m_collect_style_width(found,done,all,style,width,families,sorted,strictname)
+ else
+ if trace_names then
+ report_names("resolving stage %s, name %a, style %a",stage,name,style)
+ end
+ s_collect_style(found,done,all,style,family)
+ m_collect_style(found,done,all,style,families,sorted,strictname)
+ end
+ elseif width and width ~= "" then
+ if trace_names then
+ report_names("resolving stage %s, name %a, width %a",stage,name,width)
+ end
+ s_collect_width(found,done,all,width,family)
+ m_collect_width(found,done,all,width,families,sorted,strictname)
+ else
+ if trace_names then
+ report_names("resolving stage %s, name %a",stage,name)
+ end
+ s_collect(found,done,all,family)
+ m_collect(found,done,all,families,sorted,strictname)
+ end
+end
+
+local function heuristic(name,weight,style,width,variant,all) -- todo: fallbacks
+ local found, done = { }, { }
+--~ print(name,weight,style,width,variant)
+ weight, style, width, variant = weight or "normal", style or "normal", width or "normal", variant or "normal"
+ name = cleanname(name)
+ collect(1,found,done,name,weight,style,width,variant,all)
+ -- still needed ?
+ if #found == 0 and variant ~= "normal" then -- not weight
+ variant = "normal"
+ collect(4,found,done,name,weight,style,width,variant,all)
+ end
+ if #found == 0 and width ~= "normal" then
+ width = "normal"
+ collect(2,found,done,name,weight,style,width,variant,all)
+ end
+ if #found == 0 and weight ~= "normal" then -- not style
+ weight = "normal"
+ collect(3,found,done,name,weight,style,width,variant,all)
+ end
+ if #found == 0 and style ~= "normal" then -- not weight
+ style = "normal"
+ collect(4,found,done,name,weight,style,width,variant,all)
+ end
+ --
+ local nf = #found
+ if trace_names then
+ if nf then
+ local t = { }
+ for i=1,nf do
+ t[i] = formatters["%a"](found[i].fontname)
+ end
+ report_names("name %a resolved to %s instances: % t",name,nf,t)
+ else
+ report_names("name %a unresolved",name)
+ end
+ end
+ if all then
+ return nf > 0 and found
+ else
+ return found[1]
+ end
+end
+
+function names.specification(askedname,weight,style,width,variant,reload,all)
+ if askedname and askedname ~= "" and names.enabled then
+ askedname = cleanname(askedname) -- or cleanname
+ names.load(reload)
+ local found = heuristic(askedname,weight,style,width,variant,all)
+ if not found and is_reloaded() then
+ found = heuristic(askedname,weight,style,width,variant,all)
+ if not filename then
+ found = foundname(askedname) -- old method
+ end
+ end
+ return found
+ end
+end
+
+function names.collect(askedname,weight,style,width,variant,reload,all)
+ if askedname and askedname ~= "" and names.enabled then
+ askedname = cleanname(askedname) -- or cleanname
+ names.load(reload)
+ local list = heuristic(askedname,weight,style,width,variant,true)
+ if not list or #list == 0 and is_reloaded() then
+ list = heuristic(askedname,weight,style,width,variant,true)
+ end
+ return list
+ end
+end
+
+function names.collectspec(askedname,reload,all)
+ local name, weight, style, width, variant = names.splitspec(askedname)
+ return names.collect(name,weight,style,width,variant,reload,all)
+end
+
+function names.resolvespec(askedname,sub) -- redefined later
+ local found = names.specification(names.splitspec(askedname))
+ if found then
+ return found.filename, found.subfont and found.rawname
+ end
+end
+
+function names.collectfiles(askedname,reload) -- no all
+ if askedname and askedname ~= "" and names.enabled then
+ askedname = cleanname(askedname) -- or cleanname
+ names.load(reload)
+ local list = { }
+ local specifications = names.data.specifications
+ for i=1,#specifications do
+ local s = specifications[i]
+ if find(cleanname(basename(s.filename)),askedname) then
+ list[#list+1] = s
+ end
+ end
+ return list
+ end
+end
+
+-- todo:
+--
+-- blacklisted = {
+-- ["cmr10.ttf"] = "completely messed up",
+-- }
+
+function names.exists(name)
+ local found = false
+ local list = filters.list
+ for k=1,#list do
+ local v = list[k]
+ found = (findfile(name,v) or "") ~= ""
+ if found then
+ return found
+ end
+ end
+ return (findfile(name,"tfm") or "") ~= "" or (names.resolve(name) or "") ~= ""
+end
+
+local lastlookups, lastpattern = { }, ""
+
+function names.lookup(pattern,name,reload) -- todo: find
+ if lastpattern ~= pattern then
+ names.load(reload)
+ local specifications = names.data.specifications
+ local families = names.data.families
+ local lookups = specifications
+ if name then
+ lookups = families[name]
+ elseif not find(pattern,"=") then
+ lookups = families[pattern]
+ end
+ if trace_names then
+ report_names("starting with %s lookups for %a",#lookups,pattern)
+ end
+ if lookups then
+ for key, value in gmatch(pattern,"([^=,]+)=([^=,]+)") do
+ local t, n = { }, 0
+ if find(value,"*") then
+ value = string.topattern(value)
+ for i=1,#lookups do
+ local s = lookups[i]
+ if find(s[key],value) then
+ n = n + 1
+ t[n] = lookups[i]
+ end
+ end
+ else
+ for i=1,#lookups do
+ local s = lookups[i]
+ if s[key] == value then
+ n = n + 1
+ t[n] = lookups[i]
+ end
+ end
+ end
+ if trace_names then
+ report_names("%s matches for key %a with value %a",#t,key,value)
+ end
+ lookups = t
+ end
+ end
+ lastpattern = pattern
+ lastlookups = lookups or { }
+ end
+ return #lastlookups
+end
+
+function names.getlookupkey(key,n)
+ local l = lastlookups[n or 1]
+ return (l and l[key]) or ""
+end
+
+function names.noflookups()
+ return #lastlookups
+end
+
+function names.getlookups(pattern,name,reload)
+ if pattern then
+ names.lookup(pattern,name,reload)
+ end
+ return lastlookups
+end
+
+-- The following is new ... watch the overload!
+
+local specifications = allocate()
+names.specifications = specifications
+
+-- files = {
+-- name = "antykwapoltawskiego",
+-- list = {
+-- ["AntPoltLtCond-Regular.otf"] = {
+-- -- name = "antykwapoltawskiego",
+-- style = "regular",
+-- weight = "light",
+-- width = "condensed",
+-- },
+-- },
+-- }
+
+function names.register(files)
+ if files then
+ local list, commonname = files.list, files.name
+ if list then
+ local n, m = 0, 0
+ for filename, filespec in next, list do
+ local name = lower(filespec.name or commonname)
+ if name and name ~= "" then
+ local style = normalized_styles [lower(filespec.style or "normal")]
+ local width = normalized_widths [lower(filespec.width or "normal")]
+ local weight = normalized_weights [lower(filespec.weight or "normal")]
+ local variant = normalized_variants[lower(filespec.variant or "normal")]
+ local weights = specifications[name ] if not weights then weights = { } specifications[name ] = weights end
+ local styles = weights [weight] if not styles then styles = { } weights [weight] = styles end
+ local widths = styles [style ] if not widths then widths = { } styles [style ] = widths end
+ local variants = widths [width ] if not variants then variants = { } widths [width ] = variants end
+ variants[variant] = filename
+ n = n + 1
+ else
+ m = m + 1
+ end
+ end
+ if trace_specifications then
+ report_names("%s filenames registered, %s filenames rejected",n,m)
+ end
+ end
+ end
+end
+
+function names.registered(name,weight,style,width,variant)
+ local ok = specifications[name]
+ ok = ok and (ok[(weight and weight ~= "" and weight ) or "normal"] or ok.normal)
+ ok = ok and (ok[(style and style ~= "" and style ) or "normal"] or ok.normal)
+ ok = ok and (ok[(width and width ~= "" and width ) or "normal"] or ok.normal)
+ ok = ok and (ok[(variant and variant ~= "" and variant) or "normal"] or ok.normal)
+ --
+ -- todo: same fallbacks as with database
+ --
+ if ok then
+ return {
+ filename = ok,
+ subname = "",
+ -- rawname = nil,
+ }
+ end
+end
+
+function names.resolvespec(askedname,sub) -- overloads previous definition
+ local name, weight, style, width, variant = names.splitspec(askedname)
+ if trace_specifications then
+ report_names("resolving specification: %a to name=%s, weight=%s, style=%s, width=%s, variant=%s",askedname,name,weight,style,width,variant)
+ end
+ local found = names.registered(name,weight,style,width,variant)
+ if found and found.filename then
+ if trace_specifications then
+ report_names("resolved by registered names: %a to %s",askedname,found.filename)
+ end
+ return found.filename, found.subname, found.rawname
+ else
+ found = names.specification(name,weight,style,width,variant)
+ if found and found.filename then
+ if trace_specifications then
+ report_names("resolved by font database: %a to %s",askedname,found.filename)
+ end
+ return found.filename, found.subfont and found.rawname
+ end
+ end
+ if trace_specifications then
+ report_names("unresolved: %s",askedname)
+ end
+end
diff --git a/tex/context/base/font-tfm.lua b/tex/context/base/font-tfm.lua
index 316b947a3..316e11a65 100644
--- a/tex/context/base/font-tfm.lua
+++ b/tex/context/base/font-tfm.lua
@@ -1,152 +1,152 @@
-if not modules then modules = { } end modules ['font-tfm'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local next = next
-local match = string.match
-
-local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
-local trace_features = false trackers.register("tfm.features", function(v) trace_features = v end)
-
-local report_defining = logs.reporter("fonts","defining")
-local report_tfm = logs.reporter("fonts","tfm loading")
-
-local findbinfile = resolvers.findbinfile
-
-local fonts = fonts
-local handlers = fonts.handlers
-local readers = fonts.readers
-local constructors = fonts.constructors
-local encodings = fonts.encodings
-
-local tfm = constructors.newhandler("tfm")
-
-local tfmfeatures = constructors.newfeatures("tfm")
-local registertfmfeature = tfmfeatures.register
-
-constructors.resolvevirtualtoo = false -- wil be set in font-ctx.lua
-
-fonts.formats.tfm = "type1" -- we need to have at least a value here
-
---[[ldx--
-
The next function encapsulates the standard loader as
-supplied by .
---ldx]]--
-
--- this might change: not scaling and then apply features and do scaling in the
--- usual way with dummy descriptions but on the other hand .. we no longer use
--- tfm so why bother
-
--- ofm directive blocks local path search unless set; btw, in context we
--- don't support ofm files anyway as this format is obsolete
-
-function tfm.setfeatures(tfmdata,features)
- local okay = constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm)
- if okay then
- return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm)
- else
- return { } -- will become false
- end
-end
-
-local function read_from_tfm(specification)
- local filename = specification.filename
- local size = specification.size
- if trace_defining then
- report_defining("loading tfm file %a at size %s",filename,size)
- end
- local tfmdata = font.read_tfm(filename,size) -- not cached, fast enough
- if tfmdata then
- local features = specification.features and specification.features.normal or { }
- local resources = tfmdata.resources or { }
- local properties = tfmdata.properties or { }
- local parameters = tfmdata.parameters or { }
- local shared = tfmdata.shared or { }
- properties.name = tfmdata.name
- properties.fontname = tfmdata.fontname
- properties.psname = tfmdata.psname
- properties.filename = specification.filename
- parameters.size = size
- shared.rawdata = { }
- shared.features = features
- shared.processes = next(features) and tfm.setfeatures(tfmdata,features) or nil
- --
- tfmdata.properties = properties
- tfmdata.resources = resources
- tfmdata.parameters = parameters
- tfmdata.shared = shared
- --
- parameters.slant = parameters.slant or parameters[1] or 0
- parameters.space = parameters.space or parameters[2] or 0
- parameters.space_stretch = parameters.space_stretch or parameters[3] or 0
- parameters.space_shrink = parameters.space_shrink or parameters[4] or 0
- parameters.x_height = parameters.x_height or parameters[5] or 0
- parameters.quad = parameters.quad or parameters[6] or 0
- parameters.extra_space = parameters.extra_space or parameters[7] or 0
- --
- constructors.enhanceparameters(parameters) -- official copies for us
- --
- if constructors.resolvevirtualtoo then
- fonts.loggers.register(tfmdata,file.suffix(filename),specification) -- strange, why here
- local vfname = findbinfile(specification.name, 'ovf')
- if vfname and vfname ~= "" then
- local vfdata = font.read_vf(vfname,size) -- not cached, fast enough
- if vfdata then
- local chars = tfmdata.characters
- for k,v in next, vfdata.characters do
- chars[k].commands = v.commands
- end
- properties.virtualized = true
- tfmdata.fonts = vfdata.fonts
- end
- end
- end
- --
- local allfeatures = tfmdata.shared.features or specification.features.normal
- constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm)
- if not features.encoding then
- local encoding, filename = match(properties.filename,"^(.-)%-(.*)$") -- context: encoding-name.*
- if filename and encoding and encodings.known[encoding] then
- features.encoding = encoding
- end
- end
- --
- return tfmdata
- end
-end
-
-local function check_tfm(specification,fullname) -- we could split up like afm/otf
- local foundname = findbinfile(fullname, 'tfm') or ""
- if foundname == "" then
- foundname = findbinfile(fullname, 'ofm') or "" -- not needed in context
- end
- if foundname == "" then
- foundname = fonts.names.getfilename(fullname,"tfm") or ""
- end
- if foundname ~= "" then
- specification.filename = foundname
- specification.format = "ofm"
- return read_from_tfm(specification)
- elseif trace_defining then
- report_defining("loading tfm with name %a fails",specification.name)
- end
-end
-
-readers.check_tfm = check_tfm
-
-function readers.tfm(specification)
- local fullname = specification.filename or ""
- if fullname == "" then
- local forced = specification.forced or ""
- if forced ~= "" then
- fullname = specification.name .. "." .. forced
- else
- fullname = specification.name
- end
- end
- return check_tfm(specification,fullname)
-end
+if not modules then modules = { } end modules ['font-tfm'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local next = next
+local match = string.match
+
+local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
+local trace_features = false trackers.register("tfm.features", function(v) trace_features = v end)
+
+local report_defining = logs.reporter("fonts","defining")
+local report_tfm = logs.reporter("fonts","tfm loading")
+
+local findbinfile = resolvers.findbinfile
+
+local fonts = fonts
+local handlers = fonts.handlers
+local readers = fonts.readers
+local constructors = fonts.constructors
+local encodings = fonts.encodings
+
+local tfm = constructors.newhandler("tfm")
+
+local tfmfeatures = constructors.newfeatures("tfm")
+local registertfmfeature = tfmfeatures.register
+
+constructors.resolvevirtualtoo = false -- wil be set in font-ctx.lua
+
+fonts.formats.tfm = "type1" -- we need to have at least a value here
+
+--[[ldx--
+
The next function encapsulates the standard loader as
+supplied by .
+--ldx]]--
+
+-- this might change: not scaling and then apply features and do scaling in the
+-- usual way with dummy descriptions but on the other hand .. we no longer use
+-- tfm so why bother
+
+-- ofm directive blocks local path search unless set; btw, in context we
+-- don't support ofm files anyway as this format is obsolete
+
+function tfm.setfeatures(tfmdata,features)
+ local okay = constructors.initializefeatures("tfm",tfmdata,features,trace_features,report_tfm)
+ if okay then
+ return constructors.collectprocessors("tfm",tfmdata,features,trace_features,report_tfm)
+ else
+ return { } -- will become false
+ end
+end
+
+local function read_from_tfm(specification)
+ local filename = specification.filename
+ local size = specification.size
+ if trace_defining then
+ report_defining("loading tfm file %a at size %s",filename,size)
+ end
+ local tfmdata = font.read_tfm(filename,size) -- not cached, fast enough
+ if tfmdata then
+ local features = specification.features and specification.features.normal or { }
+ local resources = tfmdata.resources or { }
+ local properties = tfmdata.properties or { }
+ local parameters = tfmdata.parameters or { }
+ local shared = tfmdata.shared or { }
+ properties.name = tfmdata.name
+ properties.fontname = tfmdata.fontname
+ properties.psname = tfmdata.psname
+ properties.filename = specification.filename
+ parameters.size = size
+ shared.rawdata = { }
+ shared.features = features
+ shared.processes = next(features) and tfm.setfeatures(tfmdata,features) or nil
+ --
+ tfmdata.properties = properties
+ tfmdata.resources = resources
+ tfmdata.parameters = parameters
+ tfmdata.shared = shared
+ --
+ parameters.slant = parameters.slant or parameters[1] or 0
+ parameters.space = parameters.space or parameters[2] or 0
+ parameters.space_stretch = parameters.space_stretch or parameters[3] or 0
+ parameters.space_shrink = parameters.space_shrink or parameters[4] or 0
+ parameters.x_height = parameters.x_height or parameters[5] or 0
+ parameters.quad = parameters.quad or parameters[6] or 0
+ parameters.extra_space = parameters.extra_space or parameters[7] or 0
+ --
+ constructors.enhanceparameters(parameters) -- official copies for us
+ --
+ if constructors.resolvevirtualtoo then
+ fonts.loggers.register(tfmdata,file.suffix(filename),specification) -- strange, why here
+ local vfname = findbinfile(specification.name, 'ovf')
+ if vfname and vfname ~= "" then
+ local vfdata = font.read_vf(vfname,size) -- not cached, fast enough
+ if vfdata then
+ local chars = tfmdata.characters
+ for k,v in next, vfdata.characters do
+ chars[k].commands = v.commands
+ end
+ properties.virtualized = true
+ tfmdata.fonts = vfdata.fonts
+ end
+ end
+ end
+ --
+ local allfeatures = tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm)
+ if not features.encoding then
+ local encoding, filename = match(properties.filename,"^(.-)%-(.*)$") -- context: encoding-name.*
+ if filename and encoding and encodings.known[encoding] then
+ features.encoding = encoding
+ end
+ end
+ --
+ return tfmdata
+ end
+end
+
+local function check_tfm(specification,fullname) -- we could split up like afm/otf
+ local foundname = findbinfile(fullname, 'tfm') or ""
+ if foundname == "" then
+ foundname = findbinfile(fullname, 'ofm') or "" -- not needed in context
+ end
+ if foundname == "" then
+ foundname = fonts.names.getfilename(fullname,"tfm") or ""
+ end
+ if foundname ~= "" then
+ specification.filename = foundname
+ specification.format = "ofm"
+ return read_from_tfm(specification)
+ elseif trace_defining then
+ report_defining("loading tfm with name %a fails",specification.name)
+ end
+end
+
+readers.check_tfm = check_tfm
+
+function readers.tfm(specification)
+ local fullname = specification.filename or ""
+ if fullname == "" then
+ local forced = specification.forced or ""
+ if forced ~= "" then
+ fullname = specification.name .. "." .. forced
+ else
+ fullname = specification.name
+ end
+ end
+ return check_tfm(specification,fullname)
+end
diff --git a/tex/context/base/font-trt.lua b/tex/context/base/font-trt.lua
index 6fc8028d1..d382e62d7 100644
--- a/tex/context/base/font-trt.lua
+++ b/tex/context/base/font-trt.lua
@@ -1,57 +1,57 @@
-if not modules then modules = { } end modules ['font-trt'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local rawget, dofile, next = rawget, dofile, next
-
---[[ldx--
-
We provide a simple treatment mechanism (mostly because I want to demonstrate
-something in a manual). It's one of the few places where an lfg file gets loaded
-outside the goodies manager.
---ldx]]--
-
-local treatments = utilities.storage.allocate()
-fonts.treatments = treatments
-local treatmentdata = { }
-treatments.data = treatmentdata
-treatments.filename = "treatments.lfg"
-
--- function treatments.load(name)
--- local filename = resolvers.findfile(name)
--- if filename and filename ~= "" then
--- local goodies = dofile(filename)
--- if goodies then
--- local treatments = goodies.treatments
--- if treatments then
--- for name, data in next, treatments do
--- treatmentdata[name] = data -- always wins
--- end
--- end
--- end
--- end
--- end
-
-table.setmetatableindex(treatmentdata,function(t,k)
- local files = resolvers.findfiles(treatments.filename)
- if files then
- for i=1,#files do
- local goodies = dofile(files[i])
- if goodies then
- local treatments = goodies.treatments
- if treatments then
- for name, data in next, treatments do
- if not rawget(t,name) then
- t[name] = data
- end
- end
- end
- end
- end
- end
- table.setmetatableindex(treatmentdata,nil)
- return treatmentdata[k]
-end)
+if not modules then modules = { } end modules ['font-trt'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local rawget, dofile, next = rawget, dofile, next
+
+--[[ldx--
+
We provide a simple treatment mechanism (mostly because I want to demonstrate
+something in a manual). It's one of the few places where an lfg file gets loaded
+outside the goodies manager.
+--ldx]]--
+
+local treatments = utilities.storage.allocate()
+fonts.treatments = treatments
+local treatmentdata = { }
+treatments.data = treatmentdata
+treatments.filename = "treatments.lfg"
+
+-- function treatments.load(name)
+-- local filename = resolvers.findfile(name)
+-- if filename and filename ~= "" then
+-- local goodies = dofile(filename)
+-- if goodies then
+-- local treatments = goodies.treatments
+-- if treatments then
+-- for name, data in next, treatments do
+-- treatmentdata[name] = data -- always wins
+-- end
+-- end
+-- end
+-- end
+-- end
+
+table.setmetatableindex(treatmentdata,function(t,k)
+ local files = resolvers.findfiles(treatments.filename)
+ if files then
+ for i=1,#files do
+ local goodies = dofile(files[i])
+ if goodies then
+ local treatments = goodies.treatments
+ if treatments then
+ for name, data in next, treatments do
+ if not rawget(t,name) then
+ t[name] = data
+ end
+ end
+ end
+ end
+ end
+ end
+ table.setmetatableindex(treatmentdata,nil)
+ return treatmentdata[k]
+end)
diff --git a/tex/context/base/font-vf.lua b/tex/context/base/font-vf.lua
index 1fe6dd71c..bc6ed400e 100644
--- a/tex/context/base/font-vf.lua
+++ b/tex/context/base/font-vf.lua
@@ -1,205 +1,205 @@
-if not modules then modules = { } end modules ['font-vf'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-
This is very experimental code! Not yet adapted to recent changes. This will change.
---ldx]]--
-
--- present in the backend but unspecified:
---
--- vf.rule vf.special vf.right vf.push vf.down vf.char vf.node vf.fontid vf.pop vf.image vf.nop
-
-local next = next
-
-local allocate = utilities.storage.allocate
-local setmetatableindex = table.setmetatableindex
-local fastcopy = table.fastcopy
-
-local fonts = fonts
-local constructors = fonts.constructors
-local vf = constructors.newhandler("vf")
-
--- general code
-
-function vf.find(name)
- name = file.removesuffix(file.basename(name))
- if constructors.resolvevirtualtoo then
- local format = fonts.loggers.format(name)
- if format == 'tfm' or format == 'ofm' then
- if trace_defining then
- report_defining("locating vf for %a",name)
- end
- return findbinfile(name,"ovf")
- else
- if trace_defining then
- report_defining("vf for %a is already taken care of",name)
- end
- return nil -- ""
- end
- else
- if trace_defining then
- report_defining("locating vf for %a",name)
- end
- return findbinfile(name,"ovf")
- end
-end
-
---[[ldx--
-
We overload the reader.
---ldx]]--
-
-callbacks.register('find_vf_file', vf.find, "locating virtual fonts, insofar needed") -- not that relevant any more
-
--- specific code (will move to other module)
-
-local definers = fonts.definers
-local methods = definers.methods
-
-local variants = allocate()
-local combinations = { }
-local combiner = { }
-local whatever = allocate()
-local helpers = allocate()
-local predefined = allocate {
- dummy = { "comment" },
- push = { "push" },
- pop = { "pop" },
-}
-
-methods.variants = variants -- todo .. wrong namespace
-vf.combinations = combinations
-vf.combiner = combiner
-vf.whatever = whatever
-vf.helpers = helpers
-vf.predefined = predefined
-
-setmetatableindex(whatever, function(t,k) local v = { } t[k] = v return v end)
-
-local function checkparameters(g,f)
- if f and g and not g.parameters and #g.fonts > 0 then
- local p = { }
- for k,v in next, f.parameters do
- p[k] = v
- end
- g.parameters = p
- setmetatable(p, getmetatable(f.parameters))
- end
-end
-
-function methods.install(tag, rules)
- vf.combinations[tag] = rules
- variants[tag] = function(specification)
- return vf.combine(specification,tag)
- end
-end
-
-local function combine_load(g,name)
- return constructors.readanddefine(name or g.specification.name,g.specification.size)
-end
-
-local function combine_assign(g, name, from, to, start, force)
- local f, id = combine_load(g,name)
- if f and id then
- -- optimize for whole range, then just g = f
- if not from then from, to = 0, 0xFF00 end
- if not to then to = from end
- if not start then start = from end
- local fc, gc = f.characters, g.characters
- local fd, gd = f.descriptions, g.descriptions
- local hn = #g.fonts+1
- g.fonts[hn] = { id = id } -- no need to be sparse
- for i=from,to do
- if fc[i] and (force or not gc[i]) then
- gc[i] = fastcopy(fc[i],true) -- can be optimized
- gc[i].commands = { { 'slot', hn, start } }
- gd[i] = fd[i]
- end
- start = start + 1
- end
- checkparameters(g,f)
- end
-end
-
-local function combine_process(g,list)
- if list then
- for _,v in next, list do
- (combiner.commands[v[1]] or nop)(g,v)
- end
- end
-end
-
-local function combine_names(g,name,force)
- local f, id = constructors.readanddefine(name,g.specification.size)
- if f and id then
- local fc, gc = f.characters, g.characters
- local fd, gd = f.descriptions, g.descriptions
- g.fonts[#g.fonts+1] = { id = id } -- no need to be sparse
- local hn = #g.fonts
- for k, v in next, fc do
- if force or not gc[k] then
- gc[k] = fastcopy(v,true)
- gc[k].commands = { { 'slot', hn, k } }
- gd[i] = fd[i]
- end
- end
- checkparameters(g,f)
- end
-end
-
-local combine_feature = function(g,v)
- local key, value = v[2], v[3]
- if key then
- if value == nil then
- value = true
- end
- local specification = g.specification
- if specification then
- local normalfeatures = specification.features.normal
- if normalfeatures then
- normalfeatures[key] = value -- otf?
- end
- end
- end
-end
-
---~ combiner.load = combine_load
---~ combiner.assign = combine_assign
---~ combiner.process = combine_process
---~ combiner.names = combine_names
---~ combiner.feature = combine_feature
-
-combiner.commands = allocate {
- ["initialize"] = function(g,v) combine_assign (g,g.properties.name) end,
- ["include-method"] = function(g,v) combine_process (g,combinations[v[2]]) end, -- name
- -- ["copy-parameters"] = function(g,v) combine_parameters(g,v[2]) end, -- name
- ["copy-range"] = function(g,v) combine_assign (g,v[2],v[3],v[4],v[5],true) end, -- name, from-start, from-end, to-start
- ["copy-char"] = function(g,v) combine_assign (g,v[2],v[3],v[3],v[4],true) end, -- name, from, to
- ["fallback-range"] = function(g,v) combine_assign (g,v[2],v[3],v[4],v[5],false) end, -- name, from-start, from-end, to-start
- ["fallback-char"] = function(g,v) combine_assign (g,v[2],v[3],v[3],v[4],false) end, -- name, from, to
- ["copy-names"] = function(g,v) combine_names (g,v[2],true) end,
- ["fallback-names"] = function(g,v) combine_names (g,v[2],false) end,
- ["feature"] = combine_feature,
-}
-
-function vf.combine(specification,tag)
- local g = {
- name = specification.name,
- properties = {
- virtualized = true,
- },
- fonts = {
- },
- characters = {
- },
- descriptions = {
- },
- specification = fastcopy(specification),
- }
- combine_process(g,combinations[tag])
- return g
-end
+if not modules then modules = { } end modules ['font-vf'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+
This is very experimental code! Not yet adapted to recent changes. This will change.
+--ldx]]--
+
+-- present in the backend but unspecified:
+--
+-- vf.rule vf.special vf.right vf.push vf.down vf.char vf.node vf.fontid vf.pop vf.image vf.nop
+
+local next = next
+
+local allocate = utilities.storage.allocate
+local setmetatableindex = table.setmetatableindex
+local fastcopy = table.fastcopy
+
+local fonts = fonts
+local constructors = fonts.constructors
+local vf = constructors.newhandler("vf")
+
+-- general code
+
+function vf.find(name)
+ name = file.removesuffix(file.basename(name))
+ if constructors.resolvevirtualtoo then
+ local format = fonts.loggers.format(name)
+ if format == 'tfm' or format == 'ofm' then
+ if trace_defining then
+ report_defining("locating vf for %a",name)
+ end
+ return findbinfile(name,"ovf")
+ else
+ if trace_defining then
+ report_defining("vf for %a is already taken care of",name)
+ end
+ return nil -- ""
+ end
+ else
+ if trace_defining then
+ report_defining("locating vf for %a",name)
+ end
+ return findbinfile(name,"ovf")
+ end
+end
+
+--[[ldx--
+
We overload the reader.
+--ldx]]--
+
+callbacks.register('find_vf_file', vf.find, "locating virtual fonts, insofar needed") -- not that relevant any more
+
+-- specific code (will move to other module)
+
+local definers = fonts.definers
+local methods = definers.methods
+
+local variants = allocate()
+local combinations = { }
+local combiner = { }
+local whatever = allocate()
+local helpers = allocate()
+local predefined = allocate {
+ dummy = { "comment" },
+ push = { "push" },
+ pop = { "pop" },
+}
+
+methods.variants = variants -- todo .. wrong namespace
+vf.combinations = combinations
+vf.combiner = combiner
+vf.whatever = whatever
+vf.helpers = helpers
+vf.predefined = predefined
+
+setmetatableindex(whatever, function(t,k) local v = { } t[k] = v return v end)
+
+local function checkparameters(g,f)
+ if f and g and not g.parameters and #g.fonts > 0 then
+ local p = { }
+ for k,v in next, f.parameters do
+ p[k] = v
+ end
+ g.parameters = p
+ setmetatable(p, getmetatable(f.parameters))
+ end
+end
+
+function methods.install(tag, rules)
+ vf.combinations[tag] = rules
+ variants[tag] = function(specification)
+ return vf.combine(specification,tag)
+ end
+end
+
+local function combine_load(g,name)
+ return constructors.readanddefine(name or g.specification.name,g.specification.size)
+end
+
+local function combine_assign(g, name, from, to, start, force)
+ local f, id = combine_load(g,name)
+ if f and id then
+ -- optimize for whole range, then just g = f
+ if not from then from, to = 0, 0xFF00 end
+ if not to then to = from end
+ if not start then start = from end
+ local fc, gc = f.characters, g.characters
+ local fd, gd = f.descriptions, g.descriptions
+ local hn = #g.fonts+1
+ g.fonts[hn] = { id = id } -- no need to be sparse
+ for i=from,to do
+ if fc[i] and (force or not gc[i]) then
+ gc[i] = fastcopy(fc[i],true) -- can be optimized
+ gc[i].commands = { { 'slot', hn, start } }
+ gd[i] = fd[i]
+ end
+ start = start + 1
+ end
+ checkparameters(g,f)
+ end
+end
+
+local function combine_process(g,list)
+ if list then
+ for _,v in next, list do
+ (combiner.commands[v[1]] or nop)(g,v)
+ end
+ end
+end
+
+local function combine_names(g,name,force)
+ local f, id = constructors.readanddefine(name,g.specification.size)
+ if f and id then
+ local fc, gc = f.characters, g.characters
+ local fd, gd = f.descriptions, g.descriptions
+ g.fonts[#g.fonts+1] = { id = id } -- no need to be sparse
+ local hn = #g.fonts
+ for k, v in next, fc do
+ if force or not gc[k] then
+ gc[k] = fastcopy(v,true)
+ gc[k].commands = { { 'slot', hn, k } }
+ gd[i] = fd[i]
+ end
+ end
+ checkparameters(g,f)
+ end
+end
+
+local combine_feature = function(g,v)
+ local key, value = v[2], v[3]
+ if key then
+ if value == nil then
+ value = true
+ end
+ local specification = g.specification
+ if specification then
+ local normalfeatures = specification.features.normal
+ if normalfeatures then
+ normalfeatures[key] = value -- otf?
+ end
+ end
+ end
+end
+
+--~ combiner.load = combine_load
+--~ combiner.assign = combine_assign
+--~ combiner.process = combine_process
+--~ combiner.names = combine_names
+--~ combiner.feature = combine_feature
+
+combiner.commands = allocate {
+ ["initialize"] = function(g,v) combine_assign (g,g.properties.name) end,
+ ["include-method"] = function(g,v) combine_process (g,combinations[v[2]]) end, -- name
+ -- ["copy-parameters"] = function(g,v) combine_parameters(g,v[2]) end, -- name
+ ["copy-range"] = function(g,v) combine_assign (g,v[2],v[3],v[4],v[5],true) end, -- name, from-start, from-end, to-start
+ ["copy-char"] = function(g,v) combine_assign (g,v[2],v[3],v[3],v[4],true) end, -- name, from, to
+ ["fallback-range"] = function(g,v) combine_assign (g,v[2],v[3],v[4],v[5],false) end, -- name, from-start, from-end, to-start
+ ["fallback-char"] = function(g,v) combine_assign (g,v[2],v[3],v[3],v[4],false) end, -- name, from, to
+ ["copy-names"] = function(g,v) combine_names (g,v[2],true) end,
+ ["fallback-names"] = function(g,v) combine_names (g,v[2],false) end,
+ ["feature"] = combine_feature,
+}
+
+function vf.combine(specification,tag)
+ local g = {
+ name = specification.name,
+ properties = {
+ virtualized = true,
+ },
+ fonts = {
+ },
+ characters = {
+ },
+ descriptions = {
+ },
+ specification = fastcopy(specification),
+ }
+ combine_process(g,combinations[tag])
+ return g
+end
diff --git a/tex/context/base/grph-epd.lua b/tex/context/base/grph-epd.lua
index 4f9d46097..49022e464 100644
--- a/tex/context/base/grph-epd.lua
+++ b/tex/context/base/grph-epd.lua
@@ -1,25 +1,25 @@
-if not modules then modules = { } end modules ['grph-epd'] = {
- version = 1.001,
- comment = "companion to grph-epd.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local variables = interfaces.variables
-local settings_to_hash = utilities.parsers.settings_to_hash
-
--- todo: page, name, file, url
-
-local codeinjections = backends.codeinjections
-
-function figures.mergegoodies(optionlist)
- local options = settings_to_hash(optionlist)
- local all = options[variables.all] or options[variables.yes]
- if all or options[variables.reference] then
- codeinjections.mergereferences()
- end
- if all or options[variables.layer] then
- codeinjections.mergeviewerlayers()
- end
-end
+if not modules then modules = { } end modules ['grph-epd'] = {
+ version = 1.001,
+ comment = "companion to grph-epd.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local variables = interfaces.variables
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+-- todo: page, name, file, url
+
+local codeinjections = backends.codeinjections
+
+function figures.mergegoodies(optionlist)
+ local options = settings_to_hash(optionlist)
+ local all = options[variables.all] or options[variables.yes]
+ if all or options[variables.reference] then
+ codeinjections.mergereferences()
+ end
+ if all or options[variables.layer] then
+ codeinjections.mergeviewerlayers()
+ end
+end
diff --git a/tex/context/base/grph-fil.lua b/tex/context/base/grph-fil.lua
index 3449f1779..9ee90b07a 100644
--- a/tex/context/base/grph-fil.lua
+++ b/tex/context/base/grph-fil.lua
@@ -1,71 +1,71 @@
-if not modules then modules = { } end modules ['grph-fil'] = {
- version = 1.001,
- comment = "companion to grph-fig.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local type = type
-
-local trace_run = false trackers.register("graphic.runfile",function(v) trace_run = v end)
-local report_run = logs.reporter("graphics","run")
-
--- Historically running files is part of graphics processing, so this is why it
--- sits here but is part of the job namespace.
-
-local allocate = utilities.storage.allocate
-
-local collected = allocate()
-local tobesaved = allocate()
-
-local jobfiles = {
- collected = collected,
- tobesaved = tobesaved,
- forcerun = false, -- maybe a directive some day
-}
-
-job.files = jobfiles
-
-local function initializer()
- tobesaved = jobfiles.tobesaved
- collected = jobfiles.collected
-end
-
-job.register('job.files.collected', tobesaved, initializer)
-
-function jobfiles.run(name,action)
- local oldchecksum = collected[name]
- local newchecksum = file.checksum(name)
- if jobfiles.forcerun or not oldchecksum or oldchecksum ~= newchecksum then
- if trace_run then
- report_run("processing file, changes in %a, processing forced",name)
- end
- local ta = type(action)
- if ta == "function" then
- action(name)
- elseif ta == "string" and action ~= "" then
- os.execute(action)
- else
- report_run("processing file, no action given for processing %a",name)
- end
- elseif trace_run then
- report_run("processing file, no changes in %a, not processed",name)
- end
- tobesaved[name] = newchecksum
-end
-
---
-
-function jobfiles.context(name,options)
- if type(name) == "table" then
- local result = { }
- for i=1,#name do
- result[#result+1] = jobfiles.context(name[i],options)
- end
- return result
- else
- jobfiles.run(name,"context ".. (options or "") .. " " .. name)
- return file.replacesuffix(name,"pdf")
- end
-end
+if not modules then modules = { } end modules ['grph-fil'] = {
+ version = 1.001,
+ comment = "companion to grph-fig.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type = type
+
+local trace_run = false trackers.register("graphic.runfile",function(v) trace_run = v end)
+local report_run = logs.reporter("graphics","run")
+
+-- Historically running files is part of graphics processing, so this is why it
+-- sits here but is part of the job namespace.
+
+local allocate = utilities.storage.allocate
+
+local collected = allocate()
+local tobesaved = allocate()
+
+local jobfiles = {
+ collected = collected,
+ tobesaved = tobesaved,
+ forcerun = false, -- maybe a directive some day
+}
+
+job.files = jobfiles
+
+local function initializer()
+ tobesaved = jobfiles.tobesaved
+ collected = jobfiles.collected
+end
+
+job.register('job.files.collected', tobesaved, initializer)
+
+function jobfiles.run(name,action)
+ local oldchecksum = collected[name]
+ local newchecksum = file.checksum(name)
+ if jobfiles.forcerun or not oldchecksum or oldchecksum ~= newchecksum then
+ if trace_run then
+ report_run("processing file, changes in %a, processing forced",name)
+ end
+ local ta = type(action)
+ if ta == "function" then
+ action(name)
+ elseif ta == "string" and action ~= "" then
+ os.execute(action)
+ else
+ report_run("processing file, no action given for processing %a",name)
+ end
+ elseif trace_run then
+ report_run("processing file, no changes in %a, not processed",name)
+ end
+ tobesaved[name] = newchecksum
+end
+
+--
+
+function jobfiles.context(name,options)
+ if type(name) == "table" then
+ local result = { }
+ for i=1,#name do
+ result[#result+1] = jobfiles.context(name[i],options)
+ end
+ return result
+ else
+ jobfiles.run(name,"context ".. (options or "") .. " " .. name)
+ return file.replacesuffix(name,"pdf")
+ end
+end
diff --git a/tex/context/base/grph-inc.lua b/tex/context/base/grph-inc.lua
index 9603419ae..ae4d5642d 100644
--- a/tex/context/base/grph-inc.lua
+++ b/tex/context/base/grph-inc.lua
@@ -1,1609 +1,1609 @@
-if not modules then modules = { } end modules ['grph-inc'] = {
- version = 1.001,
- comment = "companion to grph-inc.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: empty filename or only suffix always false (not found)
--- lowercase types
--- mps tex tmp svg
--- partly qualified
--- dimensions
--- use metatables
--- figures.boxnumber can go as we now can use names
--- avoid push
--- move some to command namespace
-
---[[
-The ConTeXt figure inclusion mechanisms are among the oldest code
-in ConTeXt and evolved into a complex whole. One reason is that we
-deal with backend in an abstract way. What complicates matters is
-that we deal with internal graphics as well: TeX code, MetaPost code,
-etc. Later on figure databases were introduced, which resulted in
-a plug in model for locating images. On top of that runs a conversion
-mechanism (with caching) and resource logging.
-
-Porting that to Lua is not that trivial because quite some
-status information is kept between al these stages. Of course, image
-reuse also has some price, and so I decided to implement the graphics
-inclusion in several layers: detection, loading, inclusion, etc.
-
-Object sharing and scaling can happen at each stage, depending on the
-way the resource is dealt with.
-
-The TeX-Lua mix is suboptimal. This has to do with the fact that we cannot
-run TeX code from within Lua. Some more functionality will move to Lua.
-]]--
-
-local format, lower, find, match, gsub, gmatch = string.format, string.lower, string.find, string.match, string.gsub, string.gmatch
-local texbox = tex.box
-local contains = table.contains
-local concat, insert, remove = table.concat, table.insert, table.remove
-local todimen = string.todimen
-local collapsepath = file.collapsepath
-local formatters = string.formatters
-local longtostring = string.longtostring
-local expandfilename = dir.expandname
-
-local P, lpegmatch = lpeg.P, lpeg.match
-
-local settings_to_array = utilities.parsers.settings_to_array
-local settings_to_hash = utilities.parsers.settings_to_hash
-local allocate = utilities.storage.allocate
-local setmetatableindex = table.setmetatableindex
-local replacetemplate = utilities.templates.replace
-
-local variables = interfaces.variables
-local codeinjections = backends.codeinjections
-local nodeinjections = backends.nodeinjections
-
-local trace_figures = false trackers.register("graphics.locating", function(v) trace_figures = v end)
-local trace_bases = false trackers.register("graphics.bases", function(v) trace_bases = v end)
-local trace_programs = false trackers.register("graphics.programs", function(v) trace_programs = v end)
-local trace_conversion = false trackers.register("graphics.conversion", function(v) trace_conversion = v end)
-local trace_inclusion = false trackers.register("graphics.inclusion", function(v) trace_inclusion = v end)
-
-local report_inclusion = logs.reporter("graphics","inclusion")
-
-local context, img = context, img
-
-local f_hash_part = formatters["%s->%s->%s"]
-local f_hash_full = formatters["%s->%s->%s->%s->%s->%s->%s"]
-
-local v_yes = variables.yes
-local v_low = variables.low
-local v_medium = variables.medium
-local v_high = variables.high
-local v_global = variables["global"]
-local v_local = variables["local"]
-local v_default = variables.default
-
-local maxdimen = 2^30-1
-
-function img.check(figure)
- if figure then
- local width = figure.width
- local height = figure.height
- if height > width then
- if height > maxdimen then
- figure.height = maxdimen
- figure.width = width * maxdimen/height
- report_inclusion("limiting natural dimensions of %a (%s)",figure.filename,"height")
- end
- elseif width > maxdimen then
- figure.width = maxdimen
- figure.height = height * maxdimen/width
- report_inclusion("limiting natural dimensions of %a (%s)",figure.filename,"width")
- end
- return figure
- end
-end
-
---- some extra img functions --- can become luat-img.lua
-
-local imgkeys = img.keys()
-
-function img.totable(imgtable)
- local result = { }
- for k=1,#imgkeys do
- local key = imgkeys[k]
- result[key] = imgtable[key]
- end
- return result
-end
-
-function img.serialize(i,...)
- return table.serialize(img.totable(i),...)
-end
-
-function img.print(i,...)
- return table.print(img.totable(i),...)
-end
-
-function img.clone(i,data)
- i.width = data.width or i.width
- i.height = data.height or i.height
- -- attr etc
- return i
-end
-
-local validsizes = table.tohash(img.boxes())
-local validtypes = table.tohash(img.types())
-
-function img.checksize(size)
- if size then
- size = gsub(size,"box","")
- return validsizes[size] and size or "crop"
- else
- return "crop"
- end
-end
-
-local indexed = { }
-
-function img.ofindex(n)
- return indexed[n]
-end
-
---- we can consider an grph-ini file
-
-figures = figures or { }
-local figures = figures
-
-figures.boxnumber = figures.boxnumber or 0
-figures.defaultsearch = true
-figures.defaultwidth = 0
-figures.defaultheight = 0
-figures.defaultdepth = 0
-figures.nofprocessed = 0
-figures.preferquality = true -- quality over location
-
-local figures_loaded = allocate() figures.loaded = figures_loaded
-local figures_used = allocate() figures.used = figures_used
-local figures_found = allocate() figures.found = figures_found
-local figures_suffixes = allocate() figures.suffixes = figures_suffixes
-local figures_patterns = allocate() figures.patterns = figures_patterns
-local figures_resources = allocate() figures.resources = figures_resources
-
-local existers = allocate() figures.existers = existers
-local checkers = allocate() figures.checkers = checkers
-local includers = allocate() figures.includers = includers
-local converters = allocate() figures.converters = converters
-local identifiers = allocate() figures.identifiers = identifiers
-local programs = allocate() figures.programs = programs
-
-local defaultformat = "pdf"
-local defaultprefix = "m_k_i_v_"
-
-figures.localpaths = allocate {
- ".", "..", "../.."
-}
-
-figures.cachepaths = allocate {
- prefix = "",
- path = ".",
- subpath = ".",
-}
-
-local figure_paths = allocate(table.copy(figures.localpaths))
-figures.paths = figure_paths
-
-local figures_order = allocate {
- "pdf", "mps", "jpg", "png", "jp2", "jbig", "svg", "eps", "tif", "gif", "mov", "buffer", "tex", "cld", "auto",
-}
-
-local figures_formats = allocate { -- magic and order will move here
- ["pdf"] = { list = { "pdf" } },
- ["mps"] = { patterns = { "mps", "%d+" } },
- ["jpg"] = { list = { "jpg", "jpeg" } },
- ["png"] = { list = { "png" } },
- ["jp2"] = { list = { "jp2" } },
- ["jbig"] = { list = { "jbig", "jbig2", "jb2" } },
- ["svg"] = { list = { "svg", "svgz" } },
- ["eps"] = { list = { "eps", "ai" } },
- ["gif"] = { list = { "gif" } },
- ["tif"] = { list = { "tif", "tiff" } },
- ["mov"] = { list = { "mov", "flv", "mp4" } }, -- "avi" is not supported
- ["buffer"] = { list = { "tmp", "buffer", "buf" } },
- ["tex"] = { list = { "tex" } },
- ["cld"] = { list = { "cld" } },
- ["auto"] = { list = { "auto" } },
-}
-
-local figures_magics = allocate {
- { format = "png", pattern = P("\137PNG\013\010\026\010") }, -- 89 50 4E 47 0D 0A 1A 0A,
- { format = "jpg", pattern = P("\255\216\255") }, -- FF D8 FF
- { format = "jp2", pattern = P("\000\000\000\012\106\080\032\032\013\010"), }, -- 00 00 00 0C 6A 50 20 20 0D 0A },
- { format = "gif", pattern = P("GIF") },
- { format = "pdf", pattern = (1 - P("%PDF"))^0 * P("%PDF") },
-}
-
-figures.formats = figures_formats -- frozen
-figures.magics = figures_magics -- frozen
-figures.order = figures_order -- frozen
-
--- We can set the order but only indirectly so that we can check for support.
-
-function figures.setorder(list) -- can be table or string
- if type(list) == "string" then
- list = settings_to_array(list)
- end
- if list and #list > 0 then
- figures_order = allocate()
- figures.order = figures_order
- local done = { } -- just to be sure in case the list is generated
- for i=1,#list do
- local l = lower(list[i])
- if figures_formats[l] and not done[l] then
- figures_order[#figures_order+1] = l
- done[l] = true
- end
- end
- report_inclusion("lookup order % a",figures_order)
- else
- -- invalid list
- end
-end
-
-function figures.guess(filename)
- local f = io.open(filename,'rb')
- if f then
- local str = f:read(100)
- f:close()
- if str then
- for i=1,#figures_magics do
- local pattern = figures_magics[i]
- if lpegmatch(pattern.pattern,str) then
- local format = pattern.format
- if trace_figures then
- report_inclusion("file %a has format %a",filename,format)
- end
- return format
- end
- end
- end
- end
-end
-
-local function setlookups() -- tobe redone .. just set locals
- figures_suffixes = allocate()
- figures_patterns = allocate()
- for _, format in next, figures_order do
- local data = figures_formats[format]
- local list = data.list
- if list then
- for i=1,#list do
- figures_suffixes[list[i]] = format -- hash
- end
- else
- figures_suffixes[format] = format
- end
- local patterns = data.patterns
- if patterns then
- for i=1,#patterns do
- figures_patterns[#figures_patterns+1] = { patterns[i], format } -- array
- end
- end
- end
- figures.suffixes = figures_suffixes
- figures.patterns = figures_patterns
-end
-
-setlookups()
-
-figures.setlookups = setlookups
-
-function figures.registerresource(t)
- local n = #figures_resources + 1
- figures_resources[n] = t
- return n
-end
-
-local function register(tag,target,what)
- local data = figures_formats[target] -- resolver etc
- if not data then
- data = { }
- figures_formats[target] = data
- end
- local d = data[tag] -- list or pattern
- if d and not contains(d,what) then
- d[#d+1] = what -- suffix or patternspec
- else
- data[tag] = { what }
- end
- if not contains(figures_order,target) then
- figures_order[#figures_order+1] = target
- end
- setlookups()
-end
-
-function figures.registersuffix (suffix, target) register('list', target,suffix ) end
-function figures.registerpattern(pattern,target) register('pattern',target,pattern) end
-
-local last_locationset = last_locationset or nil
-local last_pathlist = last_pathlist or nil
-
-function figures.setpaths(locationset,pathlist)
- if last_locationset == locationset and last_pathlist == pathlist then
- -- this function can be called each graphic so we provide this optimization
- return
- end
- local t, h = figure_paths, settings_to_hash(locationset)
- if last_locationset ~= locationset then
- -- change == reset (actually, a 'reset' would indeed reset
- if h[v_local] then
- t = table.fastcopy(figures.localpaths or { })
- else
- t = { }
- end
- figures.defaultsearch = h[v_default]
- last_locationset = locationset
- end
- if h[v_global] then
- local list = settings_to_array(pathlist)
- for i=1,#list do
- local s = list[i]
- if not contains(t,s) then
- t[#t+1] = s
- end
- end
- end
- figure_paths = t
- last_pathlist = pathlist
- figures.paths = figure_paths
- if trace_figures then
- report_inclusion("using locations %a",last_locationset)
- report_inclusion("using paths % a",figure_paths)
- end
-end
-
--- check conversions and handle it here
-
-function figures.hash(data)
- local status = data and data.status
- return (status and status.hash or tostring(status.private)) or "nohash" -- the
-end
-
--- interfacing to tex
-
-local function new() -- we could use metatables status -> used -> request but it needs testing
- local request = {
- name = false,
- label = false,
- format = false,
- page = false,
- width = false,
- height = false,
- preview = false,
- ["repeat"] = false,
- controls = false,
- display = false,
- mask = false,
- conversion = false,
- resolution = false,
- cache = false,
- prefix = false,
- size = false,
- }
- local used = {
- fullname = false,
- format = false,
- name = false,
- path = false,
- suffix = false,
- width = false,
- height = false,
- }
- local status = {
- status = 0,
- converted = false,
- cached = false,
- fullname = false,
- format = false,
- }
- -- this needs checking because we might check for nil, the test case
- -- is getfiguredimensions which then should return ~= 0
- -- setmetatableindex(status, used)
- -- setmetatableindex(used, request)
- return {
- request = request,
- used = used,
- status = status,
- }
-end
-
--- use table.insert|remove
-
-local lastfiguredata = nil -- will be topofstack or last so no { } (else problems with getfiguredimensions)
-local callstack = { }
-
-function figures.initialize(request)
- local figuredata = new()
- if request then
- -- request.width/height are strings and are only used when no natural dimensions
- -- can be determined; at some point the handlers might set them to numbers instead
- local w = tonumber(request.width) or 0
- local h = tonumber(request.height) or 0
- request.width = w > 0 and w or nil
- request.height = h > 0 and h or nil
- --
- request.page = math.max(tonumber(request.page) or 1,1)
- request.size = img.checksize(request.size)
- request.object = request.object == v_yes
- request["repeat"] = request["repeat"] == v_yes
- request.preview = request.preview == v_yes
- request.cache = request.cache ~= "" and request.cache
- request.prefix = request.prefix ~= "" and request.prefix
- request.format = request.format ~= "" and request.format
- table.merge(figuredata.request,request)
- end
- return figuredata
-end
-
-function figures.push(request)
- statistics.starttiming(figures)
- local figuredata = figures.initialize(request)
- insert(callstack,figuredata)
- lastfiguredata = figuredata
- return figuredata
-end
-
-function figures.pop()
- lastfiguredata = remove(callstack) or lastfiguredata
- statistics.stoptiming(figures)
-end
-
-function figures.current()
- return callstack[#callstack] or lastfiguredata
-end
-
-local function get(category,tag,default)
- local value = lastfiguredata and lastfiguredata[category]
- value = value and value[tag]
- if not value or value == "" or value == true then
- return default or ""
- else
- return value
- end
-end
-
-figures.get = get
-
-function commands.figurevariable(category,tag,default)
- context(get(category,tag,default))
-end
-
-function commands.figurestatus (tag,default) context(get("status", tag,default)) end
-function commands.figurerequest(tag,default) context(get("request",tag,default)) end
-function commands.figureused (tag,default) context(get("used", tag,default)) end
-
-function commands.figurefilepath() context(file.dirname (get("used","fullname"))) end
-function commands.figurefilename() context(file.nameonly(get("used","fullname"))) end
-function commands.figurefiletype() context(file.extname (get("used","fullname"))) end
-
--- todo: local path or cache path
-
-local function forbiddenname(filename)
- if not filename or filename == "" then
- return false
- end
- local expandedfullname = collapsepath(filename,true)
- local expandedinputname = collapsepath(file.addsuffix(environment.jobfilename,environment.jobfilesuffix),true)
- if expandedfullname == expandedinputname then
- report_inclusion("skipping graphic with same name as input filename %a, enforce suffix",expandedinputname)
- return true
- end
- local expandedoutputname = collapsepath(codeinjections.getoutputfilename(),true)
- if expandedfullname == expandedoutputname then
- report_inclusion("skipping graphic with same name as output filename %a, enforce suffix",expandedoutputname)
- return true
- end
-end
-
-local function register(askedname,specification)
- if not specification then
- specification = { }
- elseif forbiddenname(specification.fullname) then
- specification = { }
- else
- local format = specification.format
- if format then
- local conversion = specification.conversion
- local resolution = specification.resolution
- if conversion == "" then
- conversion = nil
- end
- if resolution == "" then
- resolution = nil
- end
- local newformat = conversion
- if not newformat or newformat == "" then
- newformat = defaultformat
- end
- if trace_conversion then
- report_inclusion("checking conversion of %a, fullname %a, old format %a, new format %a, conversion %a, resolution %a",
- askedname,specification.fullname,format,newformat,conversion or "default",resolution or "default")
- end
- -- quick hack
- local converter = (newformat ~= format or resolution) and converters[format]
- if converter then
- if converter[newformat] then
- converter = converter[newformat]
- else
- newformat = defaultformat
- if converter[newformat] then
- converter = converter[newformat]
- else
- converter = nil
- newformat = defaultformat
- end
- end
- elseif trace_conversion then
- report_inclusion("no converter for %a to %a",format,newformat)
- end
- if converter then
- local oldname = specification.fullname
- local newpath = file.dirname(oldname)
- local oldbase = file.basename(oldname)
- --
- -- problem: we can have weird filenames, like a.b.c (no suffix) and a.b.c.gif
- -- so we cannot safely remove a suffix (unless we do that for known suffixes)
- --
- -- local newbase = file.removesuffix(oldbase) -- assumes a known suffix
- --
- -- so we now have (also see *):
- --
- local newbase = oldbase
- --
- local fc = specification.cache or figures.cachepaths.path
- if fc and fc ~= "" and fc ~= "." then
- newpath = fc
- else
- newbase = defaultprefix .. newbase
- end
- if not file.is_writable(newpath) then
- if trace_conversion then
- report_inclusion("path %a is not writable, forcing conversion path %a",newpath,".")
- end
- newpath = "."
- end
- local subpath = specification.subpath or figures.cachepaths.subpath
- if subpath and subpath ~= "" and subpath ~= "." then
- newpath = newpath .. "/" .. subpath
- end
- local prefix = specification.prefix or figures.cachepaths.prefix
- if prefix and prefix ~= "" then
- newbase = prefix .. newbase
- end
- if resolution and resolution ~= "" then -- the order might change
- newbase = newbase .. "_" .. resolution
- end
- --
- -- see *, we had:
- --
- -- local newbase = file.addsuffix(newbase,newformat)
- --
- -- but now have (result of Aditya's web image testing):
- --
- -- as a side effect we can now have multiple fetches with different
- -- original figures_formats, not that it matters much (apart from older conversions
- -- sticking around)
- --
- local newbase = newbase .. "." .. newformat
- --
- local newname = file.join(newpath,newbase)
- dir.makedirs(newpath)
- oldname = collapsepath(oldname)
- newname = collapsepath(newname)
- local oldtime = lfs.attributes(oldname,'modification') or 0
- local newtime = lfs.attributes(newname,'modification') or 0
- if newtime == 0 or oldtime > newtime then
- if trace_conversion then
- report_inclusion("converting %a (%a) from %a to %a",askedname,oldname,format,newformat)
- end
- converter(oldname,newname,resolution or "")
- else
- if trace_conversion then
- report_inclusion("no need to convert %a (%a) from %a to %a",askedname,oldname,format,newformat)
- end
- end
- if io.exists(newname) and io.size(newname) > 0 then
- specification.foundname = oldname
- specification.fullname = newname
- specification.prefix = prefix
- specification.subpath = subpath
- specification.converted = true
- format = newformat
- if not figures_suffixes[format] then
- -- maybe the new format is lowres.png (saves entry in suffixes)
- -- so let's do thsi extra check
- local suffix = file.suffix(newformat)
- if figures_suffixes[suffix] then
- if trace_figures then
- report_inclusion("using suffix %a as format for %a",suffix,format)
- end
- format = suffix
- end
- end
- elseif io.exists(oldname) then
- specification.fullname = oldname -- was newname
- specification.converted = false
- end
- end
- end
- local found = figures_suffixes[format] -- validtypes[format]
- if not found then
- specification.found = false
- if trace_figures then
- report_inclusion("format %a is not supported",format)
- end
- else
- specification.found = true
- if trace_figures then
- if validtypes[format] then -- format?
- report_inclusion("format %a natively supported by backend",format)
- else
- report_inclusion("format %a supported by output file format",format)
- end
- end
- end
- end
- specification.foundname = specification.foundname or specification.fullname
- local askedhash = f_hash_part(askedname,specification.conversion or "default",specification.resolution or "default")
- figures_found[askedhash] = specification
- return specification
-end
-
-local resolve_too = false -- true
-
-local internalschemes = {
- file = true,
-}
-
-local function locate(request) -- name, format, cache
- -- not resolvers.cleanpath(request.name) as it fails on a!b.pdf and b~c.pdf
- -- todo: more restricted cleanpath
- local askedname = request.name
- local askedhash = f_hash_part(askedname,request.conversion or "default",request.resolution or "default")
- local foundname = figures_found[askedhash]
- if foundname then
- return foundname
- end
- --
- local askedcache = request.cache
- local askedconversion = request.conversion
- local askedresolution = request.resolution
- --
- if request.format == "" or request.format == "unknown" then
- request.format = nil
- end
- -- protocol check
- local hashed = url.hashed(askedname)
- if not hashed then
- -- go on
- elseif internalschemes[hashed.scheme] then
- local path = hashed.path
- if path and path ~= "" then
- askedname = path
- end
- else
- local foundname = resolvers.findbinfile(askedname)
- if not foundname or not lfs.isfile(foundname) then -- foundname can be dummy
- if trace_figures then
- report_inclusion("unknown url %a",askedname)
- end
- -- url not found
- return register(askedname)
- end
- local askedformat = request.format or file.suffix(askedname) or ""
- local guessedformat = figures.guess(foundname)
- if askedformat ~= guessedformat then
- if trace_figures then
- report_inclusion("url %a has unknown format",askedname)
- end
- -- url found, but wrong format
- return register(askedname)
- else
- if trace_figures then
- report_inclusion("url %a is resolved to %a",askedname,foundname)
- end
- return register(askedname, {
- askedname = askedname,
- fullname = foundname,
- format = askedformat,
- cache = askedcache,
- conversion = askedconversion,
- resolution = askedresolution,
- })
- end
- end
- -- we could use the hashed data instead
- local askedpath= file.is_rootbased_path(askedname)
- local askedbase = file.basename(askedname)
- local askedformat = request.format or file.suffix(askedname) or ""
- if askedformat ~= "" then
- askedformat = lower(askedformat)
- if trace_figures then
- report_inclusion("forcing format %a",askedformat)
- end
- local format = figures_suffixes[askedformat]
- if not format then
- for i=1,#figures_patterns do
- local pattern = figures_patterns[i]
- if find(askedformat,pattern[1]) then
- format = pattern[2]
- break
- end
- end
- end
- if format then
- local foundname, quitscanning, forcedformat = figures.exists(askedname,format,resolve_too) -- not askedformat
- if foundname then
- return register(askedname, {
- askedname = askedname,
- fullname = foundname, -- askedname,
- format = forcedformat or format,
- cache = askedcache,
- -- foundname = foundname, -- no
- conversion = askedconversion,
- resolution = askedresolution,
- })
- elseif quitscanning then
- return register(askedname)
- end
- elseif trace_figures then
- report_inclusion("unknown format %a",askedformat)
- end
- if askedpath then
- -- path and type given, todo: strip pieces of path
- local foundname, quitscanning, forcedformat = figures.exists(askedname,askedformat,resolve_too)
- if foundname then
- return register(askedname, {
- askedname = askedname,
- fullname = foundname, -- askedname,
- format = forcedformat or askedformat,
- cache = askedcache,
- conversion = askedconversion,
- resolution = askedresolution,
- })
- end
- else
- -- type given
- for i=1,#figure_paths do
- local path = figure_paths[i]
- local check = path .. "/" .. askedname
- -- we pass 'true' as it can be an url as well, as the type
- -- is given we don't waste much time
- local foundname, quitscanning, forcedformat = figures.exists(check,askedformat,resolve_too)
- if foundname then
- return register(check, {
- askedname = askedname,
- fullname = check,
- format = askedformat,
- cache = askedcache,
- conversion = askedconversion,
- resolution = askedresolution,
- })
- end
- end
- if figures.defaultsearch then
- local check = resolvers.findfile(askedname)
- if check and check ~= "" then
- return register(askedname, {
- askedname = askedname,
- fullname = check,
- format = askedformat,
- cache = askedcache,
- conversion = askedconversion,
- resolution = askedresolution,
- })
- end
- end
- end
- elseif askedpath then
- if trace_figures then
- report_inclusion("using rootbased path")
- end
- for i=1,#figures_order do
- local format = figures_order[i]
- local list = figures_formats[format].list or { format }
- for j=1,#list do
- local suffix = list[j]
- local check = file.addsuffix(askedname,suffix)
- local foundname, quitscanning, forcedformat = figures.exists(check,format,resolve_too)
- if foundname then
- return register(askedname, {
- askedname = askedname,
- fullname = foundname, -- check,
- format = forcedformat or format,
- cache = askedcache,
- conversion = askedconversion,
- resolution = askedresolution,
- })
- end
- end
- end
- else
- if figures.preferquality then
- if trace_figures then
- report_inclusion("unknown format, quality preferred")
- end
- for j=1,#figures_order do
- local format = figures_order[j]
- local list = figures_formats[format].list or { format }
- for k=1,#list do
- local suffix = list[k]
- -- local name = file.replacesuffix(askedbase,suffix)
- local name = file.replacesuffix(askedname,suffix)
- for i=1,#figure_paths do
- local path = figure_paths[i]
- local check = path .. "/" .. name
- local isfile = url.hashed(check).scheme == "file"
- if not isfile then
- if trace_figures then
- report_inclusion("warning: skipping path %a",path)
- end
- else
- local foundname, quitscanning, forcedformat = figures.exists(check,format,resolve_too) -- true)
- if foundname then
- return register(askedname, {
- askedname = askedname,
- fullname = foundname, -- check
- format = forcedformat or format,
- cache = askedcache,
- conversion = askedconversion,
- resolution = askedresolution,
- })
- end
- end
- end
- end
- end
- else -- 'location'
- if trace_figures then
- report_inclusion("unknown format, using path strategy")
- end
- for i=1,#figure_paths do
- local path = figure_paths[i]
- for j=1,#figures_order do
- local format = figures_order[j]
- local list = figures_formats[format].list or { format }
- for k=1,#list do
- local suffix = list[k]
- local check = path .. "/" .. file.replacesuffix(askedbase,suffix)
- local foundname, quitscanning, forcedformat = figures.exists(check,format,resolve_too)
- if foundname then
- return register(askedname, {
- askedname = askedname,
- fullname = foudname, -- check,
- format = forcedformat or format,
- cache = askedcache,
- conversion = askedconversion,
- resolution = askedresolution,
- })
- end
- end
- end
- end
- end
- if figures.defaultsearch then
- if trace_figures then
- report_inclusion("using default tex path")
- end
- for j=1,#figures_order do
- local format = figures_order[j]
- local list = figures_formats[format].list or { format }
- for k=1,#list do
- local suffix = list[k]
- local check = resolvers.findfile(file.replacesuffix(askedname,suffix))
- if check and check ~= "" then
- return register(askedname, {
- askedname = askedname,
- fullname = check,
- format = format,
- cache = askedcache,
- conversion = askedconversion,
- resolution = askedresolution,
- })
- end
- end
- end
- end
- end
- return register(askedname, { -- these two are needed for hashing 'found'
- conversion = askedconversion,
- resolution = askedresolution,
- })
-end
-
--- -- -- plugins -- -- --
-
-function identifiers.default(data)
- local dr, du, ds = data.request, data.used, data.status
- local l = locate(dr)
- local foundname = l.foundname
- local fullname = l.fullname or foundname
- if fullname then
- du.format = l.format or false
- du.fullname = fullname -- can be cached
- ds.fullname = foundname -- original
- ds.format = l.format
- ds.status = (l.found and 10) or 0
- end
- return data
-end
-
-function figures.identify(data)
- data = data or callstack[#callstack] or lastfiguredata
- local list = identifiers.list -- defined at the end
- for i=1,#list do
- local identifier = list[i]
- data = identifier(data)
- if data.status.status > 0 then
- break
- end
- end
- return data
-end
-
-function figures.exists(askedname,format,resolve)
- return (existers[format] or existers.generic)(askedname,resolve)
-end
-
-function figures.check(data)
- data = data or callstack[#callstack] or lastfiguredata
- return (checkers[data.status.format] or checkers.generic)(data)
-end
-
-function figures.include(data)
- data = data or callstack[#callstack] or lastfiguredata
- return (includers[data.status.format] or includers.generic)(data)
-end
-
-function figures.scale(data) -- will become lua code
- context.doscalefigure()
- return data
-end
-
-function figures.done(data)
- figures.nofprocessed = figures.nofprocessed + 1
- data = data or callstack[#callstack] or lastfiguredata
- local dr, du, ds, nr = data.request, data.used, data.status, figures.boxnumber
- local box = texbox[nr]
- ds.width = box.width
- ds.height = box.height
- ds.xscale = ds.width /(du.width or 1)
- ds.yscale = ds.height/(du.height or 1)
- ds.page = ds.page or du.page or dr.page -- sort of redundant but can be limited
- return data
-end
-
-function figures.dummy(data)
- data = data or callstack[#callstack] or lastfiguredata
- local dr, du, nr = data.request, data.used, figures.boxnumber
- local box = node.hpack(node.new("hlist")) -- we need to set the dir (luatex 0.60 buglet)
- du.width = du.width or figures.defaultwidth
- du.height = du.height or figures.defaultheight
- du.depth = du.depth or figures.defaultdepth
- -- box.dir = "TLT"
- box.width = du.width
- box.height = du.height
- box.depth = du.depth
- texbox[nr] = box -- hm, should be global (to be checked for consistency)
-end
-
--- -- -- generic -- -- --
-
-function existers.generic(askedname,resolve)
- -- not findbinfile
- local result
- if lfs.isfile(askedname) then
- result = askedname
- elseif resolve then
- result = resolvers.findbinfile(askedname) or ""
- if result == "" then result = false end
- end
- if trace_figures then
- if result then
- report_inclusion("%a resolved to %a",askedname,result)
- else
- report_inclusion("%a cannot be resolved",askedname)
- end
- end
- return result
-end
-
-function checkers.generic(data)
- local dr, du, ds = data.request, data.used, data.status
- local name = du.fullname or "unknown generic"
- local page = du.page or dr.page
- local size = dr.size or "crop"
- local color = dr.color or "natural"
- local mask = dr.mask or "none"
- local conversion = dr.conversion
- local resolution = dr.resolution
- if not conversion or conversion == "" then
- conversion = "unknown"
- end
- if not resolution or resolution == "" then
- resolution = "unknown"
- end
- local hash = f_hash_full(name,page,size,color,conversion,resolution,mask)
- local figure = figures_loaded[hash]
- if figure == nil then
- figure = img.new {
- filename = name,
- page = page,
- pagebox = dr.size,
- -- visiblefilename = "", -- this prohibits the full filename ending up in the file
- }
- codeinjections.setfigurecolorspace(data,figure)
- codeinjections.setfiguremask(data,figure)
- figure = figure and img.check(img.scan(figure)) or false
- local f, d = codeinjections.setfigurealternative(data,figure)
- figure, data = f or figure, d or data
- figures_loaded[hash] = figure
- if trace_conversion then
- report_inclusion("new graphic, using hash %a",hash)
- end
- else
- if trace_conversion then
- report_inclusion("existing graphic, using hash %a",hash)
- end
- end
- if figure then
- du.width = figure.width
- du.height = figure.height
- du.pages = figure.pages
- du.depth = figure.depth or 0
- du.colordepth = figure.colordepth or 0
- du.xresolution = figure.xres or 0
- du.yresolution = figure.yres or 0
- du.xsize = figure.xsize or 0
- du.ysize = figure.ysize or 0
- ds.private = figure
- ds.hash = hash
- end
- return data
-end
-
-function includers.generic(data)
- local dr, du, ds = data.request, data.used, data.status
- -- here we set the 'natural dimensions'
- dr.width = du.width
- dr.height = du.height
- local hash = figures.hash(data)
- local figure = figures_used[hash]
- -- figures.registerresource {
- -- filename = du.fullname,
- -- width = dr.width,
- -- height = dr.height,
- -- }
- if figure == nil then
- figure = ds.private
- if figure then
- figure = img.copy(figure)
- figure = figure and img.clone(figure,data.request) or false
- end
- figures_used[hash] = figure
- end
- if figure then
- local nr = figures.boxnumber
- -- it looks like we have a leak in attributes here .. todo
- local box = node.hpack(img.node(figure)) -- img.node(figure) not longer valid
- indexed[figure.index] = figure
- box.width, box.height, box.depth = figure.width, figure.height, 0 -- new, hm, tricky, we need to do that in tex (yet)
- texbox[nr] = box
- ds.objectnumber = figure.objnum
- context.relocateexternalfigure()
- end
- return data
-end
-
--- -- -- nongeneric -- -- --
-
-local function checkers_nongeneric(data,command) -- todo: macros and context.*
- local dr, du, ds = data.request, data.used, data.status
- local name = du.fullname or "unknown nongeneric"
- local hash = name
- if dr.object then
- -- hm, bugged ... waiting for an xform interface
- if not job.objects.get("FIG::"..hash) then
- if type(command) == "function" then
- command()
- end
- context.dosetfigureobject(hash)
- end
- context.doboxfigureobject(hash)
- elseif type(command) == "function" then
- command()
- end
- return data
-end
-
-local function includers_nongeneric(data)
- return data
-end
-
-checkers.nongeneric = checkers_nongeneric
-includers.nongeneric = includers_nongeneric
-
--- -- -- mov -- -- --
-
-function checkers.mov(data)
- local dr, du, ds = data.request, data.used, data.status
- local width = todimen(dr.width or figures.defaultwidth)
- local height = todimen(dr.height or figures.defaultheight)
- local foundname = du.fullname
- dr.width, dr.height = width, height
- du.width, du.height, du.foundname = width, height, foundname
- if trace_inclusion then
- report_inclusion("including movie %a, width %p, height %p",foundname,width,height)
- end
- -- we need to push the node.write in between ... we could make a shared helper for this
- context.startfoundexternalfigure(width .. "sp",height .. "sp")
- context(function()
- nodeinjections.insertmovie {
- width = width,
- height = height,
- factor = number.dimenfactors.bp,
- ["repeat"] = dr["repeat"],
- controls = dr.controls,
- preview = dr.preview,
- label = dr.label,
- foundname = foundname,
- }
- end)
- context.stopfoundexternalfigure()
- return data
-end
-
-includers.mov = includers.nongeneric
-
--- -- -- mps -- -- --
-
-internalschemes.mprun = true
-
-local function internal(askedname)
- local spec, mprun, mpnum = match(lower(askedname),"mprun([:%.]?)(.-)%.(%d+)")
- if spec ~= "" then
- return mprun, mpnum
- else
- return "", mpnum
- end
-end
-
-function existers.mps(askedname)
- local mprun, mpnum = internal(askedname)
- if mpnum then
- return askedname
- else
- return existers.generic(askedname)
- end
-end
-
-function checkers.mps(data)
- local mprun, mpnum = internal(data.used.fullname)
- if mpnum then
- return checkers_nongeneric(data,function() context.docheckfiguremprun(mprun,mpnum) end)
- else
- return checkers_nongeneric(data,function() context.docheckfiguremps(data.used.fullname) end)
- end
-end
-
-includers.mps = includers.nongeneric
-
--- -- -- tex -- -- --
-
-function existers.tex(askedname)
- askedname = resolvers.findfile(askedname)
- return askedname ~= "" and askedname or false
-end
-
-function checkers.tex(data)
- return checkers_nongeneric(data,function() context.docheckfiguretex(data.used.fullname) end)
-end
-
-includers.tex = includers.nongeneric
-
--- -- -- buffer -- -- --
-
-function existers.buffer(askedname)
- local name = file.nameonly(askedname)
- local okay = buffers.exists(name)
- return okay and name, true -- always quit scanning
-end
-
-function checkers.buffer(data)
- return checkers_nongeneric(data,function() context.docheckfigurebuffer(file.nameonly(data.used.fullname)) end)
-end
-
-includers.buffers = includers.nongeneric
-
--- -- -- auto -- -- --
-
-function existers.auto(askedname)
- local name = gsub(askedname, ".auto$", "")
- local format = figures.guess(name)
- if format then
- report_inclusion("format guess %a for %a",format,name)
- else
- report_inclusion("format guess for %a is not possible",name)
- end
- return format and name, true, format
-end
-
-checkers.auto = checkers.generic
-includers.auto = includers.generic
-
--- -- -- cld -- -- --
-
-existers.cld = existers.tex
-
-function checkers.cld(data)
- return checkers_nongeneric(data,function() context.docheckfigurecld(data.used.fullname) end)
-end
-
-includers.cld = includers.nongeneric
-
--- -- -- converters -- -- --
-
-local function makeoptions(options)
- local to = type(options)
- return (to == "table" and concat(options," ")) or (to == "string" and options) or ""
-end
-
--- programs.makeoptions = makeoptions
-
-local function runprogram(binary,argument,variables)
- local binary = match(binary,"[%S]+") -- to be sure
- if type(argument) == "table" then
- argument = concat(argument," ") -- for old times sake
- end
- if not os.which(binary) then
- report_inclusion("program %a is not installed, not running command: %s",binary,command)
- elseif not argument or argument == "" then
- report_inclusion("nothing to run, unknown program %a",binary)
- else
- local command = format([["%s" %s]],binary,replacetemplate(longtostring(argument),variables))
- if trace_conversion or trace_programs then
- report_inclusion("running command: %s",command)
- end
- os.spawn(command)
- end
-end
-
-programs.run = runprogram
-
--- -- -- eps & pdf -- -- --
---
--- \externalfigure[cow.eps]
--- \externalfigure[cow.pdf][conversion=stripped]
-
-local epsconverter = converters.eps or { }
-converters.eps = epsconverter
-converters.ps = epsconverter
-
-local epstopdf = {
- resolutions = {
- [v_low] = "screen",
- [v_medium] = "ebook",
- [v_high] = "prepress",
- },
- command = os.type == "windows" and "gswin32c" or "gs",
- -- -dProcessDSCComments=false
- argument = [[
- -q
- -sDEVICE=pdfwrite
- -dNOPAUSE
- -dNOCACHE
- -dBATCH
- -dAutoRotatePages=/None
- -dPDFSETTINGS=/%presets%
- -dEPSCrop
- -sOutputFile=%newname%
- %oldname%
- -c quit
- ]],
-}
-
-programs.epstopdf = epstopdf
-programs.gs = epstopdf
-
-function epsconverter.pdf(oldname,newname,resolution) -- the resolution interface might change
- local epstopdf = programs.epstopdf -- can be changed
- local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high
- runprogram(epstopdf.command, epstopdf.argument, {
- newname = newname,
- oldname = oldname,
- presets = presets,
- } )
-end
-
-epsconverter.default = epsconverter.pdf
-
-local pdfconverter = converters.pdf or { }
-converters.pdf = pdfconverter
-
-programs.pdftoeps = {
- command = "pdftops",
- argument = [[-eps "%oldname%" "%newname%]],
-}
-
-pdfconverter.stripped = function(oldname,newname)
- local pdftoeps = programs.pdftoeps -- can be changed
- local epstopdf = programs.epstopdf -- can be changed
- local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high
- local tmpname = newname .. ".tmp"
- runprogram(pdftoeps.command, pdftoeps.argument, { oldname = oldname, newname = tmpname, presets = presets })
- runprogram(epstopdf.command, epstopdf.argument, { oldname = tmpname, newname = newname, presets = presets })
- os.remove(tmpname)
-end
-
-figures.registersuffix("stripped","pdf")
-
--- -- -- svg -- -- --
-
-local svgconverter = { }
-converters.svg = svgconverter
-converters.svgz = svgconverter
-
--- inkscape on windows only works with complete paths
-
-programs.inkscape = {
- command = "inkscape",
- pdfargument = [[
- "%oldname%"
- --export-dpi=600
- -A
- "%newname%"
- ]],
- pngargument = [[
- "%oldname%"
- --export-dpi=600
- --export-png="%newname%"
- ]],
-}
-
-function svgconverter.pdf(oldname,newname)
- local inkscape = programs.inkscape -- can be changed
- runprogram(inkscape.command, inkscape.pdfargument, {
- newname = expandfilename(newname),
- oldname = expandfilename(oldname),
- } )
-end
-
-function svgconverter.png(oldname,newname)
- local inkscape = programs.inkscape
- runprogram(inkscape.command, inkscape.pngargument, {
- newname = expandfilename(newname),
- oldname = expandfilename(oldname),
- } )
-end
-
-svgconverter.default = svgconverter.pdf
-
--- -- -- gif -- -- --
--- -- -- tif -- -- --
-
-local gifconverter = converters.gif or { }
-local tifconverter = converters.tif or { }
-local bmpconverter = converters.bmp or { }
-
-converters.gif = gifconverter
-converters.tif = tifconverter
-converters.bmp = bmpconverter
-
-programs.convert = {
- command = "gm", -- graphicmagick
- argument = [[convert "%oldname%" "%newname%"]],
-}
-
-local function converter(oldname,newname)
- local convert = programs.convert
- runprogram(convert.command, convert.argument, {
- newname = newname,
- oldname = oldname,
- } )
-end
-
-tifconverter.pdf = converter
-gifconverter.pdf = converter
-bmpconverter.pdf = converter
-
-gifconverter.default = converter
-tifconverter.default = converter
-bmpconverter.default = converter
-
--- todo: lowres
-
--- -- -- bases -- -- --
-
-local bases = allocate()
-figures.bases = bases
-
-local bases_list = nil -- index => { basename, fullname, xmlroot }
-local bases_used = nil -- [basename] => { basename, fullname, xmlroot } -- pointer to list
-local bases_found = nil
-local bases_enabled = false
-
-local function reset()
- bases_list = allocate()
- bases_used = allocate()
- bases_found = allocate()
- bases_enabled = false
- bases.list = bases_list
- bases.used = bases_used
- bases.found = bases_found
-end
-
-reset()
-
-function bases.use(basename)
- if basename == "reset" then
- reset()
- else
- basename = file.addsuffix(basename,"xml")
- if not bases_used[basename] then
- local t = { basename, nil, nil }
- bases_used[basename] = t
- bases_list[#bases_list+1] = t
- if not bases_enabled then
- bases_enabled = true
- xml.registerns("rlx","http://www.pragma-ade.com/schemas/rlx") -- we should be able to do this per xml file
- end
- if trace_bases then
- report_inclusion("registering base %a",basename)
- end
- end
- end
-end
-
-local function bases_find(basename,askedlabel)
- if trace_bases then
- report_inclusion("checking for %a in base %a",askedlabel,basename)
- end
- basename = file.addsuffix(basename,"xml")
- local t = bases_found[askedlabel]
- if t == nil then
- local base = bases_used[basename]
- local page = 0
- if base[2] == nil then
- -- no yet located
- for i=1,#figure_paths do
- local path = figure_paths[i]
- local xmlfile = path .. "/" .. basename
- if io.exists(xmlfile) then
- base[2] = xmlfile
- base[3] = xml.load(xmlfile)
- if trace_bases then
- report_inclusion("base %a loaded",xmlfile)
- end
- break
- end
- end
- end
- t = false
- if base[2] and base[3] then -- rlx:library
- for e in xml.collected(base[3],"/(*:library|figurelibrary)/*:figure/*:label") do
- page = page + 1
- if xml.text(e) == askedlabel then
- t = {
- base = file.replacesuffix(base[2],"pdf"),
- format = "pdf",
- name = xml.text(e,"../*:file"), -- to be checked
- page = page,
- }
- bases_found[askedlabel] = t
- if trace_bases then
- report_inclusion("figure %a found in base %a",askedlabel,base[2])
- end
- return t
- end
- end
- if trace_bases and not t then
- report_inclusion("figure %a not found in base %a",askedlabel,base[2])
- end
- end
- end
- return t
-end
-
--- we can access sequential or by name
-
-local function bases_locate(askedlabel)
- for i=1,#bases_list do
- local entry = bases_list[i]
- local t = bases_find(entry[1],askedlabel)
- if t then
- return t
- end
- end
- return false
-end
-
-function identifiers.base(data)
- if bases_enabled then
- local dr, du, ds = data.request, data.used, data.status
- local fbl = bases_locate(dr.name or dr.label)
- if fbl then
- du.page = fbl.page
- du.format = fbl.format
- du.fullname = fbl.base
- ds.fullname = fbl.name
- ds.format = fbl.format
- ds.page = fbl.page
- ds.status = 10
- end
- end
- return data
-end
-
-bases.locate = bases_locate
-bases.find = bases_find
-
-identifiers.list = {
- identifiers.base,
- identifiers.default
-}
-
--- tracing
-
-statistics.register("graphics processing time", function()
- local nofprocessed = figures.nofprocessed
- if nofprocessed > 0 then
- return format("%s seconds including tex, %s processed images", statistics.elapsedtime(figures),nofprocessed)
- else
- return nil
- end
-end)
-
--- helper
-
-function figures.applyratio(width,height,w,h) -- width and height are strings and w and h are numbers
- if not width or width == "" then
- if not height or height == "" then
- return figures.defaultwidth, figures.defaultheight
- else
- height = todimen(height)
- if w and h then
- return height * w/h, height
- else
- return figures.defaultwidth, height
- end
- end
- else
- width = todimen(width)
- if not height or height == "" then
- if w and h then
- return width, width * h/w
- else
- return width, figures.defaultheight
- end
- else
- return width, todimen(height)
- end
- end
-end
-
--- example of simple plugins:
---
--- figures.converters.png = {
--- png = function(oldname,newname,resolution)
--- local command = string.format('gm convert -depth 1 "%s" "%s"',oldname,newname)
--- logs.report(string.format("running command %s",command))
--- os.execute(command)
--- end,
--- }
-
--- local fig = figures.push { name = pdffile }
--- figures.identify()
--- figures.check()
--- local nofpages = fig.used.pages
--- figures.pop()
-
--- interfacing
-
-commands.setfigurelookuporder = figures.setorder
+if not modules then modules = { } end modules ['grph-inc'] = {
+ version = 1.001,
+ comment = "companion to grph-inc.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: empty filename or only suffix always false (not found)
+-- lowercase types
+-- mps tex tmp svg
+-- partly qualified
+-- dimensions
+-- use metatables
+-- figures.boxnumber can go as we now can use names
+-- avoid push
+-- move some to command namespace
+
+--[[
+The ConTeXt figure inclusion mechanisms are among the oldest code
+in ConTeXt and evolved into a complex whole. One reason is that we
+deal with backend in an abstract way. What complicates matters is
+that we deal with internal graphics as well: TeX code, MetaPost code,
+etc. Later on figure databases were introduced, which resulted in
+a plug in model for locating images. On top of that runs a conversion
+mechanism (with caching) and resource logging.
+
+Porting that to Lua is not that trivial because quite some
+status information is kept between al these stages. Of course, image
+reuse also has some price, and so I decided to implement the graphics
+inclusion in several layers: detection, loading, inclusion, etc.
+
+Object sharing and scaling can happen at each stage, depending on the
+way the resource is dealt with.
+
+The TeX-Lua mix is suboptimal. This has to do with the fact that we cannot
+run TeX code from within Lua. Some more functionality will move to Lua.
+]]--
+
+local format, lower, find, match, gsub, gmatch = string.format, string.lower, string.find, string.match, string.gsub, string.gmatch
+local texbox = tex.box
+local contains = table.contains
+local concat, insert, remove = table.concat, table.insert, table.remove
+local todimen = string.todimen
+local collapsepath = file.collapsepath
+local formatters = string.formatters
+local longtostring = string.longtostring
+local expandfilename = dir.expandname
+
+local P, lpegmatch = lpeg.P, lpeg.match
+
+local settings_to_array = utilities.parsers.settings_to_array
+local settings_to_hash = utilities.parsers.settings_to_hash
+local allocate = utilities.storage.allocate
+local setmetatableindex = table.setmetatableindex
+local replacetemplate = utilities.templates.replace
+
+local variables = interfaces.variables
+local codeinjections = backends.codeinjections
+local nodeinjections = backends.nodeinjections
+
+local trace_figures = false trackers.register("graphics.locating", function(v) trace_figures = v end)
+local trace_bases = false trackers.register("graphics.bases", function(v) trace_bases = v end)
+local trace_programs = false trackers.register("graphics.programs", function(v) trace_programs = v end)
+local trace_conversion = false trackers.register("graphics.conversion", function(v) trace_conversion = v end)
+local trace_inclusion = false trackers.register("graphics.inclusion", function(v) trace_inclusion = v end)
+
+local report_inclusion = logs.reporter("graphics","inclusion")
+
+local context, img = context, img
+
+local f_hash_part = formatters["%s->%s->%s"]
+local f_hash_full = formatters["%s->%s->%s->%s->%s->%s->%s"]
+
+local v_yes = variables.yes
+local v_low = variables.low
+local v_medium = variables.medium
+local v_high = variables.high
+local v_global = variables["global"]
+local v_local = variables["local"]
+local v_default = variables.default
+
+local maxdimen = 2^30-1
+
+function img.check(figure)
+ if figure then
+ local width = figure.width
+ local height = figure.height
+ if height > width then
+ if height > maxdimen then
+ figure.height = maxdimen
+ figure.width = width * maxdimen/height
+ report_inclusion("limiting natural dimensions of %a (%s)",figure.filename,"height")
+ end
+ elseif width > maxdimen then
+ figure.width = maxdimen
+ figure.height = height * maxdimen/width
+ report_inclusion("limiting natural dimensions of %a (%s)",figure.filename,"width")
+ end
+ return figure
+ end
+end
+
+--- some extra img functions --- can become luat-img.lua
+
+local imgkeys = img.keys()
+
+function img.totable(imgtable)
+ local result = { }
+ for k=1,#imgkeys do
+ local key = imgkeys[k]
+ result[key] = imgtable[key]
+ end
+ return result
+end
+
+function img.serialize(i,...)
+ return table.serialize(img.totable(i),...)
+end
+
+function img.print(i,...)
+ return table.print(img.totable(i),...)
+end
+
+function img.clone(i,data)
+ i.width = data.width or i.width
+ i.height = data.height or i.height
+ -- attr etc
+ return i
+end
+
+local validsizes = table.tohash(img.boxes())
+local validtypes = table.tohash(img.types())
+
+function img.checksize(size)
+ if size then
+ size = gsub(size,"box","")
+ return validsizes[size] and size or "crop"
+ else
+ return "crop"
+ end
+end
+
+local indexed = { }
+
+function img.ofindex(n)
+ return indexed[n]
+end
+
+--- we can consider an grph-ini file
+
+figures = figures or { }
+local figures = figures
+
+figures.boxnumber = figures.boxnumber or 0
+figures.defaultsearch = true
+figures.defaultwidth = 0
+figures.defaultheight = 0
+figures.defaultdepth = 0
+figures.nofprocessed = 0
+figures.preferquality = true -- quality over location
+
+local figures_loaded = allocate() figures.loaded = figures_loaded
+local figures_used = allocate() figures.used = figures_used
+local figures_found = allocate() figures.found = figures_found
+local figures_suffixes = allocate() figures.suffixes = figures_suffixes
+local figures_patterns = allocate() figures.patterns = figures_patterns
+local figures_resources = allocate() figures.resources = figures_resources
+
+local existers = allocate() figures.existers = existers
+local checkers = allocate() figures.checkers = checkers
+local includers = allocate() figures.includers = includers
+local converters = allocate() figures.converters = converters
+local identifiers = allocate() figures.identifiers = identifiers
+local programs = allocate() figures.programs = programs
+
+local defaultformat = "pdf"
+local defaultprefix = "m_k_i_v_"
+
+figures.localpaths = allocate {
+ ".", "..", "../.."
+}
+
+figures.cachepaths = allocate {
+ prefix = "",
+ path = ".",
+ subpath = ".",
+}
+
+local figure_paths = allocate(table.copy(figures.localpaths))
+figures.paths = figure_paths
+
+local figures_order = allocate {
+ "pdf", "mps", "jpg", "png", "jp2", "jbig", "svg", "eps", "tif", "gif", "mov", "buffer", "tex", "cld", "auto",
+}
+
+local figures_formats = allocate { -- magic and order will move here
+ ["pdf"] = { list = { "pdf" } },
+ ["mps"] = { patterns = { "mps", "%d+" } },
+ ["jpg"] = { list = { "jpg", "jpeg" } },
+ ["png"] = { list = { "png" } },
+ ["jp2"] = { list = { "jp2" } },
+ ["jbig"] = { list = { "jbig", "jbig2", "jb2" } },
+ ["svg"] = { list = { "svg", "svgz" } },
+ ["eps"] = { list = { "eps", "ai" } },
+ ["gif"] = { list = { "gif" } },
+ ["tif"] = { list = { "tif", "tiff" } },
+ ["mov"] = { list = { "mov", "flv", "mp4" } }, -- "avi" is not supported
+ ["buffer"] = { list = { "tmp", "buffer", "buf" } },
+ ["tex"] = { list = { "tex" } },
+ ["cld"] = { list = { "cld" } },
+ ["auto"] = { list = { "auto" } },
+}
+
+local figures_magics = allocate {
+ { format = "png", pattern = P("\137PNG\013\010\026\010") }, -- 89 50 4E 47 0D 0A 1A 0A,
+ { format = "jpg", pattern = P("\255\216\255") }, -- FF D8 FF
+ { format = "jp2", pattern = P("\000\000\000\012\106\080\032\032\013\010"), }, -- 00 00 00 0C 6A 50 20 20 0D 0A },
+ { format = "gif", pattern = P("GIF") },
+ { format = "pdf", pattern = (1 - P("%PDF"))^0 * P("%PDF") },
+}
+
+figures.formats = figures_formats -- frozen
+figures.magics = figures_magics -- frozen
+figures.order = figures_order -- frozen
+
+-- We can set the order but only indirectly so that we can check for support.
+
+function figures.setorder(list) -- can be table or string
+ if type(list) == "string" then
+ list = settings_to_array(list)
+ end
+ if list and #list > 0 then
+ figures_order = allocate()
+ figures.order = figures_order
+ local done = { } -- just to be sure in case the list is generated
+ for i=1,#list do
+ local l = lower(list[i])
+ if figures_formats[l] and not done[l] then
+ figures_order[#figures_order+1] = l
+ done[l] = true
+ end
+ end
+ report_inclusion("lookup order % a",figures_order)
+ else
+ -- invalid list
+ end
+end
+
+function figures.guess(filename)
+ local f = io.open(filename,'rb')
+ if f then
+ local str = f:read(100)
+ f:close()
+ if str then
+ for i=1,#figures_magics do
+ local pattern = figures_magics[i]
+ if lpegmatch(pattern.pattern,str) then
+ local format = pattern.format
+ if trace_figures then
+ report_inclusion("file %a has format %a",filename,format)
+ end
+ return format
+ end
+ end
+ end
+ end
+end
+
+local function setlookups() -- tobe redone .. just set locals
+ figures_suffixes = allocate()
+ figures_patterns = allocate()
+ for _, format in next, figures_order do
+ local data = figures_formats[format]
+ local list = data.list
+ if list then
+ for i=1,#list do
+ figures_suffixes[list[i]] = format -- hash
+ end
+ else
+ figures_suffixes[format] = format
+ end
+ local patterns = data.patterns
+ if patterns then
+ for i=1,#patterns do
+ figures_patterns[#figures_patterns+1] = { patterns[i], format } -- array
+ end
+ end
+ end
+ figures.suffixes = figures_suffixes
+ figures.patterns = figures_patterns
+end
+
+setlookups()
+
+figures.setlookups = setlookups
+
+function figures.registerresource(t)
+ local n = #figures_resources + 1
+ figures_resources[n] = t
+ return n
+end
+
+local function register(tag,target,what)
+ local data = figures_formats[target] -- resolver etc
+ if not data then
+ data = { }
+ figures_formats[target] = data
+ end
+ local d = data[tag] -- list or pattern
+ if d and not contains(d,what) then
+ d[#d+1] = what -- suffix or patternspec
+ else
+ data[tag] = { what }
+ end
+ if not contains(figures_order,target) then
+ figures_order[#figures_order+1] = target
+ end
+ setlookups()
+end
+
+function figures.registersuffix (suffix, target) register('list', target,suffix ) end
+function figures.registerpattern(pattern,target) register('pattern',target,pattern) end
+
+local last_locationset = last_locationset or nil
+local last_pathlist = last_pathlist or nil
+
+function figures.setpaths(locationset,pathlist)
+ if last_locationset == locationset and last_pathlist == pathlist then
+ -- this function can be called each graphic so we provide this optimization
+ return
+ end
+ local t, h = figure_paths, settings_to_hash(locationset)
+ if last_locationset ~= locationset then
+ -- change == reset (actually, a 'reset' would indeed reset
+ if h[v_local] then
+ t = table.fastcopy(figures.localpaths or { })
+ else
+ t = { }
+ end
+ figures.defaultsearch = h[v_default]
+ last_locationset = locationset
+ end
+ if h[v_global] then
+ local list = settings_to_array(pathlist)
+ for i=1,#list do
+ local s = list[i]
+ if not contains(t,s) then
+ t[#t+1] = s
+ end
+ end
+ end
+ figure_paths = t
+ last_pathlist = pathlist
+ figures.paths = figure_paths
+ if trace_figures then
+ report_inclusion("using locations %a",last_locationset)
+ report_inclusion("using paths % a",figure_paths)
+ end
+end
+
+-- check conversions and handle it here
+
+function figures.hash(data)
+ local status = data and data.status
+ return (status and status.hash or tostring(status.private)) or "nohash" -- the
+end
+
+-- interfacing to tex
+
+local function new() -- we could use metatables status -> used -> request but it needs testing
+ local request = {
+ name = false,
+ label = false,
+ format = false,
+ page = false,
+ width = false,
+ height = false,
+ preview = false,
+ ["repeat"] = false,
+ controls = false,
+ display = false,
+ mask = false,
+ conversion = false,
+ resolution = false,
+ cache = false,
+ prefix = false,
+ size = false,
+ }
+ local used = {
+ fullname = false,
+ format = false,
+ name = false,
+ path = false,
+ suffix = false,
+ width = false,
+ height = false,
+ }
+ local status = {
+ status = 0,
+ converted = false,
+ cached = false,
+ fullname = false,
+ format = false,
+ }
+ -- this needs checking because we might check for nil, the test case
+ -- is getfiguredimensions which then should return ~= 0
+ -- setmetatableindex(status, used)
+ -- setmetatableindex(used, request)
+ return {
+ request = request,
+ used = used,
+ status = status,
+ }
+end
+
+-- use table.insert|remove
+
+local lastfiguredata = nil -- will be topofstack or last so no { } (else problems with getfiguredimensions)
+local callstack = { }
+
+function figures.initialize(request)
+ local figuredata = new()
+ if request then
+ -- request.width/height are strings and are only used when no natural dimensions
+ -- can be determined; at some point the handlers might set them to numbers instead
+ local w = tonumber(request.width) or 0
+ local h = tonumber(request.height) or 0
+ request.width = w > 0 and w or nil
+ request.height = h > 0 and h or nil
+ --
+ request.page = math.max(tonumber(request.page) or 1,1)
+ request.size = img.checksize(request.size)
+ request.object = request.object == v_yes
+ request["repeat"] = request["repeat"] == v_yes
+ request.preview = request.preview == v_yes
+ request.cache = request.cache ~= "" and request.cache
+ request.prefix = request.prefix ~= "" and request.prefix
+ request.format = request.format ~= "" and request.format
+ table.merge(figuredata.request,request)
+ end
+ return figuredata
+end
+
+function figures.push(request)
+ statistics.starttiming(figures)
+ local figuredata = figures.initialize(request)
+ insert(callstack,figuredata)
+ lastfiguredata = figuredata
+ return figuredata
+end
+
+function figures.pop()
+ lastfiguredata = remove(callstack) or lastfiguredata
+ statistics.stoptiming(figures)
+end
+
+function figures.current()
+ return callstack[#callstack] or lastfiguredata
+end
+
+local function get(category,tag,default)
+ local value = lastfiguredata and lastfiguredata[category]
+ value = value and value[tag]
+ if not value or value == "" or value == true then
+ return default or ""
+ else
+ return value
+ end
+end
+
+figures.get = get
+
+function commands.figurevariable(category,tag,default)
+ context(get(category,tag,default))
+end
+
+function commands.figurestatus (tag,default) context(get("status", tag,default)) end
+function commands.figurerequest(tag,default) context(get("request",tag,default)) end
+function commands.figureused (tag,default) context(get("used", tag,default)) end
+
+function commands.figurefilepath() context(file.dirname (get("used","fullname"))) end
+function commands.figurefilename() context(file.nameonly(get("used","fullname"))) end
+function commands.figurefiletype() context(file.extname (get("used","fullname"))) end
+
+-- todo: local path or cache path
+
+local function forbiddenname(filename)
+ if not filename or filename == "" then
+ return false
+ end
+ local expandedfullname = collapsepath(filename,true)
+ local expandedinputname = collapsepath(file.addsuffix(environment.jobfilename,environment.jobfilesuffix),true)
+ if expandedfullname == expandedinputname then
+ report_inclusion("skipping graphic with same name as input filename %a, enforce suffix",expandedinputname)
+ return true
+ end
+ local expandedoutputname = collapsepath(codeinjections.getoutputfilename(),true)
+ if expandedfullname == expandedoutputname then
+ report_inclusion("skipping graphic with same name as output filename %a, enforce suffix",expandedoutputname)
+ return true
+ end
+end
+
+local function register(askedname,specification)
+ if not specification then
+ specification = { }
+ elseif forbiddenname(specification.fullname) then
+ specification = { }
+ else
+ local format = specification.format
+ if format then
+ local conversion = specification.conversion
+ local resolution = specification.resolution
+ if conversion == "" then
+ conversion = nil
+ end
+ if resolution == "" then
+ resolution = nil
+ end
+ local newformat = conversion
+ if not newformat or newformat == "" then
+ newformat = defaultformat
+ end
+ if trace_conversion then
+ report_inclusion("checking conversion of %a, fullname %a, old format %a, new format %a, conversion %a, resolution %a",
+ askedname,specification.fullname,format,newformat,conversion or "default",resolution or "default")
+ end
+ -- quick hack
+ local converter = (newformat ~= format or resolution) and converters[format]
+ if converter then
+ if converter[newformat] then
+ converter = converter[newformat]
+ else
+ newformat = defaultformat
+ if converter[newformat] then
+ converter = converter[newformat]
+ else
+ converter = nil
+ newformat = defaultformat
+ end
+ end
+ elseif trace_conversion then
+ report_inclusion("no converter for %a to %a",format,newformat)
+ end
+ if converter then
+ local oldname = specification.fullname
+ local newpath = file.dirname(oldname)
+ local oldbase = file.basename(oldname)
+ --
+ -- problem: we can have weird filenames, like a.b.c (no suffix) and a.b.c.gif
+ -- so we cannot safely remove a suffix (unless we do that for known suffixes)
+ --
+ -- local newbase = file.removesuffix(oldbase) -- assumes a known suffix
+ --
+ -- so we now have (also see *):
+ --
+ local newbase = oldbase
+ --
+ local fc = specification.cache or figures.cachepaths.path
+ if fc and fc ~= "" and fc ~= "." then
+ newpath = fc
+ else
+ newbase = defaultprefix .. newbase
+ end
+ if not file.is_writable(newpath) then
+ if trace_conversion then
+ report_inclusion("path %a is not writable, forcing conversion path %a",newpath,".")
+ end
+ newpath = "."
+ end
+ local subpath = specification.subpath or figures.cachepaths.subpath
+ if subpath and subpath ~= "" and subpath ~= "." then
+ newpath = newpath .. "/" .. subpath
+ end
+ local prefix = specification.prefix or figures.cachepaths.prefix
+ if prefix and prefix ~= "" then
+ newbase = prefix .. newbase
+ end
+ if resolution and resolution ~= "" then -- the order might change
+ newbase = newbase .. "_" .. resolution
+ end
+ --
+ -- see *, we had:
+ --
+ -- local newbase = file.addsuffix(newbase,newformat)
+ --
+ -- but now have (result of Aditya's web image testing):
+ --
+ -- as a side effect we can now have multiple fetches with different
+ -- original figures_formats, not that it matters much (apart from older conversions
+ -- sticking around)
+ --
+ local newbase = newbase .. "." .. newformat
+ --
+ local newname = file.join(newpath,newbase)
+ dir.makedirs(newpath)
+ oldname = collapsepath(oldname)
+ newname = collapsepath(newname)
+ local oldtime = lfs.attributes(oldname,'modification') or 0
+ local newtime = lfs.attributes(newname,'modification') or 0
+ if newtime == 0 or oldtime > newtime then
+ if trace_conversion then
+ report_inclusion("converting %a (%a) from %a to %a",askedname,oldname,format,newformat)
+ end
+ converter(oldname,newname,resolution or "")
+ else
+ if trace_conversion then
+ report_inclusion("no need to convert %a (%a) from %a to %a",askedname,oldname,format,newformat)
+ end
+ end
+ if io.exists(newname) and io.size(newname) > 0 then
+ specification.foundname = oldname
+ specification.fullname = newname
+ specification.prefix = prefix
+ specification.subpath = subpath
+ specification.converted = true
+ format = newformat
+ if not figures_suffixes[format] then
+ -- maybe the new format is lowres.png (saves entry in suffixes)
+ -- so let's do thsi extra check
+ local suffix = file.suffix(newformat)
+ if figures_suffixes[suffix] then
+ if trace_figures then
+ report_inclusion("using suffix %a as format for %a",suffix,format)
+ end
+ format = suffix
+ end
+ end
+ elseif io.exists(oldname) then
+ specification.fullname = oldname -- was newname
+ specification.converted = false
+ end
+ end
+ end
+ local found = figures_suffixes[format] -- validtypes[format]
+ if not found then
+ specification.found = false
+ if trace_figures then
+ report_inclusion("format %a is not supported",format)
+ end
+ else
+ specification.found = true
+ if trace_figures then
+ if validtypes[format] then -- format?
+ report_inclusion("format %a natively supported by backend",format)
+ else
+ report_inclusion("format %a supported by output file format",format)
+ end
+ end
+ end
+ end
+ specification.foundname = specification.foundname or specification.fullname
+ local askedhash = f_hash_part(askedname,specification.conversion or "default",specification.resolution or "default")
+ figures_found[askedhash] = specification
+ return specification
+end
+
+local resolve_too = false -- true
+
+local internalschemes = {
+ file = true,
+}
+
+local function locate(request) -- name, format, cache
+ -- not resolvers.cleanpath(request.name) as it fails on a!b.pdf and b~c.pdf
+ -- todo: more restricted cleanpath
+ local askedname = request.name
+ local askedhash = f_hash_part(askedname,request.conversion or "default",request.resolution or "default")
+ local foundname = figures_found[askedhash]
+ if foundname then
+ return foundname
+ end
+ --
+ local askedcache = request.cache
+ local askedconversion = request.conversion
+ local askedresolution = request.resolution
+ --
+ if request.format == "" or request.format == "unknown" then
+ request.format = nil
+ end
+ -- protocol check
+ local hashed = url.hashed(askedname)
+ if not hashed then
+ -- go on
+ elseif internalschemes[hashed.scheme] then
+ local path = hashed.path
+ if path and path ~= "" then
+ askedname = path
+ end
+ else
+ local foundname = resolvers.findbinfile(askedname)
+ if not foundname or not lfs.isfile(foundname) then -- foundname can be dummy
+ if trace_figures then
+ report_inclusion("unknown url %a",askedname)
+ end
+ -- url not found
+ return register(askedname)
+ end
+ local askedformat = request.format or file.suffix(askedname) or ""
+ local guessedformat = figures.guess(foundname)
+ if askedformat ~= guessedformat then
+ if trace_figures then
+ report_inclusion("url %a has unknown format",askedname)
+ end
+ -- url found, but wrong format
+ return register(askedname)
+ else
+ if trace_figures then
+ report_inclusion("url %a is resolved to %a",askedname,foundname)
+ end
+ return register(askedname, {
+ askedname = askedname,
+ fullname = foundname,
+ format = askedformat,
+ cache = askedcache,
+ conversion = askedconversion,
+ resolution = askedresolution,
+ })
+ end
+ end
+ -- we could use the hashed data instead
+ local askedpath= file.is_rootbased_path(askedname)
+ local askedbase = file.basename(askedname)
+ local askedformat = request.format or file.suffix(askedname) or ""
+ if askedformat ~= "" then
+ askedformat = lower(askedformat)
+ if trace_figures then
+ report_inclusion("forcing format %a",askedformat)
+ end
+ local format = figures_suffixes[askedformat]
+ if not format then
+ for i=1,#figures_patterns do
+ local pattern = figures_patterns[i]
+ if find(askedformat,pattern[1]) then
+ format = pattern[2]
+ break
+ end
+ end
+ end
+ if format then
+ local foundname, quitscanning, forcedformat = figures.exists(askedname,format,resolve_too) -- not askedformat
+ if foundname then
+ return register(askedname, {
+ askedname = askedname,
+ fullname = foundname, -- askedname,
+ format = forcedformat or format,
+ cache = askedcache,
+ -- foundname = foundname, -- no
+ conversion = askedconversion,
+ resolution = askedresolution,
+ })
+ elseif quitscanning then
+ return register(askedname)
+ end
+ elseif trace_figures then
+ report_inclusion("unknown format %a",askedformat)
+ end
+ if askedpath then
+ -- path and type given, todo: strip pieces of path
+ local foundname, quitscanning, forcedformat = figures.exists(askedname,askedformat,resolve_too)
+ if foundname then
+ return register(askedname, {
+ askedname = askedname,
+ fullname = foundname, -- askedname,
+ format = forcedformat or askedformat,
+ cache = askedcache,
+ conversion = askedconversion,
+ resolution = askedresolution,
+ })
+ end
+ else
+ -- type given
+ for i=1,#figure_paths do
+ local path = figure_paths[i]
+ local check = path .. "/" .. askedname
+ -- we pass 'true' as it can be an url as well, as the type
+ -- is given we don't waste much time
+ local foundname, quitscanning, forcedformat = figures.exists(check,askedformat,resolve_too)
+ if foundname then
+ return register(check, {
+ askedname = askedname,
+ fullname = check,
+ format = askedformat,
+ cache = askedcache,
+ conversion = askedconversion,
+ resolution = askedresolution,
+ })
+ end
+ end
+ if figures.defaultsearch then
+ local check = resolvers.findfile(askedname)
+ if check and check ~= "" then
+ return register(askedname, {
+ askedname = askedname,
+ fullname = check,
+ format = askedformat,
+ cache = askedcache,
+ conversion = askedconversion,
+ resolution = askedresolution,
+ })
+ end
+ end
+ end
+ elseif askedpath then
+ if trace_figures then
+ report_inclusion("using rootbased path")
+ end
+ for i=1,#figures_order do
+ local format = figures_order[i]
+ local list = figures_formats[format].list or { format }
+ for j=1,#list do
+ local suffix = list[j]
+ local check = file.addsuffix(askedname,suffix)
+ local foundname, quitscanning, forcedformat = figures.exists(check,format,resolve_too)
+ if foundname then
+ return register(askedname, {
+ askedname = askedname,
+ fullname = foundname, -- check,
+ format = forcedformat or format,
+ cache = askedcache,
+ conversion = askedconversion,
+ resolution = askedresolution,
+ })
+ end
+ end
+ end
+ else
+ if figures.preferquality then
+ if trace_figures then
+ report_inclusion("unknown format, quality preferred")
+ end
+ for j=1,#figures_order do
+ local format = figures_order[j]
+ local list = figures_formats[format].list or { format }
+ for k=1,#list do
+ local suffix = list[k]
+ -- local name = file.replacesuffix(askedbase,suffix)
+ local name = file.replacesuffix(askedname,suffix)
+ for i=1,#figure_paths do
+ local path = figure_paths[i]
+ local check = path .. "/" .. name
+ local isfile = url.hashed(check).scheme == "file"
+ if not isfile then
+ if trace_figures then
+ report_inclusion("warning: skipping path %a",path)
+ end
+ else
+ local foundname, quitscanning, forcedformat = figures.exists(check,format,resolve_too) -- true)
+ if foundname then
+ return register(askedname, {
+ askedname = askedname,
+ fullname = foundname, -- check
+ format = forcedformat or format,
+ cache = askedcache,
+ conversion = askedconversion,
+ resolution = askedresolution,
+ })
+ end
+ end
+ end
+ end
+ end
+ else -- 'location'
+ if trace_figures then
+ report_inclusion("unknown format, using path strategy")
+ end
+ for i=1,#figure_paths do
+ local path = figure_paths[i]
+ for j=1,#figures_order do
+ local format = figures_order[j]
+ local list = figures_formats[format].list or { format }
+ for k=1,#list do
+ local suffix = list[k]
+ local check = path .. "/" .. file.replacesuffix(askedbase,suffix)
+ local foundname, quitscanning, forcedformat = figures.exists(check,format,resolve_too)
+ if foundname then
+ return register(askedname, {
+ askedname = askedname,
+ fullname = foudname, -- check,
+ format = forcedformat or format,
+ cache = askedcache,
+ conversion = askedconversion,
+ resolution = askedresolution,
+ })
+ end
+ end
+ end
+ end
+ end
+ if figures.defaultsearch then
+ if trace_figures then
+ report_inclusion("using default tex path")
+ end
+ for j=1,#figures_order do
+ local format = figures_order[j]
+ local list = figures_formats[format].list or { format }
+ for k=1,#list do
+ local suffix = list[k]
+ local check = resolvers.findfile(file.replacesuffix(askedname,suffix))
+ if check and check ~= "" then
+ return register(askedname, {
+ askedname = askedname,
+ fullname = check,
+ format = format,
+ cache = askedcache,
+ conversion = askedconversion,
+ resolution = askedresolution,
+ })
+ end
+ end
+ end
+ end
+ end
+ return register(askedname, { -- these two are needed for hashing 'found'
+ conversion = askedconversion,
+ resolution = askedresolution,
+ })
+end
+
+-- -- -- plugins -- -- --
+
+function identifiers.default(data)
+ local dr, du, ds = data.request, data.used, data.status
+ local l = locate(dr)
+ local foundname = l.foundname
+ local fullname = l.fullname or foundname
+ if fullname then
+ du.format = l.format or false
+ du.fullname = fullname -- can be cached
+ ds.fullname = foundname -- original
+ ds.format = l.format
+ ds.status = (l.found and 10) or 0
+ end
+ return data
+end
+
+function figures.identify(data)
+ data = data or callstack[#callstack] or lastfiguredata
+ local list = identifiers.list -- defined at the end
+ for i=1,#list do
+ local identifier = list[i]
+ data = identifier(data)
+ if data.status.status > 0 then
+ break
+ end
+ end
+ return data
+end
+
+function figures.exists(askedname,format,resolve)
+ return (existers[format] or existers.generic)(askedname,resolve)
+end
+
+function figures.check(data)
+ data = data or callstack[#callstack] or lastfiguredata
+ return (checkers[data.status.format] or checkers.generic)(data)
+end
+
+function figures.include(data)
+ data = data or callstack[#callstack] or lastfiguredata
+ return (includers[data.status.format] or includers.generic)(data)
+end
+
+function figures.scale(data) -- will become lua code
+ context.doscalefigure()
+ return data
+end
+
+function figures.done(data)
+ figures.nofprocessed = figures.nofprocessed + 1
+ data = data or callstack[#callstack] or lastfiguredata
+ local dr, du, ds, nr = data.request, data.used, data.status, figures.boxnumber
+ local box = texbox[nr]
+ ds.width = box.width
+ ds.height = box.height
+ ds.xscale = ds.width /(du.width or 1)
+ ds.yscale = ds.height/(du.height or 1)
+ ds.page = ds.page or du.page or dr.page -- sort of redundant but can be limited
+ return data
+end
+
+function figures.dummy(data)
+ data = data or callstack[#callstack] or lastfiguredata
+ local dr, du, nr = data.request, data.used, figures.boxnumber
+ local box = node.hpack(node.new("hlist")) -- we need to set the dir (luatex 0.60 buglet)
+ du.width = du.width or figures.defaultwidth
+ du.height = du.height or figures.defaultheight
+ du.depth = du.depth or figures.defaultdepth
+ -- box.dir = "TLT"
+ box.width = du.width
+ box.height = du.height
+ box.depth = du.depth
+ texbox[nr] = box -- hm, should be global (to be checked for consistency)
+end
+
+-- -- -- generic -- -- --
+
+function existers.generic(askedname,resolve)
+ -- not findbinfile
+ local result
+ if lfs.isfile(askedname) then
+ result = askedname
+ elseif resolve then
+ result = resolvers.findbinfile(askedname) or ""
+ if result == "" then result = false end
+ end
+ if trace_figures then
+ if result then
+ report_inclusion("%a resolved to %a",askedname,result)
+ else
+ report_inclusion("%a cannot be resolved",askedname)
+ end
+ end
+ return result
+end
+
+function checkers.generic(data)
+ local dr, du, ds = data.request, data.used, data.status
+ local name = du.fullname or "unknown generic"
+ local page = du.page or dr.page
+ local size = dr.size or "crop"
+ local color = dr.color or "natural"
+ local mask = dr.mask or "none"
+ local conversion = dr.conversion
+ local resolution = dr.resolution
+ if not conversion or conversion == "" then
+ conversion = "unknown"
+ end
+ if not resolution or resolution == "" then
+ resolution = "unknown"
+ end
+ local hash = f_hash_full(name,page,size,color,conversion,resolution,mask)
+ local figure = figures_loaded[hash]
+ if figure == nil then
+ figure = img.new {
+ filename = name,
+ page = page,
+ pagebox = dr.size,
+ -- visiblefilename = "", -- this prohibits the full filename ending up in the file
+ }
+ codeinjections.setfigurecolorspace(data,figure)
+ codeinjections.setfiguremask(data,figure)
+ figure = figure and img.check(img.scan(figure)) or false
+ local f, d = codeinjections.setfigurealternative(data,figure)
+ figure, data = f or figure, d or data
+ figures_loaded[hash] = figure
+ if trace_conversion then
+ report_inclusion("new graphic, using hash %a",hash)
+ end
+ else
+ if trace_conversion then
+ report_inclusion("existing graphic, using hash %a",hash)
+ end
+ end
+ if figure then
+ du.width = figure.width
+ du.height = figure.height
+ du.pages = figure.pages
+ du.depth = figure.depth or 0
+ du.colordepth = figure.colordepth or 0
+ du.xresolution = figure.xres or 0
+ du.yresolution = figure.yres or 0
+ du.xsize = figure.xsize or 0
+ du.ysize = figure.ysize or 0
+ ds.private = figure
+ ds.hash = hash
+ end
+ return data
+end
+
+function includers.generic(data)
+ local dr, du, ds = data.request, data.used, data.status
+ -- here we set the 'natural dimensions'
+ dr.width = du.width
+ dr.height = du.height
+ local hash = figures.hash(data)
+ local figure = figures_used[hash]
+ -- figures.registerresource {
+ -- filename = du.fullname,
+ -- width = dr.width,
+ -- height = dr.height,
+ -- }
+ if figure == nil then
+ figure = ds.private
+ if figure then
+ figure = img.copy(figure)
+ figure = figure and img.clone(figure,data.request) or false
+ end
+ figures_used[hash] = figure
+ end
+ if figure then
+ local nr = figures.boxnumber
+ -- it looks like we have a leak in attributes here .. todo
+ local box = node.hpack(img.node(figure)) -- img.node(figure) not longer valid
+ indexed[figure.index] = figure
+ box.width, box.height, box.depth = figure.width, figure.height, 0 -- new, hm, tricky, we need to do that in tex (yet)
+ texbox[nr] = box
+ ds.objectnumber = figure.objnum
+ context.relocateexternalfigure()
+ end
+ return data
+end
+
+-- -- -- nongeneric -- -- --
+
+local function checkers_nongeneric(data,command) -- todo: macros and context.*
+ local dr, du, ds = data.request, data.used, data.status
+ local name = du.fullname or "unknown nongeneric"
+ local hash = name
+ if dr.object then
+ -- hm, bugged ... waiting for an xform interface
+ if not job.objects.get("FIG::"..hash) then
+ if type(command) == "function" then
+ command()
+ end
+ context.dosetfigureobject(hash)
+ end
+ context.doboxfigureobject(hash)
+ elseif type(command) == "function" then
+ command()
+ end
+ return data
+end
+
+local function includers_nongeneric(data)
+ return data
+end
+
+checkers.nongeneric = checkers_nongeneric
+includers.nongeneric = includers_nongeneric
+
+-- -- -- mov -- -- --
+
+function checkers.mov(data)
+ local dr, du, ds = data.request, data.used, data.status
+ local width = todimen(dr.width or figures.defaultwidth)
+ local height = todimen(dr.height or figures.defaultheight)
+ local foundname = du.fullname
+ dr.width, dr.height = width, height
+ du.width, du.height, du.foundname = width, height, foundname
+ if trace_inclusion then
+ report_inclusion("including movie %a, width %p, height %p",foundname,width,height)
+ end
+ -- we need to push the node.write in between ... we could make a shared helper for this
+ context.startfoundexternalfigure(width .. "sp",height .. "sp")
+ context(function()
+ nodeinjections.insertmovie {
+ width = width,
+ height = height,
+ factor = number.dimenfactors.bp,
+ ["repeat"] = dr["repeat"],
+ controls = dr.controls,
+ preview = dr.preview,
+ label = dr.label,
+ foundname = foundname,
+ }
+ end)
+ context.stopfoundexternalfigure()
+ return data
+end
+
+includers.mov = includers.nongeneric
+
+-- -- -- mps -- -- --
+
+internalschemes.mprun = true
+
+local function internal(askedname)
+ local spec, mprun, mpnum = match(lower(askedname),"mprun([:%.]?)(.-)%.(%d+)")
+ if spec ~= "" then
+ return mprun, mpnum
+ else
+ return "", mpnum
+ end
+end
+
+function existers.mps(askedname)
+ local mprun, mpnum = internal(askedname)
+ if mpnum then
+ return askedname
+ else
+ return existers.generic(askedname)
+ end
+end
+
+function checkers.mps(data)
+ local mprun, mpnum = internal(data.used.fullname)
+ if mpnum then
+ return checkers_nongeneric(data,function() context.docheckfiguremprun(mprun,mpnum) end)
+ else
+ return checkers_nongeneric(data,function() context.docheckfiguremps(data.used.fullname) end)
+ end
+end
+
+includers.mps = includers.nongeneric
+
+-- -- -- tex -- -- --
+
+function existers.tex(askedname)
+ askedname = resolvers.findfile(askedname)
+ return askedname ~= "" and askedname or false
+end
+
+function checkers.tex(data)
+ return checkers_nongeneric(data,function() context.docheckfiguretex(data.used.fullname) end)
+end
+
+includers.tex = includers.nongeneric
+
+-- -- -- buffer -- -- --
+
+function existers.buffer(askedname)
+ local name = file.nameonly(askedname)
+ local okay = buffers.exists(name)
+ return okay and name, true -- always quit scanning
+end
+
+function checkers.buffer(data)
+ return checkers_nongeneric(data,function() context.docheckfigurebuffer(file.nameonly(data.used.fullname)) end)
+end
+
+includers.buffers = includers.nongeneric
+
+-- -- -- auto -- -- --
+
+function existers.auto(askedname)
+ local name = gsub(askedname, ".auto$", "")
+ local format = figures.guess(name)
+ if format then
+ report_inclusion("format guess %a for %a",format,name)
+ else
+ report_inclusion("format guess for %a is not possible",name)
+ end
+ return format and name, true, format
+end
+
+checkers.auto = checkers.generic
+includers.auto = includers.generic
+
+-- -- -- cld -- -- --
+
+existers.cld = existers.tex
+
+function checkers.cld(data)
+ return checkers_nongeneric(data,function() context.docheckfigurecld(data.used.fullname) end)
+end
+
+includers.cld = includers.nongeneric
+
+-- -- -- converters -- -- --
+
+local function makeoptions(options)
+ local to = type(options)
+ return (to == "table" and concat(options," ")) or (to == "string" and options) or ""
+end
+
+-- programs.makeoptions = makeoptions
+
+local function runprogram(binary,argument,variables)
+ local binary = match(binary,"[%S]+") -- to be sure
+ if type(argument) == "table" then
+ argument = concat(argument," ") -- for old times sake
+ end
+ if not os.which(binary) then
+ report_inclusion("program %a is not installed, not running command: %s",binary,command)
+ elseif not argument or argument == "" then
+ report_inclusion("nothing to run, unknown program %a",binary)
+ else
+ local command = format([["%s" %s]],binary,replacetemplate(longtostring(argument),variables))
+ if trace_conversion or trace_programs then
+ report_inclusion("running command: %s",command)
+ end
+ os.spawn(command)
+ end
+end
+
+programs.run = runprogram
+
+-- -- -- eps & pdf -- -- --
+--
+-- \externalfigure[cow.eps]
+-- \externalfigure[cow.pdf][conversion=stripped]
+
+local epsconverter = converters.eps or { }
+converters.eps = epsconverter
+converters.ps = epsconverter
+
+local epstopdf = {
+ resolutions = {
+ [v_low] = "screen",
+ [v_medium] = "ebook",
+ [v_high] = "prepress",
+ },
+ command = os.type == "windows" and "gswin32c" or "gs",
+ -- -dProcessDSCComments=false
+ argument = [[
+ -q
+ -sDEVICE=pdfwrite
+ -dNOPAUSE
+ -dNOCACHE
+ -dBATCH
+ -dAutoRotatePages=/None
+ -dPDFSETTINGS=/%presets%
+ -dEPSCrop
+ -sOutputFile=%newname%
+ %oldname%
+ -c quit
+ ]],
+}
+
+programs.epstopdf = epstopdf
+programs.gs = epstopdf
+
+function epsconverter.pdf(oldname,newname,resolution) -- the resolution interface might change
+ local epstopdf = programs.epstopdf -- can be changed
+ local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high
+ runprogram(epstopdf.command, epstopdf.argument, {
+ newname = newname,
+ oldname = oldname,
+ presets = presets,
+ } )
+end
+
+epsconverter.default = epsconverter.pdf
+
+local pdfconverter = converters.pdf or { }
+converters.pdf = pdfconverter
+
+programs.pdftoeps = {
+ command = "pdftops",
+ argument = [[-eps "%oldname%" "%newname%]],
+}
+
+pdfconverter.stripped = function(oldname,newname)
+ local pdftoeps = programs.pdftoeps -- can be changed
+ local epstopdf = programs.epstopdf -- can be changed
+ local presets = epstopdf.resolutions[resolution or ""] or epstopdf.resolutions.high
+ local tmpname = newname .. ".tmp"
+ runprogram(pdftoeps.command, pdftoeps.argument, { oldname = oldname, newname = tmpname, presets = presets })
+ runprogram(epstopdf.command, epstopdf.argument, { oldname = tmpname, newname = newname, presets = presets })
+ os.remove(tmpname)
+end
+
+figures.registersuffix("stripped","pdf")
+
+-- -- -- svg -- -- --
+
+local svgconverter = { }
+converters.svg = svgconverter
+converters.svgz = svgconverter
+
+-- inkscape on windows only works with complete paths
+
+programs.inkscape = {
+ command = "inkscape",
+ pdfargument = [[
+ "%oldname%"
+ --export-dpi=600
+ -A
+ "%newname%"
+ ]],
+ pngargument = [[
+ "%oldname%"
+ --export-dpi=600
+ --export-png="%newname%"
+ ]],
+}
+
+function svgconverter.pdf(oldname,newname)
+ local inkscape = programs.inkscape -- can be changed
+ runprogram(inkscape.command, inkscape.pdfargument, {
+ newname = expandfilename(newname),
+ oldname = expandfilename(oldname),
+ } )
+end
+
+function svgconverter.png(oldname,newname)
+ local inkscape = programs.inkscape
+ runprogram(inkscape.command, inkscape.pngargument, {
+ newname = expandfilename(newname),
+ oldname = expandfilename(oldname),
+ } )
+end
+
+svgconverter.default = svgconverter.pdf
+
+-- -- -- gif -- -- --
+-- -- -- tif -- -- --
+
+local gifconverter = converters.gif or { }
+local tifconverter = converters.tif or { }
+local bmpconverter = converters.bmp or { }
+
+converters.gif = gifconverter
+converters.tif = tifconverter
+converters.bmp = bmpconverter
+
+programs.convert = {
+ command = "gm", -- graphicmagick
+ argument = [[convert "%oldname%" "%newname%"]],
+}
+
+local function converter(oldname,newname)
+ local convert = programs.convert
+ runprogram(convert.command, convert.argument, {
+ newname = newname,
+ oldname = oldname,
+ } )
+end
+
+tifconverter.pdf = converter
+gifconverter.pdf = converter
+bmpconverter.pdf = converter
+
+gifconverter.default = converter
+tifconverter.default = converter
+bmpconverter.default = converter
+
+-- todo: lowres
+
+-- -- -- bases -- -- --
+
+local bases = allocate()
+figures.bases = bases
+
+local bases_list = nil -- index => { basename, fullname, xmlroot }
+local bases_used = nil -- [basename] => { basename, fullname, xmlroot } -- pointer to list
+local bases_found = nil
+local bases_enabled = false
+
+local function reset()
+ bases_list = allocate()
+ bases_used = allocate()
+ bases_found = allocate()
+ bases_enabled = false
+ bases.list = bases_list
+ bases.used = bases_used
+ bases.found = bases_found
+end
+
+reset()
+
+function bases.use(basename)
+ if basename == "reset" then
+ reset()
+ else
+ basename = file.addsuffix(basename,"xml")
+ if not bases_used[basename] then
+ local t = { basename, nil, nil }
+ bases_used[basename] = t
+ bases_list[#bases_list+1] = t
+ if not bases_enabled then
+ bases_enabled = true
+ xml.registerns("rlx","http://www.pragma-ade.com/schemas/rlx") -- we should be able to do this per xml file
+ end
+ if trace_bases then
+ report_inclusion("registering base %a",basename)
+ end
+ end
+ end
+end
+
+local function bases_find(basename,askedlabel)
+ if trace_bases then
+ report_inclusion("checking for %a in base %a",askedlabel,basename)
+ end
+ basename = file.addsuffix(basename,"xml")
+ local t = bases_found[askedlabel]
+ if t == nil then
+ local base = bases_used[basename]
+ local page = 0
+ if base[2] == nil then
+ -- no yet located
+ for i=1,#figure_paths do
+ local path = figure_paths[i]
+ local xmlfile = path .. "/" .. basename
+ if io.exists(xmlfile) then
+ base[2] = xmlfile
+ base[3] = xml.load(xmlfile)
+ if trace_bases then
+ report_inclusion("base %a loaded",xmlfile)
+ end
+ break
+ end
+ end
+ end
+ t = false
+ if base[2] and base[3] then -- rlx:library
+ for e in xml.collected(base[3],"/(*:library|figurelibrary)/*:figure/*:label") do
+ page = page + 1
+ if xml.text(e) == askedlabel then
+ t = {
+ base = file.replacesuffix(base[2],"pdf"),
+ format = "pdf",
+ name = xml.text(e,"../*:file"), -- to be checked
+ page = page,
+ }
+ bases_found[askedlabel] = t
+ if trace_bases then
+ report_inclusion("figure %a found in base %a",askedlabel,base[2])
+ end
+ return t
+ end
+ end
+ if trace_bases and not t then
+ report_inclusion("figure %a not found in base %a",askedlabel,base[2])
+ end
+ end
+ end
+ return t
+end
+
+-- we can access sequential or by name
+
+local function bases_locate(askedlabel)
+ for i=1,#bases_list do
+ local entry = bases_list[i]
+ local t = bases_find(entry[1],askedlabel)
+ if t then
+ return t
+ end
+ end
+ return false
+end
+
+function identifiers.base(data)
+ if bases_enabled then
+ local dr, du, ds = data.request, data.used, data.status
+ local fbl = bases_locate(dr.name or dr.label)
+ if fbl then
+ du.page = fbl.page
+ du.format = fbl.format
+ du.fullname = fbl.base
+ ds.fullname = fbl.name
+ ds.format = fbl.format
+ ds.page = fbl.page
+ ds.status = 10
+ end
+ end
+ return data
+end
+
+bases.locate = bases_locate
+bases.find = bases_find
+
+identifiers.list = {
+ identifiers.base,
+ identifiers.default
+}
+
+-- tracing
+
+statistics.register("graphics processing time", function()
+ local nofprocessed = figures.nofprocessed
+ if nofprocessed > 0 then
+ return format("%s seconds including tex, %s processed images", statistics.elapsedtime(figures),nofprocessed)
+ else
+ return nil
+ end
+end)
+
+-- helper
+
+function figures.applyratio(width,height,w,h) -- width and height are strings and w and h are numbers
+ if not width or width == "" then
+ if not height or height == "" then
+ return figures.defaultwidth, figures.defaultheight
+ else
+ height = todimen(height)
+ if w and h then
+ return height * w/h, height
+ else
+ return figures.defaultwidth, height
+ end
+ end
+ else
+ width = todimen(width)
+ if not height or height == "" then
+ if w and h then
+ return width, width * h/w
+ else
+ return width, figures.defaultheight
+ end
+ else
+ return width, todimen(height)
+ end
+ end
+end
+
+-- example of simple plugins:
+--
+-- figures.converters.png = {
+-- png = function(oldname,newname,resolution)
+-- local command = string.format('gm convert -depth 1 "%s" "%s"',oldname,newname)
+-- logs.report(string.format("running command %s",command))
+-- os.execute(command)
+-- end,
+-- }
+
+-- local fig = figures.push { name = pdffile }
+-- figures.identify()
+-- figures.check()
+-- local nofpages = fig.used.pages
+-- figures.pop()
+
+-- interfacing
+
+commands.setfigurelookuporder = figures.setorder
diff --git a/tex/context/base/grph-raw.lua b/tex/context/base/grph-raw.lua
index 4c5b031ea..e2ffb689f 100644
--- a/tex/context/base/grph-raw.lua
+++ b/tex/context/base/grph-raw.lua
@@ -1,42 +1,42 @@
-if not modules then modules = { } end modules ['grph-raw'] = {
- version = 1.001,
- comment = "companion to grph-raw.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This module is for Mojca, who wanted something like this for
--- her gnuplot project. It's somewhat premliminary code but it
--- works ok for that purpose.
-
-local tonumber = tonumber
-
-local report_bitmap = logs.reporter("graphics","bitmaps")
-
-local context = context
-local texsp = tex.sp
-
-function figures.bitmapimage(t)
- local data = t.data
- local xresolution = tonumber(t.xresolution)
- local yresolution = tonumber(t.yresolution)
- if data and xresolution and yresolution then
- local width, height = t.width or "", t.height or ""
- local n = backends.nodeinjections.injectbitmap {
- xresolution = xresolution,
- yresolution = yresolution,
- width = width ~= "" and texsp(width) or nil,
- height = height ~= "" and texsp(height) or nil,
- data = data,
- colorspace = t.colorspace,
- }
- if n then
- context.hbox(n)
- else
- report_bitmap("format no supported by backend")
- end
- else
- report_bitmap("invalid specification")
- end
-end
+if not modules then modules = { } end modules ['grph-raw'] = {
+ version = 1.001,
+ comment = "companion to grph-raw.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This module is for Mojca, who wanted something like this for
+-- her gnuplot project. It's somewhat premliminary code but it
+-- works ok for that purpose.
+
+local tonumber = tonumber
+
+local report_bitmap = logs.reporter("graphics","bitmaps")
+
+local context = context
+local texsp = tex.sp
+
+function figures.bitmapimage(t)
+ local data = t.data
+ local xresolution = tonumber(t.xresolution)
+ local yresolution = tonumber(t.yresolution)
+ if data and xresolution and yresolution then
+ local width, height = t.width or "", t.height or ""
+ local n = backends.nodeinjections.injectbitmap {
+ xresolution = xresolution,
+ yresolution = yresolution,
+ width = width ~= "" and texsp(width) or nil,
+ height = height ~= "" and texsp(height) or nil,
+ data = data,
+ colorspace = t.colorspace,
+ }
+ if n then
+ context.hbox(n)
+ else
+ report_bitmap("format no supported by backend")
+ end
+ else
+ report_bitmap("invalid specification")
+ end
+end
diff --git a/tex/context/base/grph-swf.lua b/tex/context/base/grph-swf.lua
index 8c28b76af..58136f7fc 100644
--- a/tex/context/base/grph-swf.lua
+++ b/tex/context/base/grph-swf.lua
@@ -1,94 +1,94 @@
-if not modules then modules = { } end modules ['grph-swf'] = {
- version = 1.001,
- comment = "companion to grph-inc.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- maybe: backends.codeinjections.insertswf
-
-local sub, format, match, byte = string.sub, string.format, string.match, string.byte
-local concat = table.concat
-local floor = math.floor
-local tonumber = tonumber
-
-local readstring = io.readstring
-local readnumber = io.readnumber
-local tobitstring = number.tobitstring
-local todimen = number.todimen
-local nodeinjections = backends.nodeinjections
-local figures = figures
-local context = context
-
-local function getheader(name)
- local f = io.open(name,"rb")
- if not f then
- return
- end
- local signature = readstring(f,3) -- F=uncompressed, C=compressed (zlib)
- local version = readnumber(f,1)
- local filelength = readnumber(f,-4)
- local compressed = sub(signature,1,1) == "C"
- local buffer
- if compressed then
- buffer = zlib.decompress(f:read('*a'))
- else
- buffer = f:read(20) -- ('*a')
- end
- f:close()
- buffer = { match(buffer,"(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)") }
- for i=1,9 do
- buffer[i] = tobitstring(byte(buffer[i]))
- end
- local framebits = concat(buffer,"",1,9)
- local n = tonumber(sub(framebits,1,5),2)
- local frame = { } -- xmin xmax ymin ymax
- local xmin = tonumber(sub(framebits,6, 5 + n),2)
- local xmax = tonumber(sub(framebits,6 + 1*n,5 + 2*n),2)
- local ymin = tonumber(sub(framebits,6 + 2*n,5 + 3*n),2)
- local ymax = tonumber(sub(framebits,6 + 3*n,5 + 4*n),2)
- return {
- filename = name,
- version = version,
- filelength = filelength,
- framerate = tonumber(byte(buffer[10]) * 256 + byte(buffer[11])),
- framecount = tonumber(byte(buffer[12]) * 256 + byte(buffer[13])),
- -- framebits = framebits,
- compressed = compressed,
- width = floor((xmax - xmin) / 20),
- height = floor((ymax - ymin) / 20),
- rectangle = {
- xmin = xmin,
- xmax = xmax,
- ymin = ymin,
- ymax = ymax,
- }
- }
-end
-
-function figures.checkers.swf(data)
- local dr, du, ds = data.request, data.used, data.status
- local foundname = du.fullname
- local header = getheader(foundname)
- local width, height = figures.applyratio(dr.width,dr.height,header.width,header.height)
- dr.width, dr.height = width, height
- du.width, du.height, du.foundname = width, height, foundname
- context.startfoundexternalfigure(todimen(width),todimen(height))
- nodeinjections.insertswf {
- foundname = foundname,
- width = width,
- height = height,
- -- factor = number.dimenfactors.bp,
- display = dr.display,
- controls = dr.controls,
- -- label = dr.label,
- resources = dr.resources,
- }
- context.stopfoundexternalfigure()
- return data
-end
-
-figures.includers.swf = figures.includers.nongeneric
-
-figures.registersuffix("swf","swf")
+if not modules then modules = { } end modules ['grph-swf'] = {
+ version = 1.001,
+ comment = "companion to grph-inc.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- maybe: backends.codeinjections.insertswf
+
+local sub, format, match, byte = string.sub, string.format, string.match, string.byte
+local concat = table.concat
+local floor = math.floor
+local tonumber = tonumber
+
+local readstring = io.readstring
+local readnumber = io.readnumber
+local tobitstring = number.tobitstring
+local todimen = number.todimen
+local nodeinjections = backends.nodeinjections
+local figures = figures
+local context = context
+
+local function getheader(name)
+ local f = io.open(name,"rb")
+ if not f then
+ return
+ end
+ local signature = readstring(f,3) -- F=uncompressed, C=compressed (zlib)
+ local version = readnumber(f,1)
+ local filelength = readnumber(f,-4)
+ local compressed = sub(signature,1,1) == "C"
+ local buffer
+ if compressed then
+ buffer = zlib.decompress(f:read('*a'))
+ else
+ buffer = f:read(20) -- ('*a')
+ end
+ f:close()
+ buffer = { match(buffer,"(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)(.)") }
+ for i=1,9 do
+ buffer[i] = tobitstring(byte(buffer[i]))
+ end
+ local framebits = concat(buffer,"",1,9)
+ local n = tonumber(sub(framebits,1,5),2)
+ local frame = { } -- xmin xmax ymin ymax
+ local xmin = tonumber(sub(framebits,6, 5 + n),2)
+ local xmax = tonumber(sub(framebits,6 + 1*n,5 + 2*n),2)
+ local ymin = tonumber(sub(framebits,6 + 2*n,5 + 3*n),2)
+ local ymax = tonumber(sub(framebits,6 + 3*n,5 + 4*n),2)
+ return {
+ filename = name,
+ version = version,
+ filelength = filelength,
+ framerate = tonumber(byte(buffer[10]) * 256 + byte(buffer[11])),
+ framecount = tonumber(byte(buffer[12]) * 256 + byte(buffer[13])),
+ -- framebits = framebits,
+ compressed = compressed,
+ width = floor((xmax - xmin) / 20),
+ height = floor((ymax - ymin) / 20),
+ rectangle = {
+ xmin = xmin,
+ xmax = xmax,
+ ymin = ymin,
+ ymax = ymax,
+ }
+ }
+end
+
+function figures.checkers.swf(data)
+ local dr, du, ds = data.request, data.used, data.status
+ local foundname = du.fullname
+ local header = getheader(foundname)
+ local width, height = figures.applyratio(dr.width,dr.height,header.width,header.height)
+ dr.width, dr.height = width, height
+ du.width, du.height, du.foundname = width, height, foundname
+ context.startfoundexternalfigure(todimen(width),todimen(height))
+ nodeinjections.insertswf {
+ foundname = foundname,
+ width = width,
+ height = height,
+ -- factor = number.dimenfactors.bp,
+ display = dr.display,
+ controls = dr.controls,
+ -- label = dr.label,
+ resources = dr.resources,
+ }
+ context.stopfoundexternalfigure()
+ return data
+end
+
+figures.includers.swf = figures.includers.nongeneric
+
+figures.registersuffix("swf","swf")
diff --git a/tex/context/base/grph-u3d.lua b/tex/context/base/grph-u3d.lua
index 6961c5503..d141dc080 100644
--- a/tex/context/base/grph-u3d.lua
+++ b/tex/context/base/grph-u3d.lua
@@ -1,51 +1,51 @@
-if not modules then modules = { } end modules ['grph-u3d'] = {
- version = 1.001,
- comment = "companion to grph-inc.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- see lpdf-u3d.lua for comment
-
--- maybe: backends.codeinjections.insertu3d
-
-local trace_inclusion = false trackers.register("figures.inclusion", function(v) trace_inclusion = v end)
-
-local report_u3d = logs.reporter("graphics","u3d")
-
-local figures = figures
-local context = context
-local nodeinjections = backends.nodeinjections
-local todimen = string.todimen
-
-function figures.checkers.u3d(data)
- local dr, du, ds = data.request, data.used, data.status
- local width = todimen(dr.width or figures.defaultwidth)
- local height = todimen(dr.height or figures.defaultheight)
- local foundname = du.fullname
- dr.width, dr.height = width, height
- du.width, du.height, du.foundname = width, height, foundname
- if trace_inclusion then
- report_u3d("including u3d %a, width %p, height %p",foundname,width,height)
- end
- context.startfoundexternalfigure(width .. "sp",height .. "sp")
- context(function()
- nodeinjections.insertu3d {
- foundname = foundname,
- width = width,
- height = height,
- factor = number.dimenfactors.bp,
- display = dr.display,
- controls = dr.controls,
- label = dr.label,
- }
- end)
- context.stopfoundexternalfigure()
- return data
-end
-
-figures.includers.u3d = figures.includers.nongeneric
-
-figures.registersuffix("u3d","u3d")
-figures.registersuffix("prc","u3d")
+if not modules then modules = { } end modules ['grph-u3d'] = {
+ version = 1.001,
+ comment = "companion to grph-inc.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- see lpdf-u3d.lua for comment
+
+-- maybe: backends.codeinjections.insertu3d
+
+local trace_inclusion = false trackers.register("figures.inclusion", function(v) trace_inclusion = v end)
+
+local report_u3d = logs.reporter("graphics","u3d")
+
+local figures = figures
+local context = context
+local nodeinjections = backends.nodeinjections
+local todimen = string.todimen
+
+function figures.checkers.u3d(data)
+ local dr, du, ds = data.request, data.used, data.status
+ local width = todimen(dr.width or figures.defaultwidth)
+ local height = todimen(dr.height or figures.defaultheight)
+ local foundname = du.fullname
+ dr.width, dr.height = width, height
+ du.width, du.height, du.foundname = width, height, foundname
+ if trace_inclusion then
+ report_u3d("including u3d %a, width %p, height %p",foundname,width,height)
+ end
+ context.startfoundexternalfigure(width .. "sp",height .. "sp")
+ context(function()
+ nodeinjections.insertu3d {
+ foundname = foundname,
+ width = width,
+ height = height,
+ factor = number.dimenfactors.bp,
+ display = dr.display,
+ controls = dr.controls,
+ label = dr.label,
+ }
+ end)
+ context.stopfoundexternalfigure()
+ return data
+end
+
+figures.includers.u3d = figures.includers.nongeneric
+
+figures.registersuffix("u3d","u3d")
+figures.registersuffix("prc","u3d")
diff --git a/tex/context/base/grph-wnd.lua b/tex/context/base/grph-wnd.lua
index ebb9b1169..8b005b123 100644
--- a/tex/context/base/grph-wnd.lua
+++ b/tex/context/base/grph-wnd.lua
@@ -1,47 +1,47 @@
-if not modules then modules = { } end modules ['grph-wnd'] = {
- version = 1.001,
- comment = "companion to grph-inc.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- Thanks to Luigi Scarso for making graphic magic work in luatex.
---
--- \externalfigure[hacker.jpeg][width=4cm,conversion=gray.jpg]
-
-local converters, suffixes = figures.converters, figures.suffixes
-
-local trace_conversion = false trackers.register("figures.conversion", function(v) trace_conversion = v end)
-
-local report_wand = logs.reporter("graphics","wand")
-
-local function togray(oldname,newname)
- if lfs.isfile(oldname) then
- require("gmwand")
- if trace_conversion then
- report_wand("converting %a to %a using gmwand",oldname,newname)
- end
- gmwand.InitializeMagick("./") -- What does this path do?
- local wand = gmwand.NewMagickWand()
- gmwand.MagickReadImage(wand,oldname)
- gmwand.MagickSetImageColorspace(wand,gmwand.GRAYColorspace)
- gmwand.MagickWriteImages(wand,newname,1)
- gmwand.DestroyMagickWand(wand)
- else
- report_wand("unable to convert %a to %a using gmwand",oldname,newname)
- end
-end
-
-local formats = { "png", "jpg", "gif" }
-
-for i=1,#formats do
- local oldformat = formats[i]
- local newformat = "gray." .. oldformat
- if trace_conversion then
- report_wand("installing converter for %a to %a",oldformat,newformat)
- end
- converters[oldformat] = converters[oldformat] or { }
- converters[oldformat][newformat] = togray
- suffixes [newformat] = oldformat
-end
+if not modules then modules = { } end modules ['grph-wnd'] = {
+ version = 1.001,
+ comment = "companion to grph-inc.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Thanks to Luigi Scarso for making graphic magic work in luatex.
+--
+-- \externalfigure[hacker.jpeg][width=4cm,conversion=gray.jpg]
+
+local converters, suffixes = figures.converters, figures.suffixes
+
+local trace_conversion = false trackers.register("figures.conversion", function(v) trace_conversion = v end)
+
+local report_wand = logs.reporter("graphics","wand")
+
+local function togray(oldname,newname)
+ if lfs.isfile(oldname) then
+ require("gmwand")
+ if trace_conversion then
+ report_wand("converting %a to %a using gmwand",oldname,newname)
+ end
+ gmwand.InitializeMagick("./") -- What does this path do?
+ local wand = gmwand.NewMagickWand()
+ gmwand.MagickReadImage(wand,oldname)
+ gmwand.MagickSetImageColorspace(wand,gmwand.GRAYColorspace)
+ gmwand.MagickWriteImages(wand,newname,1)
+ gmwand.DestroyMagickWand(wand)
+ else
+ report_wand("unable to convert %a to %a using gmwand",oldname,newname)
+ end
+end
+
+local formats = { "png", "jpg", "gif" }
+
+for i=1,#formats do
+ local oldformat = formats[i]
+ local newformat = "gray." .. oldformat
+ if trace_conversion then
+ report_wand("installing converter for %a to %a",oldformat,newformat)
+ end
+ converters[oldformat] = converters[oldformat] or { }
+ converters[oldformat][newformat] = togray
+ suffixes [newformat] = oldformat
+end
diff --git a/tex/context/base/java-ini.lua b/tex/context/base/java-ini.lua
index 321e4e24d..3f1fbd6cf 100644
--- a/tex/context/base/java-ini.lua
+++ b/tex/context/base/java-ini.lua
@@ -1,226 +1,226 @@
-if not modules then modules = { } end modules ['java-ini'] = {
- version = 1.001,
- comment = "companion to java-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format = string.format
-local concat = table.concat
-local lpegmatch, P, S, C, Carg, Cc = lpeg.match, lpeg.P, lpeg.S, lpeg.C, lpeg.Carg, lpeg.Cc
-
-local allocate = utilities.storage.allocate
-local settings_to_array = utilities.parsers.settings_to_array
-local variables = interfaces.variables
-local formatters = string.formatters
-
--- todo: don't flush scripts if no JS key
-
-local trace_javascript = false trackers.register("backends.javascript", function(v) trace_javascript = v end)
-
-local report_javascripts = logs.reporter ("interactions","javascripts")
-local status_javascripts = logs.messenger("interactions","javascripts")
-
-interactions.javascripts = interactions.javascripts or { }
-local javascripts = interactions.javascripts
-
-javascripts.codes = allocate()
-javascripts.preambles = allocate()
-javascripts.functions = allocate()
-
-local codes, preambles, functions = javascripts.codes, javascripts.preambles, javascripts.functions
-
-local preambled = { }
-
-local function storefunction(s,preamble)
- if trace_javascript then
- report_javascripts("found function %a",s)
- end
- functions[s] = preamble
-end
-
-local uses = P("uses")
-local used = P("used")
-local left = P("{")
-local right = P("}")
-local space = S(" \r\n")
-local spaces = space^0
-local braced = left * C((1-right-space)^1) * right
-local unbraced = C((1-space)^1)
-local name = spaces * (braced + unbraced) * spaces
-local any = P(1)
-local script = C(any^1)
-local funct = P("function")
-local leftp = P("(")
-local rightp = P(")")
-local fname = spaces * funct * spaces * (C((1-space-left-leftp)^1) * Carg(1) / storefunction) * spaces * leftp
-
-local parsecode = name * ((uses * name) + Cc("")) * spaces * script
-local parsepreamble = name * ((used * name) + Cc("")) * spaces * script
-local parsefunctions = (fname + any)^0
-
-function javascripts.storecode(str)
- local name, uses, script = lpegmatch(parsecode,str)
- if name and name ~= "" then
- codes[name] = { uses, script }
- end
-end
-
-function javascripts.storepreamble(str) -- now later
- local name, used, script = lpegmatch(parsepreamble,str)
- if name and name ~= "" and not preambled[name] then
- local n = #preambles + 1
- preambles[n] = { name, used, script }
- preambled[name] = n
- if trace_javascript then
- report_javascripts("stored preamble %a, state %a, order %a",name,used,n)
- end
- lpegmatch(parsefunctions,script,1,n)
- end
-end
-
-function javascripts.setpreamble(name,script) -- now later
- if name and name ~= "" and not preambled[name] then
- local n = #preambles + 1
- preambles[n] = { name, "now", script }
- preambled[name] = n
- if trace_javascript then
- report_javascripts("adapted preamble %a, state %a, order %a",name,"now",n)
- end
- lpegmatch(parsefunctions,script,1,n)
- end
-end
-
-function javascripts.addtopreamble(name,script)
- if name and name ~= "" then
- local p = preambled[name]
- if p then
- preambles[p] = { "now", preambles[p] .. " ;\n" .. script }
- if trace_javascript then
- report_javascripts("extended preamble %a, state %a, order %a",name,"now",p)
- end
- else
- local n = #preambles + 1
- preambles[n] = { name, "now", script }
- preambled[name] = n
- if trace_javascript then
- report_javascripts("stored preamble %a, state %a, order %a",name,"now",n)
- end
- lpegmatch(parsefunctions,script,1,n)
- end
- end
-end
-
-function javascripts.usepreamblenow(name) -- now later
- if name and name ~= "" and name ~= variables.reset then -- todo: reset
- local names = settings_to_array(name)
- for i=1,#names do
- local somename = names[i]
- if not preambled[somename] then
- preambles[preambled[somename]][2] = "now"
- if trace_javascript then
- report_javascripts("used preamble %a, state %a, order %a",somename,"now","auto")
- end
- end
- end
- end
-end
-
-local splitter = lpeg.tsplitat(lpeg.patterns.commaspacer)
-
-local used, reported = false, { } -- we can cache more
-
-function javascripts.code(name,arguments)
- local c = codes[name]
- if c then
- local u, code = c[1], c[2]
- if u ~= "" then
- local p = preambled[u]
- if p then
- preambles[p][2] = "now"
- if trace_javascript and not reported[name] then
- reported[name] = true
- report_javascripts("used code %a, preamble %a",name,u)
- end
- elseif trace_javascript and not reported[name] then
- reported[name] = true
- report_javascripts("used code %a",name)
- end
- elseif trace_javascript and not reported[name] then
- reported[name] = true
- report_javascripts("used code %a",name)
- end
- used = true
- return code
- end
- local f = functions[name]
- if f then
- used = true
- if trace_javascript and not reported[name] then
- reported[name] = true
- report_javascripts("used function %a",name)
- end
- preambles[f][2] = "now" -- automatically tag preambles that define the function (as later)
- if arguments then
- local args = lpegmatch(splitter,arguments)
- for i=1,#args do -- can be a helper
- args[i] = formatters["%q"](args[i])
- end
- return formatters["%s(%s)"](name,concat(args,","))
- else
- return formatters["%s()"](name)
- end
- end
-end
-
-function javascripts.flushpreambles()
- local t = { }
--- if used then -- we want to be able to enforce inclusion
- for i=1,#preambles do
- local preamble = preambles[i]
- if preamble[2] == "now" then
- if trace_javascript then
- report_javascripts("flushed preamble %a",preamble[1])
- end
- t[#t+1] = { preamble[1], preamble[3] }
- end
- end
--- end
- return t
-end
-
-local patterns = { "java-imp-%s.mkiv", "java-imp-%s.tex", "java-%s.mkiv", "java-%s.tex" }
-
-local function action(name,foundname)
- context.startnointerference()
- context.startreadingfile()
- context.input(foundname)
- status_javascripts("loaded: library %a",name)
- context.stopreadingfile()
- context.stopnointerference()
-end
-
-local function failure(name)
- report_javascripts("unknown library %a",name)
-end
-
-function javascripts.usescripts(name)
- if name ~= variables.reset then -- reset is obsolete
- commands.uselibrary {
- name = name,
- patterns = patterns,
- action = action,
- failure = failure,
- onlyonce = true,
- }
- end
-end
-
--- interface
-
-commands.storejavascriptcode = interactions.javascripts.storecode
-commands.storejavascriptpreamble = interactions.javascripts.storepreamble
-commands.addtojavascriptpreamble = interactions.javascripts.addtopreamble
-commands.usejavascriptpreamble = interactions.javascripts.usepreamblenow
-commands.usejavascriptscripts = interactions.javascripts.usescripts
+if not modules then modules = { } end modules ['java-ini'] = {
+ version = 1.001,
+ comment = "companion to java-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format = string.format
+local concat = table.concat
+local lpegmatch, P, S, C, Carg, Cc = lpeg.match, lpeg.P, lpeg.S, lpeg.C, lpeg.Carg, lpeg.Cc
+
+local allocate = utilities.storage.allocate
+local settings_to_array = utilities.parsers.settings_to_array
+local variables = interfaces.variables
+local formatters = string.formatters
+
+-- todo: don't flush scripts if no JS key
+
+local trace_javascript = false trackers.register("backends.javascript", function(v) trace_javascript = v end)
+
+local report_javascripts = logs.reporter ("interactions","javascripts")
+local status_javascripts = logs.messenger("interactions","javascripts")
+
+interactions.javascripts = interactions.javascripts or { }
+local javascripts = interactions.javascripts
+
+javascripts.codes = allocate()
+javascripts.preambles = allocate()
+javascripts.functions = allocate()
+
+local codes, preambles, functions = javascripts.codes, javascripts.preambles, javascripts.functions
+
+local preambled = { }
+
+local function storefunction(s,preamble)
+ if trace_javascript then
+ report_javascripts("found function %a",s)
+ end
+ functions[s] = preamble
+end
+
+local uses = P("uses")
+local used = P("used")
+local left = P("{")
+local right = P("}")
+local space = S(" \r\n")
+local spaces = space^0
+local braced = left * C((1-right-space)^1) * right
+local unbraced = C((1-space)^1)
+local name = spaces * (braced + unbraced) * spaces
+local any = P(1)
+local script = C(any^1)
+local funct = P("function")
+local leftp = P("(")
+local rightp = P(")")
+local fname = spaces * funct * spaces * (C((1-space-left-leftp)^1) * Carg(1) / storefunction) * spaces * leftp
+
+local parsecode = name * ((uses * name) + Cc("")) * spaces * script
+local parsepreamble = name * ((used * name) + Cc("")) * spaces * script
+local parsefunctions = (fname + any)^0
+
+function javascripts.storecode(str)
+ local name, uses, script = lpegmatch(parsecode,str)
+ if name and name ~= "" then
+ codes[name] = { uses, script }
+ end
+end
+
+function javascripts.storepreamble(str) -- now later
+ local name, used, script = lpegmatch(parsepreamble,str)
+ if name and name ~= "" and not preambled[name] then
+ local n = #preambles + 1
+ preambles[n] = { name, used, script }
+ preambled[name] = n
+ if trace_javascript then
+ report_javascripts("stored preamble %a, state %a, order %a",name,used,n)
+ end
+ lpegmatch(parsefunctions,script,1,n)
+ end
+end
+
+function javascripts.setpreamble(name,script) -- now later
+ if name and name ~= "" and not preambled[name] then
+ local n = #preambles + 1
+ preambles[n] = { name, "now", script }
+ preambled[name] = n
+ if trace_javascript then
+ report_javascripts("adapted preamble %a, state %a, order %a",name,"now",n)
+ end
+ lpegmatch(parsefunctions,script,1,n)
+ end
+end
+
+function javascripts.addtopreamble(name,script)
+ if name and name ~= "" then
+ local p = preambled[name]
+ if p then
+ preambles[p] = { "now", preambles[p] .. " ;\n" .. script }
+ if trace_javascript then
+ report_javascripts("extended preamble %a, state %a, order %a",name,"now",p)
+ end
+ else
+ local n = #preambles + 1
+ preambles[n] = { name, "now", script }
+ preambled[name] = n
+ if trace_javascript then
+ report_javascripts("stored preamble %a, state %a, order %a",name,"now",n)
+ end
+ lpegmatch(parsefunctions,script,1,n)
+ end
+ end
+end
+
+function javascripts.usepreamblenow(name) -- now later
+ if name and name ~= "" and name ~= variables.reset then -- todo: reset
+ local names = settings_to_array(name)
+ for i=1,#names do
+ local somename = names[i]
+ if not preambled[somename] then
+ preambles[preambled[somename]][2] = "now"
+ if trace_javascript then
+ report_javascripts("used preamble %a, state %a, order %a",somename,"now","auto")
+ end
+ end
+ end
+ end
+end
+
+local splitter = lpeg.tsplitat(lpeg.patterns.commaspacer)
+
+local used, reported = false, { } -- we can cache more
+
+function javascripts.code(name,arguments)
+ local c = codes[name]
+ if c then
+ local u, code = c[1], c[2]
+ if u ~= "" then
+ local p = preambled[u]
+ if p then
+ preambles[p][2] = "now"
+ if trace_javascript and not reported[name] then
+ reported[name] = true
+ report_javascripts("used code %a, preamble %a",name,u)
+ end
+ elseif trace_javascript and not reported[name] then
+ reported[name] = true
+ report_javascripts("used code %a",name)
+ end
+ elseif trace_javascript and not reported[name] then
+ reported[name] = true
+ report_javascripts("used code %a",name)
+ end
+ used = true
+ return code
+ end
+ local f = functions[name]
+ if f then
+ used = true
+ if trace_javascript and not reported[name] then
+ reported[name] = true
+ report_javascripts("used function %a",name)
+ end
+ preambles[f][2] = "now" -- automatically tag preambles that define the function (as later)
+ if arguments then
+ local args = lpegmatch(splitter,arguments)
+ for i=1,#args do -- can be a helper
+ args[i] = formatters["%q"](args[i])
+ end
+ return formatters["%s(%s)"](name,concat(args,","))
+ else
+ return formatters["%s()"](name)
+ end
+ end
+end
+
+function javascripts.flushpreambles()
+ local t = { }
+-- if used then -- we want to be able to enforce inclusion
+ for i=1,#preambles do
+ local preamble = preambles[i]
+ if preamble[2] == "now" then
+ if trace_javascript then
+ report_javascripts("flushed preamble %a",preamble[1])
+ end
+ t[#t+1] = { preamble[1], preamble[3] }
+ end
+ end
+-- end
+ return t
+end
+
+local patterns = { "java-imp-%s.mkiv", "java-imp-%s.tex", "java-%s.mkiv", "java-%s.tex" }
+
+local function action(name,foundname)
+ context.startnointerference()
+ context.startreadingfile()
+ context.input(foundname)
+ status_javascripts("loaded: library %a",name)
+ context.stopreadingfile()
+ context.stopnointerference()
+end
+
+local function failure(name)
+ report_javascripts("unknown library %a",name)
+end
+
+function javascripts.usescripts(name)
+ if name ~= variables.reset then -- reset is obsolete
+ commands.uselibrary {
+ name = name,
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = true,
+ }
+ end
+end
+
+-- interface
+
+commands.storejavascriptcode = interactions.javascripts.storecode
+commands.storejavascriptpreamble = interactions.javascripts.storepreamble
+commands.addtojavascriptpreamble = interactions.javascripts.addtopreamble
+commands.usejavascriptpreamble = interactions.javascripts.usepreamblenow
+commands.usejavascriptscripts = interactions.javascripts.usescripts
diff --git a/tex/context/base/l-boolean.lua b/tex/context/base/l-boolean.lua
index f087f1a4c..ddac9b8a0 100644
--- a/tex/context/base/l-boolean.lua
+++ b/tex/context/base/l-boolean.lua
@@ -1,69 +1,69 @@
-if not modules then modules = { } end modules ['l-boolean'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local type, tonumber = type, tonumber
-
-boolean = boolean or { }
-local boolean = boolean
-
-function boolean.tonumber(b)
- if b then return 1 else return 0 end -- test and return or return
-end
-
-function toboolean(str,tolerant) -- global
- if str == nil then
- return false
- elseif str == false then
- return false
- elseif str == true then
- return true
- elseif str == "true" then
- return true
- elseif str == "false" then
- return false
- elseif not tolerant then
- return false
- elseif str == 0 then
- return false
- elseif (tonumber(str) or 0) > 0 then
- return true
- else
- return str == "yes" or str == "on" or str == "t"
- end
-end
-
-string.toboolean = toboolean
-
-function string.booleanstring(str)
- if str == "0" then
- return false
- elseif str == "1" then
- return true
- elseif str == "" then
- return false
- elseif str == "false" then
- return false
- elseif str == "true" then
- return true
- elseif (tonumber(str) or 0) > 0 then
- return true
- else
- return str == "yes" or str == "on" or str == "t"
- end
-end
-
-function string.is_boolean(str,default)
- if type(str) == "string" then
- if str == "true" or str == "yes" or str == "on" or str == "t" then
- return true
- elseif str == "false" or str == "no" or str == "off" or str == "f" then
- return false
- end
- end
- return default
-end
+if not modules then modules = { } end modules ['l-boolean'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type, tonumber = type, tonumber
+
+boolean = boolean or { }
+local boolean = boolean
+
+function boolean.tonumber(b)
+ if b then return 1 else return 0 end -- test and return or return
+end
+
+function toboolean(str,tolerant) -- global
+ if str == nil then
+ return false
+ elseif str == false then
+ return false
+ elseif str == true then
+ return true
+ elseif str == "true" then
+ return true
+ elseif str == "false" then
+ return false
+ elseif not tolerant then
+ return false
+ elseif str == 0 then
+ return false
+ elseif (tonumber(str) or 0) > 0 then
+ return true
+ else
+ return str == "yes" or str == "on" or str == "t"
+ end
+end
+
+string.toboolean = toboolean
+
+function string.booleanstring(str)
+ if str == "0" then
+ return false
+ elseif str == "1" then
+ return true
+ elseif str == "" then
+ return false
+ elseif str == "false" then
+ return false
+ elseif str == "true" then
+ return true
+ elseif (tonumber(str) or 0) > 0 then
+ return true
+ else
+ return str == "yes" or str == "on" or str == "t"
+ end
+end
+
+function string.is_boolean(str,default)
+ if type(str) == "string" then
+ if str == "true" or str == "yes" or str == "on" or str == "t" then
+ return true
+ elseif str == "false" or str == "no" or str == "off" or str == "f" then
+ return false
+ end
+ end
+ return default
+end
diff --git a/tex/context/base/l-dir.lua b/tex/context/base/l-dir.lua
index 3d0576eeb..a58e5302e 100644
--- a/tex/context/base/l-dir.lua
+++ b/tex/context/base/l-dir.lua
@@ -1,470 +1,470 @@
-if not modules then modules = { } end modules ['l-dir'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- dir.expandname will be merged with cleanpath and collapsepath
-
-local type, select = type, select
-local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub
-local concat, insert, remove, unpack = table.concat, table.insert, table.remove, table.unpack
-local lpegmatch = lpeg.match
-
-local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V
-
-dir = dir or { }
-local dir = dir
-local lfs = lfs
-
-local attributes = lfs.attributes
-local walkdir = lfs.dir
-local isdir = lfs.isdir
-local isfile = lfs.isfile
-local currentdir = lfs.currentdir
-local chdir = lfs.chdir
-
--- in case we load outside luatex
-
-if not isdir then
- function isdir(name)
- local a = attributes(name)
- return a and a.mode == "directory"
- end
- lfs.isdir = isdir
-end
-
-if not isfile then
- function isfile(name)
- local a = attributes(name)
- return a and a.mode == "file"
- end
- lfs.isfile = isfile
-end
-
--- handy
-
-function dir.current()
- return (gsub(currentdir(),"\\","/"))
-end
-
--- optimizing for no find (*) does not save time
-
---~ local function globpattern(path,patt,recurse,action) -- fails in recent luatex due to some change in lfs
---~ local ok, scanner
---~ if path == "/" then
---~ ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
---~ else
---~ ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
---~ end
---~ if ok and type(scanner) == "function" then
---~ if not find(path,"/$") then path = path .. '/' end
---~ for name in scanner do
---~ local full = path .. name
---~ local mode = attributes(full,'mode')
---~ if mode == 'file' then
---~ if find(full,patt) then
---~ action(full)
---~ end
---~ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
---~ globpattern(full,patt,recurse,action)
---~ end
---~ end
---~ end
---~ end
-
-local lfsisdir = isdir
-
-local function isdir(path)
- path = gsub(path,"[/\\]+$","")
- return lfsisdir(path)
-end
-
-lfs.isdir = isdir
-
-local function globpattern(path,patt,recurse,action)
- if path == "/" then
- path = path .. "."
- elseif not find(path,"/$") then
- path = path .. '/'
- end
- if isdir(path) then -- lfs.isdir does not like trailing /
- for name in walkdir(path) do -- lfs.dir accepts trailing /
- local full = path .. name
- local mode = attributes(full,'mode')
- if mode == 'file' then
- if find(full,patt) then
- action(full)
- end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- globpattern(full,patt,recurse,action)
- end
- end
- end
-end
-
-dir.globpattern = globpattern
-
-local function collectpattern(path,patt,recurse,result)
- local ok, scanner
- result = result or { }
- if path == "/" then
- ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
- else
- ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
- end
- if ok and type(scanner) == "function" then
- if not find(path,"/$") then path = path .. '/' end
- for name in scanner, first do
- local full = path .. name
- local attr = attributes(full)
- local mode = attr.mode
- if mode == 'file' then
- if find(full,patt) then
- result[name] = attr
- end
- elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
- attr.list = collectpattern(full,patt,recurse)
- result[name] = attr
- end
- end
- end
- return result
-end
-
-dir.collectpattern = collectpattern
-
-local pattern = Ct {
- [1] = (C(P(".") + P("/")^1) + C(R("az","AZ") * P(":") * P("/")^0) + Cc("./")) * V(2) * V(3),
- [2] = C(((1-S("*?/"))^0 * P("/"))^0),
- [3] = C(P(1)^0)
-}
-
-local filter = Cs ( (
- P("**") / ".*" +
- P("*") / "[^/]*" +
- P("?") / "[^/]" +
- P(".") / "%%." +
- P("+") / "%%+" +
- P("-") / "%%-" +
- P(1)
-)^0 )
-
-local function glob(str,t)
- if type(t) == "function" then
- if type(str) == "table" then
- for s=1,#str do
- glob(str[s],t)
- end
- elseif isfile(str) then
- t(str)
- else
- local split = lpegmatch(pattern,str) -- we could use the file splitter
- if split then
- local root, path, base = split[1], split[2], split[3]
- local recurse = find(base,"%*%*")
- local start = root .. path
- local result = lpegmatch(filter,start .. base)
- globpattern(start,result,recurse,t)
- end
- end
- else
- if type(str) == "table" then
- local t = t or { }
- for s=1,#str do
- glob(str[s],t)
- end
- return t
- elseif isfile(str) then
- if t then
- t[#t+1] = str
- return t
- else
- return { str }
- end
- else
- local split = lpegmatch(pattern,str) -- we could use the file splitter
- if split then
- local t = t or { }
- local action = action or function(name) t[#t+1] = name end
- local root, path, base = split[1], split[2], split[3]
- local recurse = find(base,"%*%*")
- local start = root .. path
- local result = lpegmatch(filter,start .. base)
- globpattern(start,result,recurse,action)
- return t
- else
- return { }
- end
- end
- end
-end
-
-dir.glob = glob
-
---~ list = dir.glob("**/*.tif")
---~ list = dir.glob("/**/*.tif")
---~ list = dir.glob("./**/*.tif")
---~ list = dir.glob("oeps/**/*.tif")
---~ list = dir.glob("/oeps/**/*.tif")
-
-local function globfiles(path,recurse,func,files) -- func == pattern or function
- if type(func) == "string" then
- local s = func
- func = function(name) return find(name,s) end
- end
- files = files or { }
- local noffiles = #files
- for name in walkdir(path) do
- if find(name,"^%.") then
- --- skip
- else
- local mode = attributes(name,'mode')
- if mode == "directory" then
- if recurse then
- globfiles(path .. "/" .. name,recurse,func,files)
- end
- elseif mode == "file" then
- if not func or func(name) then
- noffiles = noffiles + 1
- files[noffiles] = path .. "/" .. name
- end
- end
- end
- end
- return files
-end
-
-dir.globfiles = globfiles
-
--- t = dir.glob("c:/data/develop/context/sources/**/????-*.tex")
--- t = dir.glob("c:/data/develop/tex/texmf/**/*.tex")
--- t = dir.glob("c:/data/develop/context/texmf/**/*.tex")
--- t = dir.glob("f:/minimal/tex/**/*")
--- print(dir.ls("f:/minimal/tex/**/*"))
--- print(dir.ls("*.tex"))
-
-function dir.ls(pattern)
- return concat(glob(pattern),"\n")
-end
-
---~ mkdirs("temp")
---~ mkdirs("a/b/c")
---~ mkdirs(".","/a/b/c")
---~ mkdirs("a","b","c")
-
-local make_indeed = true -- false
-
-local onwindows = os.type == "windows" or find(os.getenv("PATH"),";")
-
-if onwindows then
-
- function dir.mkdirs(...)
- local str, pth = "", ""
- for i=1,select("#",...) do
- local s = select(i,...)
- if s == "" then
- -- skip
- elseif str == "" then
- str = s
- else
- str = str .. "/" .. s
- end
- end
- local first, middle, last
- local drive = false
- first, middle, last = match(str,"^(//)(//*)(.*)$")
- if first then
- -- empty network path == local path
- else
- first, last = match(str,"^(//)/*(.-)$")
- if first then
- middle, last = match(str,"([^/]+)/+(.-)$")
- if middle then
- pth = "//" .. middle
- else
- pth = "//" .. last
- last = ""
- end
- else
- first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$")
- if first then
- pth, drive = first .. middle, true
- else
- middle, last = match(str,"^(/*)(.-)$")
- if not middle then
- last = str
- end
- end
- end
- end
- for s in gmatch(last,"[^/]+") do
- if pth == "" then
- pth = s
- elseif drive then
- pth, drive = pth .. s, false
- else
- pth = pth .. "/" .. s
- end
- if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
- end
- end
- return pth, (isdir(pth) == true)
- end
-
- --~ print(dir.mkdirs("","","a","c"))
- --~ print(dir.mkdirs("a"))
- --~ print(dir.mkdirs("a:"))
- --~ print(dir.mkdirs("a:/b/c"))
- --~ print(dir.mkdirs("a:b/c"))
- --~ print(dir.mkdirs("a:/bbb/c"))
- --~ print(dir.mkdirs("/a/b/c"))
- --~ print(dir.mkdirs("/aaa/b/c"))
- --~ print(dir.mkdirs("//a/b/c"))
- --~ print(dir.mkdirs("///a/b/c"))
- --~ print(dir.mkdirs("a/bbb//ccc/"))
-
-else
-
- function dir.mkdirs(...)
- local str, pth = "", ""
- for i=1,select("#",...) do
- local s = select(i,...)
- if s and s ~= "" then -- we catch nil and false
- if str ~= "" then
- str = str .. "/" .. s
- else
- str = s
- end
- end
- end
- str = gsub(str,"/+","/")
- if find(str,"^/") then
- pth = "/"
- for s in gmatch(str,"[^/]+") do
- local first = (pth == "/")
- if first then
- pth = pth .. s
- else
- pth = pth .. "/" .. s
- end
- if make_indeed and not first and not isdir(pth) then
- lfs.mkdir(pth)
- end
- end
- else
- pth = "."
- for s in gmatch(str,"[^/]+") do
- pth = pth .. "/" .. s
- if make_indeed and not isdir(pth) then
- lfs.mkdir(pth)
- end
- end
- end
- return pth, (isdir(pth) == true)
- end
-
- --~ print(dir.mkdirs("","","a","c"))
- --~ print(dir.mkdirs("a"))
- --~ print(dir.mkdirs("/a/b/c"))
- --~ print(dir.mkdirs("/aaa/b/c"))
- --~ print(dir.mkdirs("//a/b/c"))
- --~ print(dir.mkdirs("///a/b/c"))
- --~ print(dir.mkdirs("a/bbb//ccc/"))
-
-end
-
-dir.makedirs = dir.mkdirs
-
--- we can only define it here as it uses dir.current
-
-if onwindows then
-
- function dir.expandname(str) -- will be merged with cleanpath and collapsepath
- local first, nothing, last = match(str,"^(//)(//*)(.*)$")
- if first then
- first = dir.current() .. "/" -- dir.current sanitizes
- end
- if not first then
- first, last = match(str,"^(//)/*(.*)$")
- end
- if not first then
- first, last = match(str,"^([a-zA-Z]:)(.*)$")
- if first and not find(last,"^/") then
- local d = currentdir()
- if chdir(first) then
- first = dir.current()
- end
- chdir(d)
- end
- end
- if not first then
- first, last = dir.current(), str
- end
- last = gsub(last,"//","/")
- last = gsub(last,"/%./","/")
- last = gsub(last,"^/*","")
- first = gsub(first,"/*$","")
- if last == "" or last == "." then
- return first
- else
- return first .. "/" .. last
- end
- end
-
-else
-
- function dir.expandname(str) -- will be merged with cleanpath and collapsepath
- if not find(str,"^/") then
- str = currentdir() .. "/" .. str
- end
- str = gsub(str,"//","/")
- str = gsub(str,"/%./","/")
- str = gsub(str,"(.)/%.$","%1")
- return str
- end
-
-end
-
-file.expandname = dir.expandname -- for convenience
-
-local stack = { }
-
-function dir.push(newdir)
- insert(stack,currentdir())
- if newdir and newdir ~= "" then
- chdir(newdir)
- end
-end
-
-function dir.pop()
- local d = remove(stack)
- if d then
- chdir(d)
- end
- return d
-end
-
-local function found(...) -- can have nil entries
- for i=1,select("#",...) do
- local path = select(i,...)
- local kind = type(path)
- if kind == "string" then
- if isdir(path) then
- return path
- end
- elseif kind == "table" then
- -- here we asume no holes, i.e. an indexed table
- local path = found(unpack(path))
- if path then
- return path
- end
- end
- end
- -- return nil -- if we want print("crappath") to show something
-end
-
-dir.found = found
+if not modules then modules = { } end modules ['l-dir'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- dir.expandname will be merged with cleanpath and collapsepath
+
+local type, select = type, select
+local find, gmatch, match, gsub = string.find, string.gmatch, string.match, string.gsub
+local concat, insert, remove, unpack = table.concat, table.insert, table.remove, table.unpack
+local lpegmatch = lpeg.match
+
+local P, S, R, C, Cc, Cs, Ct, Cv, V = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cv, lpeg.V
+
+dir = dir or { }
+local dir = dir
+local lfs = lfs
+
+local attributes = lfs.attributes
+local walkdir = lfs.dir
+local isdir = lfs.isdir
+local isfile = lfs.isfile
+local currentdir = lfs.currentdir
+local chdir = lfs.chdir
+
+-- in case we load outside luatex
+
+if not isdir then
+ function isdir(name)
+ local a = attributes(name)
+ return a and a.mode == "directory"
+ end
+ lfs.isdir = isdir
+end
+
+if not isfile then
+ function isfile(name)
+ local a = attributes(name)
+ return a and a.mode == "file"
+ end
+ lfs.isfile = isfile
+end
+
+-- handy
+
+function dir.current()
+ return (gsub(currentdir(),"\\","/"))
+end
+
+-- optimizing for no find (*) does not save time
+
+--~ local function globpattern(path,patt,recurse,action) -- fails in recent luatex due to some change in lfs
+--~ local ok, scanner
+--~ if path == "/" then
+--~ ok, scanner = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+--~ else
+--~ ok, scanner = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+--~ end
+--~ if ok and type(scanner) == "function" then
+--~ if not find(path,"/$") then path = path .. '/' end
+--~ for name in scanner do
+--~ local full = path .. name
+--~ local mode = attributes(full,'mode')
+--~ if mode == 'file' then
+--~ if find(full,patt) then
+--~ action(full)
+--~ end
+--~ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+--~ globpattern(full,patt,recurse,action)
+--~ end
+--~ end
+--~ end
+--~ end
+
+local lfsisdir = isdir
+
+local function isdir(path)
+ path = gsub(path,"[/\\]+$","")
+ return lfsisdir(path)
+end
+
+lfs.isdir = isdir
+
+local function globpattern(path,patt,recurse,action)
+ if path == "/" then
+ path = path .. "."
+ elseif not find(path,"/$") then
+ path = path .. '/'
+ end
+ if isdir(path) then -- lfs.isdir does not like trailing /
+ for name in walkdir(path) do -- lfs.dir accepts trailing /
+ local full = path .. name
+ local mode = attributes(full,'mode')
+ if mode == 'file' then
+ if find(full,patt) then
+ action(full)
+ end
+ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+ globpattern(full,patt,recurse,action)
+ end
+ end
+ end
+end
+
+dir.globpattern = globpattern
+
+local function collectpattern(path,patt,recurse,result)
+ local ok, scanner
+ result = result or { }
+ if path == "/" then
+ ok, scanner, first = xpcall(function() return walkdir(path..".") end, function() end) -- kepler safe
+ else
+ ok, scanner, first = xpcall(function() return walkdir(path) end, function() end) -- kepler safe
+ end
+ if ok and type(scanner) == "function" then
+ if not find(path,"/$") then path = path .. '/' end
+ for name in scanner, first do
+ local full = path .. name
+ local attr = attributes(full)
+ local mode = attr.mode
+ if mode == 'file' then
+ if find(full,patt) then
+ result[name] = attr
+ end
+ elseif recurse and (mode == "directory") and (name ~= '.') and (name ~= "..") then
+ attr.list = collectpattern(full,patt,recurse)
+ result[name] = attr
+ end
+ end
+ end
+ return result
+end
+
+dir.collectpattern = collectpattern
+
+local pattern = Ct {
+ [1] = (C(P(".") + P("/")^1) + C(R("az","AZ") * P(":") * P("/")^0) + Cc("./")) * V(2) * V(3),
+ [2] = C(((1-S("*?/"))^0 * P("/"))^0),
+ [3] = C(P(1)^0)
+}
+
+local filter = Cs ( (
+ P("**") / ".*" +
+ P("*") / "[^/]*" +
+ P("?") / "[^/]" +
+ P(".") / "%%." +
+ P("+") / "%%+" +
+ P("-") / "%%-" +
+ P(1)
+)^0 )
+
+local function glob(str,t)
+ if type(t) == "function" then
+ if type(str) == "table" then
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ elseif isfile(str) then
+ t(str)
+ else
+ local split = lpegmatch(pattern,str) -- we could use the file splitter
+ if split then
+ local root, path, base = split[1], split[2], split[3]
+ local recurse = find(base,"%*%*")
+ local start = root .. path
+ local result = lpegmatch(filter,start .. base)
+ globpattern(start,result,recurse,t)
+ end
+ end
+ else
+ if type(str) == "table" then
+ local t = t or { }
+ for s=1,#str do
+ glob(str[s],t)
+ end
+ return t
+ elseif isfile(str) then
+ if t then
+ t[#t+1] = str
+ return t
+ else
+ return { str }
+ end
+ else
+ local split = lpegmatch(pattern,str) -- we could use the file splitter
+ if split then
+ local t = t or { }
+ local action = action or function(name) t[#t+1] = name end
+ local root, path, base = split[1], split[2], split[3]
+ local recurse = find(base,"%*%*")
+ local start = root .. path
+ local result = lpegmatch(filter,start .. base)
+ globpattern(start,result,recurse,action)
+ return t
+ else
+ return { }
+ end
+ end
+ end
+end
+
+dir.glob = glob
+
+--~ list = dir.glob("**/*.tif")
+--~ list = dir.glob("/**/*.tif")
+--~ list = dir.glob("./**/*.tif")
+--~ list = dir.glob("oeps/**/*.tif")
+--~ list = dir.glob("/oeps/**/*.tif")
+
+local function globfiles(path,recurse,func,files) -- func == pattern or function
+ if type(func) == "string" then
+ local s = func
+ func = function(name) return find(name,s) end
+ end
+ files = files or { }
+ local noffiles = #files
+ for name in walkdir(path) do
+ if find(name,"^%.") then
+ --- skip
+ else
+ local mode = attributes(name,'mode')
+ if mode == "directory" then
+ if recurse then
+ globfiles(path .. "/" .. name,recurse,func,files)
+ end
+ elseif mode == "file" then
+ if not func or func(name) then
+ noffiles = noffiles + 1
+ files[noffiles] = path .. "/" .. name
+ end
+ end
+ end
+ end
+ return files
+end
+
+dir.globfiles = globfiles
+
+-- t = dir.glob("c:/data/develop/context/sources/**/????-*.tex")
+-- t = dir.glob("c:/data/develop/tex/texmf/**/*.tex")
+-- t = dir.glob("c:/data/develop/context/texmf/**/*.tex")
+-- t = dir.glob("f:/minimal/tex/**/*")
+-- print(dir.ls("f:/minimal/tex/**/*"))
+-- print(dir.ls("*.tex"))
+
+function dir.ls(pattern)
+ return concat(glob(pattern),"\n")
+end
+
+--~ mkdirs("temp")
+--~ mkdirs("a/b/c")
+--~ mkdirs(".","/a/b/c")
+--~ mkdirs("a","b","c")
+
+local make_indeed = true -- false
+
+local onwindows = os.type == "windows" or find(os.getenv("PATH"),";")
+
+if onwindows then
+
+ function dir.mkdirs(...)
+ local str, pth = "", ""
+ for i=1,select("#",...) do
+ local s = select(i,...)
+ if s == "" then
+ -- skip
+ elseif str == "" then
+ str = s
+ else
+ str = str .. "/" .. s
+ end
+ end
+ local first, middle, last
+ local drive = false
+ first, middle, last = match(str,"^(//)(//*)(.*)$")
+ if first then
+ -- empty network path == local path
+ else
+ first, last = match(str,"^(//)/*(.-)$")
+ if first then
+ middle, last = match(str,"([^/]+)/+(.-)$")
+ if middle then
+ pth = "//" .. middle
+ else
+ pth = "//" .. last
+ last = ""
+ end
+ else
+ first, middle, last = match(str,"^([a-zA-Z]:)(/*)(.-)$")
+ if first then
+ pth, drive = first .. middle, true
+ else
+ middle, last = match(str,"^(/*)(.-)$")
+ if not middle then
+ last = str
+ end
+ end
+ end
+ end
+ for s in gmatch(last,"[^/]+") do
+ if pth == "" then
+ pth = s
+ elseif drive then
+ pth, drive = pth .. s, false
+ else
+ pth = pth .. "/" .. s
+ end
+ if make_indeed and not isdir(pth) then
+ lfs.mkdir(pth)
+ end
+ end
+ return pth, (isdir(pth) == true)
+ end
+
+ --~ print(dir.mkdirs("","","a","c"))
+ --~ print(dir.mkdirs("a"))
+ --~ print(dir.mkdirs("a:"))
+ --~ print(dir.mkdirs("a:/b/c"))
+ --~ print(dir.mkdirs("a:b/c"))
+ --~ print(dir.mkdirs("a:/bbb/c"))
+ --~ print(dir.mkdirs("/a/b/c"))
+ --~ print(dir.mkdirs("/aaa/b/c"))
+ --~ print(dir.mkdirs("//a/b/c"))
+ --~ print(dir.mkdirs("///a/b/c"))
+ --~ print(dir.mkdirs("a/bbb//ccc/"))
+
+else
+
+ function dir.mkdirs(...)
+ local str, pth = "", ""
+ for i=1,select("#",...) do
+ local s = select(i,...)
+ if s and s ~= "" then -- we catch nil and false
+ if str ~= "" then
+ str = str .. "/" .. s
+ else
+ str = s
+ end
+ end
+ end
+ str = gsub(str,"/+","/")
+ if find(str,"^/") then
+ pth = "/"
+ for s in gmatch(str,"[^/]+") do
+ local first = (pth == "/")
+ if first then
+ pth = pth .. s
+ else
+ pth = pth .. "/" .. s
+ end
+ if make_indeed and not first and not isdir(pth) then
+ lfs.mkdir(pth)
+ end
+ end
+ else
+ pth = "."
+ for s in gmatch(str,"[^/]+") do
+ pth = pth .. "/" .. s
+ if make_indeed and not isdir(pth) then
+ lfs.mkdir(pth)
+ end
+ end
+ end
+ return pth, (isdir(pth) == true)
+ end
+
+ --~ print(dir.mkdirs("","","a","c"))
+ --~ print(dir.mkdirs("a"))
+ --~ print(dir.mkdirs("/a/b/c"))
+ --~ print(dir.mkdirs("/aaa/b/c"))
+ --~ print(dir.mkdirs("//a/b/c"))
+ --~ print(dir.mkdirs("///a/b/c"))
+ --~ print(dir.mkdirs("a/bbb//ccc/"))
+
+end
+
+dir.makedirs = dir.mkdirs
+
+-- we can only define it here as it uses dir.current
+
+if onwindows then
+
+ function dir.expandname(str) -- will be merged with cleanpath and collapsepath
+ local first, nothing, last = match(str,"^(//)(//*)(.*)$")
+ if first then
+ first = dir.current() .. "/" -- dir.current sanitizes
+ end
+ if not first then
+ first, last = match(str,"^(//)/*(.*)$")
+ end
+ if not first then
+ first, last = match(str,"^([a-zA-Z]:)(.*)$")
+ if first and not find(last,"^/") then
+ local d = currentdir()
+ if chdir(first) then
+ first = dir.current()
+ end
+ chdir(d)
+ end
+ end
+ if not first then
+ first, last = dir.current(), str
+ end
+ last = gsub(last,"//","/")
+ last = gsub(last,"/%./","/")
+ last = gsub(last,"^/*","")
+ first = gsub(first,"/*$","")
+ if last == "" or last == "." then
+ return first
+ else
+ return first .. "/" .. last
+ end
+ end
+
+else
+
+ function dir.expandname(str) -- will be merged with cleanpath and collapsepath
+ if not find(str,"^/") then
+ str = currentdir() .. "/" .. str
+ end
+ str = gsub(str,"//","/")
+ str = gsub(str,"/%./","/")
+ str = gsub(str,"(.)/%.$","%1")
+ return str
+ end
+
+end
+
+file.expandname = dir.expandname -- for convenience
+
+local stack = { }
+
+function dir.push(newdir)
+ insert(stack,currentdir())
+ if newdir and newdir ~= "" then
+ chdir(newdir)
+ end
+end
+
+function dir.pop()
+ local d = remove(stack)
+ if d then
+ chdir(d)
+ end
+ return d
+end
+
+local function found(...) -- can have nil entries
+ for i=1,select("#",...) do
+ local path = select(i,...)
+ local kind = type(path)
+ if kind == "string" then
+ if isdir(path) then
+ return path
+ end
+ elseif kind == "table" then
+ -- here we asume no holes, i.e. an indexed table
+ local path = found(unpack(path))
+ if path then
+ return path
+ end
+ end
+ end
+ -- return nil -- if we want print("crappath") to show something
+end
+
+dir.found = found
diff --git a/tex/context/base/l-file.lua b/tex/context/base/l-file.lua
index f25490749..2e47a3d1f 100644
--- a/tex/context/base/l-file.lua
+++ b/tex/context/base/l-file.lua
@@ -1,590 +1,590 @@
-if not modules then modules = { } end modules ['l-file'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- needs a cleanup
-
-file = file or { }
-local file = file
-
-if not lfs then
- lfs = optionalrequire("lfs")
-end
-
-if not lfs then
-
- lfs = {
- getcurrentdir = function()
- return "."
- end,
- attributes = function()
- return nil
- end,
- isfile = function(name)
- local f = io.open(name,'rb')
- if f then
- f:close()
- return true
- end
- end,
- isdir = function(name)
- print("you need to load lfs")
- return false
- end
- }
-
-elseif not lfs.isfile then
-
- local attributes = lfs.attributes
-
- function lfs.isdir(name)
- return attributes(name,"mode") == "directory"
- end
-
- function lfs.isfile(name)
- return attributes(name,"mode") == "file"
- end
-
- -- function lfs.isdir(name)
- -- local a = attributes(name)
- -- return a and a.mode == "directory"
- -- end
-
- -- function lfs.isfile(name)
- -- local a = attributes(name)
- -- return a and a.mode == "file"
- -- end
-
-end
-
-local insert, concat = table.insert, table.concat
-local match, find, gmatch = string.match, string.find, string.gmatch
-local lpegmatch = lpeg.match
-local getcurrentdir, attributes = lfs.currentdir, lfs.attributes
-local checkedsplit = string.checkedsplit
-
--- local patterns = file.patterns or { }
--- file.patterns = patterns
-
-local P, R, S, C, Cs, Cp, Cc, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc, lpeg.Ct
-
-local colon = P(":")
-local period = P(".")
-local periods = P("..")
-local fwslash = P("/")
-local bwslash = P("\\")
-local slashes = S("\\/")
-local noperiod = 1-period
-local noslashes = 1-slashes
-local name = noperiod^1
-local suffix = period/"" * (1-period-slashes)^1 * -1
-
------ pattern = C((noslashes^0 * slashes^1)^1)
-local pattern = C((1 - (slashes^1 * noslashes^1 * -1))^1) * P(1) -- there must be a more efficient way
-
-local function pathpart(name,default)
- return name and lpegmatch(pattern,name) or default or ""
-end
-
-local pattern = (noslashes^0 * slashes)^1 * C(noslashes^1) * -1
-
-local function basename(name)
- return name and lpegmatch(pattern,name) or name
-end
-
--- print(pathpart("file"))
--- print(pathpart("dir/file"))
--- print(pathpart("/dir/file"))
--- print(basename("file"))
--- print(basename("dir/file"))
--- print(basename("/dir/file"))
-
-local pattern = (noslashes^0 * slashes^1)^0 * Cs((1-suffix)^1) * suffix^0
-
-local function nameonly(name)
- return name and lpegmatch(pattern,name) or name
-end
-
-local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * C(noperiod^1) * -1
-
-local function suffixonly(name)
- return name and lpegmatch(pattern,name) or ""
-end
-
-file.pathpart = pathpart
-file.basename = basename
-file.nameonly = nameonly
-file.suffixonly = suffixonly
-file.suffix = suffixonly
-
-file.dirname = pathpart -- obsolete
-file.extname = suffixonly -- obsolete
-
--- actually these are schemes
-
-local drive = C(R("az","AZ")) * colon
-local path = C((noslashes^0 * slashes)^0)
-local suffix = period * C(P(1-period)^0 * P(-1))
-local base = C((1-suffix)^0)
-local rest = C(P(1)^0)
-
-drive = drive + Cc("")
-path = path + Cc("")
-base = base + Cc("")
-suffix = suffix + Cc("")
-
-local pattern_a = drive * path * base * suffix
-local pattern_b = path * base * suffix
-local pattern_c = C(drive * path) * C(base * suffix) -- trick: two extra captures
-local pattern_d = path * rest
-
-function file.splitname(str,splitdrive)
- if not str then
- -- error
- elseif splitdrive then
- return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
- else
- return lpegmatch(pattern_b,str) -- returns path, base, suffix
- end
-end
-
-function file.splitbase(str)
- if str then
- return lpegmatch(pattern_d,str) -- returns path, base+suffix (path has / appended, might change at some point)
- else
- return "", str -- assume no path
- end
-end
-
----- stripslash = C((1 - P("/")^1*P(-1))^0)
-
-function file.nametotable(str,splitdrive)
- if str then
- local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
- -- if path ~= "" then
- -- path = lpegmatch(stripslash,path) -- unfortunate hack, maybe this becomes default
- -- end
- if splitdrive then
- return {
- path = path,
- drive = drive,
- subpath = subpath,
- name = name,
- base = base,
- suffix = suffix,
- }
- else
- return {
- path = path,
- name = name,
- base = base,
- suffix = suffix,
- }
- end
- end
-end
-
--- print(file.splitname("file"))
--- print(file.splitname("dir/file"))
--- print(file.splitname("/dir/file"))
--- print(file.splitname("file"))
--- print(file.splitname("dir/file"))
--- print(file.splitname("/dir/file"))
-
--- inspect(file.nametotable("file.ext"))
--- inspect(file.nametotable("dir/file.ext"))
--- inspect(file.nametotable("/dir/file.ext"))
--- inspect(file.nametotable("file.ext"))
--- inspect(file.nametotable("dir/file.ext"))
--- inspect(file.nametotable("/dir/file.ext"))
-
------ pattern = Cs(((period * noperiod^1 * -1) / "" + 1)^1)
-local pattern = Cs(((period * (1-period-slashes)^1 * -1) / "" + 1)^1)
-
-function file.removesuffix(name)
- return name and lpegmatch(pattern,name)
-end
-
--- local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * Cp() * noperiod^1 * -1
---
--- function file.addsuffix(name, suffix)
--- local p = lpegmatch(pattern,name)
--- if p then
--- return name
--- else
--- return name .. "." .. suffix
--- end
--- end
-
-local suffix = period/"" * (1-period-slashes)^1 * -1
-local pattern = Cs((noslashes^0 * slashes^1)^0 * ((1-suffix)^1)) * Cs(suffix)
-
-function file.addsuffix(filename,suffix,criterium)
- if not filename or not suffix or suffix == "" then
- return filename
- elseif criterium == true then
- return filename .. "." .. suffix
- elseif not criterium then
- local n, s = lpegmatch(pattern,filename)
- if not s or s == "" then
- return filename .. "." .. suffix
- else
- return filename
- end
- else
- local n, s = lpegmatch(pattern,filename)
- if s and s ~= "" then
- local t = type(criterium)
- if t == "table" then
- -- keep if in criterium
- for i=1,#criterium do
- if s == criterium[i] then
- return filename
- end
- end
- elseif t == "string" then
- -- keep if criterium
- if s == criterium then
- return filename
- end
- end
- end
- return (n or filename) .. "." .. suffix
- end
-end
-
--- print("1 " .. file.addsuffix("name","new") .. " -> name.new")
--- print("2 " .. file.addsuffix("name.old","new") .. " -> name.old")
--- print("3 " .. file.addsuffix("name.old","new",true) .. " -> name.old.new")
--- print("4 " .. file.addsuffix("name.old","new","new") .. " -> name.new")
--- print("5 " .. file.addsuffix("name.old","new","old") .. " -> name.old")
--- print("6 " .. file.addsuffix("name.old","new","foo") .. " -> name.new")
--- print("7 " .. file.addsuffix("name.old","new",{"foo","bar"}) .. " -> name.new")
--- print("8 " .. file.addsuffix("name.old","new",{"old","bar"}) .. " -> name.old")
-
-local suffix = period * (1-period-slashes)^1 * -1
-local pattern = Cs((1-suffix)^0)
-
-function file.replacesuffix(name,suffix)
- if name and suffix and suffix ~= "" then
- return lpegmatch(pattern,name) .. "." .. suffix
- else
- return name
- end
-end
-
---
-
-local reslasher = lpeg.replacer(P("\\"),"/")
-
-function file.reslash(str)
- return str and lpegmatch(reslasher,str)
-end
-
--- We should be able to use:
---
--- local writable = P(1) * P("w") * Cc(true)
---
--- function file.is_writable(name)
--- local a = attributes(name) or attributes(pathpart(name,"."))
--- return a and lpegmatch(writable,a.permissions) or false
--- end
---
--- But after some testing Taco and I came up with the more robust
--- variant:
-
-function file.is_writable(name)
- if not name then
- -- error
- elseif lfs.isdir(name) then
- name = name .. "/m_t_x_t_e_s_t.tmp"
- local f = io.open(name,"wb")
- if f then
- f:close()
- os.remove(name)
- return true
- end
- elseif lfs.isfile(name) then
- local f = io.open(name,"ab")
- if f then
- f:close()
- return true
- end
- else
- local f = io.open(name,"ab")
- if f then
- f:close()
- os.remove(name)
- return true
- end
- end
- return false
-end
-
-local readable = P("r") * Cc(true)
-
-function file.is_readable(name)
- if name then
- local a = attributes(name)
- return a and lpegmatch(readable,a.permissions) or false
- else
- return false
- end
-end
-
-file.isreadable = file.is_readable -- depricated
-file.iswritable = file.is_writable -- depricated
-
-function file.size(name)
- if name then
- local a = attributes(name)
- return a and a.size or 0
- else
- return 0
- end
-end
-
-function file.splitpath(str,separator) -- string .. reslash is a bonus (we could do a direct split)
- return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
-end
-
-function file.joinpath(tab,separator) -- table
- return tab and concat(tab,separator or io.pathseparator) -- can have trailing //
-end
-
-local stripper = Cs(P(fwslash)^0/"" * reslasher)
-local isnetwork = fwslash * fwslash * (1-fwslash) + (1-fwslash-colon)^1 * colon
-local isroot = fwslash^1 * -1
-local hasroot = fwslash^1
-
-local deslasher = lpeg.replacer(S("\\/")^1,"/")
-
--- If we have a network or prefix then there is a change that we end up with two
--- // in the middle ... we could prevent this if we (1) expand prefixes: and (2)
--- split and rebuild as url. Of course we could assume no network paths (which
--- makes sense) adn assume either mapped drives (windows) or mounts (unix) but
--- then we still have to deal with urls ... anyhow, multiple // are never a real
--- problem but just ugly.
-
-function file.join(...)
- local lst = { ... }
- local one = lst[1]
- if lpegmatch(isnetwork,one) then
- local two = lpegmatch(deslasher,concat(lst,"/",2))
- return one .. "/" .. two
- elseif lpegmatch(isroot,one) then
- local two = lpegmatch(deslasher,concat(lst,"/",2))
- if lpegmatch(hasroot,two) then
- return two
- else
- return "/" .. two
- end
- elseif one == "" then
- return lpegmatch(stripper,concat(lst,"/",2))
- else
- return lpegmatch(deslasher,concat(lst,"/"))
- end
-end
-
--- print(file.join("c:/whatever","name"))
--- print(file.join("//","/y"))
--- print(file.join("/","/y"))
--- print(file.join("","/y"))
--- print(file.join("/x/","/y"))
--- print(file.join("x/","/y"))
--- print(file.join("http://","/y"))
--- print(file.join("http://a","/y"))
--- print(file.join("http:///a","/y"))
--- print(file.join("//nas-1","/y"))
-
--- The previous one fails on "a.b/c" so Taco came up with a split based
--- variant. After some skyping we got it sort of compatible with the old
--- one. After that the anchoring to currentdir was added in a better way.
--- Of course there are some optimizations too. Finally we had to deal with
--- windows drive prefixes and things like sys://. Eventually gsubs and
--- finds were replaced by lpegs.
-
-local drivespec = R("az","AZ")^1 * colon
-local anchors = fwslash + drivespec
-local untouched = periods + (1-period)^1 * P(-1)
-local splitstarter = (Cs(drivespec * (bwslash/"/" + fwslash)^0) + Cc(false)) * Ct(lpeg.splitat(S("/\\")^1))
-local absolute = fwslash
-
-function file.collapsepath(str,anchor) -- anchor: false|nil, true, "."
- if not str then
- return
- end
- if anchor == true and not lpegmatch(anchors,str) then
- str = getcurrentdir() .. "/" .. str
- end
- if str == "" or str =="." then
- return "."
- elseif lpegmatch(untouched,str) then
- return lpegmatch(reslasher,str)
- end
- local starter, oldelements = lpegmatch(splitstarter,str)
- local newelements = { }
- local i = #oldelements
- while i > 0 do
- local element = oldelements[i]
- if element == '.' then
- -- do nothing
- elseif element == '..' then
- local n = i - 1
- while n > 0 do
- local element = oldelements[n]
- if element ~= '..' and element ~= '.' then
- oldelements[n] = '.'
- break
- else
- n = n - 1
- end
- end
- if n < 1 then
- insert(newelements,1,'..')
- end
- elseif element ~= "" then
- insert(newelements,1,element)
- end
- i = i - 1
- end
- if #newelements == 0 then
- return starter or "."
- elseif starter then
- return starter .. concat(newelements, '/')
- elseif lpegmatch(absolute,str) then
- return "/" .. concat(newelements,'/')
- else
- newelements = concat(newelements, '/')
- if anchor == "." and find(str,"^%./") then
- return "./" .. newelements
- else
- return newelements
- end
- end
-end
-
--- local function test(str,...)
--- print(string.format("%-20s %-15s %-30s %-20s",str,file.collapsepath(str),file.collapsepath(str,true),file.collapsepath(str,".")))
--- end
--- test("a/b.c/d") test("b.c/d") test("b.c/..")
--- test("/") test("c:/..") test("sys://..")
--- test("") test("./") test(".") test("..") test("./..") test("../..")
--- test("a") test("./a") test("/a") test("a/../..")
--- test("a/./b/..") test("a/aa/../b/bb") test("a/.././././b/..") test("a/./././b/..")
--- test("a/b/c/../..") test("./a/b/c/../..") test("a/b/c/../..")
--- test("./a")
-
-local validchars = R("az","09","AZ","--","..")
-local pattern_a = lpeg.replacer(1-validchars)
-local pattern_a = Cs((validchars + P(1)/"-")^1)
-local whatever = P("-")^0 / ""
-local pattern_b = Cs(whatever * (1 - whatever * -1)^1)
-
-function file.robustname(str,strict)
- if str then
- str = lpegmatch(pattern_a,str) or str
- if strict then
- return lpegmatch(pattern_b,str) or str -- two step is cleaner (less backtracking)
- else
- return str
- end
- end
-end
-
-file.readdata = io.loaddata
-file.savedata = io.savedata
-
-function file.copy(oldname,newname)
- if oldname and newname then
- local data = io.loaddata(oldname)
- if data and data ~= "" then
- file.savedata(newname,data)
- end
- end
-end
-
--- also rewrite previous
-
-local letter = R("az","AZ") + S("_-+")
-local separator = P("://")
-
-local qualified = period^0 * fwslash
- + letter * colon
- + letter^1 * separator
- + letter^1 * fwslash
-local rootbased = fwslash
- + letter * colon
-
-lpeg.patterns.qualified = qualified
-lpeg.patterns.rootbased = rootbased
-
--- ./name ../name /name c: :// name/name
-
-function file.is_qualified_path(filename)
- return filename and lpegmatch(qualified,filename) ~= nil
-end
-
-function file.is_rootbased_path(filename)
- return filename and lpegmatch(rootbased,filename) ~= nil
-end
-
--- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
---
--- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" }
--- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" }
--- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
--- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
-
--- -- maybe:
---
--- if os.type == "windows" then
--- local currentdir = getcurrentdir
--- function getcurrentdir()
--- return lpegmatch(reslasher,currentdir())
--- end
--- end
-
--- for myself:
-
-function file.strip(name,dir)
- if name then
- local b, a = match(name,"^(.-)" .. dir .. "(.*)$")
- return a ~= "" and a or name
- end
-end
-
--- local debuglist = {
--- "pathpart", "basename", "nameonly", "suffixonly", "suffix", "dirname", "extname",
--- "addsuffix", "removesuffix", "replacesuffix", "join",
--- "strip","collapsepath", "joinpath", "splitpath",
--- }
-
--- for i=1,#debuglist do
--- local name = debuglist[i]
--- local f = file[name]
--- file[name] = function(...)
--- print(name,f(...))
--- return f(...)
--- end
--- end
-
--- a goodie: a dumb version of mkdirs:
-
-function lfs.mkdirs(path)
- local full
- for sub in gmatch(path,"([^\\/]+)") do
- if full then
- full = full .. "/" .. sub
- else
- full = sub
- end
- if not lfs.isdir(full) then
- lfs.mkdir(full)
- end
- end
-end
+if not modules then modules = { } end modules ['l-file'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- needs a cleanup
+
+file = file or { }
+local file = file
+
+if not lfs then
+ lfs = optionalrequire("lfs")
+end
+
+if not lfs then
+
+ lfs = {
+ getcurrentdir = function()
+ return "."
+ end,
+ attributes = function()
+ return nil
+ end,
+ isfile = function(name)
+ local f = io.open(name,'rb')
+ if f then
+ f:close()
+ return true
+ end
+ end,
+ isdir = function(name)
+ print("you need to load lfs")
+ return false
+ end
+ }
+
+elseif not lfs.isfile then
+
+ local attributes = lfs.attributes
+
+ function lfs.isdir(name)
+ return attributes(name,"mode") == "directory"
+ end
+
+ function lfs.isfile(name)
+ return attributes(name,"mode") == "file"
+ end
+
+ -- function lfs.isdir(name)
+ -- local a = attributes(name)
+ -- return a and a.mode == "directory"
+ -- end
+
+ -- function lfs.isfile(name)
+ -- local a = attributes(name)
+ -- return a and a.mode == "file"
+ -- end
+
+end
+
+local insert, concat = table.insert, table.concat
+local match, find, gmatch = string.match, string.find, string.gmatch
+local lpegmatch = lpeg.match
+local getcurrentdir, attributes = lfs.currentdir, lfs.attributes
+local checkedsplit = string.checkedsplit
+
+-- local patterns = file.patterns or { }
+-- file.patterns = patterns
+
+local P, R, S, C, Cs, Cp, Cc, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc, lpeg.Ct
+
+local colon = P(":")
+local period = P(".")
+local periods = P("..")
+local fwslash = P("/")
+local bwslash = P("\\")
+local slashes = S("\\/")
+local noperiod = 1-period
+local noslashes = 1-slashes
+local name = noperiod^1
+local suffix = period/"" * (1-period-slashes)^1 * -1
+
+----- pattern = C((noslashes^0 * slashes^1)^1)
+local pattern = C((1 - (slashes^1 * noslashes^1 * -1))^1) * P(1) -- there must be a more efficient way
+
+local function pathpart(name,default)
+ return name and lpegmatch(pattern,name) or default or ""
+end
+
+local pattern = (noslashes^0 * slashes)^1 * C(noslashes^1) * -1
+
+local function basename(name)
+ return name and lpegmatch(pattern,name) or name
+end
+
+-- print(pathpart("file"))
+-- print(pathpart("dir/file"))
+-- print(pathpart("/dir/file"))
+-- print(basename("file"))
+-- print(basename("dir/file"))
+-- print(basename("/dir/file"))
+
+local pattern = (noslashes^0 * slashes^1)^0 * Cs((1-suffix)^1) * suffix^0
+
+local function nameonly(name)
+ return name and lpegmatch(pattern,name) or name
+end
+
+local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * C(noperiod^1) * -1
+
+local function suffixonly(name)
+ return name and lpegmatch(pattern,name) or ""
+end
+
+file.pathpart = pathpart
+file.basename = basename
+file.nameonly = nameonly
+file.suffixonly = suffixonly
+file.suffix = suffixonly
+
+file.dirname = pathpart -- obsolete
+file.extname = suffixonly -- obsolete
+
+-- actually these are schemes
+
+local drive = C(R("az","AZ")) * colon
+local path = C((noslashes^0 * slashes)^0)
+local suffix = period * C(P(1-period)^0 * P(-1))
+local base = C((1-suffix)^0)
+local rest = C(P(1)^0)
+
+drive = drive + Cc("")
+path = path + Cc("")
+base = base + Cc("")
+suffix = suffix + Cc("")
+
+local pattern_a = drive * path * base * suffix
+local pattern_b = path * base * suffix
+local pattern_c = C(drive * path) * C(base * suffix) -- trick: two extra captures
+local pattern_d = path * rest
+
+function file.splitname(str,splitdrive)
+ if not str then
+ -- error
+ elseif splitdrive then
+ return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
+ else
+ return lpegmatch(pattern_b,str) -- returns path, base, suffix
+ end
+end
+
+function file.splitbase(str)
+ if str then
+ return lpegmatch(pattern_d,str) -- returns path, base+suffix (path has / appended, might change at some point)
+ else
+ return "", str -- assume no path
+ end
+end
+
+---- stripslash = C((1 - P("/")^1*P(-1))^0)
+
+function file.nametotable(str,splitdrive)
+ if str then
+ local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
+ -- if path ~= "" then
+ -- path = lpegmatch(stripslash,path) -- unfortunate hack, maybe this becomes default
+ -- end
+ if splitdrive then
+ return {
+ path = path,
+ drive = drive,
+ subpath = subpath,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ else
+ return {
+ path = path,
+ name = name,
+ base = base,
+ suffix = suffix,
+ }
+ end
+ end
+end
+
+-- print(file.splitname("file"))
+-- print(file.splitname("dir/file"))
+-- print(file.splitname("/dir/file"))
+-- print(file.splitname("file"))
+-- print(file.splitname("dir/file"))
+-- print(file.splitname("/dir/file"))
+
+-- inspect(file.nametotable("file.ext"))
+-- inspect(file.nametotable("dir/file.ext"))
+-- inspect(file.nametotable("/dir/file.ext"))
+-- inspect(file.nametotable("file.ext"))
+-- inspect(file.nametotable("dir/file.ext"))
+-- inspect(file.nametotable("/dir/file.ext"))
+
+----- pattern = Cs(((period * noperiod^1 * -1) / "" + 1)^1)
+local pattern = Cs(((period * (1-period-slashes)^1 * -1) / "" + 1)^1)
+
+function file.removesuffix(name)
+ return name and lpegmatch(pattern,name)
+end
+
+-- local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * Cp() * noperiod^1 * -1
+--
+-- function file.addsuffix(name, suffix)
+-- local p = lpegmatch(pattern,name)
+-- if p then
+-- return name
+-- else
+-- return name .. "." .. suffix
+-- end
+-- end
+
+local suffix = period/"" * (1-period-slashes)^1 * -1
+local pattern = Cs((noslashes^0 * slashes^1)^0 * ((1-suffix)^1)) * Cs(suffix)
+
+function file.addsuffix(filename,suffix,criterium)
+ if not filename or not suffix or suffix == "" then
+ return filename
+ elseif criterium == true then
+ return filename .. "." .. suffix
+ elseif not criterium then
+ local n, s = lpegmatch(pattern,filename)
+ if not s or s == "" then
+ return filename .. "." .. suffix
+ else
+ return filename
+ end
+ else
+ local n, s = lpegmatch(pattern,filename)
+ if s and s ~= "" then
+ local t = type(criterium)
+ if t == "table" then
+ -- keep if in criterium
+ for i=1,#criterium do
+ if s == criterium[i] then
+ return filename
+ end
+ end
+ elseif t == "string" then
+ -- keep if criterium
+ if s == criterium then
+ return filename
+ end
+ end
+ end
+ return (n or filename) .. "." .. suffix
+ end
+end
+
+-- print("1 " .. file.addsuffix("name","new") .. " -> name.new")
+-- print("2 " .. file.addsuffix("name.old","new") .. " -> name.old")
+-- print("3 " .. file.addsuffix("name.old","new",true) .. " -> name.old.new")
+-- print("4 " .. file.addsuffix("name.old","new","new") .. " -> name.new")
+-- print("5 " .. file.addsuffix("name.old","new","old") .. " -> name.old")
+-- print("6 " .. file.addsuffix("name.old","new","foo") .. " -> name.new")
+-- print("7 " .. file.addsuffix("name.old","new",{"foo","bar"}) .. " -> name.new")
+-- print("8 " .. file.addsuffix("name.old","new",{"old","bar"}) .. " -> name.old")
+
+local suffix = period * (1-period-slashes)^1 * -1
+local pattern = Cs((1-suffix)^0)
+
+function file.replacesuffix(name,suffix)
+ if name and suffix and suffix ~= "" then
+ return lpegmatch(pattern,name) .. "." .. suffix
+ else
+ return name
+ end
+end
+
+--
+
+local reslasher = lpeg.replacer(P("\\"),"/")
+
+function file.reslash(str)
+ return str and lpegmatch(reslasher,str)
+end
+
+-- We should be able to use:
+--
+-- local writable = P(1) * P("w") * Cc(true)
+--
+-- function file.is_writable(name)
+-- local a = attributes(name) or attributes(pathpart(name,"."))
+-- return a and lpegmatch(writable,a.permissions) or false
+-- end
+--
+-- But after some testing Taco and I came up with the more robust
+-- variant:
+
+function file.is_writable(name)
+ if not name then
+ -- error
+ elseif lfs.isdir(name) then
+ name = name .. "/m_t_x_t_e_s_t.tmp"
+ local f = io.open(name,"wb")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ elseif lfs.isfile(name) then
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ return true
+ end
+ else
+ local f = io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
+ end
+ end
+ return false
+end
+
+local readable = P("r") * Cc(true)
+
+function file.is_readable(name)
+ if name then
+ local a = attributes(name)
+ return a and lpegmatch(readable,a.permissions) or false
+ else
+ return false
+ end
+end
+
+file.isreadable = file.is_readable -- depricated
+file.iswritable = file.is_writable -- depricated
+
+function file.size(name)
+ if name then
+ local a = attributes(name)
+ return a and a.size or 0
+ else
+ return 0
+ end
+end
+
+function file.splitpath(str,separator) -- string .. reslash is a bonus (we could do a direct split)
+ return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
+end
+
+function file.joinpath(tab,separator) -- table
+ return tab and concat(tab,separator or io.pathseparator) -- can have trailing //
+end
+
+local stripper = Cs(P(fwslash)^0/"" * reslasher)
+local isnetwork = fwslash * fwslash * (1-fwslash) + (1-fwslash-colon)^1 * colon
+local isroot = fwslash^1 * -1
+local hasroot = fwslash^1
+
+local deslasher = lpeg.replacer(S("\\/")^1,"/")
+
+-- If we have a network or prefix then there is a change that we end up with two
+-- // in the middle ... we could prevent this if we (1) expand prefixes: and (2)
+-- split and rebuild as url. Of course we could assume no network paths (which
+-- makes sense) adn assume either mapped drives (windows) or mounts (unix) but
+-- then we still have to deal with urls ... anyhow, multiple // are never a real
+-- problem but just ugly.
+
+function file.join(...)
+ local lst = { ... }
+ local one = lst[1]
+ if lpegmatch(isnetwork,one) then
+ local two = lpegmatch(deslasher,concat(lst,"/",2))
+ return one .. "/" .. two
+ elseif lpegmatch(isroot,one) then
+ local two = lpegmatch(deslasher,concat(lst,"/",2))
+ if lpegmatch(hasroot,two) then
+ return two
+ else
+ return "/" .. two
+ end
+ elseif one == "" then
+ return lpegmatch(stripper,concat(lst,"/",2))
+ else
+ return lpegmatch(deslasher,concat(lst,"/"))
+ end
+end
+
+-- print(file.join("c:/whatever","name"))
+-- print(file.join("//","/y"))
+-- print(file.join("/","/y"))
+-- print(file.join("","/y"))
+-- print(file.join("/x/","/y"))
+-- print(file.join("x/","/y"))
+-- print(file.join("http://","/y"))
+-- print(file.join("http://a","/y"))
+-- print(file.join("http:///a","/y"))
+-- print(file.join("//nas-1","/y"))
+
+-- The previous one fails on "a.b/c" so Taco came up with a split based
+-- variant. After some skyping we got it sort of compatible with the old
+-- one. After that the anchoring to currentdir was added in a better way.
+-- Of course there are some optimizations too. Finally we had to deal with
+-- windows drive prefixes and things like sys://. Eventually gsubs and
+-- finds were replaced by lpegs.
+
+local drivespec = R("az","AZ")^1 * colon
+local anchors = fwslash + drivespec
+local untouched = periods + (1-period)^1 * P(-1)
+local splitstarter = (Cs(drivespec * (bwslash/"/" + fwslash)^0) + Cc(false)) * Ct(lpeg.splitat(S("/\\")^1))
+local absolute = fwslash
+
+function file.collapsepath(str,anchor) -- anchor: false|nil, true, "."
+ if not str then
+ return
+ end
+ if anchor == true and not lpegmatch(anchors,str) then
+ str = getcurrentdir() .. "/" .. str
+ end
+ if str == "" or str =="." then
+ return "."
+ elseif lpegmatch(untouched,str) then
+ return lpegmatch(reslasher,str)
+ end
+ local starter, oldelements = lpegmatch(splitstarter,str)
+ local newelements = { }
+ local i = #oldelements
+ while i > 0 do
+ local element = oldelements[i]
+ if element == '.' then
+ -- do nothing
+ elseif element == '..' then
+ local n = i - 1
+ while n > 0 do
+ local element = oldelements[n]
+ if element ~= '..' and element ~= '.' then
+ oldelements[n] = '.'
+ break
+ else
+ n = n - 1
+ end
+ end
+ if n < 1 then
+ insert(newelements,1,'..')
+ end
+ elseif element ~= "" then
+ insert(newelements,1,element)
+ end
+ i = i - 1
+ end
+ if #newelements == 0 then
+ return starter or "."
+ elseif starter then
+ return starter .. concat(newelements, '/')
+ elseif lpegmatch(absolute,str) then
+ return "/" .. concat(newelements,'/')
+ else
+ newelements = concat(newelements, '/')
+ if anchor == "." and find(str,"^%./") then
+ return "./" .. newelements
+ else
+ return newelements
+ end
+ end
+end
+
+-- local function test(str,...)
+-- print(string.format("%-20s %-15s %-30s %-20s",str,file.collapsepath(str),file.collapsepath(str,true),file.collapsepath(str,".")))
+-- end
+-- test("a/b.c/d") test("b.c/d") test("b.c/..")
+-- test("/") test("c:/..") test("sys://..")
+-- test("") test("./") test(".") test("..") test("./..") test("../..")
+-- test("a") test("./a") test("/a") test("a/../..")
+-- test("a/./b/..") test("a/aa/../b/bb") test("a/.././././b/..") test("a/./././b/..")
+-- test("a/b/c/../..") test("./a/b/c/../..") test("a/b/c/../..")
+-- test("./a")
+
+local validchars = R("az","09","AZ","--","..")
+local pattern_a = lpeg.replacer(1-validchars)
+local pattern_a = Cs((validchars + P(1)/"-")^1)
+local whatever = P("-")^0 / ""
+local pattern_b = Cs(whatever * (1 - whatever * -1)^1)
+
+function file.robustname(str,strict)
+ if str then
+ str = lpegmatch(pattern_a,str) or str
+ if strict then
+ return lpegmatch(pattern_b,str) or str -- two step is cleaner (less backtracking)
+ else
+ return str
+ end
+ end
+end
+
+file.readdata = io.loaddata
+file.savedata = io.savedata
+
+function file.copy(oldname,newname)
+ if oldname and newname then
+ local data = io.loaddata(oldname)
+ if data and data ~= "" then
+ file.savedata(newname,data)
+ end
+ end
+end
+
+-- also rewrite previous
+
+local letter = R("az","AZ") + S("_-+")
+local separator = P("://")
+
+local qualified = period^0 * fwslash
+ + letter * colon
+ + letter^1 * separator
+ + letter^1 * fwslash
+local rootbased = fwslash
+ + letter * colon
+
+lpeg.patterns.qualified = qualified
+lpeg.patterns.rootbased = rootbased
+
+-- ./name ../name /name c: :// name/name
+
+function file.is_qualified_path(filename)
+ return filename and lpegmatch(qualified,filename) ~= nil
+end
+
+function file.is_rootbased_path(filename)
+ return filename and lpegmatch(rootbased,filename) ~= nil
+end
+
+-- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
+--
+-- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" }
+-- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" }
+-- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
+-- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
+
+-- -- maybe:
+--
+-- if os.type == "windows" then
+-- local currentdir = getcurrentdir
+-- function getcurrentdir()
+-- return lpegmatch(reslasher,currentdir())
+-- end
+-- end
+
+-- for myself:
+
+function file.strip(name,dir)
+ if name then
+ local b, a = match(name,"^(.-)" .. dir .. "(.*)$")
+ return a ~= "" and a or name
+ end
+end
+
+-- local debuglist = {
+-- "pathpart", "basename", "nameonly", "suffixonly", "suffix", "dirname", "extname",
+-- "addsuffix", "removesuffix", "replacesuffix", "join",
+-- "strip","collapsepath", "joinpath", "splitpath",
+-- }
+
+-- for i=1,#debuglist do
+-- local name = debuglist[i]
+-- local f = file[name]
+-- file[name] = function(...)
+-- print(name,f(...))
+-- return f(...)
+-- end
+-- end
+
+-- a goodie: a dumb version of mkdirs:
+
+function lfs.mkdirs(path)
+ local full
+ for sub in gmatch(path,"([^\\/]+)") do
+ if full then
+ full = full .. "/" .. sub
+ else
+ full = sub
+ end
+ if not lfs.isdir(full) then
+ lfs.mkdir(full)
+ end
+ end
+end
diff --git a/tex/context/base/l-function.lua b/tex/context/base/l-function.lua
index 7ded8ceec..cdb1d3def 100644
--- a/tex/context/base/l-function.lua
+++ b/tex/context/base/l-function.lua
@@ -1,11 +1,11 @@
-if not modules then modules = { } end modules ['l-functions'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-functions = functions or { }
-
-function functions.dummy() end
+if not modules then modules = { } end modules ['l-functions'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+functions = functions or { }
+
+function functions.dummy() end
diff --git a/tex/context/base/l-io.lua b/tex/context/base/l-io.lua
index 06e1fb5ef..2ddfacaee 100644
--- a/tex/context/base/l-io.lua
+++ b/tex/context/base/l-io.lua
@@ -1,362 +1,362 @@
-if not modules then modules = { } end modules ['l-io'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local io = io
-local byte, find, gsub, format = string.byte, string.find, string.gsub, string.format
-local concat = table.concat
-local floor = math.floor
-local type = type
-
-if string.find(os.getenv("PATH"),";") then
- io.fileseparator, io.pathseparator = "\\", ";"
-else
- io.fileseparator, io.pathseparator = "/" , ":"
-end
-
-local function readall(f)
- return f:read("*all")
-end
-
--- The next one is upto 50% faster on large files and less memory consumption due
--- to less intermediate large allocations. This phenomena was discussed on the
--- luatex dev list.
-
-local function readall(f)
- local size = f:seek("end")
- if size == 0 then
- return ""
- elseif size < 1024*1024 then
- f:seek("set",0)
- return f:read('*all')
- else
- local done = f:seek("set",0)
- if size < 1024*1024 then
- step = 1024 * 1024
- elseif size > 16*1024*1024 then
- step = 16*1024*1024
- else
- step = floor(size/(1024*1024)) * 1024 * 1024 / 8
- end
- local data = { }
- while true do
- local r = f:read(step)
- if not r then
- return concat(data)
- else
- data[#data+1] = r
- end
- end
- end
-end
-
-io.readall = readall
-
-function io.loaddata(filename,textmode) -- return nil if empty
- local f = io.open(filename,(textmode and 'r') or 'rb')
- if f then
--- local data = f:read('*all')
- local data = readall(f)
- f:close()
- if #data > 0 then
- return data
- end
- end
-end
-
-function io.savedata(filename,data,joiner)
- local f = io.open(filename,"wb")
- if f then
- if type(data) == "table" then
- f:write(concat(data,joiner or ""))
- elseif type(data) == "function" then
- data(f)
- else
- f:write(data or "")
- end
- f:close()
- io.flush()
- return true
- else
- return false
- end
-end
-
--- we can also chunk this one if needed: io.lines(filename,chunksize,"*l")
-
-function io.loadlines(filename,n) -- return nil if empty
- local f = io.open(filename,'r')
- if not f then
- -- no file
- elseif n then
- local lines = { }
- for i=1,n do
- local line = f:read("*lines")
- if line then
- lines[#lines+1] = line
- else
- break
- end
- end
- f:close()
- lines = concat(lines,"\n")
- if #lines > 0 then
- return lines
- end
- else
- local line = f:read("*line") or ""
- f:close()
- if #line > 0 then
- return line
- end
- end
-end
-
-function io.loadchunk(filename,n)
- local f = io.open(filename,'rb')
- if f then
- local data = f:read(n or 1024)
- f:close()
- if #data > 0 then
- return data
- end
- end
-end
-
-function io.exists(filename)
- local f = io.open(filename)
- if f == nil then
- return false
- else
- f:close()
- return true
- end
-end
-
-function io.size(filename)
- local f = io.open(filename)
- if f == nil then
- return 0
- else
- local s = f:seek("end")
- f:close()
- return s
- end
-end
-
-function io.noflines(f)
- if type(f) == "string" then
- local f = io.open(filename)
- if f then
- local n = f and io.noflines(f) or 0
- f:close()
- return n
- else
- return 0
- end
- else
- local n = 0
- for _ in f:lines() do
- n = n + 1
- end
- f:seek('set',0)
- return n
- end
-end
-
-local nextchar = {
- [ 4] = function(f)
- return f:read(1,1,1,1)
- end,
- [ 2] = function(f)
- return f:read(1,1)
- end,
- [ 1] = function(f)
- return f:read(1)
- end,
- [-2] = function(f)
- local a, b = f:read(1,1)
- return b, a
- end,
- [-4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- return d, c, b, a
- end
-}
-
-function io.characters(f,n)
- if f then
- return nextchar[n or 1], f
- end
-end
-
-local nextbyte = {
- [4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- if d then
- return byte(a), byte(b), byte(c), byte(d)
- end
- end,
- [3] = function(f)
- local a, b, c = f:read(1,1,1)
- if b then
- return byte(a), byte(b), byte(c)
- end
- end,
- [2] = function(f)
- local a, b = f:read(1,1)
- if b then
- return byte(a), byte(b)
- end
- end,
- [1] = function (f)
- local a = f:read(1)
- if a then
- return byte(a)
- end
- end,
- [-2] = function (f)
- local a, b = f:read(1,1)
- if b then
- return byte(b), byte(a)
- end
- end,
- [-3] = function(f)
- local a, b, c = f:read(1,1,1)
- if b then
- return byte(c), byte(b), byte(a)
- end
- end,
- [-4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- if d then
- return byte(d), byte(c), byte(b), byte(a)
- end
- end
-}
-
-function io.bytes(f,n)
- if f then
- return nextbyte[n or 1], f
- else
- return nil, nil
- end
-end
-
-function io.ask(question,default,options)
- while true do
- io.write(question)
- if options then
- io.write(format(" [%s]",concat(options,"|")))
- end
- if default then
- io.write(format(" [%s]",default))
- end
- io.write(format(" "))
- io.flush()
- local answer = io.read()
- answer = gsub(answer,"^%s*(.*)%s*$","%1")
- if answer == "" and default then
- return default
- elseif not options then
- return answer
- else
- for k=1,#options do
- if options[k] == answer then
- return answer
- end
- end
- local pattern = "^" .. answer
- for k=1,#options do
- local v = options[k]
- if find(v,pattern) then
- return v
- end
- end
- end
- end
-end
-
-local function readnumber(f,n,m)
- if m then
- f:seek("set",n)
- n = m
- end
- if n == 1 then
- return byte(f:read(1))
- elseif n == 2 then
- local a, b = byte(f:read(2),1,2)
- return 256 * a + b
- elseif n == 3 then
- local a, b, c = byte(f:read(3),1,3)
- return 256*256 * a + 256 * b + c
- elseif n == 4 then
- local a, b, c, d = byte(f:read(4),1,4)
- return 256*256*256 * a + 256*256 * b + 256 * c + d
- elseif n == 8 then
- local a, b = readnumber(f,4), readnumber(f,4)
- return 256 * a + b
- elseif n == 12 then
- local a, b, c = readnumber(f,4), readnumber(f,4), readnumber(f,4)
- return 256*256 * a + 256 * b + c
- elseif n == -2 then
- local b, a = byte(f:read(2),1,2)
- return 256*a + b
- elseif n == -3 then
- local c, b, a = byte(f:read(3),1,3)
- return 256*256 * a + 256 * b + c
- elseif n == -4 then
- local d, c, b, a = byte(f:read(4),1,4)
- return 256*256*256 * a + 256*256 * b + 256*c + d
- elseif n == -8 then
- local h, g, f, e, d, c, b, a = byte(f:read(8),1,8)
- return 256*256*256*256*256*256*256 * a +
- 256*256*256*256*256*256 * b +
- 256*256*256*256*256 * c +
- 256*256*256*256 * d +
- 256*256*256 * e +
- 256*256 * f +
- 256 * g +
- h
- else
- return 0
- end
-end
-
-io.readnumber = readnumber
-
-function io.readstring(f,n,m)
- if m then
- f:seek("set",n)
- n = m
- end
- local str = gsub(f:read(n),"\000","")
- return str
-end
-
---
-
-if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
-if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
-
--- This works quite ok:
---
--- function io.piped(command,writer)
--- local pipe = io.popen(command)
--- -- for line in pipe:lines() do
--- -- print(line)
--- -- end
--- while true do
--- local line = pipe:read(1)
--- if not line then
--- break
--- elseif line ~= "\n" then
--- writer(line)
--- end
--- end
--- return pipe:close() -- ok, status, (error)code
--- end
+if not modules then modules = { } end modules ['l-io'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local io = io
+local byte, find, gsub, format = string.byte, string.find, string.gsub, string.format
+local concat = table.concat
+local floor = math.floor
+local type = type
+
+if string.find(os.getenv("PATH"),";") then
+ io.fileseparator, io.pathseparator = "\\", ";"
+else
+ io.fileseparator, io.pathseparator = "/" , ":"
+end
+
+local function readall(f)
+ return f:read("*all")
+end
+
+-- The next one is upto 50% faster on large files and less memory consumption due
+-- to less intermediate large allocations. This phenomena was discussed on the
+-- luatex dev list.
+
+local function readall(f)
+ local size = f:seek("end")
+ if size == 0 then
+ return ""
+ elseif size < 1024*1024 then
+ f:seek("set",0)
+ return f:read('*all')
+ else
+ local done = f:seek("set",0)
+ if size < 1024*1024 then
+ step = 1024 * 1024
+ elseif size > 16*1024*1024 then
+ step = 16*1024*1024
+ else
+ step = floor(size/(1024*1024)) * 1024 * 1024 / 8
+ end
+ local data = { }
+ while true do
+ local r = f:read(step)
+ if not r then
+ return concat(data)
+ else
+ data[#data+1] = r
+ end
+ end
+ end
+end
+
+io.readall = readall
+
+function io.loaddata(filename,textmode) -- return nil if empty
+ local f = io.open(filename,(textmode and 'r') or 'rb')
+ if f then
+-- local data = f:read('*all')
+ local data = readall(f)
+ f:close()
+ if #data > 0 then
+ return data
+ end
+ end
+end
+
+function io.savedata(filename,data,joiner)
+ local f = io.open(filename,"wb")
+ if f then
+ if type(data) == "table" then
+ f:write(concat(data,joiner or ""))
+ elseif type(data) == "function" then
+ data(f)
+ else
+ f:write(data or "")
+ end
+ f:close()
+ io.flush()
+ return true
+ else
+ return false
+ end
+end
+
+-- we can also chunk this one if needed: io.lines(filename,chunksize,"*l")
+
+function io.loadlines(filename,n) -- return nil if empty
+ local f = io.open(filename,'r')
+ if not f then
+ -- no file
+ elseif n then
+ local lines = { }
+ for i=1,n do
+ local line = f:read("*lines")
+ if line then
+ lines[#lines+1] = line
+ else
+ break
+ end
+ end
+ f:close()
+ lines = concat(lines,"\n")
+ if #lines > 0 then
+ return lines
+ end
+ else
+ local line = f:read("*line") or ""
+ f:close()
+ if #line > 0 then
+ return line
+ end
+ end
+end
+
+function io.loadchunk(filename,n)
+ local f = io.open(filename,'rb')
+ if f then
+ local data = f:read(n or 1024)
+ f:close()
+ if #data > 0 then
+ return data
+ end
+ end
+end
+
+function io.exists(filename)
+ local f = io.open(filename)
+ if f == nil then
+ return false
+ else
+ f:close()
+ return true
+ end
+end
+
+function io.size(filename)
+ local f = io.open(filename)
+ if f == nil then
+ return 0
+ else
+ local s = f:seek("end")
+ f:close()
+ return s
+ end
+end
+
+function io.noflines(f)
+ if type(f) == "string" then
+ local f = io.open(filename)
+ if f then
+ local n = f and io.noflines(f) or 0
+ f:close()
+ return n
+ else
+ return 0
+ end
+ else
+ local n = 0
+ for _ in f:lines() do
+ n = n + 1
+ end
+ f:seek('set',0)
+ return n
+ end
+end
+
+local nextchar = {
+ [ 4] = function(f)
+ return f:read(1,1,1,1)
+ end,
+ [ 2] = function(f)
+ return f:read(1,1)
+ end,
+ [ 1] = function(f)
+ return f:read(1)
+ end,
+ [-2] = function(f)
+ local a, b = f:read(1,1)
+ return b, a
+ end,
+ [-4] = function(f)
+ local a, b, c, d = f:read(1,1,1,1)
+ return d, c, b, a
+ end
+}
+
+function io.characters(f,n)
+ if f then
+ return nextchar[n or 1], f
+ end
+end
+
+local nextbyte = {
+ [4] = function(f)
+ local a, b, c, d = f:read(1,1,1,1)
+ if d then
+ return byte(a), byte(b), byte(c), byte(d)
+ end
+ end,
+ [3] = function(f)
+ local a, b, c = f:read(1,1,1)
+ if b then
+ return byte(a), byte(b), byte(c)
+ end
+ end,
+ [2] = function(f)
+ local a, b = f:read(1,1)
+ if b then
+ return byte(a), byte(b)
+ end
+ end,
+ [1] = function (f)
+ local a = f:read(1)
+ if a then
+ return byte(a)
+ end
+ end,
+ [-2] = function (f)
+ local a, b = f:read(1,1)
+ if b then
+ return byte(b), byte(a)
+ end
+ end,
+ [-3] = function(f)
+ local a, b, c = f:read(1,1,1)
+ if b then
+ return byte(c), byte(b), byte(a)
+ end
+ end,
+ [-4] = function(f)
+ local a, b, c, d = f:read(1,1,1,1)
+ if d then
+ return byte(d), byte(c), byte(b), byte(a)
+ end
+ end
+}
+
+function io.bytes(f,n)
+ if f then
+ return nextbyte[n or 1], f
+ else
+ return nil, nil
+ end
+end
+
+function io.ask(question,default,options)
+ while true do
+ io.write(question)
+ if options then
+ io.write(format(" [%s]",concat(options,"|")))
+ end
+ if default then
+ io.write(format(" [%s]",default))
+ end
+ io.write(format(" "))
+ io.flush()
+ local answer = io.read()
+ answer = gsub(answer,"^%s*(.*)%s*$","%1")
+ if answer == "" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for k=1,#options do
+ if options[k] == answer then
+ return answer
+ end
+ end
+ local pattern = "^" .. answer
+ for k=1,#options do
+ local v = options[k]
+ if find(v,pattern) then
+ return v
+ end
+ end
+ end
+ end
+end
+
+local function readnumber(f,n,m)
+ if m then
+ f:seek("set",n)
+ n = m
+ end
+ if n == 1 then
+ return byte(f:read(1))
+ elseif n == 2 then
+ local a, b = byte(f:read(2),1,2)
+ return 256 * a + b
+ elseif n == 3 then
+ local a, b, c = byte(f:read(3),1,3)
+ return 256*256 * a + 256 * b + c
+ elseif n == 4 then
+ local a, b, c, d = byte(f:read(4),1,4)
+ return 256*256*256 * a + 256*256 * b + 256 * c + d
+ elseif n == 8 then
+ local a, b = readnumber(f,4), readnumber(f,4)
+ return 256 * a + b
+ elseif n == 12 then
+ local a, b, c = readnumber(f,4), readnumber(f,4), readnumber(f,4)
+ return 256*256 * a + 256 * b + c
+ elseif n == -2 then
+ local b, a = byte(f:read(2),1,2)
+ return 256*a + b
+ elseif n == -3 then
+ local c, b, a = byte(f:read(3),1,3)
+ return 256*256 * a + 256 * b + c
+ elseif n == -4 then
+ local d, c, b, a = byte(f:read(4),1,4)
+ return 256*256*256 * a + 256*256 * b + 256*c + d
+ elseif n == -8 then
+ local h, g, f, e, d, c, b, a = byte(f:read(8),1,8)
+ return 256*256*256*256*256*256*256 * a +
+ 256*256*256*256*256*256 * b +
+ 256*256*256*256*256 * c +
+ 256*256*256*256 * d +
+ 256*256*256 * e +
+ 256*256 * f +
+ 256 * g +
+ h
+ else
+ return 0
+ end
+end
+
+io.readnumber = readnumber
+
+function io.readstring(f,n,m)
+ if m then
+ f:seek("set",n)
+ n = m
+ end
+ local str = gsub(f:read(n),"\000","")
+ return str
+end
+
+--
+
+if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
+if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
+
+-- This works quite ok:
+--
+-- function io.piped(command,writer)
+-- local pipe = io.popen(command)
+-- -- for line in pipe:lines() do
+-- -- print(line)
+-- -- end
+-- while true do
+-- local line = pipe:read(1)
+-- if not line then
+-- break
+-- elseif line ~= "\n" then
+-- writer(line)
+-- end
+-- end
+-- return pipe:close() -- ok, status, (error)code
+-- end
diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua
index 323c73b69..07926da86 100644
--- a/tex/context/base/l-lpeg.lua
+++ b/tex/context/base/l-lpeg.lua
@@ -1,852 +1,852 @@
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
-
--- move utf -> l-unicode
--- move string -> l-string or keep it here
-
-lpeg = require("lpeg")
-
--- tracing (only used when we encounter a problem in integration of lpeg in luatex)
-
--- some code will move to unicode and string
-
--- local lpmatch = lpeg.match
--- local lpprint = lpeg.print
--- local lpp = lpeg.P
--- local lpr = lpeg.R
--- local lps = lpeg.S
--- local lpc = lpeg.C
--- local lpb = lpeg.B
--- local lpv = lpeg.V
--- local lpcf = lpeg.Cf
--- local lpcb = lpeg.Cb
--- local lpcg = lpeg.Cg
--- local lpct = lpeg.Ct
--- local lpcs = lpeg.Cs
--- local lpcc = lpeg.Cc
--- local lpcmt = lpeg.Cmt
--- local lpcarg = lpeg.Carg
-
--- function lpeg.match(l,...) print("LPEG MATCH") lpprint(l) return lpmatch(l,...) end
-
--- function lpeg.P (l) local p = lpp (l) print("LPEG P =") lpprint(l) return p end
--- function lpeg.R (l) local p = lpr (l) print("LPEG R =") lpprint(l) return p end
--- function lpeg.S (l) local p = lps (l) print("LPEG S =") lpprint(l) return p end
--- function lpeg.C (l) local p = lpc (l) print("LPEG C =") lpprint(l) return p end
--- function lpeg.B (l) local p = lpb (l) print("LPEG B =") lpprint(l) return p end
--- function lpeg.V (l) local p = lpv (l) print("LPEG V =") lpprint(l) return p end
--- function lpeg.Cf (l) local p = lpcf (l) print("LPEG Cf =") lpprint(l) return p end
--- function lpeg.Cb (l) local p = lpcb (l) print("LPEG Cb =") lpprint(l) return p end
--- function lpeg.Cg (l) local p = lpcg (l) print("LPEG Cg =") lpprint(l) return p end
--- function lpeg.Ct (l) local p = lpct (l) print("LPEG Ct =") lpprint(l) return p end
--- function lpeg.Cs (l) local p = lpcs (l) print("LPEG Cs =") lpprint(l) return p end
--- function lpeg.Cc (l) local p = lpcc (l) print("LPEG Cc =") lpprint(l) return p end
--- function lpeg.Cmt (l) local p = lpcmt (l) print("LPEG Cmt =") lpprint(l) return p end
--- function lpeg.Carg (l) local p = lpcarg(l) print("LPEG Carg =") lpprint(l) return p end
-
-local type, next, tostring = type, next, tostring
-local byte, char, gmatch, format = string.byte, string.char, string.gmatch, string.format
------ mod, div = math.mod, math.div
-local floor = math.floor
-
-local P, R, S, V, Ct, C, Cs, Cc, Cp, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp, lpeg.Cmt
-local lpegtype, lpegmatch, lpegprint = lpeg.type, lpeg.match, lpeg.print
-
--- let's start with an inspector:
-
-setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + S("\r\n") -- cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be * Cc("utf-32-be") + utfbom_32_le * Cc("utf-32-le")
- + utfbom_16_be * Cc("utf-16-be") + utfbom_16_le * Cc("utf-16-le")
- + utfbom_8 * Cc("utf-8") + alwaysmatched * Cc("utf-8") -- assume utf8
-local utfoffset = utfbom_32_be * Cc(4) + utfbom_32_le * Cc(4)
- + utfbom_16_be * Cc(2) + utfbom_16_le * Cc(2)
- + utfbom_8 * Cc(3) + Cc(0)
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-patterns.utfoffset = utfoffset
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-local utf8character = P(1) * R("\128\191")^0 -- unchecked but fast
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.utf8character = utf8character -- this one can be used in most cases so we might use that one
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-local eol = S("\n\r")
-local spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
-local whitespace = eol + spacer
-local nonspacer = 1 - spacer
-local nonwhitespace = 1 - whitespace
-
-patterns.eol = eol
-patterns.spacer = spacer
-patterns.whitespace = whitespace
-patterns.nonspacer = nonspacer
-patterns.nonwhitespace = nonwhitespace
-
-local stripper = spacer^0 * C((spacer^0 * nonspacer^1)^0) -- from example by roberto
-
------ collapser = Cs(spacer^0/"" * ((spacer^1 * P(-1) / "") + (spacer^1/" ") + P(1))^0)
-local collapser = Cs(spacer^0/"" * nonspacer^0 * ((spacer^0/" " * nonspacer^1)^0))
-
-patterns.stripper = stripper
-patterns.collapser = collapser
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.unsigned = digit^0 * P('.') * digit^1
-patterns.float = sign^0 * patterns.unsigned
-patterns.cunsigned = digit^0 * P(',') * digit^1
-patterns.cfloat = sign^0 * patterns.cunsigned
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"") -- will change to C in the middle
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"") -- will change to C in the middle
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.singlequoted = squote * patterns.nosquote * squote
-patterns.doublequoted = dquote * patterns.nodquote * dquote
-patterns.quoted = patterns.doublequoted + patterns.singlequoted
-
-patterns.propername = R("AZ","az","__") * R("09","AZ","az", "__")^0 * P(-1)
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-patterns.longtostring = Cs(whitespace^0/"" * nonwhitespace^0 * ((whitespace^0/" " * (patterns.quoted + nonwhitespace)^1)^0))
-
-local function anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) }
-end
-
-lpeg.anywhere = anywhere
-
-function lpeg.instringchecker(p)
- p = anywhere(p)
- return function(str)
- return lpegmatch(p,str) and true or false
- end
-end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
-end
-
-function lpeg.tsplitter(pattern, action)
- return Ct((((1-P(pattern))^1)/action+1)^0)
-end
-
--- probleem: separator can be lpeg and that does not hash too well, but
--- it's quite okay as the key is then not garbage collected
-
-local splitters_s, splitters_m, splitters_t = { }, { }, { }
-
-local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
- end
- return splitter
-end
-
-local function tsplitat(separator)
- local splitter = splitters_t[separator]
- if not splitter then
- splitter = Ct(splitat(separator))
- splitters_t[separator] = splitter
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-lpeg.tsplitat = tsplitat
-
-function string.splitup(str,separator)
- if not separator then
- separator = ","
- end
- return lpegmatch(splitters_m[separator] or splitat(separator),str)
-end
-
--- local p = splitat("->",false) print(lpegmatch(p,"oeps->what->more")) -- oeps what more
--- local p = splitat("->",true) print(lpegmatch(p,"oeps->what->more")) -- oeps what->more
--- local p = splitat("->",false) print(lpegmatch(p,"oeps")) -- oeps
--- local p = splitat("->",true) print(lpegmatch(p,"oeps")) -- oeps
-
-local cache = { }
-
-function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
- end
- return lpegmatch(c,str)
-end
-
-function string.split(str,separator)
- if separator then
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
- end
- return lpegmatch(c,str)
- else
- return { str }
- end
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-local linesplitter = tsplitat(newline)
-
-patterns.linesplitter = linesplitter
-
-function string.splitlines(str)
- return lpegmatch(linesplitter,str)
-end
-
--- lpeg.splitters = cache -- no longer public
-
-local cache = { }
-
-function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return lpegmatch(c,str)
-end
-
-function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return lpegmatch(c,str)
-end
-
--- from roberto's site:
-
-local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
---~ local str = " a b c d "
-
---~ local s = lpeg.stripper(lpeg.R("az")) print("["..lpegmatch(s,str).."]")
---~ local s = lpeg.keeper(lpeg.R("az")) print("["..lpegmatch(s,str).."]")
---~ local s = lpeg.stripper("ab") print("["..lpegmatch(s,str).."]")
---~ local s = lpeg.keeper("ab") print("["..lpegmatch(s,str).."]")
-
-local cache = { }
-
-function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
- end
-end
-
-local cache = { }
-
-function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(anything^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * endofstring)^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
--- todo: cache when string
-
-function lpeg.replacer(one,two,makefunction,isutf) -- in principle we should sort the keys
- local pattern
- local u = isutf and utf8char or 1
- if type(one) == "table" then
- local no = #one
- local p = P(false)
- if no == 0 then
- for k, v in next, one do
- p = p + P(k) / v
- end
- pattern = Cs((p + u)^0)
- elseif no == 1 then
- local o = one[1]
- one, two = P(o[1]), o[2]
- -- pattern = Cs(((1-one)^1 + one/two)^0)
- pattern = Cs((one/two + u)^0)
- else
- for i=1,no do
- local o = one[i]
- p = p + P(o[1]) / o[2]
- end
- pattern = Cs((p + u)^0)
- end
- else
- pattern = Cs((P(one)/(two or "") + u)^0)
- end
- if makefunction then
- return function(str)
- return lpegmatch(pattern,str)
- end
- else
- return pattern
- end
-end
-
-function lpeg.finder(lst,makefunction)
- local pattern
- if type(lst) == "table" then
- pattern = P(false)
- if #lst == 0 then
- for k, v in next, lst do
- pattern = pattern + P(k) -- ignore key, so we can use a replacer table
- end
- else
- for i=1,#lst do
- pattern = pattern + P(lst[i])
- end
- end
- else
- pattern = P(lst)
- end
- pattern = (1-pattern)^0 * pattern
- if makefunction then
- return function(str)
- return lpegmatch(pattern,str)
- end
- else
- return pattern
- end
-end
-
--- print(lpeg.match(lpeg.replacer("e","a"),"test test"))
--- print(lpeg.match(lpeg.replacer{{"e","a"}},"test test"))
--- print(lpeg.match(lpeg.replacer({ e = "a", t = "x" }),"test test"))
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
-end
-
--- print(1,lpegmatch(lpeg.firstofsplit(":"),"bc:de"))
--- print(2,lpegmatch(lpeg.firstofsplit(":"),":de")) -- empty
--- print(3,lpegmatch(lpeg.firstofsplit(":"),"bc"))
--- print(4,lpegmatch(lpeg.secondofsplit(":"),"bc:de"))
--- print(5,lpegmatch(lpeg.secondofsplit(":"),"bc:")) -- empty
--- print(6,lpegmatch(lpeg.secondofsplit(":",""),"bc"))
--- print(7,lpegmatch(lpeg.secondofsplit(":"),"bc"))
--- print(9,lpegmatch(lpeg.secondofsplit(":","123"),"bc"))
-
--- -- slower:
---
--- function lpeg.counter(pattern)
--- local n, pattern = 0, (lpeg.P(pattern)/function() n = n + 1 end + lpeg.anything)^0
--- return function(str) n = 0 ; lpegmatch(pattern,str) ; return n end
--- end
-
-local nany = utf8char/""
-
-function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #lpegmatch(pattern,str)
- end
-end
-
--- utf extensies
-
-utf = utf or (unicode and unicode.utf8) or { }
-
-local utfcharacters = utf and utf.characters or string.utfcharacters
-local utfgmatch = utf and utf.gmatch
-local utfchar = utf and utf.char
-
-lpeg.UP = lpeg.P
-
-if utfcharacters then
-
- function lpeg.US(str)
- local p = P(false)
- for uc in utfcharacters(str) do
- p = p + P(uc)
- end
- return p
- end
-
-
-elseif utfgmatch then
-
- function lpeg.US(str)
- local p = P(false)
- for uc in utfgmatch(str,".") do
- p = p + P(uc)
- end
- return p
- end
-
-else
-
- function lpeg.US(str)
- local p = P(false)
- local f = function(uc)
- p = p + P(uc)
- end
- lpegmatch((utf8char/f)^0,str)
- return p
- end
-
-end
-
-local range = utf8byte * utf8byte + Cc(false) -- utf8byte is already a capture
-
-function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = lpegmatch(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and (last - first < 8) then -- a somewhat arbitrary criterium
- local p = P(false)
- for i=first,last do
- p = p + P(utfchar(i))
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- -- tricky, these nested captures
- return utf8byte / f -- nil when invalid range
- end
-end
-
--- print(lpeg.match(lpeg.Cs((C(lpeg.UR("αω"))/{ ["χ"] = "OEPS" })^0),"αωχαω"))
-
--- lpeg.print(lpeg.R("ab","cd","gh"))
--- lpeg.print(lpeg.P("a","b","c"))
--- lpeg.print(lpeg.S("a","b","c"))
-
--- print(lpeg.count("äáà a",lpeg.P("á") + lpeg.P("à ")))
--- print(lpeg.count("äáà a",lpeg.UP("áà ")))
--- print(lpeg.count("äáà a",lpeg.US("à á")))
--- print(lpeg.count("äáà a",lpeg.UR("aá")))
--- print(lpeg.count("äáà a",lpeg.UR("à á")))
--- print(lpeg.count("äáà a",lpeg.UR(0x0000,0xFFFF)))
-
-function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- table.sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
--- For the moment here, but it might move to utilities. Beware, we need to
--- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
--- loop back from the end cq. prepend.
-
-local sort = table.sort
-
-local function copyindexed(old)
- local new = { }
- for i=1,#old do
- new[i] = old
- end
- return new
-end
-
-local function sortedkeys(tab)
- local keys, s = { }, 0
- for key,_ in next, tab do
- s = s + 1
- keys[s] = key
- end
- sort(keys)
- return keys
-end
-
-function lpeg.append(list,pp,delayed,checked)
- local p = pp
- if #list > 0 then
- local keys = copyindexed(list)
- sort(keys)
- for i=#keys,1,-1 do
- local k = keys[i]
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
- local keys = sortedkeys(list)
- if p then
- for i=1,#keys,1 do
- local k = keys[i]
- local v = list[k]
- p = P(k)/list + p
- end
- else
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- if p then
- p = p / list
- end
- end
- elseif checked then
- -- problem: substitution gives a capture
- local keys = sortedkeys(list)
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- if k == v then
- p = P(k) + p
- else
- p = P(k)/v + p
- end
- else
- if k == v then
- p = P(k)
- else
- p = P(k)/v
- end
- end
- end
- else
- local keys = sortedkeys(list)
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- p = P(k)/v + p
- else
- p = P(k)/v
- end
- end
- end
- return p
-end
-
--- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
--- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
-
--- function lpeg.exact_match(words,case_insensitive)
--- local pattern = concat(words)
--- if case_insensitive then
--- local pattern = S(upper(characters)) + S(lower(characters))
--- local list = { }
--- for i=1,#words do
--- list[lower(words[i])] = true
--- end
--- return Cmt(pattern^1, function(_,i,s)
--- return list[lower(s)] and i
--- end)
--- else
--- local pattern = S(concat(words))
--- local list = { }
--- for i=1,#words do
--- list[words[i]] = true
--- end
--- return Cmt(pattern^1, function(_,i,s)
--- return list[s] and i
--- end)
--- end
--- end
-
--- experiment:
-
-local function make(t)
- local p
- local keys = sortedkeys(t)
- for i=1,#keys do
- local k = keys[i]
- local v = t[k]
- if not p then
- if next(v) then
- p = P(k) * make(v)
- else
- p = P(k)
- end
- else
- if next(v) then
- p = p + P(k) * make(v)
- else
- p = p + P(k)
- end
- end
- end
- return p
-end
-
-function lpeg.utfchartabletopattern(list) -- goes to util-lpg
- local tree = { }
- for i=1,#list do
- local t = tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c] = { }
- end
- t = t[c]
- end
- end
- return make(tree)
-end
-
--- inspect ( lpeg.utfchartabletopattern {
--- utfchar(0x00A0), -- nbsp
--- utfchar(0x2000), -- enquad
--- utfchar(0x2001), -- emquad
--- utfchar(0x2002), -- enspace
--- utfchar(0x2003), -- emspace
--- utfchar(0x2004), -- threeperemspace
--- utfchar(0x2005), -- fourperemspace
--- utfchar(0x2006), -- sixperemspace
--- utfchar(0x2007), -- figurespace
--- utfchar(0x2008), -- punctuationspace
--- utfchar(0x2009), -- breakablethinspace
--- utfchar(0x200A), -- hairspace
--- utfchar(0x200B), -- zerowidthspace
--- utfchar(0x202F), -- narrownobreakspace
--- utfchar(0x205F), -- math thinspace
--- } )
-
--- a few handy ones:
---
--- faster than find(str,"[\n\r]") when match and # > 7 and always faster when # > 3
-
-patterns.containseol = lpeg.finder(eol) -- (1-eol)^0 * eol
-
--- The next pattern^n variant is based on an approach suggested
--- by Roberto: constructing a big repetition in chunks.
---
--- Being sparse is not needed, and only complicate matters and
--- the number of redundant entries is not that large.
-
-local function nextstep(n,step,result)
- local m = n % step -- mod(n,step)
- local d = floor(n/step) -- div(n,step)
- if d > 0 then
- local v = V(tostring(step))
- local s = result.start
- for i=1,d do
- if s then
- s = v * s
- else
- s = v
- end
- end
- result.start = s
- end
- if step > 1 and result.start then
- local v = V(tostring(step/2))
- result[tostring(step)] = v * v
- end
- if step > 0 then
- return nextstep(m,step/2,result)
- else
- return result
- end
-end
-
-function lpeg.times(pattern,n)
- return P(nextstep(n,2^16,{ "start", ["1"] = pattern }))
-end
-
--- local p = lpeg.Cs((1 - lpeg.times(lpeg.P("AB"),25))^1)
--- local s = "12" .. string.rep("AB",20) .. "34" .. string.rep("AB",30) .. "56"
--- inspect(p)
--- print(lpeg.match(p,s))
-
--- moved here (before util-str)
-
-local digit = R("09")
-local period = P(".")
-local zero = P("0")
-local trailingzeros = zero^0 * -digit -- suggested by Roberto R
-local case_1 = period * trailingzeros / ""
-local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
-local number = digit^1 * (case_1 + case_2)
-local stripper = Cs((number + 1)^0)
-
-lpeg.patterns.stripzeros = stripper
-
--- local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100"
--- collectgarbage("collect")
--- str = string.rep(sample,10000)
--- local ts = os.clock()
--- lpegmatch(stripper,str)
--- print(#str, os.clock()-ts, lpegmatch(stripper,sample))
-
+if not modules then modules = { } end modules ['l-lpeg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
+
+-- move utf -> l-unicode
+-- move string -> l-string or keep it here
+
+lpeg = require("lpeg")
+
+-- tracing (only used when we encounter a problem in integration of lpeg in luatex)
+
+-- some code will move to unicode and string
+
+-- local lpmatch = lpeg.match
+-- local lpprint = lpeg.print
+-- local lpp = lpeg.P
+-- local lpr = lpeg.R
+-- local lps = lpeg.S
+-- local lpc = lpeg.C
+-- local lpb = lpeg.B
+-- local lpv = lpeg.V
+-- local lpcf = lpeg.Cf
+-- local lpcb = lpeg.Cb
+-- local lpcg = lpeg.Cg
+-- local lpct = lpeg.Ct
+-- local lpcs = lpeg.Cs
+-- local lpcc = lpeg.Cc
+-- local lpcmt = lpeg.Cmt
+-- local lpcarg = lpeg.Carg
+
+-- function lpeg.match(l,...) print("LPEG MATCH") lpprint(l) return lpmatch(l,...) end
+
+-- function lpeg.P (l) local p = lpp (l) print("LPEG P =") lpprint(l) return p end
+-- function lpeg.R (l) local p = lpr (l) print("LPEG R =") lpprint(l) return p end
+-- function lpeg.S (l) local p = lps (l) print("LPEG S =") lpprint(l) return p end
+-- function lpeg.C (l) local p = lpc (l) print("LPEG C =") lpprint(l) return p end
+-- function lpeg.B (l) local p = lpb (l) print("LPEG B =") lpprint(l) return p end
+-- function lpeg.V (l) local p = lpv (l) print("LPEG V =") lpprint(l) return p end
+-- function lpeg.Cf (l) local p = lpcf (l) print("LPEG Cf =") lpprint(l) return p end
+-- function lpeg.Cb (l) local p = lpcb (l) print("LPEG Cb =") lpprint(l) return p end
+-- function lpeg.Cg (l) local p = lpcg (l) print("LPEG Cg =") lpprint(l) return p end
+-- function lpeg.Ct (l) local p = lpct (l) print("LPEG Ct =") lpprint(l) return p end
+-- function lpeg.Cs (l) local p = lpcs (l) print("LPEG Cs =") lpprint(l) return p end
+-- function lpeg.Cc (l) local p = lpcc (l) print("LPEG Cc =") lpprint(l) return p end
+-- function lpeg.Cmt (l) local p = lpcmt (l) print("LPEG Cmt =") lpprint(l) return p end
+-- function lpeg.Carg (l) local p = lpcarg(l) print("LPEG Carg =") lpprint(l) return p end
+
+local type, next, tostring = type, next, tostring
+local byte, char, gmatch, format = string.byte, string.char, string.gmatch, string.format
+----- mod, div = math.mod, math.div
+local floor = math.floor
+
+local P, R, S, V, Ct, C, Cs, Cc, Cp, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp, lpeg.Cmt
+local lpegtype, lpegmatch, lpegprint = lpeg.type, lpeg.match, lpeg.print
+
+-- let's start with an inspector:
+
+setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
+
+-- Beware, we predefine a bunch of patterns here and one reason for doing so
+-- is that we get consistent behaviour in some of the visualizers.
+
+lpeg.patterns = lpeg.patterns or { } -- so that we can share
+local patterns = lpeg.patterns
+
+
+local anything = P(1)
+local endofstring = P(-1)
+local alwaysmatched = P(true)
+
+patterns.anything = anything
+patterns.endofstring = endofstring
+patterns.beginofstring = alwaysmatched
+patterns.alwaysmatched = alwaysmatched
+
+local digit, sign = R('09'), S('+-')
+local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
+local newline = crlf + S("\r\n") -- cr + lf
+local escaped = P("\\") * anything
+local squote = P("'")
+local dquote = P('"')
+local space = P(" ")
+
+local utfbom_32_be = P('\000\000\254\255')
+local utfbom_32_le = P('\255\254\000\000')
+local utfbom_16_be = P('\255\254')
+local utfbom_16_le = P('\254\255')
+local utfbom_8 = P('\239\187\191')
+local utfbom = utfbom_32_be + utfbom_32_le
+ + utfbom_16_be + utfbom_16_le
+ + utfbom_8
+local utftype = utfbom_32_be * Cc("utf-32-be") + utfbom_32_le * Cc("utf-32-le")
+ + utfbom_16_be * Cc("utf-16-be") + utfbom_16_le * Cc("utf-16-le")
+ + utfbom_8 * Cc("utf-8") + alwaysmatched * Cc("utf-8") -- assume utf8
+local utfoffset = utfbom_32_be * Cc(4) + utfbom_32_le * Cc(4)
+ + utfbom_16_be * Cc(2) + utfbom_16_le * Cc(2)
+ + utfbom_8 * Cc(3) + Cc(0)
+
+local utf8next = R("\128\191")
+
+patterns.utf8one = R("\000\127")
+patterns.utf8two = R("\194\223") * utf8next
+patterns.utf8three = R("\224\239") * utf8next * utf8next
+patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
+patterns.utfbom = utfbom
+patterns.utftype = utftype
+patterns.utfoffset = utfoffset
+
+local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
+local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
+
+local utf8character = P(1) * R("\128\191")^0 -- unchecked but fast
+
+patterns.utf8 = utf8char
+patterns.utf8char = utf8char
+patterns.utf8character = utf8character -- this one can be used in most cases so we might use that one
+patterns.validutf8 = validutf8char
+patterns.validutf8char = validutf8char
+
+local eol = S("\n\r")
+local spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
+local whitespace = eol + spacer
+local nonspacer = 1 - spacer
+local nonwhitespace = 1 - whitespace
+
+patterns.eol = eol
+patterns.spacer = spacer
+patterns.whitespace = whitespace
+patterns.nonspacer = nonspacer
+patterns.nonwhitespace = nonwhitespace
+
+local stripper = spacer^0 * C((spacer^0 * nonspacer^1)^0) -- from example by roberto
+
+----- collapser = Cs(spacer^0/"" * ((spacer^1 * P(-1) / "") + (spacer^1/" ") + P(1))^0)
+local collapser = Cs(spacer^0/"" * nonspacer^0 * ((spacer^0/" " * nonspacer^1)^0))
+
+patterns.stripper = stripper
+patterns.collapser = collapser
+
+patterns.digit = digit
+patterns.sign = sign
+patterns.cardinal = sign^0 * digit^1
+patterns.integer = sign^0 * digit^1
+patterns.unsigned = digit^0 * P('.') * digit^1
+patterns.float = sign^0 * patterns.unsigned
+patterns.cunsigned = digit^0 * P(',') * digit^1
+patterns.cfloat = sign^0 * patterns.cunsigned
+patterns.number = patterns.float + patterns.integer
+patterns.cnumber = patterns.cfloat + patterns.integer
+patterns.oct = P("0") * R("07")^1
+patterns.octal = patterns.oct
+patterns.HEX = P("0x") * R("09","AF")^1
+patterns.hex = P("0x") * R("09","af")^1
+patterns.hexadecimal = P("0x") * R("09","AF","af")^1
+patterns.lowercase = R("az")
+patterns.uppercase = R("AZ")
+patterns.letter = patterns.lowercase + patterns.uppercase
+patterns.space = space
+patterns.tab = P("\t")
+patterns.spaceortab = patterns.space + patterns.tab
+patterns.newline = newline
+patterns.emptyline = newline^1
+patterns.equal = P("=")
+patterns.comma = P(",")
+patterns.commaspacer = P(",") * spacer^0
+patterns.period = P(".")
+patterns.colon = P(":")
+patterns.semicolon = P(";")
+patterns.underscore = P("_")
+patterns.escaped = escaped
+patterns.squote = squote
+patterns.dquote = dquote
+patterns.nosquote = (escaped + (1-squote))^0
+patterns.nodquote = (escaped + (1-dquote))^0
+patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"") -- will change to C in the middle
+patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"") -- will change to C in the middle
+patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
+patterns.unspacer = ((patterns.spacer^1)/"")^0
+
+patterns.singlequoted = squote * patterns.nosquote * squote
+patterns.doublequoted = dquote * patterns.nodquote * dquote
+patterns.quoted = patterns.doublequoted + patterns.singlequoted
+
+patterns.propername = R("AZ","az","__") * R("09","AZ","az", "__")^0 * P(-1)
+
+patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
+patterns.beginline = #(1-newline)
+
+patterns.longtostring = Cs(whitespace^0/"" * nonwhitespace^0 * ((whitespace^0/" " * (patterns.quoted + nonwhitespace)^1)^0))
+
+local function anywhere(pattern) --slightly adapted from website
+ return P { P(pattern) + 1 * V(1) }
+end
+
+lpeg.anywhere = anywhere
+
+function lpeg.instringchecker(p)
+ p = anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
+end
+
+function lpeg.splitter(pattern, action)
+ return (((1-P(pattern))^1)/action+1)^0
+end
+
+function lpeg.tsplitter(pattern, action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
+end
+
+-- probleem: separator can be lpeg and that does not hash too well, but
+-- it's quite okay as the key is then not garbage collected
+
+local splitters_s, splitters_m, splitters_t = { }, { }, { }
+
+local function splitat(separator,single)
+ local splitter = (single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator = P(separator)
+ local other = C((1 - separator)^0)
+ if single then
+ local any = anything
+ splitter = other * (separator * C(any^0) + "") -- ?
+ splitters_s[separator] = splitter
+ else
+ splitter = other * (separator * other)^0
+ splitters_m[separator] = splitter
+ end
+ end
+ return splitter
+end
+
+local function tsplitat(separator)
+ local splitter = splitters_t[separator]
+ if not splitter then
+ splitter = Ct(splitat(separator))
+ splitters_t[separator] = splitter
+ end
+ return splitter
+end
+
+lpeg.splitat = splitat
+lpeg.tsplitat = tsplitat
+
+function string.splitup(str,separator)
+ if not separator then
+ separator = ","
+ end
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
+end
+
+-- local p = splitat("->",false) print(lpegmatch(p,"oeps->what->more")) -- oeps what more
+-- local p = splitat("->",true) print(lpegmatch(p,"oeps->what->more")) -- oeps what->more
+-- local p = splitat("->",false) print(lpegmatch(p,"oeps")) -- oeps
+-- local p = splitat("->",true) print(lpegmatch(p,"oeps")) -- oeps
+
+local cache = { }
+
+function lpeg.split(separator,str)
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return lpegmatch(c,str)
+end
+
+function string.split(str,separator)
+ if separator then
+ local c = cache[separator]
+ if not c then
+ c = tsplitat(separator)
+ cache[separator] = c
+ end
+ return lpegmatch(c,str)
+ else
+ return { str }
+ end
+end
+
+local spacing = patterns.spacer^0 * newline -- sort of strip
+local empty = spacing * Cc("")
+local nonempty = Cs((1-spacing)^1) * spacing^-1
+local content = (empty + nonempty)^1
+
+patterns.textline = content
+
+local linesplitter = tsplitat(newline)
+
+patterns.linesplitter = linesplitter
+
+function string.splitlines(str)
+ return lpegmatch(linesplitter,str)
+end
+
+-- lpeg.splitters = cache -- no longer public
+
+local cache = { }
+
+function lpeg.checkedsplit(separator,str)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return lpegmatch(c,str)
+end
+
+function string.checkedsplit(str,separator)
+ local c = cache[separator]
+ if not c then
+ separator = P(separator)
+ local other = C((1 - separator)^1)
+ c = Ct(separator^0 * other * (separator^1 * other)^0)
+ cache[separator] = c
+ end
+ return lpegmatch(c,str)
+end
+
+-- from roberto's site:
+
+local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
+local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
+local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
+
+local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
+
+patterns.utf8byte = utf8byte
+
+--~ local str = " a b c d "
+
+--~ local s = lpeg.stripper(lpeg.R("az")) print("["..lpegmatch(s,str).."]")
+--~ local s = lpeg.keeper(lpeg.R("az")) print("["..lpegmatch(s,str).."]")
+--~ local s = lpeg.stripper("ab") print("["..lpegmatch(s,str).."]")
+--~ local s = lpeg.keeper("ab") print("["..lpegmatch(s,str).."]")
+
+local cache = { }
+
+function lpeg.stripper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs(((S(str)^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs(((str^1)/"" + 1)^0)
+ end
+end
+
+local cache = { }
+
+function lpeg.keeper(str)
+ if type(str) == "string" then
+ local s = cache[str]
+ if not s then
+ s = Cs((((1-S(str))^1)/"" + 1)^0)
+ cache[str] = s
+ end
+ return s
+ else
+ return Cs((((1-str)^1)/"" + 1)^0)
+ end
+end
+
+function lpeg.frontstripper(str) -- or pattern (yet undocumented)
+ return (P(str) + P(true)) * Cs(anything^0)
+end
+
+function lpeg.endstripper(str) -- or pattern (yet undocumented)
+ return Cs((1 - P(str) * endofstring)^0)
+end
+
+-- Just for fun I looked at the used bytecode and
+-- p = (p and p + pp) or pp gets one more (testset).
+
+-- todo: cache when string
+
+function lpeg.replacer(one,two,makefunction,isutf) -- in principle we should sort the keys
+ local pattern
+ local u = isutf and utf8char or 1
+ if type(one) == "table" then
+ local no = #one
+ local p = P(false)
+ if no == 0 then
+ for k, v in next, one do
+ p = p + P(k) / v
+ end
+ pattern = Cs((p + u)^0)
+ elseif no == 1 then
+ local o = one[1]
+ one, two = P(o[1]), o[2]
+ -- pattern = Cs(((1-one)^1 + one/two)^0)
+ pattern = Cs((one/two + u)^0)
+ else
+ for i=1,no do
+ local o = one[i]
+ p = p + P(o[1]) / o[2]
+ end
+ pattern = Cs((p + u)^0)
+ end
+ else
+ pattern = Cs((P(one)/(two or "") + u)^0)
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+
+function lpeg.finder(lst,makefunction)
+ local pattern
+ if type(lst) == "table" then
+ pattern = P(false)
+ if #lst == 0 then
+ for k, v in next, lst do
+ pattern = pattern + P(k) -- ignore key, so we can use a replacer table
+ end
+ else
+ for i=1,#lst do
+ pattern = pattern + P(lst[i])
+ end
+ end
+ else
+ pattern = P(lst)
+ end
+ pattern = (1-pattern)^0 * pattern
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+
+-- print(lpeg.match(lpeg.replacer("e","a"),"test test"))
+-- print(lpeg.match(lpeg.replacer{{"e","a"}},"test test"))
+-- print(lpeg.match(lpeg.replacer({ e = "a", t = "x" }),"test test"))
+
+local splitters_f, splitters_s = { }, { }
+
+function lpeg.firstofsplit(separator) -- always return value
+ local splitter = splitters_f[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = C((1 - separator)^0)
+ splitters_f[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.secondofsplit(separator) -- nil if not split
+ local splitter = splitters_s[separator]
+ if not splitter then
+ separator = P(separator)
+ splitter = (1 - separator)^0 * separator * C(anything^0)
+ splitters_s[separator] = splitter
+ end
+ return splitter
+end
+
+function lpeg.balancer(left,right)
+ left, right = P(left), P(right)
+ return P { left * ((1 - left - right) + V(1))^0 * right }
+end
+
+-- print(1,lpegmatch(lpeg.firstofsplit(":"),"bc:de"))
+-- print(2,lpegmatch(lpeg.firstofsplit(":"),":de")) -- empty
+-- print(3,lpegmatch(lpeg.firstofsplit(":"),"bc"))
+-- print(4,lpegmatch(lpeg.secondofsplit(":"),"bc:de"))
+-- print(5,lpegmatch(lpeg.secondofsplit(":"),"bc:")) -- empty
+-- print(6,lpegmatch(lpeg.secondofsplit(":",""),"bc"))
+-- print(7,lpegmatch(lpeg.secondofsplit(":"),"bc"))
+-- print(9,lpegmatch(lpeg.secondofsplit(":","123"),"bc"))
+
+-- -- slower:
+--
+-- function lpeg.counter(pattern)
+-- local n, pattern = 0, (lpeg.P(pattern)/function() n = n + 1 end + lpeg.anything)^0
+-- return function(str) n = 0 ; lpegmatch(pattern,str) ; return n end
+-- end
+
+local nany = utf8char/""
+
+function lpeg.counter(pattern)
+ pattern = Cs((P(pattern)/" " + nany)^0)
+ return function(str)
+ return #lpegmatch(pattern,str)
+ end
+end
+
+-- utf extensies
+
+utf = utf or (unicode and unicode.utf8) or { }
+
+local utfcharacters = utf and utf.characters or string.utfcharacters
+local utfgmatch = utf and utf.gmatch
+local utfchar = utf and utf.char
+
+lpeg.UP = lpeg.P
+
+if utfcharacters then
+
+ function lpeg.US(str)
+ local p = P(false)
+ for uc in utfcharacters(str) do
+ p = p + P(uc)
+ end
+ return p
+ end
+
+
+elseif utfgmatch then
+
+ function lpeg.US(str)
+ local p = P(false)
+ for uc in utfgmatch(str,".") do
+ p = p + P(uc)
+ end
+ return p
+ end
+
+else
+
+ function lpeg.US(str)
+ local p = P(false)
+ local f = function(uc)
+ p = p + P(uc)
+ end
+ lpegmatch((utf8char/f)^0,str)
+ return p
+ end
+
+end
+
+local range = utf8byte * utf8byte + Cc(false) -- utf8byte is already a capture
+
+function lpeg.UR(str,more)
+ local first, last
+ if type(str) == "number" then
+ first = str
+ last = more or first
+ else
+ first, last = lpegmatch(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first == last then
+ return P(str)
+ elseif utfchar and (last - first < 8) then -- a somewhat arbitrary criterium
+ local p = P(false)
+ for i=first,last do
+ p = p + P(utfchar(i))
+ end
+ return p -- nil when invalid range
+ else
+ local f = function(b)
+ return b >= first and b <= last
+ end
+ -- tricky, these nested captures
+ return utf8byte / f -- nil when invalid range
+ end
+end
+
+-- print(lpeg.match(lpeg.Cs((C(lpeg.UR("αω"))/{ ["χ"] = "OEPS" })^0),"αωχαω"))
+
+-- lpeg.print(lpeg.R("ab","cd","gh"))
+-- lpeg.print(lpeg.P("a","b","c"))
+-- lpeg.print(lpeg.S("a","b","c"))
+
+-- print(lpeg.count("äáà a",lpeg.P("á") + lpeg.P("à ")))
+-- print(lpeg.count("äáà a",lpeg.UP("áà ")))
+-- print(lpeg.count("äáà a",lpeg.US("à á")))
+-- print(lpeg.count("äáà a",lpeg.UR("aá")))
+-- print(lpeg.count("äáà a",lpeg.UR("à á")))
+-- print(lpeg.count("äáà a",lpeg.UR(0x0000,0xFFFF)))
+
+function lpeg.is_lpeg(p)
+ return p and lpegtype(p) == "pattern"
+end
+
+function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order
+ if type(list) ~= "table" then
+ list = { list, ... }
+ end
+ -- table.sort(list) -- longest match first
+ local p = P(list[1])
+ for l=2,#list do
+ p = p + P(list[l])
+ end
+ return p
+end
+
+-- For the moment here, but it might move to utilities. Beware, we need to
+-- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
+-- loop back from the end cq. prepend.
+
+local sort = table.sort
+
+local function copyindexed(old)
+ local new = { }
+ for i=1,#old do
+ new[i] = old
+ end
+ return new
+end
+
+local function sortedkeys(tab)
+ local keys, s = { }, 0
+ for key,_ in next, tab do
+ s = s + 1
+ keys[s] = key
+ end
+ sort(keys)
+ return keys
+end
+
+function lpeg.append(list,pp,delayed,checked)
+ local p = pp
+ if #list > 0 then
+ local keys = copyindexed(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k = keys[i]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
+ local keys = sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k = keys[i]
+ local v = list[k]
+ p = P(k)/list + p
+ end
+ else
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k) + p
+ else
+ p = P(k)
+ end
+ end
+ if p then
+ p = p / list
+ end
+ end
+ elseif checked then
+ -- problem: substitution gives a capture
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ if k == v then
+ p = P(k) + p
+ else
+ p = P(k)/v + p
+ end
+ else
+ if k == v then
+ p = P(k)
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ else
+ local keys = sortedkeys(list)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = list[k]
+ if p then
+ p = P(k)/v + p
+ else
+ p = P(k)/v
+ end
+ end
+ end
+ return p
+end
+
+-- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
+-- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
+
+-- function lpeg.exact_match(words,case_insensitive)
+-- local pattern = concat(words)
+-- if case_insensitive then
+-- local pattern = S(upper(characters)) + S(lower(characters))
+-- local list = { }
+-- for i=1,#words do
+-- list[lower(words[i])] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[lower(s)] and i
+-- end)
+-- else
+-- local pattern = S(concat(words))
+-- local list = { }
+-- for i=1,#words do
+-- list[words[i]] = true
+-- end
+-- return Cmt(pattern^1, function(_,i,s)
+-- return list[s] and i
+-- end)
+-- end
+-- end
+
+-- experiment:
+
+local function make(t)
+ local p
+ local keys = sortedkeys(t)
+ for i=1,#keys do
+ local k = keys[i]
+ local v = t[k]
+ if not p then
+ if next(v) then
+ p = P(k) * make(v)
+ else
+ p = P(k)
+ end
+ else
+ if next(v) then
+ p = p + P(k) * make(v)
+ else
+ p = p + P(k)
+ end
+ end
+ end
+ return p
+end
+
+function lpeg.utfchartabletopattern(list) -- goes to util-lpg
+ local tree = { }
+ for i=1,#list do
+ local t = tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c] = { }
+ end
+ t = t[c]
+ end
+ end
+ return make(tree)
+end
+
+-- inspect ( lpeg.utfchartabletopattern {
+-- utfchar(0x00A0), -- nbsp
+-- utfchar(0x2000), -- enquad
+-- utfchar(0x2001), -- emquad
+-- utfchar(0x2002), -- enspace
+-- utfchar(0x2003), -- emspace
+-- utfchar(0x2004), -- threeperemspace
+-- utfchar(0x2005), -- fourperemspace
+-- utfchar(0x2006), -- sixperemspace
+-- utfchar(0x2007), -- figurespace
+-- utfchar(0x2008), -- punctuationspace
+-- utfchar(0x2009), -- breakablethinspace
+-- utfchar(0x200A), -- hairspace
+-- utfchar(0x200B), -- zerowidthspace
+-- utfchar(0x202F), -- narrownobreakspace
+-- utfchar(0x205F), -- math thinspace
+-- } )
+
+-- a few handy ones:
+--
+-- faster than find(str,"[\n\r]") when match and # > 7 and always faster when # > 3
+
+patterns.containseol = lpeg.finder(eol) -- (1-eol)^0 * eol
+
+-- The next pattern^n variant is based on an approach suggested
+-- by Roberto: constructing a big repetition in chunks.
+--
+-- Being sparse is not needed, and only complicate matters and
+-- the number of redundant entries is not that large.
+
+local function nextstep(n,step,result)
+ local m = n % step -- mod(n,step)
+ local d = floor(n/step) -- div(n,step)
+ if d > 0 then
+ local v = V(tostring(step))
+ local s = result.start
+ for i=1,d do
+ if s then
+ s = v * s
+ else
+ s = v
+ end
+ end
+ result.start = s
+ end
+ if step > 1 and result.start then
+ local v = V(tostring(step/2))
+ result[tostring(step)] = v * v
+ end
+ if step > 0 then
+ return nextstep(m,step/2,result)
+ else
+ return result
+ end
+end
+
+function lpeg.times(pattern,n)
+ return P(nextstep(n,2^16,{ "start", ["1"] = pattern }))
+end
+
+-- local p = lpeg.Cs((1 - lpeg.times(lpeg.P("AB"),25))^1)
+-- local s = "12" .. string.rep("AB",20) .. "34" .. string.rep("AB",30) .. "56"
+-- inspect(p)
+-- print(lpeg.match(p,s))
+
+-- moved here (before util-str)
+
+local digit = R("09")
+local period = P(".")
+local zero = P("0")
+local trailingzeros = zero^0 * -digit -- suggested by Roberto R
+local case_1 = period * trailingzeros / ""
+local case_2 = period * (digit - trailingzeros)^1 * (trailingzeros / "")
+local number = digit^1 * (case_1 + case_2)
+local stripper = Cs((number + 1)^0)
+
+lpeg.patterns.stripzeros = stripper
+
+-- local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100"
+-- collectgarbage("collect")
+-- str = string.rep(sample,10000)
+-- local ts = os.clock()
+-- lpegmatch(stripper,str)
+-- print(#str, os.clock()-ts, lpegmatch(stripper,sample))
+
diff --git a/tex/context/base/l-lua.lua b/tex/context/base/l-lua.lua
index fc05afa67..486c14a5f 100644
--- a/tex/context/base/l-lua.lua
+++ b/tex/context/base/l-lua.lua
@@ -1,150 +1,150 @@
-if not modules then modules = { } end modules ['l-lua'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- compatibility hacksand helpers
-
-local major, minor = string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
-
-_MAJORVERSION = tonumber(major) or 5
-_MINORVERSION = tonumber(minor) or 1
-_LUAVERSION = _MAJORVERSION + _MINORVERSION/10
-
--- lpeg
-
-if not lpeg then
- lpeg = require("lpeg")
-end
-
--- basics:
-
-if loadstring then
-
- local loadnormal = load
-
- function load(first,...)
- if type(first) == "string" then
- return loadstring(first,...)
- else
- return loadnormal(first,...)
- end
- end
-
-else
-
- loadstring = load
-
-end
-
--- table:
-
--- At some point it was announced that i[pairs would be dropped, which makes
--- sense. As we already used the for loop and # in most places the impact on
--- ConTeXt was not that large; the remaining ipairs already have been replaced.
--- Hm, actually ipairs was retained, but we no longer use it anyway (nor
--- pairs).
---
--- Just in case, we provide the fallbacks as discussed in Programming
--- in Lua (http://www.lua.org/pil/7.3.html):
-
-if not ipairs then
-
- -- for k, v in ipairs(t) do ... end
- -- for k=1,#t do local v = t[k] ... end
-
- local function iterate(a,i)
- i = i + 1
- local v = a[i]
- if v ~= nil then
- return i, v --, nil
- end
- end
-
- function ipairs(a)
- return iterate, a, 0
- end
-
-end
-
-if not pairs then
-
- -- for k, v in pairs(t) do ... end
- -- for k, v in next, t do ... end
-
- function pairs(t)
- return next, t -- , nil
- end
-
-end
-
--- The unpack function has been moved to the table table, and for compatiility
--- reasons we provide both now.
-
-if not table.unpack then
-
- table.unpack = _G.unpack
-
-elseif not unpack then
-
- _G.unpack = table.unpack
-
-end
-
--- package:
-
--- if not package.seachers then
---
--- package.searchers = package.loaders -- 5.2
---
--- elseif not package.loaders then
---
--- package.loaders = package.searchers
---
--- end
-
-if not package.loaders then -- brr, searchers is a special "loadlib function" userdata type
-
- package.loaders = package.searchers
-
-end
-
--- moved from util-deb to here:
-
-local print, select, tostring = print, select, tostring
-
-local inspectors = { }
-
-function setinspector(inspector) -- global function
- inspectors[#inspectors+1] = inspector
-end
-
-function inspect(...) -- global function
- for s=1,select("#",...) do
- local value = select(s,...)
- local done = false
- for i=1,#inspectors do
- done = inspectors[i](value)
- if done then
- break
- end
- end
- if not done then
- print(tostring(value))
- end
- end
-end
-
---
-
-local dummy = function() end
-
-function optionalrequire(...)
- local ok, result = xpcall(require,dummy,...)
- if ok then
- return result
- end
-end
+if not modules then modules = { } end modules ['l-lua'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- compatibility hacksand helpers
+
+local major, minor = string.match(_VERSION,"^[^%d]+(%d+)%.(%d+).*$")
+
+_MAJORVERSION = tonumber(major) or 5
+_MINORVERSION = tonumber(minor) or 1
+_LUAVERSION = _MAJORVERSION + _MINORVERSION/10
+
+-- lpeg
+
+if not lpeg then
+ lpeg = require("lpeg")
+end
+
+-- basics:
+
+if loadstring then
+
+ local loadnormal = load
+
+ function load(first,...)
+ if type(first) == "string" then
+ return loadstring(first,...)
+ else
+ return loadnormal(first,...)
+ end
+ end
+
+else
+
+ loadstring = load
+
+end
+
+-- table:
+
+-- At some point it was announced that i[pairs would be dropped, which makes
+-- sense. As we already used the for loop and # in most places the impact on
+-- ConTeXt was not that large; the remaining ipairs already have been replaced.
+-- Hm, actually ipairs was retained, but we no longer use it anyway (nor
+-- pairs).
+--
+-- Just in case, we provide the fallbacks as discussed in Programming
+-- in Lua (http://www.lua.org/pil/7.3.html):
+
+if not ipairs then
+
+ -- for k, v in ipairs(t) do ... end
+ -- for k=1,#t do local v = t[k] ... end
+
+ local function iterate(a,i)
+ i = i + 1
+ local v = a[i]
+ if v ~= nil then
+ return i, v --, nil
+ end
+ end
+
+ function ipairs(a)
+ return iterate, a, 0
+ end
+
+end
+
+if not pairs then
+
+ -- for k, v in pairs(t) do ... end
+ -- for k, v in next, t do ... end
+
+ function pairs(t)
+ return next, t -- , nil
+ end
+
+end
+
+-- The unpack function has been moved to the table table, and for compatiility
+-- reasons we provide both now.
+
+if not table.unpack then
+
+ table.unpack = _G.unpack
+
+elseif not unpack then
+
+ _G.unpack = table.unpack
+
+end
+
+-- package:
+
+-- if not package.seachers then
+--
+-- package.searchers = package.loaders -- 5.2
+--
+-- elseif not package.loaders then
+--
+-- package.loaders = package.searchers
+--
+-- end
+
+if not package.loaders then -- brr, searchers is a special "loadlib function" userdata type
+
+ package.loaders = package.searchers
+
+end
+
+-- moved from util-deb to here:
+
+local print, select, tostring = print, select, tostring
+
+local inspectors = { }
+
+function setinspector(inspector) -- global function
+ inspectors[#inspectors+1] = inspector
+end
+
+function inspect(...) -- global function
+ for s=1,select("#",...) do
+ local value = select(s,...)
+ local done = false
+ for i=1,#inspectors do
+ done = inspectors[i](value)
+ if done then
+ break
+ end
+ end
+ if not done then
+ print(tostring(value))
+ end
+ end
+end
+
+--
+
+local dummy = function() end
+
+function optionalrequire(...)
+ local ok, result = xpcall(require,dummy,...)
+ if ok then
+ return result
+ end
+end
diff --git a/tex/context/base/l-math.lua b/tex/context/base/l-math.lua
index 43f60b56b..fb6bbbf5d 100644
--- a/tex/context/base/l-math.lua
+++ b/tex/context/base/l-math.lua
@@ -1,34 +1,34 @@
-if not modules then modules = { } end modules ['l-math'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan
-
-if not math.round then
- function math.round(x) return floor(x + 0.5) end
-end
-
-if not math.div then
- function math.div(n,m) return floor(n/m) end
-end
-
-if not math.mod then
- function math.mod(n,m) return n % m end
-end
-
-local pipi = 2*math.pi/360
-
-if not math.sind then
- function math.sind(d) return sin(d*pipi) end
- function math.cosd(d) return cos(d*pipi) end
- function math.tand(d) return tan(d*pipi) end
-end
-
-if not math.odd then
- function math.odd (n) return n % 2 ~= 0 end
- function math.even(n) return n % 2 == 0 end
-end
+if not modules then modules = { } end modules ['l-math'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan
+
+if not math.round then
+ function math.round(x) return floor(x + 0.5) end
+end
+
+if not math.div then
+ function math.div(n,m) return floor(n/m) end
+end
+
+if not math.mod then
+ function math.mod(n,m) return n % m end
+end
+
+local pipi = 2*math.pi/360
+
+if not math.sind then
+ function math.sind(d) return sin(d*pipi) end
+ function math.cosd(d) return cos(d*pipi) end
+ function math.tand(d) return tan(d*pipi) end
+end
+
+if not math.odd then
+ function math.odd (n) return n % 2 ~= 0 end
+ function math.even(n) return n % 2 == 0 end
+end
diff --git a/tex/context/base/l-md5.lua b/tex/context/base/l-md5.lua
index 8ac20a5a5..731dc3fbe 100644
--- a/tex/context/base/l-md5.lua
+++ b/tex/context/base/l-md5.lua
@@ -1,117 +1,117 @@
-if not modules then modules = { } end modules ['l-md5'] = {
- version = 1.001,
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This also provides file checksums and checkers.
-
-if not md5 then
- md5 = optionalrequire("md5")
-end
-
-if not md5 then
- md5 = {
- sum = function(str) print("error: md5 is not loaded (sum ignored)") return str end,
- sumhexa = function(str) print("error: md5 is not loaded (sumhexa ignored)") return str end,
- }
-end
-
-local md5, file = md5, file
-local gsub, format, byte = string.gsub, string.format, string.byte
-local md5sum = md5.sum
-
-local function convert(str,fmt)
- return (gsub(md5sum(str),".",function(chr) return format(fmt,byte(chr)) end))
-end
-
-if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
-if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
-if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
-
--- local P, Cs, lpegmatch = lpeg.P, lpeg.Cs,lpeg.match
---
--- if not md5.HEX then
--- local function remap(chr) return format("%02X",byte(chr)) end
--- function md5.HEX(str) return (gsub(md5.sum(str),".",remap)) end
--- end
---
--- if not md5.hex then
--- local function remap(chr) return format("%02x",byte(chr)) end
--- function md5.hex(str) return (gsub(md5.sum(str),".",remap)) end
--- end
---
--- if not md5.dec then
--- local function remap(chr) return format("%03i",byte(chr)) end
--- function md5.dec(str) return (gsub(md5.sum(str),".",remap)) end
--- end
-
--- if not md5.HEX then
--- local pattern_HEX = Cs( ( P(1) / function(chr) return format("%02X",byte(chr)) end)^0 )
--- function md5.HEX(str) return lpegmatch(pattern_HEX,md5.sum(str)) end
--- end
---
--- if not md5.hex then
--- local pattern_hex = Cs( ( P(1) / function(chr) return format("%02x",byte(chr)) end)^0 )
--- function md5.hex(str) return lpegmatch(pattern_hex,md5.sum(str)) end
--- end
---
--- if not md5.dec then
--- local pattern_dec = Cs( ( P(1) / function(chr) return format("%02i",byte(chr)) end)^0 )
--- function md5.dec(str) return lpegmatch(pattern_dec,md5.sum(str)) end
--- end
-
-function file.needsupdating(oldname,newname,threshold) -- size modification access change
- local oldtime = lfs.attributes(oldname,"modification")
- if oldtime then
- local newtime = lfs.attributes(newname,"modification")
- if not newtime then
- return true -- no new file, so no updating needed
- elseif newtime >= oldtime then
- return false -- new file definitely needs updating
- elseif oldtime - newtime < (threshold or 1) then
- return false -- new file is probably still okay
- else
- return true -- new file has to be updated
- end
- else
- return false -- no old file, so no updating needed
- end
-end
-
-file.needs_updating = file.needsupdating
-
-function file.syncmtimes(oldname,newname)
- local oldtime = lfs.attributes(oldname,"modification")
- if oldtime and lfs.isfile(newname) then
- lfs.touch(newname,oldtime,oldtime)
- end
-end
-
-function file.checksum(name)
- if md5 then
- local data = io.loaddata(name)
- if data then
- return md5.HEX(data)
- end
- end
- return nil
-end
-
-function file.loadchecksum(name)
- if md5 then
- local data = io.loaddata(name .. ".md5")
- return data and (gsub(data,"%s",""))
- end
- return nil
-end
-
-function file.savechecksum(name,checksum)
- if not checksum then checksum = file.checksum(name) end
- if checksum then
- io.savedata(name .. ".md5",checksum)
- return checksum
- end
- return nil
-end
+if not modules then modules = { } end modules ['l-md5'] = {
+ version = 1.001,
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This also provides file checksums and checkers.
+
+if not md5 then
+ md5 = optionalrequire("md5")
+end
+
+if not md5 then
+ md5 = {
+ sum = function(str) print("error: md5 is not loaded (sum ignored)") return str end,
+ sumhexa = function(str) print("error: md5 is not loaded (sumhexa ignored)") return str end,
+ }
+end
+
+local md5, file = md5, file
+local gsub, format, byte = string.gsub, string.format, string.byte
+local md5sum = md5.sum
+
+local function convert(str,fmt)
+ return (gsub(md5sum(str),".",function(chr) return format(fmt,byte(chr)) end))
+end
+
+if not md5.HEX then function md5.HEX(str) return convert(str,"%02X") end end
+if not md5.hex then function md5.hex(str) return convert(str,"%02x") end end
+if not md5.dec then function md5.dec(str) return convert(str,"%03i") end end
+
+-- local P, Cs, lpegmatch = lpeg.P, lpeg.Cs,lpeg.match
+--
+-- if not md5.HEX then
+-- local function remap(chr) return format("%02X",byte(chr)) end
+-- function md5.HEX(str) return (gsub(md5.sum(str),".",remap)) end
+-- end
+--
+-- if not md5.hex then
+-- local function remap(chr) return format("%02x",byte(chr)) end
+-- function md5.hex(str) return (gsub(md5.sum(str),".",remap)) end
+-- end
+--
+-- if not md5.dec then
+-- local function remap(chr) return format("%03i",byte(chr)) end
+-- function md5.dec(str) return (gsub(md5.sum(str),".",remap)) end
+-- end
+
+-- if not md5.HEX then
+-- local pattern_HEX = Cs( ( P(1) / function(chr) return format("%02X",byte(chr)) end)^0 )
+-- function md5.HEX(str) return lpegmatch(pattern_HEX,md5.sum(str)) end
+-- end
+--
+-- if not md5.hex then
+-- local pattern_hex = Cs( ( P(1) / function(chr) return format("%02x",byte(chr)) end)^0 )
+-- function md5.hex(str) return lpegmatch(pattern_hex,md5.sum(str)) end
+-- end
+--
+-- if not md5.dec then
+-- local pattern_dec = Cs( ( P(1) / function(chr) return format("%02i",byte(chr)) end)^0 )
+-- function md5.dec(str) return lpegmatch(pattern_dec,md5.sum(str)) end
+-- end
+
+function file.needsupdating(oldname,newname,threshold) -- size modification access change
+ local oldtime = lfs.attributes(oldname,"modification")
+ if oldtime then
+ local newtime = lfs.attributes(newname,"modification")
+ if not newtime then
+ return true -- no new file, so no updating needed
+ elseif newtime >= oldtime then
+ return false -- new file definitely needs updating
+ elseif oldtime - newtime < (threshold or 1) then
+ return false -- new file is probably still okay
+ else
+ return true -- new file has to be updated
+ end
+ else
+ return false -- no old file, so no updating needed
+ end
+end
+
+file.needs_updating = file.needsupdating
+
+function file.syncmtimes(oldname,newname)
+ local oldtime = lfs.attributes(oldname,"modification")
+ if oldtime and lfs.isfile(newname) then
+ lfs.touch(newname,oldtime,oldtime)
+ end
+end
+
+function file.checksum(name)
+ if md5 then
+ local data = io.loaddata(name)
+ if data then
+ return md5.HEX(data)
+ end
+ end
+ return nil
+end
+
+function file.loadchecksum(name)
+ if md5 then
+ local data = io.loaddata(name .. ".md5")
+ return data and (gsub(data,"%s",""))
+ end
+ return nil
+end
+
+function file.savechecksum(name,checksum)
+ if not checksum then checksum = file.checksum(name) end
+ if checksum then
+ io.savedata(name .. ".md5",checksum)
+ return checksum
+ end
+ return nil
+end
diff --git a/tex/context/base/l-number.lua b/tex/context/base/l-number.lua
index 001ca31f7..7db82173c 100644
--- a/tex/context/base/l-number.lua
+++ b/tex/context/base/l-number.lua
@@ -1,207 +1,207 @@
-if not modules then modules = { } end modules ['l-number'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this module will be replaced when we have the bit library .. the number based sets
--- might go away
-
-local tostring, tonumber = tostring, tonumber
-local format, floor, match, rep = string.format, math.floor, string.match, string.rep
-local concat, insert = table.concat, table.insert
-local lpegmatch = lpeg.match
-
-number = number or { }
-local number = number
-
-if bit32 then -- I wonder if this is faster
-
- local btest, bor = bit32.btest, bit32.bor
-
- function number.bit(p)
- return 2 ^ (p - 1) -- 1-based indexing
- end
-
- number.hasbit = btest
- number.setbit = bor
-
- function number.setbit(x,p) -- why not bor?
- return btest(x,p) and x or x + p
- end
-
- function number.clearbit(x,p)
- return btest(x,p) and x - p or x
- end
-
-else
-
- -- http://ricilake.blogspot.com/2007/10/iterating-bits-in-lua.html
-
- function number.bit(p)
- return 2 ^ (p - 1) -- 1-based indexing
- end
-
- function number.hasbit(x, p) -- typical call: if hasbit(x, bit(3)) then ...
- return x % (p + p) >= p
- end
-
- function number.setbit(x, p)
- return (x % (p + p) >= p) and x or x + p
- end
-
- function number.clearbit(x, p)
- return (x % (p + p) >= p) and x - p or x
- end
-
-end
-
--- print(number.tobitstring(8))
--- print(number.tobitstring(14))
--- print(number.tobitstring(66))
--- print(number.tobitstring(0x00))
--- print(number.tobitstring(0xFF))
--- print(number.tobitstring(46260767936,4))
-
-if bit32 then
-
- local bextract = bit32.extract
-
- local t = {
- "0", "0", "0", "0", "0", "0", "0", "0",
- "0", "0", "0", "0", "0", "0", "0", "0",
- "0", "0", "0", "0", "0", "0", "0", "0",
- "0", "0", "0", "0", "0", "0", "0", "0",
- }
-
- function number.tobitstring(b,m)
- -- if really needed we can speed this one up
- -- because small numbers need less extraction
- local n = 32
- for i=0,31 do
- local v = bextract(b,i)
- local k = 32 - i
- if v == 1 then
- n = k
- t[k] = "1"
- else
- t[k] = "0"
- end
- end
- if m then
- m = 33 - m * 8
- if m < 1 then
- m = 1
- end
- return concat(t,"",m)
- elseif n < 8 then
- return concat(t)
- elseif n < 16 then
- return concat(t,"",9)
- elseif n < 24 then
- return concat(t,"",17)
- else
- return concat(t,"",25)
- end
- end
-
-else
-
- function number.tobitstring(n,m)
- if n > 0 then
- local t = { }
- while n > 0 do
- insert(t,1,n % 2 > 0 and 1 or 0)
- n = floor(n/2)
- end
- local nn = 8 - #t % 8
- if nn > 0 and nn < 8 then
- for i=1,nn do
- insert(t,1,0)
- end
- end
- if m then
- m = m * 8 - #t
- if m > 0 then
- insert(t,1,rep("0",m))
- end
- end
- return concat(t)
- elseif m then
- rep("00000000",m)
- else
- return "00000000"
- end
- end
-
-end
-
-function number.valid(str,default)
- return tonumber(str) or default or nil
-end
-
-function number.toevenhex(n)
- local s = format("%X",n)
- if #s % 2 == 0 then
- return s
- else
- return "0" .. s
- end
-end
-
--- a,b,c,d,e,f = number.toset(100101)
---
--- function number.toset(n)
--- return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)")
--- end
---
--- -- the lpeg way is slower on 8 digits, but faster on 4 digits, some 7.5%
--- -- on
---
--- for i=1,1000000 do
--- local a,b,c,d,e,f,g,h = number.toset(12345678)
--- local a,b,c,d = number.toset(1234)
--- local a,b,c = number.toset(123)
--- local a,b,c = number.toset("123")
--- end
-
-local one = lpeg.C(1-lpeg.S('')/tonumber)^1
-
-function number.toset(n)
- return lpegmatch(one,tostring(n))
-end
-
--- function number.bits(n,zero)
--- local t, i = { }, (zero and 0) or 1
--- while n > 0 do
--- local m = n % 2
--- if m > 0 then
--- insert(t,1,i)
--- end
--- n = floor(n/2)
--- i = i + 1
--- end
--- return t
--- end
---
--- -- a bit faster
-
-local function bits(n,i,...)
- if n > 0 then
- local m = n % 2
- local n = floor(n/2)
- if m > 0 then
- return bits(n, i+1, i, ...)
- else
- return bits(n, i+1, ...)
- end
- else
- return ...
- end
-end
-
-function number.bits(n)
- return { bits(n,1) }
-end
+if not modules then modules = { } end modules ['l-number'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- this module will be replaced when we have the bit library .. the number based sets
+-- might go away
+
+local tostring, tonumber = tostring, tonumber
+local format, floor, match, rep = string.format, math.floor, string.match, string.rep
+local concat, insert = table.concat, table.insert
+local lpegmatch = lpeg.match
+
+number = number or { }
+local number = number
+
+if bit32 then -- I wonder if this is faster
+
+ local btest, bor = bit32.btest, bit32.bor
+
+ function number.bit(p)
+ return 2 ^ (p - 1) -- 1-based indexing
+ end
+
+ number.hasbit = btest
+ number.setbit = bor
+
+ function number.setbit(x,p) -- why not bor?
+ return btest(x,p) and x or x + p
+ end
+
+ function number.clearbit(x,p)
+ return btest(x,p) and x - p or x
+ end
+
+else
+
+ -- http://ricilake.blogspot.com/2007/10/iterating-bits-in-lua.html
+
+ function number.bit(p)
+ return 2 ^ (p - 1) -- 1-based indexing
+ end
+
+ function number.hasbit(x, p) -- typical call: if hasbit(x, bit(3)) then ...
+ return x % (p + p) >= p
+ end
+
+ function number.setbit(x, p)
+ return (x % (p + p) >= p) and x or x + p
+ end
+
+ function number.clearbit(x, p)
+ return (x % (p + p) >= p) and x - p or x
+ end
+
+end
+
+-- print(number.tobitstring(8))
+-- print(number.tobitstring(14))
+-- print(number.tobitstring(66))
+-- print(number.tobitstring(0x00))
+-- print(number.tobitstring(0xFF))
+-- print(number.tobitstring(46260767936,4))
+
+if bit32 then
+
+ local bextract = bit32.extract
+
+ local t = {
+ "0", "0", "0", "0", "0", "0", "0", "0",
+ "0", "0", "0", "0", "0", "0", "0", "0",
+ "0", "0", "0", "0", "0", "0", "0", "0",
+ "0", "0", "0", "0", "0", "0", "0", "0",
+ }
+
+ function number.tobitstring(b,m)
+ -- if really needed we can speed this one up
+ -- because small numbers need less extraction
+ local n = 32
+ for i=0,31 do
+ local v = bextract(b,i)
+ local k = 32 - i
+ if v == 1 then
+ n = k
+ t[k] = "1"
+ else
+ t[k] = "0"
+ end
+ end
+ if m then
+ m = 33 - m * 8
+ if m < 1 then
+ m = 1
+ end
+ return concat(t,"",m)
+ elseif n < 8 then
+ return concat(t)
+ elseif n < 16 then
+ return concat(t,"",9)
+ elseif n < 24 then
+ return concat(t,"",17)
+ else
+ return concat(t,"",25)
+ end
+ end
+
+else
+
+ function number.tobitstring(n,m)
+ if n > 0 then
+ local t = { }
+ while n > 0 do
+ insert(t,1,n % 2 > 0 and 1 or 0)
+ n = floor(n/2)
+ end
+ local nn = 8 - #t % 8
+ if nn > 0 and nn < 8 then
+ for i=1,nn do
+ insert(t,1,0)
+ end
+ end
+ if m then
+ m = m * 8 - #t
+ if m > 0 then
+ insert(t,1,rep("0",m))
+ end
+ end
+ return concat(t)
+ elseif m then
+ rep("00000000",m)
+ else
+ return "00000000"
+ end
+ end
+
+end
+
+function number.valid(str,default)
+ return tonumber(str) or default or nil
+end
+
+function number.toevenhex(n)
+ local s = format("%X",n)
+ if #s % 2 == 0 then
+ return s
+ else
+ return "0" .. s
+ end
+end
+
+-- a,b,c,d,e,f = number.toset(100101)
+--
+-- function number.toset(n)
+-- return match(tostring(n),"(.?)(.?)(.?)(.?)(.?)(.?)(.?)(.?)")
+-- end
+--
+-- -- the lpeg way is slower on 8 digits, but faster on 4 digits, some 7.5%
+-- -- on
+--
+-- for i=1,1000000 do
+-- local a,b,c,d,e,f,g,h = number.toset(12345678)
+-- local a,b,c,d = number.toset(1234)
+-- local a,b,c = number.toset(123)
+-- local a,b,c = number.toset("123")
+-- end
+
+local one = lpeg.C(1-lpeg.S('')/tonumber)^1
+
+function number.toset(n)
+ return lpegmatch(one,tostring(n))
+end
+
+-- function number.bits(n,zero)
+-- local t, i = { }, (zero and 0) or 1
+-- while n > 0 do
+-- local m = n % 2
+-- if m > 0 then
+-- insert(t,1,i)
+-- end
+-- n = floor(n/2)
+-- i = i + 1
+-- end
+-- return t
+-- end
+--
+-- -- a bit faster
+
+local function bits(n,i,...)
+ if n > 0 then
+ local m = n % 2
+ local n = floor(n/2)
+ if m > 0 then
+ return bits(n, i+1, i, ...)
+ else
+ return bits(n, i+1, ...)
+ end
+ else
+ return ...
+ end
+end
+
+function number.bits(n)
+ return { bits(n,1) }
+end
diff --git a/tex/context/base/l-os.lua b/tex/context/base/l-os.lua
index 05ca0acdc..6b9ae12f9 100644
--- a/tex/context/base/l-os.lua
+++ b/tex/context/base/l-os.lua
@@ -1,474 +1,474 @@
-if not modules then modules = { } end modules ['l-os'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This file deals with some operating system issues. Please don't bother me
--- with the pros and cons of operating systems as they all have their flaws
--- and benefits. Bashing one of them won't help solving problems and fixing
--- bugs faster and is a waste of time and energy.
---
--- path separators: / or \ ... we can use / everywhere
--- suffixes : dll so exe ... no big deal
--- quotes : we can use "" in most cases
--- expansion : unless "" are used * might give side effects
--- piping/threads : somewhat different for each os
--- locations : specific user file locations and settings can change over time
---
--- os.type : windows | unix (new, we already guessed os.platform)
--- os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
--- os.platform : extended os.name with architecture
-
--- os.sleep() => socket.sleep()
--- math.randomseed(tonumber(string.sub(string.reverse(tostring(math.floor(socket.gettime()*10000))),1,6)))
-
--- maybe build io.flush in os.execute
-
-local os = os
-local date, time = os.date, os.time
-local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
-local concat = table.concat
-local random, ceil, randomseed = math.random, math.ceil, math.randomseed
-local rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring = rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring
-
--- The following code permits traversing the environment table, at least
--- in luatex. Internally all environment names are uppercase.
-
--- The randomseed in Lua is not that random, although this depends on the operating system as well
--- as the binary (Luatex is normally okay). But to be sure we set the seed anyway.
-
-math.initialseed = tonumber(string.sub(string.reverse(tostring(ceil(socket and socket.gettime()*10000 or time()))),1,6))
-
-randomseed(math.initialseed)
-
-if not os.__getenv__ then
-
- os.__getenv__ = os.getenv
- os.__setenv__ = os.setenv
-
- if os.env then
-
- local osgetenv = os.getenv
- local ossetenv = os.setenv
- local osenv = os.env local _ = osenv.PATH -- initialize the table
-
- function os.setenv(k,v)
- if v == nil then
- v = ""
- end
- local K = upper(k)
- osenv[K] = v
- if type(v) == "table" then
- v = concat(v,";") -- path
- end
- ossetenv(K,v)
- end
-
- function os.getenv(k)
- local K = upper(k)
- local v = osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
- if v == "" then
- return nil
- else
- return v
- end
- end
-
- else
-
- local ossetenv = os.setenv
- local osgetenv = os.getenv
- local osenv = { }
-
- function os.setenv(k,v)
- if v == nil then
- v = ""
- end
- local K = upper(k)
- osenv[K] = v
- end
-
- function os.getenv(k)
- local K = upper(k)
- local v = osenv[K] or osgetenv(K) or osgetenv(k)
- if v == "" then
- return nil
- else
- return v
- end
- end
-
- local function __index(t,k)
- return os.getenv(k)
- end
- local function __newindex(t,k,v)
- os.setenv(k,v)
- end
-
- os.env = { }
-
- setmetatable(os.env, { __index = __index, __newindex = __newindex } )
-
- end
-
-end
-
--- end of environment hack
-
-local execute, spawn, exec, iopopen, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.popen, io.flush
-
-function os.execute(...) ioflush() return execute(...) end
-function os.spawn (...) ioflush() return spawn (...) end
-function os.exec (...) ioflush() return exec (...) end
-function io.popen (...) ioflush() return iopopen(...) end
-
-function os.resultof(command)
- local handle = io.popen(command,"r")
- return handle and handle:read("*all") or ""
-end
-
-if not io.fileseparator then
- if find(os.getenv("PATH"),";") then
- io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin"
- else
- io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix"
- end
-end
-
-os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
-os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
-
-if os.type == "windows" then
- os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
-else
- os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
-end
-
-local launchers = {
- windows = "start %s",
- macosx = "open %s",
- unix = "$BROWSER %s &> /dev/null &",
-}
-
-function os.launch(str)
- os.execute(format(launchers[os.name] or launchers.unix,str))
-end
-
-if not os.times then -- ?
- -- utime = user time
- -- stime = system time
- -- cutime = children user time
- -- cstime = children system time
- function os.times()
- return {
- utime = os.gettimeofday(), -- user
- stime = 0, -- system
- cutime = 0, -- children user
- cstime = 0, -- children system
- }
- end
-end
-
-os.gettimeofday = os.gettimeofday or os.clock
-
-local startuptime = os.gettimeofday()
-
-function os.runtime()
- return os.gettimeofday() - startuptime
-end
-
---~ print(os.gettimeofday()-os.time())
---~ os.sleep(1.234)
---~ print (">>",os.runtime())
---~ print(os.date("%H:%M:%S",os.gettimeofday()))
---~ print(os.date("%H:%M:%S",os.time()))
-
--- no need for function anymore as we have more clever code and helpers now
--- this metatable trickery might as well disappear
-
-os.resolvers = os.resolvers or { } -- will become private
-
-local resolvers = os.resolvers
-
-setmetatable(os, { __index = function(t,k)
- local r = resolvers[k]
- return r and r(t,k) or nil -- no memoize
-end })
-
--- we can use HOSTTYPE on some platforms
-
-local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or ""
-
-local function guess()
- local architecture = os.resultof("uname -m") or ""
- if architecture ~= "" then
- return architecture
- end
- architecture = os.getenv("HOSTTYPE") or ""
- if architecture ~= "" then
- return architecture
- end
- return os.resultof("echo $HOSTTYPE") or ""
-end
-
-if platform ~= "" then
-
- os.platform = platform
-
-elseif os.type == "windows" then
-
- -- we could set the variable directly, no function needed here
-
- function os.resolvers.platform(t,k)
- local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or ""
- if find(architecture,"AMD64") then
- platform = "mswin-64"
- else
- platform = "mswin"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "linux" then
-
- function os.resolvers.platform(t,k)
- -- we sometimes have HOSTTYPE set so let's check that first
- local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
- platform = "linux-64"
- elseif find(architecture,"ppc") then
- platform = "linux-ppc"
- else
- platform = "linux"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "macosx" then
-
- --[[
- Identifying the architecture of OSX is quite a mess and this
- is the best we can come up with. For some reason $HOSTTYPE is
- a kind of pseudo environment variable, not known to the current
- environment. And yes, uname cannot be trusted either, so there
- is a change that you end up with a 32 bit run on a 64 bit system.
- Also, some proper 64 bit intel macs are too cheap (low-end) and
- therefore not permitted to run the 64 bit kernel.
- ]]--
-
- function os.resolvers.platform(t,k)
- -- local platform, architecture = "", os.getenv("HOSTTYPE") or ""
- -- if architecture == "" then
- -- architecture = os.resultof("echo $HOSTTYPE") or ""
- -- end
- local platform, architecture = "", os.resultof("echo $HOSTTYPE") or ""
- if architecture == "" then
- -- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n")
- platform = "osx-intel"
- elseif find(architecture,"i386") then
- platform = "osx-intel"
- elseif find(architecture,"x86_64") then
- platform = "osx-64"
- else
- platform = "osx-ppc"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "sunos" then
-
- function os.resolvers.platform(t,k)
- local platform, architecture = "", os.resultof("uname -m") or ""
- if find(architecture,"sparc") then
- platform = "solaris-sparc"
- else -- if architecture == 'i86pc'
- platform = "solaris-intel"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "freebsd" then
-
- function os.resolvers.platform(t,k)
- local platform, architecture = "", os.resultof("uname -m") or ""
- if find(architecture,"amd64") then
- platform = "freebsd-amd64"
- else
- platform = "freebsd"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-elseif name == "kfreebsd" then
-
- function os.resolvers.platform(t,k)
- -- we sometimes have HOSTTYPE set so let's check that first
- local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
- if find(architecture,"x86_64") then
- platform = "kfreebsd-amd64"
- else
- platform = "kfreebsd-i386"
- end
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-else
-
- -- platform = "linux"
- -- os.setenv("MTX_PLATFORM",platform)
- -- os.platform = platform
-
- function os.resolvers.platform(t,k)
- local platform = "linux"
- os.setenv("MTX_PLATFORM",platform)
- os.platform = platform
- return platform
- end
-
-end
-
--- beware, we set the randomseed
-
--- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the
--- version number as well as two reserved bits. All other bits are set using a random or pseudorandom
--- data source. Version 4 UUIDs have the form xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx with hexadecimal
--- digits x and hexadecimal digits 8, 9, A, or B for y. e.g. f47ac10b-58cc-4372-a567-0e02b2c3d479.
---
--- as we don't call this function too often there is not so much risk on repetition
-
-local t = { 8, 9, "a", "b" }
-
-function os.uuid()
- return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x",
- random(0xFFFF),random(0xFFFF),
- random(0x0FFF),
- t[ceil(random(4))] or 8,random(0x0FFF),
- random(0xFFFF),
- random(0xFFFF),random(0xFFFF),random(0xFFFF)
- )
-end
-
-local d
-
-function os.timezone(delta)
- d = d or tonumber(tonumber(date("%H")-date("!%H")))
- if delta then
- if d > 0 then
- return format("+%02i:00",d)
- else
- return format("-%02i:00",-d)
- end
- else
- return 1
- end
-end
-
-local timeformat = format("%%s%s",os.timezone(true))
-local dateformat = "!%Y-%m-%d %H:%M:%S"
-
-function os.fulltime(t,default)
- t = tonumber(t) or 0
- if t > 0 then
- -- valid time
- elseif default then
- return default
- else
- t = nil
- end
- return format(timeformat,date(dateformat,t))
-end
-
-local dateformat = "%Y-%m-%d %H:%M:%S"
-
-function os.localtime(t,default)
- t = tonumber(t) or 0
- if t > 0 then
- -- valid time
- elseif default then
- return default
- else
- t = nil
- end
- return date(dateformat,t)
-end
-
-function os.converttime(t,default)
- local t = tonumber(t)
- if t and t > 0 then
- return date(dateformat,t)
- else
- return default or "-"
- end
-end
-
-local memory = { }
-
-local function which(filename)
- local fullname = memory[filename]
- if fullname == nil then
- local suffix = file.suffix(filename)
- local suffixes = suffix == "" and os.binsuffixes or { suffix }
- for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
- local df = file.join(directory,filename)
- for i=1,#suffixes do
- local dfs = file.addsuffix(df,suffixes[i])
- if io.exists(dfs) then
- fullname = dfs
- break
- end
- end
- end
- if not fullname then
- fullname = false
- end
- memory[filename] = fullname
- end
- return fullname
-end
-
-os.which = which
-os.where = which
-
-function os.today()
- return date("!*t") -- table with values
-end
-
-function os.now()
- return date("!%Y-%m-%d %H:%M:%S") -- 2011-12-04 14:59:12
-end
-
--- if not os.sleep and socket then
--- os.sleep = socket.sleep
--- end
-
-if not os.sleep then
- local socket = socket
- function os.sleep(n)
- if not socket then
- -- so we delay ... if os.sleep is really needed then one should also
- -- be sure that socket can be found
- socket = require("socket")
- end
- socket.sleep(n)
- end
-end
-
--- print(os.which("inkscape.exe"))
--- print(os.which("inkscape"))
--- print(os.which("gs.exe"))
--- print(os.which("ps2pdf"))
+if not modules then modules = { } end modules ['l-os'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This file deals with some operating system issues. Please don't bother me
+-- with the pros and cons of operating systems as they all have their flaws
+-- and benefits. Bashing one of them won't help solving problems and fixing
+-- bugs faster and is a waste of time and energy.
+--
+-- path separators: / or \ ... we can use / everywhere
+-- suffixes : dll so exe ... no big deal
+-- quotes : we can use "" in most cases
+-- expansion : unless "" are used * might give side effects
+-- piping/threads : somewhat different for each os
+-- locations : specific user file locations and settings can change over time
+--
+-- os.type : windows | unix (new, we already guessed os.platform)
+-- os.name : windows | msdos | linux | macosx | solaris | .. | generic (new)
+-- os.platform : extended os.name with architecture
+
+-- os.sleep() => socket.sleep()
+-- math.randomseed(tonumber(string.sub(string.reverse(tostring(math.floor(socket.gettime()*10000))),1,6)))
+
+-- maybe build io.flush in os.execute
+
+local os = os
+local date, time = os.date, os.time
+local find, format, gsub, upper, gmatch = string.find, string.format, string.gsub, string.upper, string.gmatch
+local concat = table.concat
+local random, ceil, randomseed = math.random, math.ceil, math.randomseed
+local rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring = rawget, rawset, type, getmetatable, setmetatable, tonumber, tostring
+
+-- The following code permits traversing the environment table, at least
+-- in luatex. Internally all environment names are uppercase.
+
+-- The randomseed in Lua is not that random, although this depends on the operating system as well
+-- as the binary (Luatex is normally okay). But to be sure we set the seed anyway.
+
+math.initialseed = tonumber(string.sub(string.reverse(tostring(ceil(socket and socket.gettime()*10000 or time()))),1,6))
+
+randomseed(math.initialseed)
+
+if not os.__getenv__ then
+
+ os.__getenv__ = os.getenv
+ os.__setenv__ = os.setenv
+
+ if os.env then
+
+ local osgetenv = os.getenv
+ local ossetenv = os.setenv
+ local osenv = os.env local _ = osenv.PATH -- initialize the table
+
+ function os.setenv(k,v)
+ if v == nil then
+ v = ""
+ end
+ local K = upper(k)
+ osenv[K] = v
+ if type(v) == "table" then
+ v = concat(v,";") -- path
+ end
+ ossetenv(K,v)
+ end
+
+ function os.getenv(k)
+ local K = upper(k)
+ local v = osenv[K] or osenv[k] or osgetenv(K) or osgetenv(k)
+ if v == "" then
+ return nil
+ else
+ return v
+ end
+ end
+
+ else
+
+ local ossetenv = os.setenv
+ local osgetenv = os.getenv
+ local osenv = { }
+
+ function os.setenv(k,v)
+ if v == nil then
+ v = ""
+ end
+ local K = upper(k)
+ osenv[K] = v
+ end
+
+ function os.getenv(k)
+ local K = upper(k)
+ local v = osenv[K] or osgetenv(K) or osgetenv(k)
+ if v == "" then
+ return nil
+ else
+ return v
+ end
+ end
+
+ local function __index(t,k)
+ return os.getenv(k)
+ end
+ local function __newindex(t,k,v)
+ os.setenv(k,v)
+ end
+
+ os.env = { }
+
+ setmetatable(os.env, { __index = __index, __newindex = __newindex } )
+
+ end
+
+end
+
+-- end of environment hack
+
+local execute, spawn, exec, iopopen, ioflush = os.execute, os.spawn or os.execute, os.exec or os.execute, io.popen, io.flush
+
+function os.execute(...) ioflush() return execute(...) end
+function os.spawn (...) ioflush() return spawn (...) end
+function os.exec (...) ioflush() return exec (...) end
+function io.popen (...) ioflush() return iopopen(...) end
+
+function os.resultof(command)
+ local handle = io.popen(command,"r")
+ return handle and handle:read("*all") or ""
+end
+
+if not io.fileseparator then
+ if find(os.getenv("PATH"),";") then
+ io.fileseparator, io.pathseparator, os.type = "\\", ";", os.type or "mswin"
+ else
+ io.fileseparator, io.pathseparator, os.type = "/" , ":", os.type or "unix"
+ end
+end
+
+os.type = os.type or (io.pathseparator == ";" and "windows") or "unix"
+os.name = os.name or (os.type == "windows" and "mswin" ) or "linux"
+
+if os.type == "windows" then
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'dll', 'exe', { 'exe', 'cmd', 'bat' }
+else
+ os.libsuffix, os.binsuffix, os.binsuffixes = 'so', '', { '' }
+end
+
+local launchers = {
+ windows = "start %s",
+ macosx = "open %s",
+ unix = "$BROWSER %s &> /dev/null &",
+}
+
+function os.launch(str)
+ os.execute(format(launchers[os.name] or launchers.unix,str))
+end
+
+if not os.times then -- ?
+ -- utime = user time
+ -- stime = system time
+ -- cutime = children user time
+ -- cstime = children system time
+ function os.times()
+ return {
+ utime = os.gettimeofday(), -- user
+ stime = 0, -- system
+ cutime = 0, -- children user
+ cstime = 0, -- children system
+ }
+ end
+end
+
+os.gettimeofday = os.gettimeofday or os.clock
+
+local startuptime = os.gettimeofday()
+
+function os.runtime()
+ return os.gettimeofday() - startuptime
+end
+
+--~ print(os.gettimeofday()-os.time())
+--~ os.sleep(1.234)
+--~ print (">>",os.runtime())
+--~ print(os.date("%H:%M:%S",os.gettimeofday()))
+--~ print(os.date("%H:%M:%S",os.time()))
+
+-- no need for function anymore as we have more clever code and helpers now
+-- this metatable trickery might as well disappear
+
+os.resolvers = os.resolvers or { } -- will become private
+
+local resolvers = os.resolvers
+
+setmetatable(os, { __index = function(t,k)
+ local r = resolvers[k]
+ return r and r(t,k) or nil -- no memoize
+end })
+
+-- we can use HOSTTYPE on some platforms
+
+local name, platform = os.name or "linux", os.getenv("MTX_PLATFORM") or ""
+
+local function guess()
+ local architecture = os.resultof("uname -m") or ""
+ if architecture ~= "" then
+ return architecture
+ end
+ architecture = os.getenv("HOSTTYPE") or ""
+ if architecture ~= "" then
+ return architecture
+ end
+ return os.resultof("echo $HOSTTYPE") or ""
+end
+
+if platform ~= "" then
+
+ os.platform = platform
+
+elseif os.type == "windows" then
+
+ -- we could set the variable directly, no function needed here
+
+ function os.resolvers.platform(t,k)
+ local platform, architecture = "", os.getenv("PROCESSOR_ARCHITECTURE") or ""
+ if find(architecture,"AMD64") then
+ platform = "mswin-64"
+ else
+ platform = "mswin"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform = platform
+ return platform
+ end
+
+elseif name == "linux" then
+
+ function os.resolvers.platform(t,k)
+ -- we sometimes have HOSTTYPE set so let's check that first
+ local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
+ if find(architecture,"x86_64") then
+ platform = "linux-64"
+ elseif find(architecture,"ppc") then
+ platform = "linux-ppc"
+ else
+ platform = "linux"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform = platform
+ return platform
+ end
+
+elseif name == "macosx" then
+
+ --[[
+ Identifying the architecture of OSX is quite a mess and this
+ is the best we can come up with. For some reason $HOSTTYPE is
+ a kind of pseudo environment variable, not known to the current
+ environment. And yes, uname cannot be trusted either, so there
+ is a change that you end up with a 32 bit run on a 64 bit system.
+ Also, some proper 64 bit intel macs are too cheap (low-end) and
+ therefore not permitted to run the 64 bit kernel.
+ ]]--
+
+ function os.resolvers.platform(t,k)
+ -- local platform, architecture = "", os.getenv("HOSTTYPE") or ""
+ -- if architecture == "" then
+ -- architecture = os.resultof("echo $HOSTTYPE") or ""
+ -- end
+ local platform, architecture = "", os.resultof("echo $HOSTTYPE") or ""
+ if architecture == "" then
+ -- print("\nI have no clue what kind of OSX you're running so let's assume an 32 bit intel.\n")
+ platform = "osx-intel"
+ elseif find(architecture,"i386") then
+ platform = "osx-intel"
+ elseif find(architecture,"x86_64") then
+ platform = "osx-64"
+ else
+ platform = "osx-ppc"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform = platform
+ return platform
+ end
+
+elseif name == "sunos" then
+
+ function os.resolvers.platform(t,k)
+ local platform, architecture = "", os.resultof("uname -m") or ""
+ if find(architecture,"sparc") then
+ platform = "solaris-sparc"
+ else -- if architecture == 'i86pc'
+ platform = "solaris-intel"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform = platform
+ return platform
+ end
+
+elseif name == "freebsd" then
+
+ function os.resolvers.platform(t,k)
+ local platform, architecture = "", os.resultof("uname -m") or ""
+ if find(architecture,"amd64") then
+ platform = "freebsd-amd64"
+ else
+ platform = "freebsd"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform = platform
+ return platform
+ end
+
+elseif name == "kfreebsd" then
+
+ function os.resolvers.platform(t,k)
+ -- we sometimes have HOSTTYPE set so let's check that first
+ local platform, architecture = "", os.getenv("HOSTTYPE") or os.resultof("uname -m") or ""
+ if find(architecture,"x86_64") then
+ platform = "kfreebsd-amd64"
+ else
+ platform = "kfreebsd-i386"
+ end
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform = platform
+ return platform
+ end
+
+else
+
+ -- platform = "linux"
+ -- os.setenv("MTX_PLATFORM",platform)
+ -- os.platform = platform
+
+ function os.resolvers.platform(t,k)
+ local platform = "linux"
+ os.setenv("MTX_PLATFORM",platform)
+ os.platform = platform
+ return platform
+ end
+
+end
+
+-- beware, we set the randomseed
+
+-- from wikipedia: Version 4 UUIDs use a scheme relying only on random numbers. This algorithm sets the
+-- version number as well as two reserved bits. All other bits are set using a random or pseudorandom
+-- data source. Version 4 UUIDs have the form xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx with hexadecimal
+-- digits x and hexadecimal digits 8, 9, A, or B for y. e.g. f47ac10b-58cc-4372-a567-0e02b2c3d479.
+--
+-- as we don't call this function too often there is not so much risk on repetition
+
+local t = { 8, 9, "a", "b" }
+
+function os.uuid()
+ return format("%04x%04x-4%03x-%s%03x-%04x-%04x%04x%04x",
+ random(0xFFFF),random(0xFFFF),
+ random(0x0FFF),
+ t[ceil(random(4))] or 8,random(0x0FFF),
+ random(0xFFFF),
+ random(0xFFFF),random(0xFFFF),random(0xFFFF)
+ )
+end
+
+local d
+
+function os.timezone(delta)
+ d = d or tonumber(tonumber(date("%H")-date("!%H")))
+ if delta then
+ if d > 0 then
+ return format("+%02i:00",d)
+ else
+ return format("-%02i:00",-d)
+ end
+ else
+ return 1
+ end
+end
+
+local timeformat = format("%%s%s",os.timezone(true))
+local dateformat = "!%Y-%m-%d %H:%M:%S"
+
+function os.fulltime(t,default)
+ t = tonumber(t) or 0
+ if t > 0 then
+ -- valid time
+ elseif default then
+ return default
+ else
+ t = nil
+ end
+ return format(timeformat,date(dateformat,t))
+end
+
+local dateformat = "%Y-%m-%d %H:%M:%S"
+
+function os.localtime(t,default)
+ t = tonumber(t) or 0
+ if t > 0 then
+ -- valid time
+ elseif default then
+ return default
+ else
+ t = nil
+ end
+ return date(dateformat,t)
+end
+
+function os.converttime(t,default)
+ local t = tonumber(t)
+ if t and t > 0 then
+ return date(dateformat,t)
+ else
+ return default or "-"
+ end
+end
+
+local memory = { }
+
+local function which(filename)
+ local fullname = memory[filename]
+ if fullname == nil then
+ local suffix = file.suffix(filename)
+ local suffixes = suffix == "" and os.binsuffixes or { suffix }
+ for directory in gmatch(os.getenv("PATH"),"[^" .. io.pathseparator .."]+") do
+ local df = file.join(directory,filename)
+ for i=1,#suffixes do
+ local dfs = file.addsuffix(df,suffixes[i])
+ if io.exists(dfs) then
+ fullname = dfs
+ break
+ end
+ end
+ end
+ if not fullname then
+ fullname = false
+ end
+ memory[filename] = fullname
+ end
+ return fullname
+end
+
+os.which = which
+os.where = which
+
+function os.today()
+ return date("!*t") -- table with values
+end
+
+function os.now()
+ return date("!%Y-%m-%d %H:%M:%S") -- 2011-12-04 14:59:12
+end
+
+-- if not os.sleep and socket then
+-- os.sleep = socket.sleep
+-- end
+
+if not os.sleep then
+ local socket = socket
+ function os.sleep(n)
+ if not socket then
+ -- so we delay ... if os.sleep is really needed then one should also
+ -- be sure that socket can be found
+ socket = require("socket")
+ end
+ socket.sleep(n)
+ end
+end
+
+-- print(os.which("inkscape.exe"))
+-- print(os.which("inkscape"))
+-- print(os.which("gs.exe"))
+-- print(os.which("ps2pdf"))
diff --git a/tex/context/base/l-package.lua b/tex/context/base/l-package.lua
index 579fd3941..51da9f25d 100644
--- a/tex/context/base/l-package.lua
+++ b/tex/context/base/l-package.lua
@@ -1,340 +1,340 @@
-if not modules then modules = { } end modules ['l-package'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- Code moved from data-lua and changed into a plug-in.
-
--- We overload the regular loader. We do so because we operate mostly in
--- tds and use our own loader code. Alternatively we could use a more
--- extensive definition of package.path and package.cpath but even then
--- we're not done. Also, we now have better tracing.
---
--- -- local mylib = require("libtest")
--- -- local mysql = require("luasql.mysql")
-
-local type = type
-local gsub, format = string.gsub, string.format
-
-local P, S, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.Cs, lpeg.match
-
-local package = package
-local searchers = package.searchers or package.loaders
-
--- dummies
-
-local filejoin = file and file.join or function(path,name) return path .. "/" .. name end
-local isreadable = file and file.is_readable or function(name) local f = io.open(name) if f then f:close() return true end end
-local addsuffix = file and file.addsuffix or function(name,suffix) return name .. "." .. suffix end
-
--- local separator, concatinator, placeholder, pathofexecutable, ignorebefore = string.match(package.config,"(.-)\n(.-)\n(.-)\n(.-)\n(.-)\n")
---
--- local config = {
--- separator = separator, -- \ or /
--- concatinator = concatinator, -- ;
--- placeholder = placeholder, -- ? becomes name
--- pathofexecutable = pathofexecutable, -- ! becomes executables dir (on windows)
--- ignorebefore = ignorebefore, -- - remove all before this when making lua_open
--- }
-
-local function cleanpath(path) -- hm, don't we have a helper for this?
- return path
-end
-
-local pattern = Cs((((1-S("\\/"))^0 * (S("\\/")^1/"/"))^0 * (P(".")^1/"/"+P(1))^1) * -1)
-
-local function lualibfile(name)
- return lpegmatch(pattern,name) or name
-end
-
-local offset = luarocks and 1 or 0 -- todo: also check other extras
-
-local helpers = package.helpers or {
- cleanpath = cleanpath,
- lualibfile = lualibfile,
- trace = false,
- report = function(...) print(format(...)) end,
- builtin = {
- ["preload table"] = searchers[1+offset], -- special case, built-in libs
- ["path specification"] = searchers[2+offset],
- ["cpath specification"] = searchers[3+offset],
- ["all in one fallback"] = searchers[4+offset], -- special case, combined libs
- },
- methods = {
- },
- sequence = {
- "already loaded",
- "preload table",
- "lua extra list",
- "lib extra list",
- "path specification",
- "cpath specification",
- "all in one fallback",
- "not loaded",
- }
-}
-
-package.helpers = helpers
-
-local methods = helpers.methods
-local builtin = helpers.builtin
-
--- extra tds/ctx paths ... a bit of overhead for efficient tracing
-
-local extraluapaths = { }
-local extralibpaths = { }
-local luapaths = nil -- delayed
-local libpaths = nil -- delayed
-local oldluapath = nil
-local oldlibpath = nil
-
-local nofextralua = -1
-local nofextralib = -1
-local nofpathlua = -1
-local nofpathlib = -1
-
-local function listpaths(what,paths)
- local nofpaths = #paths
- if nofpaths > 0 then
- for i=1,nofpaths do
- helpers.report("using %s path %i: %s",what,i,paths[i])
- end
- else
- helpers.report("no %s paths defined",what)
- end
- return nofpaths
-end
-
-local function getextraluapaths()
- if helpers.trace and #extraluapaths ~= nofextralua then
- nofextralua = listpaths("extra lua",extraluapaths)
- end
- return extraluapaths
-end
-
-local function getextralibpaths()
- if helpers.trace and #extralibpaths ~= nofextralib then
- nofextralib = listpaths("extra lib",extralibpaths)
- end
- return extralibpaths
-end
-
-local function getluapaths()
- local luapath = package.path or ""
- if oldluapath ~= luapath then
- luapaths = file.splitpath(luapath,";")
- oldluapath = luapath
- nofpathlua = -1
- end
- if helpers.trace and #luapaths ~= nofpathlua then
- nofpathlua = listpaths("builtin lua",luapaths)
- end
- return luapaths
-end
-
-local function getlibpaths()
- local libpath = package.cpath or ""
- if oldlibpath ~= libpath then
- libpaths = file.splitpath(libpath,";")
- oldlibpath = libpath
- nofpathlib = -1
- end
- if helpers.trace and #libpaths ~= nofpathlib then
- nofpathlib = listpaths("builtin lib",libpaths)
- end
- return libpaths
-end
-
-package.luapaths = getluapaths
-package.libpaths = getlibpaths
-package.extraluapaths = getextraluapaths
-package.extralibpaths = getextralibpaths
-
-local hashes = {
- lua = { },
- lib = { },
-}
-
-local function registerpath(tag,what,target,...)
- local pathlist = { ... }
- local cleanpath = helpers.cleanpath
- local trace = helpers.trace
- local report = helpers.report
- local hash = hashes[what]
- --
- local function add(path)
- local path = cleanpath(path)
- if not hash[path] then
- target[#target+1] = path
- hash[path] = true
- if trace then
- report("registered %s path %s: %s",tag,#target,path)
- end
- else
- if trace then
- report("duplicate %s path: %s",tag,path)
- end
- end
- end
- --
- for p=1,#pathlist do
- local path = pathlist[p]
- if type(path) == "table" then
- for i=1,#path do
- add(path[i])
- end
- else
- add(path)
- end
- end
- return paths
-end
-
-helpers.registerpath = registerpath
-
-function package.extraluapath(...)
- registerpath("extra lua","lua",extraluapaths,...)
-end
-
-function package.extralibpath(...)
- registerpath("extra lib","lib",extralibpaths,...)
-end
-
--- lib loader (used elsewhere)
-
-local function loadedaslib(resolved,rawname) -- todo: strip all before first -
- local base = gsub(rawname,"%.","_")
- -- so, we can do a require("foo/bar") and initialize bar
- -- local base = gsub(file.basename(rawname),"%.","_")
- local init = "luaopen_" .. gsub(base,"%.","_")
- if helpers.trace then
- helpers.report("calling loadlib with '%s' with init '%s'",resolved,init)
- end
- return package.loadlib(resolved,init)
-end
-
-helpers.loadedaslib = loadedaslib
-
--- wrapped and new loaders
-
-local function loadedbypath(name,rawname,paths,islib,what)
- local trace = helpers.trace
- for p=1,#paths do
- local path = paths[p]
- local resolved = filejoin(path,name)
- if trace then
- helpers.report("%s path, identifying '%s' on '%s'",what,name,path)
- end
- if isreadable(resolved) then
- if trace then
- helpers.report("%s path, '%s' found on '%s'",what,name,resolved)
- end
- if islib then
- return loadedaslib(resolved,rawname)
- else
- return loadfile(resolved)
- end
- end
- end
-end
-
-helpers.loadedbypath = loadedbypath
-
-methods["already loaded"] = function(name)
- return package.loaded[name]
-end
-
-methods["preload table"] = function(name)
- return builtin["preload table"](name)
-end
-
-methods["lua extra list"] = function(name)
- return loadedbypath(addsuffix(lualibfile(name),"lua" ),name,getextraluapaths(),false,"lua")
-end
-
-methods["lib extra list"] = function(name)
- return loadedbypath(addsuffix(lualibfile(name),os.libsuffix),name,getextralibpaths(),true, "lib")
-end
-
-methods["path specification"] = function(name)
- getluapaths() -- triggers list building and tracing
- return builtin["path specification"](name)
-end
-
-methods["cpath specification"] = function(name)
- getlibpaths() -- triggers list building and tracing
- return builtin["cpath specification"](name)
-end
-
-methods["all in one fallback"] = function(name)
- return builtin["all in one fallback"](name)
-end
-
-methods["not loaded"] = function(name)
- if helpers.trace then
- helpers.report("unable to locate '%s'",name or "?")
- end
- return nil
-end
-
-local level = 0
-local used = { }
-
-helpers.traceused = false
-
-function helpers.loaded(name)
- local sequence = helpers.sequence
- level = level + 1
- for i=1,#sequence do
- local method = sequence[i]
- if helpers.trace then
- helpers.report("%s, level '%s', method '%s', name '%s'","locating",level,method,name)
- end
- local result, rest = methods[method](name)
- if type(result) == "function" then
- if helpers.trace then
- helpers.report("%s, level '%s', method '%s', name '%s'","found",level,method,name)
- end
- if helpers.traceused then
- used[#used+1] = { level = level, name = name }
- end
- level = level - 1
- return result, rest
- end
- end
- -- safeguard, we never come here
- level = level - 1
- return nil
-end
-
-function helpers.showused()
- local n = #used
- if n > 0 then
- helpers.report("%s libraries loaded:",n)
- helpers.report()
- for i=1,n do
- local u = used[i]
- helpers.report("%i %a",u.level,u.name)
- end
- helpers.report()
- end
-end
-
-function helpers.unload(name)
- if helpers.trace then
- if package.loaded[name] then
- helpers.report("unloading, name '%s', %s",name,"done")
- else
- helpers.report("unloading, name '%s', %s",name,"not loaded")
- end
- end
- package.loaded[name] = nil
-end
-
--- overloading require does not work out well so we need to push it in
--- front ..
-
-table.insert(searchers,1,helpers.loaded)
+if not modules then modules = { } end modules ['l-package'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Code moved from data-lua and changed into a plug-in.
+
+-- We overload the regular loader. We do so because we operate mostly in
+-- tds and use our own loader code. Alternatively we could use a more
+-- extensive definition of package.path and package.cpath but even then
+-- we're not done. Also, we now have better tracing.
+--
+-- -- local mylib = require("libtest")
+-- -- local mysql = require("luasql.mysql")
+
+local type = type
+local gsub, format = string.gsub, string.format
+
+local P, S, Cs, lpegmatch = lpeg.P, lpeg.S, lpeg.Cs, lpeg.match
+
+local package = package
+local searchers = package.searchers or package.loaders
+
+-- dummies
+
+local filejoin = file and file.join or function(path,name) return path .. "/" .. name end
+local isreadable = file and file.is_readable or function(name) local f = io.open(name) if f then f:close() return true end end
+local addsuffix = file and file.addsuffix or function(name,suffix) return name .. "." .. suffix end
+
+-- local separator, concatinator, placeholder, pathofexecutable, ignorebefore = string.match(package.config,"(.-)\n(.-)\n(.-)\n(.-)\n(.-)\n")
+--
+-- local config = {
+-- separator = separator, -- \ or /
+-- concatinator = concatinator, -- ;
+-- placeholder = placeholder, -- ? becomes name
+-- pathofexecutable = pathofexecutable, -- ! becomes executables dir (on windows)
+-- ignorebefore = ignorebefore, -- - remove all before this when making lua_open
+-- }
+
+local function cleanpath(path) -- hm, don't we have a helper for this?
+ return path
+end
+
+local pattern = Cs((((1-S("\\/"))^0 * (S("\\/")^1/"/"))^0 * (P(".")^1/"/"+P(1))^1) * -1)
+
+local function lualibfile(name)
+ return lpegmatch(pattern,name) or name
+end
+
+local offset = luarocks and 1 or 0 -- todo: also check other extras
+
+local helpers = package.helpers or {
+ cleanpath = cleanpath,
+ lualibfile = lualibfile,
+ trace = false,
+ report = function(...) print(format(...)) end,
+ builtin = {
+ ["preload table"] = searchers[1+offset], -- special case, built-in libs
+ ["path specification"] = searchers[2+offset],
+ ["cpath specification"] = searchers[3+offset],
+ ["all in one fallback"] = searchers[4+offset], -- special case, combined libs
+ },
+ methods = {
+ },
+ sequence = {
+ "already loaded",
+ "preload table",
+ "lua extra list",
+ "lib extra list",
+ "path specification",
+ "cpath specification",
+ "all in one fallback",
+ "not loaded",
+ }
+}
+
+package.helpers = helpers
+
+local methods = helpers.methods
+local builtin = helpers.builtin
+
+-- extra tds/ctx paths ... a bit of overhead for efficient tracing
+
+local extraluapaths = { }
+local extralibpaths = { }
+local luapaths = nil -- delayed
+local libpaths = nil -- delayed
+local oldluapath = nil
+local oldlibpath = nil
+
+local nofextralua = -1
+local nofextralib = -1
+local nofpathlua = -1
+local nofpathlib = -1
+
+local function listpaths(what,paths)
+ local nofpaths = #paths
+ if nofpaths > 0 then
+ for i=1,nofpaths do
+ helpers.report("using %s path %i: %s",what,i,paths[i])
+ end
+ else
+ helpers.report("no %s paths defined",what)
+ end
+ return nofpaths
+end
+
+local function getextraluapaths()
+ if helpers.trace and #extraluapaths ~= nofextralua then
+ nofextralua = listpaths("extra lua",extraluapaths)
+ end
+ return extraluapaths
+end
+
+local function getextralibpaths()
+ if helpers.trace and #extralibpaths ~= nofextralib then
+ nofextralib = listpaths("extra lib",extralibpaths)
+ end
+ return extralibpaths
+end
+
+local function getluapaths()
+ local luapath = package.path or ""
+ if oldluapath ~= luapath then
+ luapaths = file.splitpath(luapath,";")
+ oldluapath = luapath
+ nofpathlua = -1
+ end
+ if helpers.trace and #luapaths ~= nofpathlua then
+ nofpathlua = listpaths("builtin lua",luapaths)
+ end
+ return luapaths
+end
+
+local function getlibpaths()
+ local libpath = package.cpath or ""
+ if oldlibpath ~= libpath then
+ libpaths = file.splitpath(libpath,";")
+ oldlibpath = libpath
+ nofpathlib = -1
+ end
+ if helpers.trace and #libpaths ~= nofpathlib then
+ nofpathlib = listpaths("builtin lib",libpaths)
+ end
+ return libpaths
+end
+
+package.luapaths = getluapaths
+package.libpaths = getlibpaths
+package.extraluapaths = getextraluapaths
+package.extralibpaths = getextralibpaths
+
+local hashes = {
+ lua = { },
+ lib = { },
+}
+
+local function registerpath(tag,what,target,...)
+ local pathlist = { ... }
+ local cleanpath = helpers.cleanpath
+ local trace = helpers.trace
+ local report = helpers.report
+ local hash = hashes[what]
+ --
+ local function add(path)
+ local path = cleanpath(path)
+ if not hash[path] then
+ target[#target+1] = path
+ hash[path] = true
+ if trace then
+ report("registered %s path %s: %s",tag,#target,path)
+ end
+ else
+ if trace then
+ report("duplicate %s path: %s",tag,path)
+ end
+ end
+ end
+ --
+ for p=1,#pathlist do
+ local path = pathlist[p]
+ if type(path) == "table" then
+ for i=1,#path do
+ add(path[i])
+ end
+ else
+ add(path)
+ end
+ end
+ return paths
+end
+
+helpers.registerpath = registerpath
+
+function package.extraluapath(...)
+ registerpath("extra lua","lua",extraluapaths,...)
+end
+
+function package.extralibpath(...)
+ registerpath("extra lib","lib",extralibpaths,...)
+end
+
+-- lib loader (used elsewhere)
+
+local function loadedaslib(resolved,rawname) -- todo: strip all before first -
+ local base = gsub(rawname,"%.","_")
+ -- so, we can do a require("foo/bar") and initialize bar
+ -- local base = gsub(file.basename(rawname),"%.","_")
+ local init = "luaopen_" .. gsub(base,"%.","_")
+ if helpers.trace then
+ helpers.report("calling loadlib with '%s' with init '%s'",resolved,init)
+ end
+ return package.loadlib(resolved,init)
+end
+
+helpers.loadedaslib = loadedaslib
+
+-- wrapped and new loaders
+
+local function loadedbypath(name,rawname,paths,islib,what)
+ local trace = helpers.trace
+ for p=1,#paths do
+ local path = paths[p]
+ local resolved = filejoin(path,name)
+ if trace then
+ helpers.report("%s path, identifying '%s' on '%s'",what,name,path)
+ end
+ if isreadable(resolved) then
+ if trace then
+ helpers.report("%s path, '%s' found on '%s'",what,name,resolved)
+ end
+ if islib then
+ return loadedaslib(resolved,rawname)
+ else
+ return loadfile(resolved)
+ end
+ end
+ end
+end
+
+helpers.loadedbypath = loadedbypath
+
+methods["already loaded"] = function(name)
+ return package.loaded[name]
+end
+
+methods["preload table"] = function(name)
+ return builtin["preload table"](name)
+end
+
+methods["lua extra list"] = function(name)
+ return loadedbypath(addsuffix(lualibfile(name),"lua" ),name,getextraluapaths(),false,"lua")
+end
+
+methods["lib extra list"] = function(name)
+ return loadedbypath(addsuffix(lualibfile(name),os.libsuffix),name,getextralibpaths(),true, "lib")
+end
+
+methods["path specification"] = function(name)
+ getluapaths() -- triggers list building and tracing
+ return builtin["path specification"](name)
+end
+
+methods["cpath specification"] = function(name)
+ getlibpaths() -- triggers list building and tracing
+ return builtin["cpath specification"](name)
+end
+
+methods["all in one fallback"] = function(name)
+ return builtin["all in one fallback"](name)
+end
+
+methods["not loaded"] = function(name)
+ if helpers.trace then
+ helpers.report("unable to locate '%s'",name or "?")
+ end
+ return nil
+end
+
+local level = 0
+local used = { }
+
+helpers.traceused = false
+
+function helpers.loaded(name)
+ local sequence = helpers.sequence
+ level = level + 1
+ for i=1,#sequence do
+ local method = sequence[i]
+ if helpers.trace then
+ helpers.report("%s, level '%s', method '%s', name '%s'","locating",level,method,name)
+ end
+ local result, rest = methods[method](name)
+ if type(result) == "function" then
+ if helpers.trace then
+ helpers.report("%s, level '%s', method '%s', name '%s'","found",level,method,name)
+ end
+ if helpers.traceused then
+ used[#used+1] = { level = level, name = name }
+ end
+ level = level - 1
+ return result, rest
+ end
+ end
+ -- safeguard, we never come here
+ level = level - 1
+ return nil
+end
+
+function helpers.showused()
+ local n = #used
+ if n > 0 then
+ helpers.report("%s libraries loaded:",n)
+ helpers.report()
+ for i=1,n do
+ local u = used[i]
+ helpers.report("%i %a",u.level,u.name)
+ end
+ helpers.report()
+ end
+end
+
+function helpers.unload(name)
+ if helpers.trace then
+ if package.loaded[name] then
+ helpers.report("unloading, name '%s', %s",name,"done")
+ else
+ helpers.report("unloading, name '%s', %s",name,"not loaded")
+ end
+ end
+ package.loaded[name] = nil
+end
+
+-- overloading require does not work out well so we need to push it in
+-- front ..
+
+table.insert(searchers,1,helpers.loaded)
diff --git a/tex/context/base/l-pdfview.lua b/tex/context/base/l-pdfview.lua
index 80033900f..643d538e7 100644
--- a/tex/context/base/l-pdfview.lua
+++ b/tex/context/base/l-pdfview.lua
@@ -1,143 +1,143 @@
-if not modules then modules = { } end modules ['l-pdfview'] = {
- version = 1.001,
- comment = "companion to mtx-context.lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- Todo: figure out pdfopen/pdfclose on linux. Calling e.g. okular directly
--- doesn't work in linux when issued from scite as it blocks the editor (no
--- & possible or so). Unfortunately pdfopen keeps changing with not keeping
--- downward compatibility (command line arguments and so).
-
--- no 2>&1 any more, needs checking on windows
-
-local format, concat = string.format, table.concat
-
-pdfview = pdfview or { }
-
-local opencalls, closecalls, allcalls, runner
-
-if os.type == "windows" then
-
- opencalls = {
- ['default'] = "pdfopen --rxi --file",
- ['acrobat'] = "pdfopen --rxi --file",
- ['fullacrobat'] = "pdfopen --axi --file",
- ['okular'] = 'start "test" "c:/data/system/kde/bin/okular.exe" --unique', -- todo!
- ['sumatra'] = 'start "test" "c:/data/system/sumatrapdf/sumatrapdf.exe" -reuse-instance',
- ['okular'] = 'start "test" "okular.exe" --unique',
- ['sumatra'] = 'start "test" "sumatrapdf.exe" -reuse-instance -bg-color 0xCCCCCC',
- }
- closecalls= {
- ['default'] = "pdfclose --file",
- ['acrobat'] = "pdfclose --file",
- ['okular'] = false,
- ['sumatra'] = false,
- }
- allcalls = {
- ['default'] = "pdfclose --all",
- ['acrobat'] = "pdfclose --all",
- ['okular'] = false,
- ['sumatra'] = false,
- }
-
- pdfview.method = "acrobat"
-
- runner = function(...)
--- os.spawn(...)
- os.execute(...)
- end
-
-else
-
- opencalls = {
- ['default'] = "pdfopen", -- we could pass the default here
- ['okular'] = 'okular --unique'
- }
- closecalls= {
- ['default'] = "pdfclose --file",
- ['okular'] = false,
- }
- allcalls = {
- ['default'] = "pdfclose --all",
- ['okular'] = false,
- }
-
- pdfview.method = "okular"
-
- runner = function(...)
- os.spawn(...)
- end
-
-end
-
-directives.register("pdfview.method", function(v)
- pdfview.method = (opencalls[v] and v) or 'default'
-end)
-
-function pdfview.setmethod(method)
- if method and opencalls[method] then
- pdfview.method = method
- end
-end
-
-function pdfview.methods()
- return concat(table.sortedkeys(opencalls), " ")
-end
-
-function pdfview.status()
- return format("pdfview methods: %s, current method: %s (directives_pdfview_method)",pdfview.methods(),tostring(pdfview.method))
-end
-
--- local openedfiles = { }
-
-local function fullname(name)
- return file.addsuffix(name,"pdf")
-end
-
-function pdfview.open(...)
- local opencall = opencalls[pdfview.method]
- if opencall then
- local t = { ... }
- for i=1,#t do
- local name = fullname(t[i])
- if io.exists(name) then
- runner(format('%s "%s"', opencall, name))
- -- openedfiles[name] = true
- end
- end
- end
-end
-
-function pdfview.close(...)
- local closecall = closecalls[pdfview.method]
- if closecall then
- local t = { ... }
- for i=1,#t do
- local name = fullname(t[i])
- -- if openedfiles[name] then
- runner(format('%s "%s"', closecall, name))
- -- openedfiles[name] = nil
- -- else
- -- pdfview.closeall()
- -- break
- -- end
- end
- end
-end
-
-function pdfview.closeall()
- local allcall = allcalls[pdfview.method]
- if allcall then
- runner(format('%s', allcall))
- end
- -- openedfiles = { }
-end
-
---~ pdfview.open("t:/document/show-exa.pdf")
---~ os.sleep(3)
---~ pdfview.close("t:/document/show-exa.pdf")
-
-return pdfview
+if not modules then modules = { } end modules ['l-pdfview'] = {
+ version = 1.001,
+ comment = "companion to mtx-context.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Todo: figure out pdfopen/pdfclose on linux. Calling e.g. okular directly
+-- doesn't work in linux when issued from scite as it blocks the editor (no
+-- & possible or so). Unfortunately pdfopen keeps changing with not keeping
+-- downward compatibility (command line arguments and so).
+
+-- no 2>&1 any more, needs checking on windows
+
+local format, concat = string.format, table.concat
+
+pdfview = pdfview or { }
+
+local opencalls, closecalls, allcalls, runner
+
+if os.type == "windows" then
+
+ opencalls = {
+ ['default'] = "pdfopen --rxi --file",
+ ['acrobat'] = "pdfopen --rxi --file",
+ ['fullacrobat'] = "pdfopen --axi --file",
+ ['okular'] = 'start "test" "c:/data/system/kde/bin/okular.exe" --unique', -- todo!
+ ['sumatra'] = 'start "test" "c:/data/system/sumatrapdf/sumatrapdf.exe" -reuse-instance',
+ ['okular'] = 'start "test" "okular.exe" --unique',
+ ['sumatra'] = 'start "test" "sumatrapdf.exe" -reuse-instance -bg-color 0xCCCCCC',
+ }
+ closecalls= {
+ ['default'] = "pdfclose --file",
+ ['acrobat'] = "pdfclose --file",
+ ['okular'] = false,
+ ['sumatra'] = false,
+ }
+ allcalls = {
+ ['default'] = "pdfclose --all",
+ ['acrobat'] = "pdfclose --all",
+ ['okular'] = false,
+ ['sumatra'] = false,
+ }
+
+ pdfview.method = "acrobat"
+
+ runner = function(...)
+-- os.spawn(...)
+ os.execute(...)
+ end
+
+else
+
+ opencalls = {
+ ['default'] = "pdfopen", -- we could pass the default here
+ ['okular'] = 'okular --unique'
+ }
+ closecalls= {
+ ['default'] = "pdfclose --file",
+ ['okular'] = false,
+ }
+ allcalls = {
+ ['default'] = "pdfclose --all",
+ ['okular'] = false,
+ }
+
+ pdfview.method = "okular"
+
+ runner = function(...)
+ os.spawn(...)
+ end
+
+end
+
+directives.register("pdfview.method", function(v)
+ pdfview.method = (opencalls[v] and v) or 'default'
+end)
+
+function pdfview.setmethod(method)
+ if method and opencalls[method] then
+ pdfview.method = method
+ end
+end
+
+function pdfview.methods()
+ return concat(table.sortedkeys(opencalls), " ")
+end
+
+function pdfview.status()
+ return format("pdfview methods: %s, current method: %s (directives_pdfview_method)",pdfview.methods(),tostring(pdfview.method))
+end
+
+-- local openedfiles = { }
+
+local function fullname(name)
+ return file.addsuffix(name,"pdf")
+end
+
+function pdfview.open(...)
+ local opencall = opencalls[pdfview.method]
+ if opencall then
+ local t = { ... }
+ for i=1,#t do
+ local name = fullname(t[i])
+ if io.exists(name) then
+ runner(format('%s "%s"', opencall, name))
+ -- openedfiles[name] = true
+ end
+ end
+ end
+end
+
+function pdfview.close(...)
+ local closecall = closecalls[pdfview.method]
+ if closecall then
+ local t = { ... }
+ for i=1,#t do
+ local name = fullname(t[i])
+ -- if openedfiles[name] then
+ runner(format('%s "%s"', closecall, name))
+ -- openedfiles[name] = nil
+ -- else
+ -- pdfview.closeall()
+ -- break
+ -- end
+ end
+ end
+end
+
+function pdfview.closeall()
+ local allcall = allcalls[pdfview.method]
+ if allcall then
+ runner(format('%s', allcall))
+ end
+ -- openedfiles = { }
+end
+
+--~ pdfview.open("t:/document/show-exa.pdf")
+--~ os.sleep(3)
+--~ pdfview.close("t:/document/show-exa.pdf")
+
+return pdfview
diff --git a/tex/context/base/l-set.lua b/tex/context/base/l-set.lua
index 2370f0139..dfaf89284 100644
--- a/tex/context/base/l-set.lua
+++ b/tex/context/base/l-set.lua
@@ -1,87 +1,87 @@
-if not modules then modules = { } end modules ['l-set'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This will become obsolete when we have the bitset library embedded.
-
-set = set or { }
-
-local nums = { }
-local tabs = { }
-local concat = table.concat
-local next, type = next, type
-
-set.create = table.tohash
-
-function set.tonumber(t)
- if next(t) then
- local s = ""
- -- we could save mem by sorting, but it slows down
- for k, v in next, t do
- if v then
- -- why bother about the leading space
- s = s .. " " .. k
- end
- end
- local n = nums[s]
- if not n then
- n = #tabs + 1
- tabs[n] = t
- nums[s] = n
- end
- return n
- else
- return 0
- end
-end
-
-function set.totable(n)
- if n == 0 then
- return { }
- else
- return tabs[n] or { }
- end
-end
-
-function set.tolist(n)
- if n == 0 or not tabs[n] then
- return ""
- else
- local t, n = { }, 0
- for k, v in next, tabs[n] do
- if v then
- n = n + 1
- t[n] = k
- end
- end
- return concat(t," ")
- end
-end
-
-function set.contains(n,s)
- if type(n) == "table" then
- return n[s]
- elseif n == 0 then
- return false
- else
- local t = tabs[n]
- return t and t[s]
- end
-end
-
---~ local c = set.create{'aap','noot','mies'}
---~ local s = set.tonumber(c)
---~ local t = set.totable(s)
---~ print(t['aap'])
---~ local c = set.create{'zus','wim','jet'}
---~ local s = set.tonumber(c)
---~ local t = set.totable(s)
---~ print(t['aap'])
---~ print(t['jet'])
---~ print(set.contains(t,'jet'))
---~ print(set.contains(t,'aap'))
-
+if not modules then modules = { } end modules ['l-set'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This will become obsolete when we have the bitset library embedded.
+
+set = set or { }
+
+local nums = { }
+local tabs = { }
+local concat = table.concat
+local next, type = next, type
+
+set.create = table.tohash
+
+function set.tonumber(t)
+ if next(t) then
+ local s = ""
+ -- we could save mem by sorting, but it slows down
+ for k, v in next, t do
+ if v then
+ -- why bother about the leading space
+ s = s .. " " .. k
+ end
+ end
+ local n = nums[s]
+ if not n then
+ n = #tabs + 1
+ tabs[n] = t
+ nums[s] = n
+ end
+ return n
+ else
+ return 0
+ end
+end
+
+function set.totable(n)
+ if n == 0 then
+ return { }
+ else
+ return tabs[n] or { }
+ end
+end
+
+function set.tolist(n)
+ if n == 0 or not tabs[n] then
+ return ""
+ else
+ local t, n = { }, 0
+ for k, v in next, tabs[n] do
+ if v then
+ n = n + 1
+ t[n] = k
+ end
+ end
+ return concat(t," ")
+ end
+end
+
+function set.contains(n,s)
+ if type(n) == "table" then
+ return n[s]
+ elseif n == 0 then
+ return false
+ else
+ local t = tabs[n]
+ return t and t[s]
+ end
+end
+
+--~ local c = set.create{'aap','noot','mies'}
+--~ local s = set.tonumber(c)
+--~ local t = set.totable(s)
+--~ print(t['aap'])
+--~ local c = set.create{'zus','wim','jet'}
+--~ local s = set.tonumber(c)
+--~ local t = set.totable(s)
+--~ print(t['aap'])
+--~ print(t['jet'])
+--~ print(set.contains(t,'jet'))
+--~ print(set.contains(t,'aap'))
+
diff --git a/tex/context/base/l-string.lua b/tex/context/base/l-string.lua
index 77c076cc5..c87c57521 100644
--- a/tex/context/base/l-string.lua
+++ b/tex/context/base/l-string.lua
@@ -1,205 +1,205 @@
-if not modules then modules = { } end modules ['l-string'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local string = string
-local sub, gmatch, format, char, byte, rep, lower = string.sub, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower
-local lpegmatch, patterns = lpeg.match, lpeg.patterns
-local P, S, C, Ct, Cc, Cs = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.Cs
-
--- Some functions are already defined in l-lpeg and maybe some from here will
--- move there (unless we also expose caches).
-
--- if not string.split then
---
--- function string.split(str,pattern)
--- local t = { }
--- if #str > 0 then
--- local n = 1
--- for s in gmatch(str..pattern,"(.-)"..pattern) do
--- t[n] = s
--- n = n + 1
--- end
--- end
--- return t
--- end
---
--- end
-
--- function string.unquoted(str)
--- return (gsub(str,"^([\"\'])(.*)%1$","%2")) -- interesting pattern
--- end
-
-local unquoted = patterns.squote * C(patterns.nosquote) * patterns.squote
- + patterns.dquote * C(patterns.nodquote) * patterns.dquote
-
-function string.unquoted(str)
- return lpegmatch(unquoted,str) or str
-end
-
--- print(string.unquoted("test"))
--- print(string.unquoted([["t\"est"]]))
--- print(string.unquoted([["t\"est"x]]))
--- print(string.unquoted("\'test\'"))
--- print(string.unquoted('"test"'))
--- print(string.unquoted('"test"'))
-
-function string.quoted(str)
- return format("%q",str) -- always double quote
-end
-
-function string.count(str,pattern) -- variant 3
- local n = 0
- for _ in gmatch(str,pattern) do -- not for utf
- n = n + 1
- end
- return n
-end
-
-function string.limit(str,n,sentinel) -- not utf proof
- if #str > n then
- sentinel = sentinel or "..."
- return sub(str,1,(n-#sentinel)) .. sentinel
- else
- return str
- end
-end
-
-local stripper = patterns.stripper
-local collapser = patterns.collapser
-local longtostring = patterns.longtostring
-
-function string.strip(str)
- return lpegmatch(stripper,str) or ""
-end
-
-function string.collapsespaces(str)
- return lpegmatch(collapser,str) or ""
-end
-
-function string.longtostring(str)
- return lpegmatch(longtostring,str) or ""
-end
-
--- function string.is_empty(str)
--- return not find(str,"%S")
--- end
-
-local pattern = P(" ")^0 * P(-1)
-
-function string.is_empty(str)
- if str == "" then
- return true
- else
- return lpegmatch(pattern,str) and true or false
- end
-end
-
--- if not string.escapedpattern then
---
--- local patterns_escapes = {
--- ["%"] = "%%",
--- ["."] = "%.",
--- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
--- ["["] = "%[", ["]"] = "%]",
--- ["("] = "%(", [")"] = "%)",
--- -- ["{"] = "%{", ["}"] = "%}"
--- -- ["^"] = "%^", ["$"] = "%$",
--- }
---
--- local simple_escapes = {
--- ["-"] = "%-",
--- ["."] = "%.",
--- ["?"] = ".",
--- ["*"] = ".*",
--- }
---
--- function string.escapedpattern(str,simple)
--- return (gsub(str,".",simple and simple_escapes or patterns_escapes))
--- end
---
--- function string.topattern(str,lowercase,strict)
--- if str == "" then
--- return ".*"
--- else
--- str = gsub(str,".",simple_escapes)
--- if lowercase then
--- str = lower(str)
--- end
--- if strict then
--- return "^" .. str .. "$"
--- else
--- return str
--- end
--- end
--- end
---
--- end
-
---- needs checking
-
-local anything = patterns.anything
-local allescapes = Cc("%") * S(".-+%?()[]*") -- also {} and ^$ ?
-local someescapes = Cc("%") * S(".-+%()[]") -- also {} and ^$ ?
-local matchescapes = Cc(".") * S("*?") -- wildcard and single match
-
-local pattern_a = Cs ( ( allescapes + anything )^0 )
-local pattern_b = Cs ( ( someescapes + matchescapes + anything )^0 )
-local pattern_c = Cs ( Cc("^") * ( someescapes + matchescapes + anything )^0 * Cc("$") )
-
-function string.escapedpattern(str,simple)
- return lpegmatch(simple and pattern_b or pattern_a,str)
-end
-
-function string.topattern(str,lowercase,strict)
- if str=="" or type(str) ~= "string" then
- return ".*"
- elseif strict then
- str = lpegmatch(pattern_c,str)
- else
- str = lpegmatch(pattern_b,str)
- end
- if lowercase then
- return lower(str)
- else
- return str
- end
-end
-
--- print(string.escapedpattern("12+34*.tex",false))
--- print(string.escapedpattern("12+34*.tex",true))
--- print(string.topattern ("12+34*.tex",false,false))
--- print(string.topattern ("12+34*.tex",false,true))
-
-function string.valid(str,default)
- return (type(str) == "string" and str ~= "" and str) or default or nil
-end
-
--- handy fallback
-
-string.itself = function(s) return s end
-
--- also handy (see utf variant)
-
-local pattern = Ct(C(1)^0) -- string and not utf !
-
-function string.totable(str)
- return lpegmatch(pattern,str)
-end
-
--- handy from within tex:
-
-local replacer = lpeg.replacer("@","%%") -- Watch the escaped % in lpeg!
-
-function string.tformat(fmt,...)
- return format(lpegmatch(replacer,fmt),...)
-end
-
--- obsolete names:
-
-string.quote = string.quoted
-string.unquote = string.unquoted
+if not modules then modules = { } end modules ['l-string'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local string = string
+local sub, gmatch, format, char, byte, rep, lower = string.sub, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local P, S, C, Ct, Cc, Cs = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.Cs
+
+-- Some functions are already defined in l-lpeg and maybe some from here will
+-- move there (unless we also expose caches).
+
+-- if not string.split then
+--
+-- function string.split(str,pattern)
+-- local t = { }
+-- if #str > 0 then
+-- local n = 1
+-- for s in gmatch(str..pattern,"(.-)"..pattern) do
+-- t[n] = s
+-- n = n + 1
+-- end
+-- end
+-- return t
+-- end
+--
+-- end
+
+-- function string.unquoted(str)
+-- return (gsub(str,"^([\"\'])(.*)%1$","%2")) -- interesting pattern
+-- end
+
+local unquoted = patterns.squote * C(patterns.nosquote) * patterns.squote
+ + patterns.dquote * C(patterns.nodquote) * patterns.dquote
+
+function string.unquoted(str)
+ return lpegmatch(unquoted,str) or str
+end
+
+-- print(string.unquoted("test"))
+-- print(string.unquoted([["t\"est"]]))
+-- print(string.unquoted([["t\"est"x]]))
+-- print(string.unquoted("\'test\'"))
+-- print(string.unquoted('"test"'))
+-- print(string.unquoted('"test"'))
+
+function string.quoted(str)
+ return format("%q",str) -- always double quote
+end
+
+function string.count(str,pattern) -- variant 3
+ local n = 0
+ for _ in gmatch(str,pattern) do -- not for utf
+ n = n + 1
+ end
+ return n
+end
+
+function string.limit(str,n,sentinel) -- not utf proof
+ if #str > n then
+ sentinel = sentinel or "..."
+ return sub(str,1,(n-#sentinel)) .. sentinel
+ else
+ return str
+ end
+end
+
+local stripper = patterns.stripper
+local collapser = patterns.collapser
+local longtostring = patterns.longtostring
+
+function string.strip(str)
+ return lpegmatch(stripper,str) or ""
+end
+
+function string.collapsespaces(str)
+ return lpegmatch(collapser,str) or ""
+end
+
+function string.longtostring(str)
+ return lpegmatch(longtostring,str) or ""
+end
+
+-- function string.is_empty(str)
+-- return not find(str,"%S")
+-- end
+
+local pattern = P(" ")^0 * P(-1)
+
+function string.is_empty(str)
+ if str == "" then
+ return true
+ else
+ return lpegmatch(pattern,str) and true or false
+ end
+end
+
+-- if not string.escapedpattern then
+--
+-- local patterns_escapes = {
+-- ["%"] = "%%",
+-- ["."] = "%.",
+-- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
+-- ["["] = "%[", ["]"] = "%]",
+-- ["("] = "%(", [")"] = "%)",
+-- -- ["{"] = "%{", ["}"] = "%}"
+-- -- ["^"] = "%^", ["$"] = "%$",
+-- }
+--
+-- local simple_escapes = {
+-- ["-"] = "%-",
+-- ["."] = "%.",
+-- ["?"] = ".",
+-- ["*"] = ".*",
+-- }
+--
+-- function string.escapedpattern(str,simple)
+-- return (gsub(str,".",simple and simple_escapes or patterns_escapes))
+-- end
+--
+-- function string.topattern(str,lowercase,strict)
+-- if str == "" then
+-- return ".*"
+-- else
+-- str = gsub(str,".",simple_escapes)
+-- if lowercase then
+-- str = lower(str)
+-- end
+-- if strict then
+-- return "^" .. str .. "$"
+-- else
+-- return str
+-- end
+-- end
+-- end
+--
+-- end
+
+--- needs checking
+
+local anything = patterns.anything
+local allescapes = Cc("%") * S(".-+%?()[]*") -- also {} and ^$ ?
+local someescapes = Cc("%") * S(".-+%()[]") -- also {} and ^$ ?
+local matchescapes = Cc(".") * S("*?") -- wildcard and single match
+
+local pattern_a = Cs ( ( allescapes + anything )^0 )
+local pattern_b = Cs ( ( someescapes + matchescapes + anything )^0 )
+local pattern_c = Cs ( Cc("^") * ( someescapes + matchescapes + anything )^0 * Cc("$") )
+
+function string.escapedpattern(str,simple)
+ return lpegmatch(simple and pattern_b or pattern_a,str)
+end
+
+function string.topattern(str,lowercase,strict)
+ if str=="" or type(str) ~= "string" then
+ return ".*"
+ elseif strict then
+ str = lpegmatch(pattern_c,str)
+ else
+ str = lpegmatch(pattern_b,str)
+ end
+ if lowercase then
+ return lower(str)
+ else
+ return str
+ end
+end
+
+-- print(string.escapedpattern("12+34*.tex",false))
+-- print(string.escapedpattern("12+34*.tex",true))
+-- print(string.topattern ("12+34*.tex",false,false))
+-- print(string.topattern ("12+34*.tex",false,true))
+
+function string.valid(str,default)
+ return (type(str) == "string" and str ~= "" and str) or default or nil
+end
+
+-- handy fallback
+
+string.itself = function(s) return s end
+
+-- also handy (see utf variant)
+
+local pattern = Ct(C(1)^0) -- string and not utf !
+
+function string.totable(str)
+ return lpegmatch(pattern,str)
+end
+
+-- handy from within tex:
+
+local replacer = lpeg.replacer("@","%%") -- Watch the escaped % in lpeg!
+
+function string.tformat(fmt,...)
+ return format(lpegmatch(replacer,fmt),...)
+end
+
+-- obsolete names:
+
+string.quote = string.quoted
+string.unquote = string.unquoted
diff --git a/tex/context/base/l-table.lua b/tex/context/base/l-table.lua
index 9a1b97fff..54c2b86e3 100644
--- a/tex/context/base/l-table.lua
+++ b/tex/context/base/l-table.lua
@@ -1,1362 +1,1362 @@
-if not modules then modules = { } end modules ['l-table'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local type, next, tostring, tonumber, ipairs, select = type, next, tostring, tonumber, ipairs, select
-local table, string = table, string
-local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
-local format, lower, dump = string.format, string.lower, string.dump
-local getmetatable, setmetatable = getmetatable, setmetatable
-local getinfo = debug.getinfo
-local lpegmatch, patterns = lpeg.match, lpeg.patterns
-local floor = math.floor
-
--- extra functions, some might go (when not used)
-
-local stripper = patterns.stripper
-
-function table.strip(tab)
- local lst, l = { }, 0
- for i=1,#tab do
- local s = lpegmatch(stripper,tab[i]) or ""
- if s == "" then
- -- skip this one
- else
- l = l + 1
- lst[l] = s
- end
- end
- return lst
-end
-
-function table.keys(t)
- if t then
- local keys, k = { }, 0
- for key, _ in next, t do
- k = k + 1
- keys[k] = key
- end
- return keys
- else
- return { }
- end
-end
-
-local function compare(a,b)
- local ta, tb = type(a), type(b) -- needed, else 11 < 2
- if ta == tb then
- return a < b
- else
- return tostring(a) < tostring(b)
- end
-end
-
-local function sortedkeys(tab)
- if tab then
- local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
- for key,_ in next, tab do
- s = s + 1
- srt[s] = key
- if category == 3 then
- -- no further check
- else
- local tkey = type(key)
- if tkey == "string" then
- category = (category == 2 and 3) or 1
- elseif tkey == "number" then
- category = (category == 1 and 3) or 2
- else
- category = 3
- end
- end
- end
- if category == 0 or category == 3 then
- sort(srt,compare)
- else
- sort(srt)
- end
- return srt
- else
- return { }
- end
-end
-
-local function sortedhashkeys(tab,cmp) -- fast one
- if tab then
- local srt, s = { }, 0
- for key,_ in next, tab do
- if key then
- s= s + 1
- srt[s] = key
- end
- end
- sort(srt,cmp)
- return srt
- else
- return { }
- end
-end
-
-function table.allkeys(t)
- local keys = { }
- for k, v in next, t do
- for k, v in next, v do
- keys[k] = true
- end
- end
- return sortedkeys(keys)
-end
-
-table.sortedkeys = sortedkeys
-table.sortedhashkeys = sortedhashkeys
-
-local function nothing() end
-
-local function sortedhash(t,cmp)
- if t then
- local s
- if cmp then
- -- it would be nice if the sort function would accept a third argument (or nicer, an optional first)
- s = sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
- else
- s = sortedkeys(t) -- the robust one
- end
- local n = 0
- local function kv(s)
- n = n + 1
- local k = s[n]
- return k, t[k]
- end
- return kv, s
- else
- return nothing
- end
-end
-
-table.sortedhash = sortedhash
-table.sortedpairs = sortedhash -- obsolete
-
-function table.append(t,list)
- local n = #t
- for i=1,#list do
- n = n + 1
- t[n] = list[i]
- end
- return t
-end
-
-function table.prepend(t, list)
- local nl = #list
- local nt = nl + #t
- for i=#t,1,-1 do
- t[nt] = t[i]
- nt = nt - 1
- end
- for i=1,#list do
- t[i] = list[i]
- end
- return t
-end
-
--- function table.merge(t, ...) -- first one is target
--- t = t or { }
--- local lst = { ... }
--- for i=1,#lst do
--- for k, v in next, lst[i] do
--- t[k] = v
--- end
--- end
--- return t
--- end
-
-function table.merge(t, ...) -- first one is target
- t = t or { }
- for i=1,select("#",...) do
- for k, v in next, (select(i,...)) do
- t[k] = v
- end
- end
- return t
-end
-
--- function table.merged(...)
--- local tmp, lst = { }, { ... }
--- for i=1,#lst do
--- for k, v in next, lst[i] do
--- tmp[k] = v
--- end
--- end
--- return tmp
--- end
-
-function table.merged(...)
- local t = { }
- for i=1,select("#",...) do
- for k, v in next, (select(i,...)) do
- t[k] = v
- end
- end
- return t
-end
-
--- function table.imerge(t, ...)
--- local lst, nt = { ... }, #t
--- for i=1,#lst do
--- local nst = lst[i]
--- for j=1,#nst do
--- nt = nt + 1
--- t[nt] = nst[j]
--- end
--- end
--- return t
--- end
-
-function table.imerge(t, ...)
- local nt = #t
- for i=1,select("#",...) do
- local nst = select(i,...)
- for j=1,#nst do
- nt = nt + 1
- t[nt] = nst[j]
- end
- end
- return t
-end
-
--- function table.imerged(...)
--- local tmp, ntmp, lst = { }, 0, {...}
--- for i=1,#lst do
--- local nst = lst[i]
--- for j=1,#nst do
--- ntmp = ntmp + 1
--- tmp[ntmp] = nst[j]
--- end
--- end
--- return tmp
--- end
-
-function table.imerged(...)
- local tmp, ntmp = { }, 0
- for i=1,select("#",...) do
- local nst = select(i,...)
- for j=1,#nst do
- ntmp = ntmp + 1
- tmp[ntmp] = nst[j]
- end
- end
- return tmp
-end
-
-local function fastcopy(old,metatabletoo) -- fast one
- if old then
- local new = { }
- for k, v in next, old do
- if type(v) == "table" then
- new[k] = fastcopy(v,metatabletoo) -- was just table.copy
- else
- new[k] = v
- end
- end
- if metatabletoo then
- -- optional second arg
- local mt = getmetatable(old)
- if mt then
- setmetatable(new,mt)
- end
- end
- return new
- else
- return { }
- end
-end
-
--- todo : copy without metatable
-
-local function copy(t, tables) -- taken from lua wiki, slightly adapted
- tables = tables or { }
- local tcopy = {}
- if not tables[t] then
- tables[t] = tcopy
- end
- for i,v in next, t do -- brrr, what happens with sparse indexed
- if type(i) == "table" then
- if tables[i] then
- i = tables[i]
- else
- i = copy(i, tables)
- end
- end
- if type(v) ~= "table" then
- tcopy[i] = v
- elseif tables[v] then
- tcopy[i] = tables[v]
- else
- tcopy[i] = copy(v, tables)
- end
- end
- local mt = getmetatable(t)
- if mt then
- setmetatable(tcopy,mt)
- end
- return tcopy
-end
-
-table.fastcopy = fastcopy
-table.copy = copy
-
-function table.derive(parent) -- for the moment not public
- local child = { }
- if parent then
- setmetatable(child,{ __index = parent })
- end
- return child
-end
-
-function table.tohash(t,value)
- local h = { }
- if t then
- if value == nil then value = true end
- for _, v in next, t do -- no ipairs here
- h[v] = value
- end
- end
- return h
-end
-
-function table.fromhash(t)
- local hsh, h = { }, 0
- for k, v in next, t do -- no ipairs here
- if v then
- h = h + 1
- hsh[h] = k
- end
- end
- return hsh
-end
-
-local noquotes, hexify, handle, reduce, compact, inline, functions
-
-local reserved = table.tohash { -- intercept a language inconvenience: no reserved words as key
- 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if',
- 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while',
-}
-
-local function simple_table(t)
- if #t > 0 then
- local n = 0
- for _,v in next, t do
- n = n + 1
- end
- if n == #t then
- local tt, nt = { }, 0
- for i=1,#t do
- local v = t[i]
- local tv = type(v)
- if tv == "number" then
- nt = nt + 1
- if hexify then
- tt[nt] = format("0x%04X",v)
- else
- tt[nt] = tostring(v) -- tostring not needed
- end
- elseif tv == "boolean" then
- nt = nt + 1
- tt[nt] = tostring(v)
- elseif tv == "string" then
- nt = nt + 1
- tt[nt] = format("%q",v)
- else
- tt = nil
- break
- end
- end
- return tt
- end
- end
- return nil
-end
-
--- Because this is a core function of mkiv I moved some function calls
--- inline.
---
--- twice as fast in a test:
---
--- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) )
-
--- problem: there no good number_to_string converter with the best resolution
-
--- probably using .. is faster than format
--- maybe split in a few cases (yes/no hexify)
-
--- todo: %g faster on numbers than %s
-
--- we can speed this up with repeaters and formatters (is indeed faster)
-
-local propername = patterns.propername -- was find(name,"^%a[%w%_]*$")
-
-local function dummy() end
-
-local function do_serialize(root,name,depth,level,indexed)
- if level > 0 then
- depth = depth .. " "
- if indexed then
- handle(format("%s{",depth))
- else
- local tn = type(name)
- if tn == "number" then
- if hexify then
- handle(format("%s[0x%04X]={",depth,name))
- else
- handle(format("%s[%s]={",depth,name))
- end
- elseif tn == "string" then
- if noquotes and not reserved[name] and lpegmatch(propername,name) then
- handle(format("%s%s={",depth,name))
- else
- handle(format("%s[%q]={",depth,name))
- end
- elseif tn == "boolean" then
- handle(format("%s[%s]={",depth,tostring(name)))
- else
- handle(format("%s{",depth))
- end
- end
- end
- -- we could check for k (index) being number (cardinal)
- if root and next(root) then
- -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
- -- if compact then
- -- -- NOT: for k=1,#root do (we need to quit at nil)
- -- for k,v in ipairs(root) do -- can we use next?
- -- if not first then first = k end
- -- last = last + 1
- -- end
- -- end
- local first, last = nil, 0
- if compact then
- last = #root
- for k=1,last do
- if root[k] == nil then
- last = k - 1
- break
- end
- end
- if last > 0 then
- first = 1
- end
- end
- local sk = sortedkeys(root)
- for i=1,#sk do
- local k = sk[i]
- local v = root[k]
- --~ if v == root then
- -- circular
- --~ else
- local t, tk = type(v), type(k)
- if compact and first and tk == "number" and k >= first and k <= last then
- if t == "number" then
- if hexify then
- handle(format("%s 0x%04X,",depth,v))
- else
- handle(format("%s %s,",depth,v)) -- %.99g
- end
- elseif t == "string" then
- if reduce and tonumber(v) then
- handle(format("%s %s,",depth,v))
- else
- handle(format("%s %q,",depth,v))
- end
- elseif t == "table" then
- if not next(v) then
- handle(format("%s {},",depth))
- elseif inline then -- and #t > 0
- local st = simple_table(v)
- if st then
- handle(format("%s { %s },",depth,concat(st,", ")))
- else
- do_serialize(v,k,depth,level+1,true)
- end
- else
- do_serialize(v,k,depth,level+1,true)
- end
- elseif t == "boolean" then
- handle(format("%s %s,",depth,tostring(v)))
- elseif t == "function" then
- if functions then
- handle(format('%s load(%q),',depth,dump(v)))
- else
- handle(format('%s "function",',depth))
- end
- else
- handle(format("%s %q,",depth,tostring(v)))
- end
- elseif k == "__p__" then -- parent
- if false then
- handle(format("%s __p__=nil,",depth))
- end
- elseif t == "number" then
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g
- end
- elseif tk == "boolean" then
- if hexify then
- handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
- else
- handle(format("%s [%s]=%s,",depth,tostring(k),v)) -- %.99g
- end
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- if hexify then
- handle(format("%s %s=0x%04X,",depth,k,v))
- else
- handle(format("%s %s=%s,",depth,k,v)) -- %.99g
- end
- else
- if hexify then
- handle(format("%s [%q]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g
- end
- end
- elseif t == "string" then
- if reduce and tonumber(v) then
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%s,",depth,tostring(k),v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%s,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v))
- end
- else
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,v))
- else
- handle(format("%s [%s]=%q,",depth,k,v))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%q,",depth,tostring(k),v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%q,",depth,k,v))
- else
- handle(format("%s [%q]=%q,",depth,k,v))
- end
- end
- elseif t == "table" then
- if not next(v) then
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]={},",depth,k))
- else
- handle(format("%s [%s]={},",depth,k))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]={},",depth,tostring(k)))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s={},",depth,k))
- else
- handle(format("%s [%q]={},",depth,k))
- end
- elseif inline then
- local st = simple_table(v)
- if st then
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- elseif t == "boolean" then
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
- else
- handle(format("%s [%s]=%s,",depth,k,tostring(v)))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%s,",depth,k,tostring(v)))
- else
- handle(format("%s [%q]=%s,",depth,k,tostring(v)))
- end
- elseif t == "function" then
- if functions then
- local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
- -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]=load(%q),",depth,k,f))
- else
- handle(format("%s [%s]=load(%q),",depth,k,f))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=load(%q),",depth,tostring(k),f))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=load(%q),",depth,k,f))
- else
- handle(format("%s [%q]=load(%q),",depth,k,f))
- end
- end
- else
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%s]=%q,",depth,k,tostring(v)))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%q]=%q,",depth,k,tostring(v)))
- end
- end
- --~ end
- end
- end
- if level > 0 then
- handle(format("%s},",depth))
- end
-end
-
--- replacing handle by a direct t[#t+1] = ... (plus test) is not much
--- faster (0.03 on 1.00 for zapfino.tma)
-
-local function serialize(_handle,root,name,specification) -- handle wins
- local tname = type(name)
- if type(specification) == "table" then
- noquotes = specification.noquotes
- hexify = specification.hexify
- handle = _handle or specification.handle or print
- reduce = specification.reduce or false
- functions = specification.functions
- compact = specification.compact
- inline = specification.inline and compact
- if functions == nil then
- functions = true
- end
- if compact == nil then
- compact = true
- end
- if inline == nil then
- inline = compact
- end
- else
- noquotes = false
- hexify = false
- handle = _handle or print
- reduce = false
- compact = true
- inline = true
- functions = true
- end
- if tname == "string" then
- if name == "return" then
- handle("return {")
- else
- handle(name .. "={")
- end
- elseif tname == "number" then
- if hexify then
- handle(format("[0x%04X]={",name))
- else
- handle("[" .. name .. "]={")
- end
- elseif tname == "boolean" then
- if name then
- handle("return {")
- else
- handle("{")
- end
- else
- handle("t={")
- end
- if root then
- -- The dummy access will initialize a table that has a delayed initialization
- -- using a metatable. (maybe explicitly test for metatable)
- if getmetatable(root) then -- todo: make this an option, maybe even per subtable
- local dummy = root._w_h_a_t_e_v_e_r_
- root._w_h_a_t_e_v_e_r_ = nil
- end
- -- Let's forget about empty tables.
- if next(root) then
- do_serialize(root,name,"",0)
- end
- end
- handle("}")
-end
-
--- -- This is some 20% faster than using format (because formatters are much faster) but
--- -- of course, inlining the format using .. is then again faster .. anyway, as we do
--- -- some pretty printing as well there is not that much to gain unless we make a 'fast'
--- -- ugly variant as well. But, we would have to move the formatter to l-string then.
-
--- local formatters = string.formatters
-
--- local function do_serialize(root,name,level,indexed)
--- if level > 0 then
--- if indexed then
--- handle(formatters["%w{"](level))
--- else
--- local tn = type(name)
--- if tn == "number" then
--- if hexify then
--- handle(formatters["%w[%04H]={"](level,name))
--- else
--- handle(formatters["%w[%s]={"](level,name))
--- end
--- elseif tn == "string" then
--- if noquotes and not reserved[name] and lpegmatch(propername,name) then
--- handle(formatters["%w%s={"](level,name))
--- else
--- handle(formatters["%w[%q]={"](level,name))
--- end
--- elseif tn == "boolean" then
--- handle(formatters["%w[%S]={"](level,name))
--- else
--- handle(formatters["%w{"](level))
--- end
--- end
--- end
--- -- we could check for k (index) being number (cardinal)
--- if root and next(root) then
--- -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
--- -- if compact then
--- -- -- NOT: for k=1,#root do (we need to quit at nil)
--- -- for k,v in ipairs(root) do -- can we use next?
--- -- if not first then first = k end
--- -- last = last + 1
--- -- end
--- -- end
--- local first, last = nil, 0
--- if compact then
--- last = #root
--- for k=1,last do
--- if root[k] == nil then
--- last = k - 1
--- break
--- end
--- end
--- if last > 0 then
--- first = 1
--- end
--- end
--- local sk = sortedkeys(root)
--- for i=1,#sk do
--- local k = sk[i]
--- local v = root[k]
--- --~ if v == root then
--- -- circular
--- --~ else
--- local t, tk = type(v), type(k)
--- if compact and first and tk == "number" and k >= first and k <= last then
--- if t == "number" then
--- if hexify then
--- handle(formatters["%w %04H,"](level,v))
--- else
--- handle(formatters["%w %s,"](level,v)) -- %.99g
--- end
--- elseif t == "string" then
--- if reduce and tonumber(v) then
--- handle(formatters["%w %s,"](level,v))
--- else
--- handle(formatters["%w %q,"](level,v))
--- end
--- elseif t == "table" then
--- if not next(v) then
--- handle(formatters["%w {},"](level))
--- elseif inline then -- and #t > 0
--- local st = simple_table(v)
--- if st then
--- handle(formatters["%w { %, t },"](level,st))
--- else
--- do_serialize(v,k,level+1,true)
--- end
--- else
--- do_serialize(v,k,level+1,true)
--- end
--- elseif t == "boolean" then
--- handle(formatters["%w %S,"](level,v))
--- elseif t == "function" then
--- if functions then
--- handle(formatters['%w load(%q),'](level,dump(v)))
--- else
--- handle(formatters['%w "function",'](level))
--- end
--- else
--- handle(formatters["%w %Q,"](level,v))
--- end
--- elseif k == "__p__" then -- parent
--- if false then
--- handle(formatters["%w __p__=nil,"](level))
--- end
--- elseif t == "number" then
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]=%04H,"](level,k,v))
--- else
--- handle(formatters["%w [%s]=%s,"](level,k,v)) -- %.99g
--- end
--- elseif tk == "boolean" then
--- if hexify then
--- handle(formatters["%w [%S]=%04H,"](level,k,v))
--- else
--- handle(formatters["%w [%S]=%s,"](level,k,v)) -- %.99g
--- end
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- if hexify then
--- handle(formatters["%w %s=%04H,"](level,k,v))
--- else
--- handle(formatters["%w %s=%s,"](level,k,v)) -- %.99g
--- end
--- else
--- if hexify then
--- handle(formatters["%w [%q]=%04H,"](level,k,v))
--- else
--- handle(formatters["%w [%q]=%s,"](level,k,v)) -- %.99g
--- end
--- end
--- elseif t == "string" then
--- if reduce and tonumber(v) then
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]=%s,"](level,k,v))
--- else
--- handle(formatters["%w [%s]=%s,"](level,k,v))
--- end
--- elseif tk == "boolean" then
--- handle(formatters["%w [%S]=%s,"](level,k,v))
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- handle(formatters["%w %s=%s,"](level,k,v))
--- else
--- handle(formatters["%w [%q]=%s,"](level,k,v))
--- end
--- else
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]=%q,"](level,k,v))
--- else
--- handle(formatters["%w [%s]=%q,"](level,k,v))
--- end
--- elseif tk == "boolean" then
--- handle(formatters["%w [%S]=%q,"](level,k,v))
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- handle(formatters["%w %s=%q,"](level,k,v))
--- else
--- handle(formatters["%w [%q]=%q,"](level,k,v))
--- end
--- end
--- elseif t == "table" then
--- if not next(v) then
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]={},"](level,k))
--- else
--- handle(formatters["%w [%s]={},"](level,k))
--- end
--- elseif tk == "boolean" then
--- handle(formatters["%w [%S]={},"](level,k))
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- handle(formatters["%w %s={},"](level,k))
--- else
--- handle(formatters["%w [%q]={},"](level,k))
--- end
--- elseif inline then
--- local st = simple_table(v)
--- if st then
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]={ %, t },"](level,k,st))
--- else
--- handle(formatters["%w [%s]={ %, t },"](level,k,st))
--- end
--- elseif tk == "boolean" then
--- handle(formatters["%w [%S]={ %, t },"](level,k,st))
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- handle(formatters["%w %s={ %, t },"](level,k,st))
--- else
--- handle(formatters["%w [%q]={ %, t },"](level,k,st))
--- end
--- else
--- do_serialize(v,k,level+1)
--- end
--- else
--- do_serialize(v,k,level+1)
--- end
--- elseif t == "boolean" then
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]=%S,"](level,k,v))
--- else
--- handle(formatters["%w [%s]=%S,"](level,k,v))
--- end
--- elseif tk == "boolean" then
--- handle(formatters["%w [%S]=%S,"](level,k,v))
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- handle(formatters["%w %s=%S,"](level,k,v))
--- else
--- handle(formatters["%w [%q]=%S,"](level,k,v))
--- end
--- elseif t == "function" then
--- if functions then
--- local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
--- -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]=load(%q),"](level,k,f))
--- else
--- handle(formatters["%w [%s]=load(%q),"](level,k,f))
--- end
--- elseif tk == "boolean" then
--- handle(formatters["%w [%S]=load(%q),"](level,k,f))
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- handle(formatters["%w %s=load(%q),"](level,k,f))
--- else
--- handle(formatters["%w [%q]=load(%q),"](level,k,f))
--- end
--- end
--- else
--- if tk == "number" then
--- if hexify then
--- handle(formatters["%w [%04H]=%Q,"](level,k,v))
--- else
--- handle(formatters["%w [%s]=%Q,"](level,k,v))
--- end
--- elseif tk == "boolean" then
--- handle(formatters["%w [%S]=%Q,"](level,k,v))
--- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
--- handle(formatters["%w %s=%Q,"](level,k,v))
--- else
--- handle(formatters["%w [%q]=%Q,"](level,k,v))
--- end
--- end
--- --~ end
--- end
--- end
--- if level > 0 then
--- handle(formatters["%w}"](level))
--- end
--- end
-
--- local function serialize(_handle,root,name,specification) -- handle wins
--- local tname = type(name)
--- if type(specification) == "table" then
--- noquotes = specification.noquotes
--- hexify = specification.hexify
--- handle = _handle or specification.handle or print
--- reduce = specification.reduce or false
--- functions = specification.functions
--- compact = specification.compact
--- inline = specification.inline and compact
--- if functions == nil then
--- functions = true
--- end
--- if compact == nil then
--- compact = true
--- end
--- if inline == nil then
--- inline = compact
--- end
--- else
--- noquotes = false
--- hexify = false
--- handle = _handle or print
--- reduce = false
--- compact = true
--- inline = true
--- functions = true
--- end
--- if tname == "string" then
--- if name == "return" then
--- handle("return {")
--- else
--- handle(name .. "={")
--- end
--- elseif tname == "number" then
--- if hexify then
--- handle(format("[0x%04X]={",name))
--- else
--- handle("[" .. name .. "]={")
--- end
--- elseif tname == "boolean" then
--- if name then
--- handle("return {")
--- else
--- handle("{")
--- end
--- else
--- handle("t={")
--- end
--- if root then
--- -- The dummy access will initialize a table that has a delayed initialization
--- -- using a metatable. (maybe explicitly test for metatable)
--- if getmetatable(root) then -- todo: make this an option, maybe even per subtable
--- local dummy = root._w_h_a_t_e_v_e_r_
--- root._w_h_a_t_e_v_e_r_ = nil
--- end
--- -- Let's forget about empty tables.
--- if next(root) then
--- do_serialize(root,name,0)
--- end
--- end
--- handle("}")
--- end
-
--- name:
---
--- true : return { }
--- false : { }
--- nil : t = { }
--- string : string = { }
--- "return" : return { }
--- number : [number] = { }
-
-function table.serialize(root,name,specification)
- local t, n = { }, 0
- local function flush(s)
- n = n + 1
- t[n] = s
- end
- serialize(flush,root,name,specification)
- return concat(t,"\n")
-end
-
--- local a = { e = { 1,2,3,4,5,6}, a = 1, b = 2, c = "ccc", d = { a = 1, b = 2, c = "ccc", d = { a = 1, b = 2, c = "ccc" } } }
--- local t = os.clock()
--- for i=1,10000 do
--- table.serialize(a)
--- end
--- print(os.clock()-t,table.serialize(a))
-
-table.tohandle = serialize
-
--- sometimes tables are real use (zapfino extra pro is some 85M) in which
--- case a stepwise serialization is nice; actually, we could consider:
---
--- for line in table.serializer(root,name,reduce,noquotes) do
--- ...(line)
--- end
---
--- so this is on the todo list
-
-local maxtab = 2*1024
-
-function table.tofile(filename,root,name,specification)
- local f = io.open(filename,'w')
- if f then
- if maxtab > 1 then
- local t, n = { }, 0
- local function flush(s)
- n = n + 1
- t[n] = s
- if n > maxtab then
- f:write(concat(t,"\n"),"\n") -- hm, write(sometable) should be nice
- t, n = { }, 0 -- we could recycle t if needed
- end
- end
- serialize(flush,root,name,specification)
- f:write(concat(t,"\n"),"\n")
- else
- local function flush(s)
- f:write(s,"\n")
- end
- serialize(flush,root,name,specification)
- end
- f:close()
- io.flush()
- end
-end
-
-local function flattened(t,f,depth) -- also handles { nil, 1, nil, 2 }
- if f == nil then
- f = { }
- depth = 0xFFFF
- elseif tonumber(f) then
- -- assume that only two arguments are given
- depth = f
- f = { }
- elseif not depth then
- depth = 0xFFFF
- end
- for k, v in next, t do
- if type(k) ~= "number" then
- if depth > 0 and type(v) == "table" then
- flattened(v,f,depth-1)
- else
- f[#f+1] = v
- end
- end
- end
- for k=1,#t do
- local v = t[k]
- if depth > 0 and type(v) == "table" then
- flattened(v,f,depth-1)
- else
- f[#f+1] = v
- end
- end
- return f
-end
-
-table.flattened = flattened
-
-local function unnest(t,f) -- only used in mk, for old times sake
- if not f then -- and only relevant for token lists
- f = { } -- this one can become obsolete
- end
- for i=1,#t do
- local v = t[i]
- if type(v) == "table" then
- if type(v[1]) == "table" then
- unnest(v,f)
- else
- f[#f+1] = v
- end
- else
- f[#f+1] = v
- end
- end
- return f
-end
-
-function table.unnest(t) -- bad name
- return unnest(t)
-end
-
-local function are_equal(a,b,n,m) -- indexed
- if a and b and #a == #b then
- n = n or 1
- m = m or #a
- for i=n,m do
- local ai, bi = a[i], b[i]
- if ai==bi then
- -- same
- elseif type(ai) == "table" and type(bi) == "table" then
- if not are_equal(ai,bi) then
- return false
- end
- else
- return false
- end
- end
- return true
- else
- return false
- end
-end
-
-local function identical(a,b) -- assumes same structure
- for ka, va in next, a do
- local vb = b[ka]
- if va == vb then
- -- same
- elseif type(va) == "table" and type(vb) == "table" then
- if not identical(va,vb) then
- return false
- end
- else
- return false
- end
- end
- return true
-end
-
-table.identical = identical
-table.are_equal = are_equal
-
--- maybe also make a combined one
-
-function table.compact(t) -- remove empty tables, assumes subtables
- if t then
- for k, v in next, t do
- if not next(v) then -- no type checking
- t[k] = nil
- end
- end
- end
-end
-
-function table.contains(t, v)
- if t then
- for i=1, #t do
- if t[i] == v then
- return i
- end
- end
- end
- return false
-end
-
-function table.count(t)
- local n = 0
- for k, v in next, t do
- n = n + 1
- end
- return n
-end
-
-function table.swapped(t,s) -- hash
- local n = { }
- if s then
- for k, v in next, s do
- n[k] = v
- end
- end
- for k, v in next, t do
- n[v] = k
- end
- return n
-end
-
-function table.mirrored(t) -- hash
- local n = { }
- for k, v in next, t do
- n[v] = k
- n[k] = v
- end
- return n
-end
-
-function table.reversed(t)
- if t then
- local tt, tn = { }, #t
- if tn > 0 then
- local ttn = 0
- for i=tn,1,-1 do
- ttn = ttn + 1
- tt[ttn] = t[i]
- end
- end
- return tt
- end
-end
-
-function table.reverse(t)
- if t then
- local n = #t
- for i=1,floor(n/2) do
- local j = n - i + 1
- t[i], t[j] = t[j], t[i]
- end
- return t
- end
-end
-
-function table.sequenced(t,sep,simple) -- hash only
- if not t then
- return ""
- end
- local n = #t
- local s = { }
- if n > 0 then
- -- indexed
- for i=1,n do
- s[i] = tostring(t[i])
- end
- else
- -- hashed
- n = 0
- for k, v in sortedhash(t) do
- if simple then
- if v == true then
- n = n + 1
- s[n] = k
- elseif v and v~= "" then
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
- end
- else
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
- end
- end
- end
- return concat(s,sep or " | ")
-end
-
-function table.print(t,...)
- if type(t) ~= "table" then
- print(tostring(t))
- else
- serialize(print,t,...)
- end
-end
-
-setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end)
-
--- -- -- obsolete but we keep them for a while and might comment them later -- -- --
-
--- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
-
-function table.sub(t,i,j)
- return { unpack(t,i,j) }
-end
-
--- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice)
-
-function table.is_empty(t)
- return not t or not next(t)
-end
-
-function table.has_one_entry(t)
- return t and not next(t,next(t))
-end
-
--- new
-
-function table.loweredkeys(t) -- maybe utf
- local l = { }
- for k, v in next, t do
- l[lower(k)] = v
- end
- return l
-end
-
--- new, might move (maybe duplicate)
-
-function table.unique(old)
- local hash = { }
- local new = { }
- local n = 0
- for i=1,#old do
- local oi = old[i]
- if not hash[oi] then
- n = n + 1
- new[n] = oi
- hash[oi] = true
- end
- end
- return new
-end
-
-function table.sorted(t,...)
- sort(t,...)
- return t -- still sorts in-place
-end
+if not modules then modules = { } end modules ['l-table'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type, next, tostring, tonumber, ipairs, select = type, next, tostring, tonumber, ipairs, select
+local table, string = table, string
+local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
+local format, lower, dump = string.format, string.lower, string.dump
+local getmetatable, setmetatable = getmetatable, setmetatable
+local getinfo = debug.getinfo
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local floor = math.floor
+
+-- extra functions, some might go (when not used)
+
+local stripper = patterns.stripper
+
+function table.strip(tab)
+ local lst, l = { }, 0
+ for i=1,#tab do
+ local s = lpegmatch(stripper,tab[i]) or ""
+ if s == "" then
+ -- skip this one
+ else
+ l = l + 1
+ lst[l] = s
+ end
+ end
+ return lst
+end
+
+function table.keys(t)
+ if t then
+ local keys, k = { }, 0
+ for key, _ in next, t do
+ k = k + 1
+ keys[k] = key
+ end
+ return keys
+ else
+ return { }
+ end
+end
+
+local function compare(a,b)
+ local ta, tb = type(a), type(b) -- needed, else 11 < 2
+ if ta == tb then
+ return a < b
+ else
+ return tostring(a) < tostring(b)
+ end
+end
+
+local function sortedkeys(tab)
+ if tab then
+ local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
+ for key,_ in next, tab do
+ s = s + 1
+ srt[s] = key
+ if category == 3 then
+ -- no further check
+ else
+ local tkey = type(key)
+ if tkey == "string" then
+ category = (category == 2 and 3) or 1
+ elseif tkey == "number" then
+ category = (category == 1 and 3) or 2
+ else
+ category = 3
+ end
+ end
+ end
+ if category == 0 or category == 3 then
+ sort(srt,compare)
+ else
+ sort(srt)
+ end
+ return srt
+ else
+ return { }
+ end
+end
+
+local function sortedhashkeys(tab,cmp) -- fast one
+ if tab then
+ local srt, s = { }, 0
+ for key,_ in next, tab do
+ if key then
+ s= s + 1
+ srt[s] = key
+ end
+ end
+ sort(srt,cmp)
+ return srt
+ else
+ return { }
+ end
+end
+
+function table.allkeys(t)
+ local keys = { }
+ for k, v in next, t do
+ for k, v in next, v do
+ keys[k] = true
+ end
+ end
+ return sortedkeys(keys)
+end
+
+table.sortedkeys = sortedkeys
+table.sortedhashkeys = sortedhashkeys
+
+local function nothing() end
+
+local function sortedhash(t,cmp)
+ if t then
+ local s
+ if cmp then
+ -- it would be nice if the sort function would accept a third argument (or nicer, an optional first)
+ s = sortedhashkeys(t,function(a,b) return cmp(t,a,b) end)
+ else
+ s = sortedkeys(t) -- the robust one
+ end
+ local n = 0
+ local function kv(s)
+ n = n + 1
+ local k = s[n]
+ return k, t[k]
+ end
+ return kv, s
+ else
+ return nothing
+ end
+end
+
+table.sortedhash = sortedhash
+table.sortedpairs = sortedhash -- obsolete
+
+function table.append(t,list)
+ local n = #t
+ for i=1,#list do
+ n = n + 1
+ t[n] = list[i]
+ end
+ return t
+end
+
+function table.prepend(t, list)
+ local nl = #list
+ local nt = nl + #t
+ for i=#t,1,-1 do
+ t[nt] = t[i]
+ nt = nt - 1
+ end
+ for i=1,#list do
+ t[i] = list[i]
+ end
+ return t
+end
+
+-- function table.merge(t, ...) -- first one is target
+-- t = t or { }
+-- local lst = { ... }
+-- for i=1,#lst do
+-- for k, v in next, lst[i] do
+-- t[k] = v
+-- end
+-- end
+-- return t
+-- end
+
+function table.merge(t, ...) -- first one is target
+ t = t or { }
+ for i=1,select("#",...) do
+ for k, v in next, (select(i,...)) do
+ t[k] = v
+ end
+ end
+ return t
+end
+
+-- function table.merged(...)
+-- local tmp, lst = { }, { ... }
+-- for i=1,#lst do
+-- for k, v in next, lst[i] do
+-- tmp[k] = v
+-- end
+-- end
+-- return tmp
+-- end
+
+function table.merged(...)
+ local t = { }
+ for i=1,select("#",...) do
+ for k, v in next, (select(i,...)) do
+ t[k] = v
+ end
+ end
+ return t
+end
+
+-- function table.imerge(t, ...)
+-- local lst, nt = { ... }, #t
+-- for i=1,#lst do
+-- local nst = lst[i]
+-- for j=1,#nst do
+-- nt = nt + 1
+-- t[nt] = nst[j]
+-- end
+-- end
+-- return t
+-- end
+
+function table.imerge(t, ...)
+ local nt = #t
+ for i=1,select("#",...) do
+ local nst = select(i,...)
+ for j=1,#nst do
+ nt = nt + 1
+ t[nt] = nst[j]
+ end
+ end
+ return t
+end
+
+-- function table.imerged(...)
+-- local tmp, ntmp, lst = { }, 0, {...}
+-- for i=1,#lst do
+-- local nst = lst[i]
+-- for j=1,#nst do
+-- ntmp = ntmp + 1
+-- tmp[ntmp] = nst[j]
+-- end
+-- end
+-- return tmp
+-- end
+
+function table.imerged(...)
+ local tmp, ntmp = { }, 0
+ for i=1,select("#",...) do
+ local nst = select(i,...)
+ for j=1,#nst do
+ ntmp = ntmp + 1
+ tmp[ntmp] = nst[j]
+ end
+ end
+ return tmp
+end
+
+local function fastcopy(old,metatabletoo) -- fast one
+ if old then
+ local new = { }
+ for k, v in next, old do
+ if type(v) == "table" then
+ new[k] = fastcopy(v,metatabletoo) -- was just table.copy
+ else
+ new[k] = v
+ end
+ end
+ if metatabletoo then
+ -- optional second arg
+ local mt = getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
+ end
+ return new
+ else
+ return { }
+ end
+end
+
+-- todo : copy without metatable
+
+local function copy(t, tables) -- taken from lua wiki, slightly adapted
+ tables = tables or { }
+ local tcopy = {}
+ if not tables[t] then
+ tables[t] = tcopy
+ end
+ for i,v in next, t do -- brrr, what happens with sparse indexed
+ if type(i) == "table" then
+ if tables[i] then
+ i = tables[i]
+ else
+ i = copy(i, tables)
+ end
+ end
+ if type(v) ~= "table" then
+ tcopy[i] = v
+ elseif tables[v] then
+ tcopy[i] = tables[v]
+ else
+ tcopy[i] = copy(v, tables)
+ end
+ end
+ local mt = getmetatable(t)
+ if mt then
+ setmetatable(tcopy,mt)
+ end
+ return tcopy
+end
+
+table.fastcopy = fastcopy
+table.copy = copy
+
+function table.derive(parent) -- for the moment not public
+ local child = { }
+ if parent then
+ setmetatable(child,{ __index = parent })
+ end
+ return child
+end
+
+function table.tohash(t,value)
+ local h = { }
+ if t then
+ if value == nil then value = true end
+ for _, v in next, t do -- no ipairs here
+ h[v] = value
+ end
+ end
+ return h
+end
+
+function table.fromhash(t)
+ local hsh, h = { }, 0
+ for k, v in next, t do -- no ipairs here
+ if v then
+ h = h + 1
+ hsh[h] = k
+ end
+ end
+ return hsh
+end
+
+local noquotes, hexify, handle, reduce, compact, inline, functions
+
+local reserved = table.tohash { -- intercept a language inconvenience: no reserved words as key
+ 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if',
+ 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while',
+}
+
+local function simple_table(t)
+ if #t > 0 then
+ local n = 0
+ for _,v in next, t do
+ n = n + 1
+ end
+ if n == #t then
+ local tt, nt = { }, 0
+ for i=1,#t do
+ local v = t[i]
+ local tv = type(v)
+ if tv == "number" then
+ nt = nt + 1
+ if hexify then
+ tt[nt] = format("0x%04X",v)
+ else
+ tt[nt] = tostring(v) -- tostring not needed
+ end
+ elseif tv == "boolean" then
+ nt = nt + 1
+ tt[nt] = tostring(v)
+ elseif tv == "string" then
+ nt = nt + 1
+ tt[nt] = format("%q",v)
+ else
+ tt = nil
+ break
+ end
+ end
+ return tt
+ end
+ end
+ return nil
+end
+
+-- Because this is a core function of mkiv I moved some function calls
+-- inline.
+--
+-- twice as fast in a test:
+--
+-- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) )
+
+-- problem: there no good number_to_string converter with the best resolution
+
+-- probably using .. is faster than format
+-- maybe split in a few cases (yes/no hexify)
+
+-- todo: %g faster on numbers than %s
+
+-- we can speed this up with repeaters and formatters (is indeed faster)
+
+local propername = patterns.propername -- was find(name,"^%a[%w%_]*$")
+
+local function dummy() end
+
+local function do_serialize(root,name,depth,level,indexed)
+ if level > 0 then
+ depth = depth .. " "
+ if indexed then
+ handle(format("%s{",depth))
+ else
+ local tn = type(name)
+ if tn == "number" then
+ if hexify then
+ handle(format("%s[0x%04X]={",depth,name))
+ else
+ handle(format("%s[%s]={",depth,name))
+ end
+ elseif tn == "string" then
+ if noquotes and not reserved[name] and lpegmatch(propername,name) then
+ handle(format("%s%s={",depth,name))
+ else
+ handle(format("%s[%q]={",depth,name))
+ end
+ elseif tn == "boolean" then
+ handle(format("%s[%s]={",depth,tostring(name)))
+ else
+ handle(format("%s{",depth))
+ end
+ end
+ end
+ -- we could check for k (index) being number (cardinal)
+ if root and next(root) then
+ -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- if compact then
+ -- -- NOT: for k=1,#root do (we need to quit at nil)
+ -- for k,v in ipairs(root) do -- can we use next?
+ -- if not first then first = k end
+ -- last = last + 1
+ -- end
+ -- end
+ local first, last = nil, 0
+ if compact then
+ last = #root
+ for k=1,last do
+ if root[k] == nil then
+ last = k - 1
+ break
+ end
+ end
+ if last > 0 then
+ first = 1
+ end
+ end
+ local sk = sortedkeys(root)
+ for i=1,#sk do
+ local k = sk[i]
+ local v = root[k]
+ --~ if v == root then
+ -- circular
+ --~ else
+ local t, tk = type(v), type(k)
+ if compact and first and tk == "number" and k >= first and k <= last then
+ if t == "number" then
+ if hexify then
+ handle(format("%s 0x%04X,",depth,v))
+ else
+ handle(format("%s %s,",depth,v)) -- %.99g
+ end
+ elseif t == "string" then
+ if reduce and tonumber(v) then
+ handle(format("%s %s,",depth,v))
+ else
+ handle(format("%s %q,",depth,v))
+ end
+ elseif t == "table" then
+ if not next(v) then
+ handle(format("%s {},",depth))
+ elseif inline then -- and #t > 0
+ local st = simple_table(v)
+ if st then
+ handle(format("%s { %s },",depth,concat(st,", ")))
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ elseif t == "boolean" then
+ handle(format("%s %s,",depth,tostring(v)))
+ elseif t == "function" then
+ if functions then
+ handle(format('%s load(%q),',depth,dump(v)))
+ else
+ handle(format('%s "function",',depth))
+ end
+ else
+ handle(format("%s %q,",depth,tostring(v)))
+ end
+ elseif k == "__p__" then -- parent
+ if false then
+ handle(format("%s __p__=nil,",depth))
+ end
+ elseif t == "number" then
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g
+ end
+ elseif tk == "boolean" then
+ if hexify then
+ handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
+ else
+ handle(format("%s [%s]=%s,",depth,tostring(k),v)) -- %.99g
+ end
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ if hexify then
+ handle(format("%s %s=0x%04X,",depth,k,v))
+ else
+ handle(format("%s %s=%s,",depth,k,v)) -- %.99g
+ end
+ else
+ if hexify then
+ handle(format("%s [%q]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g
+ end
+ end
+ elseif t == "string" then
+ if reduce and tonumber(v) then
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ else
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,v))
+ else
+ handle(format("%s [%s]=%q,",depth,k,v))
+ end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
+ else
+ handle(format("%s [%q]=%q,",depth,k,v))
+ end
+ end
+ elseif t == "table" then
+ if not next(v) then
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%04X]={},",depth,k))
+ else
+ handle(format("%s [%s]={},",depth,k))
+ end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]={},",depth,tostring(k)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={},",depth,k))
+ else
+ handle(format("%s [%q]={},",depth,k))
+ end
+ elseif inline then
+ local st = simple_table(v)
+ if st then
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
+ end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ else
+ do_serialize(v,k,depth,level+1)
+ end
+ elseif t == "boolean" then
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%s,",depth,k,tostring(v)))
+ end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%q]=%s,",depth,k,tostring(v)))
+ end
+ elseif t == "function" then
+ if functions then
+ local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+ -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%04X]=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%s]=load(%q),",depth,k,f))
+ end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]=load(%q),",depth,tostring(k),f))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%q]=load(%q),",depth,k,f))
+ end
+ end
+ else
+ if tk == "number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%q,",depth,k,tostring(v)))
+ end
+ elseif tk == "boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%q]=%q,",depth,k,tostring(v)))
+ end
+ end
+ --~ end
+ end
+ end
+ if level > 0 then
+ handle(format("%s},",depth))
+ end
+end
+
+-- replacing handle by a direct t[#t+1] = ... (plus test) is not much
+-- faster (0.03 on 1.00 for zapfino.tma)
+
+local function serialize(_handle,root,name,specification) -- handle wins
+ local tname = type(name)
+ if type(specification) == "table" then
+ noquotes = specification.noquotes
+ hexify = specification.hexify
+ handle = _handle or specification.handle or print
+ reduce = specification.reduce or false
+ functions = specification.functions
+ compact = specification.compact
+ inline = specification.inline and compact
+ if functions == nil then
+ functions = true
+ end
+ if compact == nil then
+ compact = true
+ end
+ if inline == nil then
+ inline = compact
+ end
+ else
+ noquotes = false
+ hexify = false
+ handle = _handle or print
+ reduce = false
+ compact = true
+ inline = true
+ functions = true
+ end
+ if tname == "string" then
+ if name == "return" then
+ handle("return {")
+ else
+ handle(name .. "={")
+ end
+ elseif tname == "number" then
+ if hexify then
+ handle(format("[0x%04X]={",name))
+ else
+ handle("[" .. name .. "]={")
+ end
+ elseif tname == "boolean" then
+ if name then
+ handle("return {")
+ else
+ handle("{")
+ end
+ else
+ handle("t={")
+ end
+ if root then
+ -- The dummy access will initialize a table that has a delayed initialization
+ -- using a metatable. (maybe explicitly test for metatable)
+ if getmetatable(root) then -- todo: make this an option, maybe even per subtable
+ local dummy = root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_ = nil
+ end
+ -- Let's forget about empty tables.
+ if next(root) then
+ do_serialize(root,name,"",0)
+ end
+ end
+ handle("}")
+end
+
+-- -- This is some 20% faster than using format (because formatters are much faster) but
+-- -- of course, inlining the format using .. is then again faster .. anyway, as we do
+-- -- some pretty printing as well there is not that much to gain unless we make a 'fast'
+-- -- ugly variant as well. But, we would have to move the formatter to l-string then.
+
+-- local formatters = string.formatters
+
+-- local function do_serialize(root,name,level,indexed)
+-- if level > 0 then
+-- if indexed then
+-- handle(formatters["%w{"](level))
+-- else
+-- local tn = type(name)
+-- if tn == "number" then
+-- if hexify then
+-- handle(formatters["%w[%04H]={"](level,name))
+-- else
+-- handle(formatters["%w[%s]={"](level,name))
+-- end
+-- elseif tn == "string" then
+-- if noquotes and not reserved[name] and lpegmatch(propername,name) then
+-- handle(formatters["%w%s={"](level,name))
+-- else
+-- handle(formatters["%w[%q]={"](level,name))
+-- end
+-- elseif tn == "boolean" then
+-- handle(formatters["%w[%S]={"](level,name))
+-- else
+-- handle(formatters["%w{"](level))
+-- end
+-- end
+-- end
+-- -- we could check for k (index) being number (cardinal)
+-- if root and next(root) then
+-- -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+-- -- if compact then
+-- -- -- NOT: for k=1,#root do (we need to quit at nil)
+-- -- for k,v in ipairs(root) do -- can we use next?
+-- -- if not first then first = k end
+-- -- last = last + 1
+-- -- end
+-- -- end
+-- local first, last = nil, 0
+-- if compact then
+-- last = #root
+-- for k=1,last do
+-- if root[k] == nil then
+-- last = k - 1
+-- break
+-- end
+-- end
+-- if last > 0 then
+-- first = 1
+-- end
+-- end
+-- local sk = sortedkeys(root)
+-- for i=1,#sk do
+-- local k = sk[i]
+-- local v = root[k]
+-- --~ if v == root then
+-- -- circular
+-- --~ else
+-- local t, tk = type(v), type(k)
+-- if compact and first and tk == "number" and k >= first and k <= last then
+-- if t == "number" then
+-- if hexify then
+-- handle(formatters["%w %04H,"](level,v))
+-- else
+-- handle(formatters["%w %s,"](level,v)) -- %.99g
+-- end
+-- elseif t == "string" then
+-- if reduce and tonumber(v) then
+-- handle(formatters["%w %s,"](level,v))
+-- else
+-- handle(formatters["%w %q,"](level,v))
+-- end
+-- elseif t == "table" then
+-- if not next(v) then
+-- handle(formatters["%w {},"](level))
+-- elseif inline then -- and #t > 0
+-- local st = simple_table(v)
+-- if st then
+-- handle(formatters["%w { %, t },"](level,st))
+-- else
+-- do_serialize(v,k,level+1,true)
+-- end
+-- else
+-- do_serialize(v,k,level+1,true)
+-- end
+-- elseif t == "boolean" then
+-- handle(formatters["%w %S,"](level,v))
+-- elseif t == "function" then
+-- if functions then
+-- handle(formatters['%w load(%q),'](level,dump(v)))
+-- else
+-- handle(formatters['%w "function",'](level))
+-- end
+-- else
+-- handle(formatters["%w %Q,"](level,v))
+-- end
+-- elseif k == "__p__" then -- parent
+-- if false then
+-- handle(formatters["%w __p__=nil,"](level))
+-- end
+-- elseif t == "number" then
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]=%04H,"](level,k,v))
+-- else
+-- handle(formatters["%w [%s]=%s,"](level,k,v)) -- %.99g
+-- end
+-- elseif tk == "boolean" then
+-- if hexify then
+-- handle(formatters["%w [%S]=%04H,"](level,k,v))
+-- else
+-- handle(formatters["%w [%S]=%s,"](level,k,v)) -- %.99g
+-- end
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- if hexify then
+-- handle(formatters["%w %s=%04H,"](level,k,v))
+-- else
+-- handle(formatters["%w %s=%s,"](level,k,v)) -- %.99g
+-- end
+-- else
+-- if hexify then
+-- handle(formatters["%w [%q]=%04H,"](level,k,v))
+-- else
+-- handle(formatters["%w [%q]=%s,"](level,k,v)) -- %.99g
+-- end
+-- end
+-- elseif t == "string" then
+-- if reduce and tonumber(v) then
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]=%s,"](level,k,v))
+-- else
+-- handle(formatters["%w [%s]=%s,"](level,k,v))
+-- end
+-- elseif tk == "boolean" then
+-- handle(formatters["%w [%S]=%s,"](level,k,v))
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- handle(formatters["%w %s=%s,"](level,k,v))
+-- else
+-- handle(formatters["%w [%q]=%s,"](level,k,v))
+-- end
+-- else
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]=%q,"](level,k,v))
+-- else
+-- handle(formatters["%w [%s]=%q,"](level,k,v))
+-- end
+-- elseif tk == "boolean" then
+-- handle(formatters["%w [%S]=%q,"](level,k,v))
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- handle(formatters["%w %s=%q,"](level,k,v))
+-- else
+-- handle(formatters["%w [%q]=%q,"](level,k,v))
+-- end
+-- end
+-- elseif t == "table" then
+-- if not next(v) then
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]={},"](level,k))
+-- else
+-- handle(formatters["%w [%s]={},"](level,k))
+-- end
+-- elseif tk == "boolean" then
+-- handle(formatters["%w [%S]={},"](level,k))
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- handle(formatters["%w %s={},"](level,k))
+-- else
+-- handle(formatters["%w [%q]={},"](level,k))
+-- end
+-- elseif inline then
+-- local st = simple_table(v)
+-- if st then
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]={ %, t },"](level,k,st))
+-- else
+-- handle(formatters["%w [%s]={ %, t },"](level,k,st))
+-- end
+-- elseif tk == "boolean" then
+-- handle(formatters["%w [%S]={ %, t },"](level,k,st))
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- handle(formatters["%w %s={ %, t },"](level,k,st))
+-- else
+-- handle(formatters["%w [%q]={ %, t },"](level,k,st))
+-- end
+-- else
+-- do_serialize(v,k,level+1)
+-- end
+-- else
+-- do_serialize(v,k,level+1)
+-- end
+-- elseif t == "boolean" then
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]=%S,"](level,k,v))
+-- else
+-- handle(formatters["%w [%s]=%S,"](level,k,v))
+-- end
+-- elseif tk == "boolean" then
+-- handle(formatters["%w [%S]=%S,"](level,k,v))
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- handle(formatters["%w %s=%S,"](level,k,v))
+-- else
+-- handle(formatters["%w [%q]=%S,"](level,k,v))
+-- end
+-- elseif t == "function" then
+-- if functions then
+-- local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
+-- -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]=load(%q),"](level,k,f))
+-- else
+-- handle(formatters["%w [%s]=load(%q),"](level,k,f))
+-- end
+-- elseif tk == "boolean" then
+-- handle(formatters["%w [%S]=load(%q),"](level,k,f))
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- handle(formatters["%w %s=load(%q),"](level,k,f))
+-- else
+-- handle(formatters["%w [%q]=load(%q),"](level,k,f))
+-- end
+-- end
+-- else
+-- if tk == "number" then
+-- if hexify then
+-- handle(formatters["%w [%04H]=%Q,"](level,k,v))
+-- else
+-- handle(formatters["%w [%s]=%Q,"](level,k,v))
+-- end
+-- elseif tk == "boolean" then
+-- handle(formatters["%w [%S]=%Q,"](level,k,v))
+-- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+-- handle(formatters["%w %s=%Q,"](level,k,v))
+-- else
+-- handle(formatters["%w [%q]=%Q,"](level,k,v))
+-- end
+-- end
+-- --~ end
+-- end
+-- end
+-- if level > 0 then
+-- handle(formatters["%w}"](level))
+-- end
+-- end
+
+-- local function serialize(_handle,root,name,specification) -- handle wins
+-- local tname = type(name)
+-- if type(specification) == "table" then
+-- noquotes = specification.noquotes
+-- hexify = specification.hexify
+-- handle = _handle or specification.handle or print
+-- reduce = specification.reduce or false
+-- functions = specification.functions
+-- compact = specification.compact
+-- inline = specification.inline and compact
+-- if functions == nil then
+-- functions = true
+-- end
+-- if compact == nil then
+-- compact = true
+-- end
+-- if inline == nil then
+-- inline = compact
+-- end
+-- else
+-- noquotes = false
+-- hexify = false
+-- handle = _handle or print
+-- reduce = false
+-- compact = true
+-- inline = true
+-- functions = true
+-- end
+-- if tname == "string" then
+-- if name == "return" then
+-- handle("return {")
+-- else
+-- handle(name .. "={")
+-- end
+-- elseif tname == "number" then
+-- if hexify then
+-- handle(format("[0x%04X]={",name))
+-- else
+-- handle("[" .. name .. "]={")
+-- end
+-- elseif tname == "boolean" then
+-- if name then
+-- handle("return {")
+-- else
+-- handle("{")
+-- end
+-- else
+-- handle("t={")
+-- end
+-- if root then
+-- -- The dummy access will initialize a table that has a delayed initialization
+-- -- using a metatable. (maybe explicitly test for metatable)
+-- if getmetatable(root) then -- todo: make this an option, maybe even per subtable
+-- local dummy = root._w_h_a_t_e_v_e_r_
+-- root._w_h_a_t_e_v_e_r_ = nil
+-- end
+-- -- Let's forget about empty tables.
+-- if next(root) then
+-- do_serialize(root,name,0)
+-- end
+-- end
+-- handle("}")
+-- end
+
+-- name:
+--
+-- true : return { }
+-- false : { }
+-- nil : t = { }
+-- string : string = { }
+-- "return" : return { }
+-- number : [number] = { }
+
+function table.serialize(root,name,specification)
+ local t, n = { }, 0
+ local function flush(s)
+ n = n + 1
+ t[n] = s
+ end
+ serialize(flush,root,name,specification)
+ return concat(t,"\n")
+end
+
+-- local a = { e = { 1,2,3,4,5,6}, a = 1, b = 2, c = "ccc", d = { a = 1, b = 2, c = "ccc", d = { a = 1, b = 2, c = "ccc" } } }
+-- local t = os.clock()
+-- for i=1,10000 do
+-- table.serialize(a)
+-- end
+-- print(os.clock()-t,table.serialize(a))
+
+table.tohandle = serialize
+
+-- sometimes tables are real use (zapfino extra pro is some 85M) in which
+-- case a stepwise serialization is nice; actually, we could consider:
+--
+-- for line in table.serializer(root,name,reduce,noquotes) do
+-- ...(line)
+-- end
+--
+-- so this is on the todo list
+
+local maxtab = 2*1024
+
+function table.tofile(filename,root,name,specification)
+ local f = io.open(filename,'w')
+ if f then
+ if maxtab > 1 then
+ local t, n = { }, 0
+ local function flush(s)
+ n = n + 1
+ t[n] = s
+ if n > maxtab then
+ f:write(concat(t,"\n"),"\n") -- hm, write(sometable) should be nice
+ t, n = { }, 0 -- we could recycle t if needed
+ end
+ end
+ serialize(flush,root,name,specification)
+ f:write(concat(t,"\n"),"\n")
+ else
+ local function flush(s)
+ f:write(s,"\n")
+ end
+ serialize(flush,root,name,specification)
+ end
+ f:close()
+ io.flush()
+ end
+end
+
+local function flattened(t,f,depth) -- also handles { nil, 1, nil, 2 }
+ if f == nil then
+ f = { }
+ depth = 0xFFFF
+ elseif tonumber(f) then
+ -- assume that only two arguments are given
+ depth = f
+ f = { }
+ elseif not depth then
+ depth = 0xFFFF
+ end
+ for k, v in next, t do
+ if type(k) ~= "number" then
+ if depth > 0 and type(v) == "table" then
+ flattened(v,f,depth-1)
+ else
+ f[#f+1] = v
+ end
+ end
+ end
+ for k=1,#t do
+ local v = t[k]
+ if depth > 0 and type(v) == "table" then
+ flattened(v,f,depth-1)
+ else
+ f[#f+1] = v
+ end
+ end
+ return f
+end
+
+table.flattened = flattened
+
+local function unnest(t,f) -- only used in mk, for old times sake
+ if not f then -- and only relevant for token lists
+ f = { } -- this one can become obsolete
+ end
+ for i=1,#t do
+ local v = t[i]
+ if type(v) == "table" then
+ if type(v[1]) == "table" then
+ unnest(v,f)
+ else
+ f[#f+1] = v
+ end
+ else
+ f[#f+1] = v
+ end
+ end
+ return f
+end
+
+function table.unnest(t) -- bad name
+ return unnest(t)
+end
+
+local function are_equal(a,b,n,m) -- indexed
+ if a and b and #a == #b then
+ n = n or 1
+ m = m or #a
+ for i=n,m do
+ local ai, bi = a[i], b[i]
+ if ai==bi then
+ -- same
+ elseif type(ai) == "table" and type(bi) == "table" then
+ if not are_equal(ai,bi) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+ else
+ return false
+ end
+end
+
+local function identical(a,b) -- assumes same structure
+ for ka, va in next, a do
+ local vb = b[ka]
+ if va == vb then
+ -- same
+ elseif type(va) == "table" and type(vb) == "table" then
+ if not identical(va,vb) then
+ return false
+ end
+ else
+ return false
+ end
+ end
+ return true
+end
+
+table.identical = identical
+table.are_equal = are_equal
+
+-- maybe also make a combined one
+
+function table.compact(t) -- remove empty tables, assumes subtables
+ if t then
+ for k, v in next, t do
+ if not next(v) then -- no type checking
+ t[k] = nil
+ end
+ end
+ end
+end
+
+function table.contains(t, v)
+ if t then
+ for i=1, #t do
+ if t[i] == v then
+ return i
+ end
+ end
+ end
+ return false
+end
+
+function table.count(t)
+ local n = 0
+ for k, v in next, t do
+ n = n + 1
+ end
+ return n
+end
+
+function table.swapped(t,s) -- hash
+ local n = { }
+ if s then
+ for k, v in next, s do
+ n[k] = v
+ end
+ end
+ for k, v in next, t do
+ n[v] = k
+ end
+ return n
+end
+
+function table.mirrored(t) -- hash
+ local n = { }
+ for k, v in next, t do
+ n[v] = k
+ n[k] = v
+ end
+ return n
+end
+
+function table.reversed(t)
+ if t then
+ local tt, tn = { }, #t
+ if tn > 0 then
+ local ttn = 0
+ for i=tn,1,-1 do
+ ttn = ttn + 1
+ tt[ttn] = t[i]
+ end
+ end
+ return tt
+ end
+end
+
+function table.reverse(t)
+ if t then
+ local n = #t
+ for i=1,floor(n/2) do
+ local j = n - i + 1
+ t[i], t[j] = t[j], t[i]
+ end
+ return t
+ end
+end
+
+function table.sequenced(t,sep,simple) -- hash only
+ if not t then
+ return ""
+ end
+ local n = #t
+ local s = { }
+ if n > 0 then
+ -- indexed
+ for i=1,n do
+ s[i] = tostring(t[i])
+ end
+ else
+ -- hashed
+ n = 0
+ for k, v in sortedhash(t) do
+ if simple then
+ if v == true then
+ n = n + 1
+ s[n] = k
+ elseif v and v~= "" then
+ n = n + 1
+ s[n] = k .. "=" .. tostring(v)
+ end
+ else
+ n = n + 1
+ s[n] = k .. "=" .. tostring(v)
+ end
+ end
+ end
+ return concat(s,sep or " | ")
+end
+
+function table.print(t,...)
+ if type(t) ~= "table" then
+ print(tostring(t))
+ else
+ serialize(print,t,...)
+ end
+end
+
+setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end)
+
+-- -- -- obsolete but we keep them for a while and might comment them later -- -- --
+
+-- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
+
+function table.sub(t,i,j)
+ return { unpack(t,i,j) }
+end
+
+-- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice)
+
+function table.is_empty(t)
+ return not t or not next(t)
+end
+
+function table.has_one_entry(t)
+ return t and not next(t,next(t))
+end
+
+-- new
+
+function table.loweredkeys(t) -- maybe utf
+ local l = { }
+ for k, v in next, t do
+ l[lower(k)] = v
+ end
+ return l
+end
+
+-- new, might move (maybe duplicate)
+
+function table.unique(old)
+ local hash = { }
+ local new = { }
+ local n = 0
+ for i=1,#old do
+ local oi = old[i]
+ if not hash[oi] then
+ n = n + 1
+ new[n] = oi
+ hash[oi] = true
+ end
+ end
+ return new
+end
+
+function table.sorted(t,...)
+ sort(t,...)
+ return t -- still sorts in-place
+end
diff --git a/tex/context/base/l-unicode.lua b/tex/context/base/l-unicode.lua
index 813ffd54b..d38d4cbd1 100644
--- a/tex/context/base/l-unicode.lua
+++ b/tex/context/base/l-unicode.lua
@@ -1,942 +1,942 @@
-if not modules then modules = { } end modules ['l-unicode'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this module will be reorganized
-
--- todo: utf.sub replacement (used in syst-aux)
-
--- we put these in the utf namespace:
-
-utf = utf or (unicode and unicode.utf8) or { }
-
-utf.characters = utf.characters or string.utfcharacters
-utf.values = utf.values or string.utfvalues
-
--- string.utfvalues
--- string.utfcharacters
--- string.characters
--- string.characterpairs
--- string.bytes
--- string.bytepairs
-
-local type = type
-local char, byte, format, sub = string.char, string.byte, string.format, string.sub
-local concat = table.concat
-local P, C, R, Cs, Ct, Cmt, Cc, Carg, Cp = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Carg, lpeg.Cp
-local lpegmatch, patterns = lpeg.match, lpeg.patterns
-
-local bytepairs = string.bytepairs
-
-local finder = lpeg.finder
-local replacer = lpeg.replacer
-
-local utfvalues = utf.values
-local utfgmatch = utf.gmatch -- not always present
-
-local p_utftype = patterns.utftype
-local p_utfoffset = patterns.utfoffset
-local p_utf8char = patterns.utf8char
-local p_utf8byte = patterns.utf8byte
-local p_utfbom = patterns.utfbom
-local p_newline = patterns.newline
-local p_whitespace = patterns.whitespace
-
-if not unicode then
-
- unicode = { utf = utf } -- for a while
-
-end
-
-if not utf.char then
-
- local floor, char = math.floor, string.char
-
- function utf.char(n)
- if n < 0x80 then
- -- 0aaaaaaa : 0x80
- return char(n)
- elseif n < 0x800 then
- -- 110bbbaa : 0xC0 : n >> 6
- -- 10aaaaaa : 0x80 : n & 0x3F
- return char(
- 0xC0 + floor(n/0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x10000 then
- -- 1110bbbb : 0xE0 : n >> 12
- -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
- -- 10aaaaaa : 0x80 : n & 0x3F
- return char(
- 0xE0 + floor(n/0x1000),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- elseif n < 0x200000 then
- -- 11110ccc : 0xF0 : n >> 18
- -- 10ccbbbb : 0x80 : (n >> 12) & 0x3F
- -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
- -- 10aaaaaa : 0x80 : n & 0x3F
- -- dddd : ccccc - 1
- return char(
- 0xF0 + floor(n/0x40000),
- 0x80 + (floor(n/0x1000) % 0x40),
- 0x80 + (floor(n/0x40) % 0x40),
- 0x80 + (n % 0x40)
- )
- else
- return ""
- end
- end
-
-end
-
-if not utf.byte then
-
- local utf8byte = patterns.utf8byte
-
- function utf.byte(c)
- return lpegmatch(utf8byte,c)
- end
-
-end
-
-local utfchar, utfbyte = utf.char, utf.byte
-
--- As we want to get rid of the (unmaintained) utf library we implement our own
--- variants (in due time an independent module):
-
-function utf.filetype(data)
- return data and lpegmatch(p_utftype,data) or "unknown"
-end
-
-local toentities = Cs (
- (
- patterns.utf8one
- + (
- patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("%X;",b) end end
- )^0
-)
-
-patterns.toentities = toentities
-
-function utf.toentities(str)
- return lpegmatch(toentities,str)
-end
-
--- local utfchr = { } -- 60K -> 2.638 M extra mem but currently not called that often (on latin)
---
--- setmetatable(utfchr, { __index = function(t,k) local v = utfchar(k) t[k] = v return v end } )
---
--- collectgarbage("collect")
--- local u = collectgarbage("count")*1024
--- local t = os.clock()
--- for i=1,1000 do
--- for i=1,600 do
--- local a = utfchr[i]
--- end
--- end
--- print(os.clock()-t,collectgarbage("count")*1024-u)
-
--- collectgarbage("collect")
--- local t = os.clock()
--- for i=1,1000 do
--- for i=1,600 do
--- local a = utfchar(i)
--- end
--- end
--- print(os.clock()-t,collectgarbage("count")*1024-u)
-
--- local byte = string.byte
--- local utfchar = utf.char
-
-local one = P(1)
-local two = C(1) * C(1)
-local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
-
--- actually one of them is already utf ... sort of useless this one
-
--- function utf.char(n)
--- if n < 0x80 then
--- return char(n)
--- elseif n < 0x800 then
--- return char(
--- 0xC0 + floor(n/0x40),
--- 0x80 + (n % 0x40)
--- )
--- elseif n < 0x10000 then
--- return char(
--- 0xE0 + floor(n/0x1000),
--- 0x80 + (floor(n/0x40) % 0x40),
--- 0x80 + (n % 0x40)
--- )
--- elseif n < 0x40000 then
--- return char(
--- 0xF0 + floor(n/0x40000),
--- 0x80 + floor(n/0x1000),
--- 0x80 + (floor(n/0x40) % 0x40),
--- 0x80 + (n % 0x40)
--- )
--- else
--- -- return char(
--- -- 0xF1 + floor(n/0x1000000),
--- -- 0x80 + floor(n/0x40000),
--- -- 0x80 + floor(n/0x1000),
--- -- 0x80 + (floor(n/0x40) % 0x40),
--- -- 0x80 + (n % 0x40)
--- -- )
--- return "?"
--- end
--- end
---
--- merge into:
-
-local pattern = P("\254\255") * Cs( (
- four / function(a,b,c,d)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(a,b)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
- + P("\255\254") * Cs( (
- four / function(b,a,d,c)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(b,a)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
-
-function string.toutf(s) -- in string namespace
- return lpegmatch(pattern,s) or s -- todo: utf32
-end
-
-local validatedutf = Cs (
- (
- patterns.utf8one
- + patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- + P(1) / "�"
- )^0
-)
-
-patterns.validatedutf = validatedutf
-
-function utf.is_valid(str)
- return type(str) == "string" and lpegmatch(validatedutf,str) or false
-end
-
-if not utf.len then
-
- -- -- alternative 1: 0.77
- --
- -- local utfcharcounter = utfbom^-1 * Cs((p_utf8char/'!')^0)
- --
- -- function utf.len(str)
- -- return #lpegmatch(utfcharcounter,str or "")
- -- end
- --
- -- -- alternative 2: 1.70
- --
- -- local n = 0
- --
- -- local utfcharcounter = utfbom^-1 * (p_utf8char/function() n = n + 1 end)^0 -- slow
- --
- -- function utf.length(str)
- -- n = 0
- -- lpegmatch(utfcharcounter,str or "")
- -- return n
- -- end
- --
- -- -- alternative 3: 0.24 (native unicode.utf8.len: 0.047)
-
- -- local n = 0
- --
- -- -- local utfcharcounter = lpeg.patterns.utfbom^-1 * P ( ( Cp() * (
- -- -- patterns.utf8one ^1 * Cc(1)
- -- -- + patterns.utf8two ^1 * Cc(2)
- -- -- + patterns.utf8three^1 * Cc(3)
- -- -- + patterns.utf8four ^1 * Cc(4) ) * Cp() / function(f,d,t) n = n + (t - f)/d end
- -- -- )^0 ) -- just as many captures as below
- --
- -- -- local utfcharcounter = lpeg.patterns.utfbom^-1 * P ( (
- -- -- (Cmt(patterns.utf8one ^1,function(_,_,s) n = n + #s return true end))
- -- -- + (Cmt(patterns.utf8two ^1,function(_,_,s) n = n + #s/2 return true end))
- -- -- + (Cmt(patterns.utf8three^1,function(_,_,s) n = n + #s/3 return true end))
- -- -- + (Cmt(patterns.utf8four ^1,function(_,_,s) n = n + #s/4 return true end))
- -- -- )^0 ) -- not interesting as it creates strings but sometimes faster
- --
- -- -- The best so far:
- --
- -- local utfcharcounter = utfbom^-1 * P ( (
- -- Cp() * (patterns.utf8one )^1 * Cp() / function(f,t) n = n + t - f end
- -- + Cp() * (patterns.utf8two )^1 * Cp() / function(f,t) n = n + (t - f)/2 end
- -- + Cp() * (patterns.utf8three)^1 * Cp() / function(f,t) n = n + (t - f)/3 end
- -- + Cp() * (patterns.utf8four )^1 * Cp() / function(f,t) n = n + (t - f)/4 end
- -- )^0 )
-
- -- function utf.len(str)
- -- n = 0
- -- lpegmatch(utfcharcounter,str or "")
- -- return n
- -- end
-
- local n, f = 0, 1
-
- local utfcharcounter = patterns.utfbom^-1 * Cmt (
- Cc(1) * patterns.utf8one ^1
- + Cc(2) * patterns.utf8two ^1
- + Cc(3) * patterns.utf8three^1
- + Cc(4) * patterns.utf8four ^1,
- function(_,t,d) -- due to Cc no string captures, so faster
- n = n + (t - f)/d
- f = t
- return true
- end
- )^0
-
- function utf.len(str)
- n, f = 0, 1
- lpegmatch(utfcharcounter,str or "")
- return n
- end
-
- -- -- these are quite a bit slower:
-
- -- utfcharcounter = utfbom^-1 * (Cmt(P(1) * R("\128\191")^0, function() n = n + 1 return true end))^0 -- 50+ times slower
- -- utfcharcounter = utfbom^-1 * (Cmt(P(1), function() n = n + 1 return true end) * R("\128\191")^0)^0 -- 50- times slower
-
-end
-
-utf.length = utf.len
-
-if not utf.sub then
-
- -- inefficient as lpeg just copies ^n
-
- -- local function sub(str,start,stop)
- -- local pattern = p_utf8char^-(start-1) * C(p_utf8char^-(stop-start+1))
- -- inspect(pattern)
- -- return lpegmatch(pattern,str) or ""
- -- end
-
- -- local b, e, n, first, last = 0, 0, 0, 0, 0
- --
- -- local function slide(s,p)
- -- n = n + 1
- -- if n == first then
- -- b = p
- -- if not last then
- -- return nil
- -- end
- -- end
- -- if n == last then
- -- e = p
- -- return nil
- -- else
- -- return p
- -- end
- -- end
- --
- -- local pattern = Cmt(p_utf8char,slide)^0
- --
- -- function utf.sub(str,start,stop) -- todo: from the end
- -- if not start then
- -- return str
- -- end
- -- b, e, n, first, last = 0, 0, 0, start, stop
- -- lpegmatch(pattern,str)
- -- if not stop then
- -- return sub(str,b)
- -- else
- -- return sub(str,b,e-1)
- -- end
- -- end
-
- -- print(utf.sub("Hans Hagen is my name"))
- -- print(utf.sub("Hans Hagen is my name",5))
- -- print(utf.sub("Hans Hagen is my name",5,10))
-
- local utflength = utf.length
-
- -- also negative indices, upto 10 times slower than a c variant
-
- local b, e, n, first, last = 0, 0, 0, 0, 0
-
- local function slide_zero(s,p)
- n = n + 1
- if n >= last then
- e = p - 1
- else
- return p
- end
- end
-
- local function slide_one(s,p)
- n = n + 1
- if n == first then
- b = p
- end
- if n >= last then
- e = p - 1
- else
- return p
- end
- end
-
- local function slide_two(s,p)
- n = n + 1
- if n == first then
- b = p
- else
- return true
- end
- end
-
- local pattern_zero = Cmt(p_utf8char,slide_zero)^0
- local pattern_one = Cmt(p_utf8char,slide_one )^0
- local pattern_two = Cmt(p_utf8char,slide_two )^0
-
- function utf.sub(str,start,stop)
- if not start then
- return str
- end
- if start == 0 then
- start = 1
- end
- if not stop then
- if start < 0 then
- local l = utflength(str) -- we can inline this function if needed
- start = l + start
- else
- start = start - 1
- end
- b, n, first = 0, 0, start
- lpegmatch(pattern_two,str)
- if n >= first then
- return sub(str,b)
- else
- return ""
- end
- end
- if start < 0 or stop < 0 then
- local l = utf.length(str)
- if start < 0 then
- start = l + start
- if start <= 0 then
- start = 1
- else
- start = start + 1
- end
- end
- if stop < 0 then
- stop = l + stop
- if stop == 0 then
- stop = 1
- else
- stop = stop + 1
- end
- end
- end
- if start > stop then
- return ""
- elseif start > 1 then
- b, e, n, first, last = 0, 0, 0, start - 1, stop
- lpegmatch(pattern_one,str)
- if n >= first and e == 0 then
- e = #str
- end
- return sub(str,b,e)
- else
- b, e, n, last = 1, 0, 0, stop
- lpegmatch(pattern_zero,str)
- if e == 0 then
- e = #str
- end
- return sub(str,b,e)
- end
- end
-
- -- local n = 100000
- -- local str = string.rep("123456à áâãäå",100)
- --
- -- for i=-15,15,1 do
- -- for j=-15,15,1 do
- -- if utf.xsub(str,i,j) ~= utf.sub(str,i,j) then
- -- print("error",i,j,"l>"..utf.xsub(str,i,j),"s>"..utf.sub(str,i,j))
- -- end
- -- end
- -- if utf.xsub(str,i) ~= utf.sub(str,i) then
- -- print("error",i,"l>"..utf.xsub(str,i),"s>"..utf.sub(str,i))
- -- end
- -- end
-
- -- print(" 1, 7",utf.xsub(str, 1, 7),utf.sub(str, 1, 7))
- -- print(" 0, 7",utf.xsub(str, 0, 7),utf.sub(str, 0, 7))
- -- print(" 0, 9",utf.xsub(str, 0, 9),utf.sub(str, 0, 9))
- -- print(" 4 ",utf.xsub(str, 4 ),utf.sub(str, 4 ))
- -- print(" 0 ",utf.xsub(str, 0 ),utf.sub(str, 0 ))
- -- print(" 0, 0",utf.xsub(str, 0, 0),utf.sub(str, 0, 0))
- -- print(" 4, 4",utf.xsub(str, 4, 4),utf.sub(str, 4, 4))
- -- print(" 4, 0",utf.xsub(str, 4, 0),utf.sub(str, 4, 0))
- -- print("-3, 0",utf.xsub(str,-3, 0),utf.sub(str,-3, 0))
- -- print(" 0,-3",utf.xsub(str, 0,-3),utf.sub(str, 0,-3))
- -- print(" 5,-3",utf.xsub(str,-5,-3),utf.sub(str,-5,-3))
- -- print("-3 ",utf.xsub(str,-3 ),utf.sub(str,-3 ))
-
-end
-
--- a replacement for simple gsubs:
-
-function utf.remapper(mapping)
- local pattern = Cs((p_utf8char/mapping)^0)
- return function(str)
- if not str or str == "" then
- return ""
- else
- return lpegmatch(pattern,str)
- end
- end, pattern
-end
-
--- local remap = utf.remapper { a = 'd', b = "c", c = "b", d = "a" }
--- print(remap("abcd 1234 abcd"))
-
---
-
-function utf.replacer(t) -- no precheck, always string builder
- local r = replacer(t,false,false,true)
- return function(str)
- return lpegmatch(r,str)
- end
-end
-
-function utf.subtituter(t) -- with precheck and no building if no match
- local f = finder (t)
- local r = replacer(t,false,false,true)
- return function(str)
- local i = lpegmatch(f,str)
- if not i then
- return str
- elseif i > #str then
- return str
- else
- -- return sub(str,1,i-2) .. lpegmatch(r,str,i-1) -- slower
- return lpegmatch(r,str)
- end
- end
-end
-
--- inspect(utf.split("a b c d"))
--- inspect(utf.split("a b c d",true))
-
-local utflinesplitter = p_utfbom^-1 * lpeg.tsplitat(p_newline)
-local utfcharsplitter_ows = p_utfbom^-1 * Ct(C(p_utf8char)^0)
-local utfcharsplitter_iws = p_utfbom^-1 * Ct((p_whitespace^1 + C(p_utf8char))^0)
-local utfcharsplitter_raw = Ct(C(p_utf8char)^0)
-
-patterns.utflinesplitter = utflinesplitter
-
-function utf.splitlines(str)
- return lpegmatch(utflinesplitter,str or "")
-end
-
-function utf.split(str,ignorewhitespace) -- new
- if ignorewhitespace then
- return lpegmatch(utfcharsplitter_iws,str or "")
- else
- return lpegmatch(utfcharsplitter_ows,str or "")
- end
-end
-
-function utf.totable(str) -- keeps bom
- return lpegmatch(utfcharsplitter_raw,str)
-end
-
--- 0 EF BB BF UTF-8
--- 1 FF FE UTF-16-little-endian
--- 2 FE FF UTF-16-big-endian
--- 3 FF FE 00 00 UTF-32-little-endian
--- 4 00 00 FE FF UTF-32-big-endian
---
--- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated
-
--- utf.name = {
--- [0] = 'utf-8',
--- [1] = 'utf-16-le',
--- [2] = 'utf-16-be',
--- [3] = 'utf-32-le',
--- [4] = 'utf-32-be'
--- }
---
--- function utf.magic(f)
--- local str = f:read(4)
--- if not str then
--- f:seek('set')
--- return 0
--- -- elseif find(str,"^%z%z\254\255") then -- depricated
--- -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged
--- elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH)
--- return 4
--- -- elseif find(str,"^\255\254%z%z") then -- depricated
--- -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged
--- elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH)
--- return 3
--- elseif find(str,"^\254\255") then
--- f:seek('set',2)
--- return 2
--- elseif find(str,"^\255\254") then
--- f:seek('set',2)
--- return 1
--- elseif find(str,"^\239\187\191") then
--- f:seek('set',3)
--- return 0
--- else
--- f:seek('set')
--- return 0
--- end
--- end
-
-function utf.magic(f) -- not used
- local str = f:read(4) or ""
- local off = lpegmatch(p_utfoffset,str)
- if off < 4 then
- f:seek('set',off)
- end
- return lpegmatch(p_utftype,str)
-end
-
-local function utf16_to_utf8_be(t)
- if type(t) == "string" then
- t = lpegmatch(utflinesplitter,t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, 0
- for left, right in bytepairs(t[i]) do
- if right then
- local now = 256*left + right
- if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- more = 0
- r = r + 1
- result[r] = utfchar(now)
- elseif now >= 0xD800 and now <= 0xDBFF then
- more = now
- else
- r = r + 1
- result[r] = utfchar(now)
- end
- end
- end
- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
- end
- return t
-end
-
-local function utf16_to_utf8_le(t)
- if type(t) == "string" then
- t = lpegmatch(utflinesplitter,t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, 0
- for left, right in bytepairs(t[i]) do
- if right then
- local now = 256*right + left
- if more > 0 then
- now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
- more = 0
- r = r + 1
- result[r] = utfchar(now)
- elseif now >= 0xD800 and now <= 0xDBFF then
- more = now
- else
- r = r + 1
- result[r] = utfchar(now)
- end
- end
- end
- t[i] = concat(result,"",1,r) -- we reused tmp, hence t
- end
- return t
-end
-
-local function utf32_to_utf8_be(t)
- if type(t) == "string" then
- t = lpegmatch(utflinesplitter,t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, -1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more < 0 then
- more = 256*256*256*a + 256*256*b
- else
- r = r + 1
- result[t] = utfchar(more + 256*a + b)
- more = -1
- end
- else
- break
- end
- end
- t[i] = concat(result,"",1,r)
- end
- return t
-end
-
-local function utf32_to_utf8_le(t)
- if type(t) == "string" then
- t = lpegmatch(utflinesplitter,t)
- end
- local result = { } -- we reuse result
- for i=1,#t do
- local r, more = 0, -1
- for a,b in bytepairs(t[i]) do
- if a and b then
- if more < 0 then
- more = 256*b + a
- else
- r = r + 1
- result[t] = utfchar(more + 256*256*256*b + 256*256*a)
- more = -1
- end
- else
- break
- end
- end
- t[i] = concat(result,"",1,r)
- end
- return t
-end
-
-utf.utf32_to_utf8_be = utf32_to_utf8_be
-utf.utf32_to_utf8_le = utf32_to_utf8_le
-utf.utf16_to_utf8_be = utf16_to_utf8_be
-utf.utf16_to_utf8_le = utf16_to_utf8_le
-
-function utf.utf8_to_utf8(t)
- return type(t) == "string" and lpegmatch(utflinesplitter,t) or t
-end
-
-function utf.utf16_to_utf8(t,endian)
- return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
-end
-
-function utf.utf32_to_utf8(t,endian)
- return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
-end
-
-local function little(c)
- local b = byte(c)
- if b < 0x10000 then
- return char(b%256,b/256)
- else
- b = b - 0x10000
- local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
- return char(b1%256,b1/256,b2%256,b2/256)
- end
-end
-
-local function big(c)
- local b = byte(c)
- if b < 0x10000 then
- return char(b/256,b%256)
- else
- b = b - 0x10000
- local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
- return char(b1/256,b1%256,b2/256,b2%256)
- end
-end
-
--- function utf.utf8_to_utf16(str,littleendian)
--- if littleendian then
--- return char(255,254) .. utfgsub(str,".",little)
--- else
--- return char(254,255) .. utfgsub(str,".",big)
--- end
--- end
-
-local _, l_remap = utf.remapper(little)
-local _, b_remap = utf.remapper(big)
-
-function utf.utf8_to_utf16(str,littleendian)
- if littleendian then
- return char(255,254) .. lpegmatch(l_remap,str)
- else
- return char(254,255) .. lpegmatch(b_remap,str)
- end
-end
-
--- function utf.tocodes(str,separator) -- can be sped up with an lpeg
--- local t, n = { }, 0
--- for u in utfvalues(str) do
--- n = n + 1
--- t[n] = format("0x%04X",u)
--- end
--- return concat(t,separator or " ")
--- end
-
-local pattern = Cs (
- (p_utf8byte / function(unicode ) return format( "0x%04X", unicode) end) *
- (p_utf8byte * Carg(1) / function(unicode,separator) return format("%s0x%04X",separator,unicode) end)^0
-)
-
-function utf.tocodes(str,separator)
- return lpegmatch(pattern,str,1,separator or " ")
-end
-
-function utf.ustring(s)
- return format("U+%05X",type(s) == "number" and s or utfbyte(s))
-end
-
-function utf.xstring(s)
- return format("0x%05X",type(s) == "number" and s or utfbyte(s))
-end
-
---
-
-local p_nany = p_utf8char / ""
-
-if utfgmatch then
-
- function utf.count(str,what)
- if type(what) == "string" then
- local n = 0
- for _ in utfgmatch(str,what) do
- n = n + 1
- end
- return n
- else -- 4 times slower but still faster than / function
- return #lpegmatch(Cs((P(what)/" " + p_nany)^0),str)
- end
- end
-
-else
-
- local cache = { }
-
- function utf.count(str,what)
- if type(what) == "string" then
- local p = cache[what]
- if not p then
- p = Cs((P(what)/" " + p_nany)^0)
- cache[p] = p
- end
- return #lpegmatch(p,str)
- else -- 4 times slower but still faster than / function
- return #lpegmatch(Cs((P(what)/" " + p_nany)^0),str)
- end
- end
-
-end
-
--- maybe also register as string.utf*
-
-
-if not utf.characters then
-
- -- New: this gmatch hack is taken from the Lua 5.2 book. It's about two times slower
- -- than the built-in string.utfcharacters.
-
- function utf.characters(str)
- return gmatch(str,".[\128-\191]*")
- end
-
- string.utfcharacters = utf.characters
-
-end
-
-if not utf.values then
-
- -- So, a logical next step is to check for the values variant. It over five times
- -- slower than the built-in string.utfvalues. I optimized it a bit for n=0,1.
-
- ----- wrap, yield, gmatch = coroutine.wrap, coroutine.yield, string.gmatch
- local find = string.find
-
- local dummy = function()
- -- we share this one
- end
-
- -- function utf.values(str)
- -- local n = #str
- -- if n == 0 then
- -- return wrap(dummy)
- -- elseif n == 1 then
- -- return wrap(function() yield(utfbyte(str)) end)
- -- else
- -- return wrap(function() for s in gmatch(str,".[\128-\191]*") do
- -- yield(utfbyte(s))
- -- end end)
- -- end
- -- end
- --
- -- faster:
-
- function utf.values(str)
- local n = #str
- if n == 0 then
- return dummy
- elseif n == 1 then
- return function() return utfbyte(str) end
- else
- local p = 1
- -- local n = #str
- return function()
- -- if p <= n then -- slower than the last find
- local b, e = find(str,".[\128-\191]*",p)
- if b then
- p = e + 1
- return utfbyte(sub(str,b,e))
- end
- -- end
- end
- end
- end
-
- -- slower:
- --
- -- local pattern = C(patterns.utf8character) * Cp()
- -- ----- pattern = patterns.utf8character/utfbyte * Cp()
- -- ----- pattern = patterns.utf8byte * Cp()
- --
- -- function utf.values(str) -- one of the cases where a find is faster than an lpeg
- -- local n = #str
- -- if n == 0 then
- -- return dummy
- -- elseif n == 1 then
- -- return function() return utfbyte(str) end
- -- else
- -- local p = 1
- -- return function()
- -- local s, e = lpegmatch(pattern,str,p)
- -- if e then
- -- p = e
- -- return utfbyte(s)
- -- -- return s
- -- end
- -- end
- -- end
- -- end
-
- string.utfvalues = utf.values
-
-end
+if not modules then modules = { } end modules ['l-unicode'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- this module will be reorganized
+
+-- todo: utf.sub replacement (used in syst-aux)
+
+-- we put these in the utf namespace:
+
+utf = utf or (unicode and unicode.utf8) or { }
+
+utf.characters = utf.characters or string.utfcharacters
+utf.values = utf.values or string.utfvalues
+
+-- string.utfvalues
+-- string.utfcharacters
+-- string.characters
+-- string.characterpairs
+-- string.bytes
+-- string.bytepairs
+
+local type = type
+local char, byte, format, sub = string.char, string.byte, string.format, string.sub
+local concat = table.concat
+local P, C, R, Cs, Ct, Cmt, Cc, Carg, Cp = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Carg, lpeg.Cp
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+
+local bytepairs = string.bytepairs
+
+local finder = lpeg.finder
+local replacer = lpeg.replacer
+
+local utfvalues = utf.values
+local utfgmatch = utf.gmatch -- not always present
+
+local p_utftype = patterns.utftype
+local p_utfoffset = patterns.utfoffset
+local p_utf8char = patterns.utf8char
+local p_utf8byte = patterns.utf8byte
+local p_utfbom = patterns.utfbom
+local p_newline = patterns.newline
+local p_whitespace = patterns.whitespace
+
+if not unicode then
+
+ unicode = { utf = utf } -- for a while
+
+end
+
+if not utf.char then
+
+ local floor, char = math.floor, string.char
+
+ function utf.char(n)
+ if n < 0x80 then
+ -- 0aaaaaaa : 0x80
+ return char(n)
+ elseif n < 0x800 then
+ -- 110bbbaa : 0xC0 : n >> 6
+ -- 10aaaaaa : 0x80 : n & 0x3F
+ return char(
+ 0xC0 + floor(n/0x40),
+ 0x80 + (n % 0x40)
+ )
+ elseif n < 0x10000 then
+ -- 1110bbbb : 0xE0 : n >> 12
+ -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
+ -- 10aaaaaa : 0x80 : n & 0x3F
+ return char(
+ 0xE0 + floor(n/0x1000),
+ 0x80 + (floor(n/0x40) % 0x40),
+ 0x80 + (n % 0x40)
+ )
+ elseif n < 0x200000 then
+ -- 11110ccc : 0xF0 : n >> 18
+ -- 10ccbbbb : 0x80 : (n >> 12) & 0x3F
+ -- 10bbbbaa : 0x80 : (n >> 6) & 0x3F
+ -- 10aaaaaa : 0x80 : n & 0x3F
+ -- dddd : ccccc - 1
+ return char(
+ 0xF0 + floor(n/0x40000),
+ 0x80 + (floor(n/0x1000) % 0x40),
+ 0x80 + (floor(n/0x40) % 0x40),
+ 0x80 + (n % 0x40)
+ )
+ else
+ return ""
+ end
+ end
+
+end
+
+if not utf.byte then
+
+ local utf8byte = patterns.utf8byte
+
+ function utf.byte(c)
+ return lpegmatch(utf8byte,c)
+ end
+
+end
+
+local utfchar, utfbyte = utf.char, utf.byte
+
+-- As we want to get rid of the (unmaintained) utf library we implement our own
+-- variants (in due time an independent module):
+
+function utf.filetype(data)
+ return data and lpegmatch(p_utftype,data) or "unknown"
+end
+
+local toentities = Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+-- local utfchr = { } -- 60K -> 2.638 M extra mem but currently not called that often (on latin)
+--
+-- setmetatable(utfchr, { __index = function(t,k) local v = utfchar(k) t[k] = v return v end } )
+--
+-- collectgarbage("collect")
+-- local u = collectgarbage("count")*1024
+-- local t = os.clock()
+-- for i=1,1000 do
+-- for i=1,600 do
+-- local a = utfchr[i]
+-- end
+-- end
+-- print(os.clock()-t,collectgarbage("count")*1024-u)
+
+-- collectgarbage("collect")
+-- local t = os.clock()
+-- for i=1,1000 do
+-- for i=1,600 do
+-- local a = utfchar(i)
+-- end
+-- end
+-- print(os.clock()-t,collectgarbage("count")*1024-u)
+
+-- local byte = string.byte
+-- local utfchar = utf.char
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+-- actually one of them is already utf ... sort of useless this one
+
+-- function utf.char(n)
+-- if n < 0x80 then
+-- return char(n)
+-- elseif n < 0x800 then
+-- return char(
+-- 0xC0 + floor(n/0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x10000 then
+-- return char(
+-- 0xE0 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x40000 then
+-- return char(
+-- 0xF0 + floor(n/0x40000),
+-- 0x80 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- else
+-- -- return char(
+-- -- 0xF1 + floor(n/0x1000000),
+-- -- 0x80 + floor(n/0x40000),
+-- -- 0x80 + floor(n/0x1000),
+-- -- 0x80 + (floor(n/0x40) % 0x40),
+-- -- 0x80 + (n % 0x40)
+-- -- )
+-- return "?"
+-- end
+-- end
+--
+-- merge into:
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s) -- in string namespace
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+local validatedutf = Cs (
+ (
+ patterns.utf8one
+ + patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ + P(1) / "�"
+ )^0
+)
+
+patterns.validatedutf = validatedutf
+
+function utf.is_valid(str)
+ return type(str) == "string" and lpegmatch(validatedutf,str) or false
+end
+
+if not utf.len then
+
+ -- -- alternative 1: 0.77
+ --
+ -- local utfcharcounter = utfbom^-1 * Cs((p_utf8char/'!')^0)
+ --
+ -- function utf.len(str)
+ -- return #lpegmatch(utfcharcounter,str or "")
+ -- end
+ --
+ -- -- alternative 2: 1.70
+ --
+ -- local n = 0
+ --
+ -- local utfcharcounter = utfbom^-1 * (p_utf8char/function() n = n + 1 end)^0 -- slow
+ --
+ -- function utf.length(str)
+ -- n = 0
+ -- lpegmatch(utfcharcounter,str or "")
+ -- return n
+ -- end
+ --
+ -- -- alternative 3: 0.24 (native unicode.utf8.len: 0.047)
+
+ -- local n = 0
+ --
+ -- -- local utfcharcounter = lpeg.patterns.utfbom^-1 * P ( ( Cp() * (
+ -- -- patterns.utf8one ^1 * Cc(1)
+ -- -- + patterns.utf8two ^1 * Cc(2)
+ -- -- + patterns.utf8three^1 * Cc(3)
+ -- -- + patterns.utf8four ^1 * Cc(4) ) * Cp() / function(f,d,t) n = n + (t - f)/d end
+ -- -- )^0 ) -- just as many captures as below
+ --
+ -- -- local utfcharcounter = lpeg.patterns.utfbom^-1 * P ( (
+ -- -- (Cmt(patterns.utf8one ^1,function(_,_,s) n = n + #s return true end))
+ -- -- + (Cmt(patterns.utf8two ^1,function(_,_,s) n = n + #s/2 return true end))
+ -- -- + (Cmt(patterns.utf8three^1,function(_,_,s) n = n + #s/3 return true end))
+ -- -- + (Cmt(patterns.utf8four ^1,function(_,_,s) n = n + #s/4 return true end))
+ -- -- )^0 ) -- not interesting as it creates strings but sometimes faster
+ --
+ -- -- The best so far:
+ --
+ -- local utfcharcounter = utfbom^-1 * P ( (
+ -- Cp() * (patterns.utf8one )^1 * Cp() / function(f,t) n = n + t - f end
+ -- + Cp() * (patterns.utf8two )^1 * Cp() / function(f,t) n = n + (t - f)/2 end
+ -- + Cp() * (patterns.utf8three)^1 * Cp() / function(f,t) n = n + (t - f)/3 end
+ -- + Cp() * (patterns.utf8four )^1 * Cp() / function(f,t) n = n + (t - f)/4 end
+ -- )^0 )
+
+ -- function utf.len(str)
+ -- n = 0
+ -- lpegmatch(utfcharcounter,str or "")
+ -- return n
+ -- end
+
+ local n, f = 0, 1
+
+ local utfcharcounter = patterns.utfbom^-1 * Cmt (
+ Cc(1) * patterns.utf8one ^1
+ + Cc(2) * patterns.utf8two ^1
+ + Cc(3) * patterns.utf8three^1
+ + Cc(4) * patterns.utf8four ^1,
+ function(_,t,d) -- due to Cc no string captures, so faster
+ n = n + (t - f)/d
+ f = t
+ return true
+ end
+ )^0
+
+ function utf.len(str)
+ n, f = 0, 1
+ lpegmatch(utfcharcounter,str or "")
+ return n
+ end
+
+ -- -- these are quite a bit slower:
+
+ -- utfcharcounter = utfbom^-1 * (Cmt(P(1) * R("\128\191")^0, function() n = n + 1 return true end))^0 -- 50+ times slower
+ -- utfcharcounter = utfbom^-1 * (Cmt(P(1), function() n = n + 1 return true end) * R("\128\191")^0)^0 -- 50- times slower
+
+end
+
+utf.length = utf.len
+
+if not utf.sub then
+
+ -- inefficient as lpeg just copies ^n
+
+ -- local function sub(str,start,stop)
+ -- local pattern = p_utf8char^-(start-1) * C(p_utf8char^-(stop-start+1))
+ -- inspect(pattern)
+ -- return lpegmatch(pattern,str) or ""
+ -- end
+
+ -- local b, e, n, first, last = 0, 0, 0, 0, 0
+ --
+ -- local function slide(s,p)
+ -- n = n + 1
+ -- if n == first then
+ -- b = p
+ -- if not last then
+ -- return nil
+ -- end
+ -- end
+ -- if n == last then
+ -- e = p
+ -- return nil
+ -- else
+ -- return p
+ -- end
+ -- end
+ --
+ -- local pattern = Cmt(p_utf8char,slide)^0
+ --
+ -- function utf.sub(str,start,stop) -- todo: from the end
+ -- if not start then
+ -- return str
+ -- end
+ -- b, e, n, first, last = 0, 0, 0, start, stop
+ -- lpegmatch(pattern,str)
+ -- if not stop then
+ -- return sub(str,b)
+ -- else
+ -- return sub(str,b,e-1)
+ -- end
+ -- end
+
+ -- print(utf.sub("Hans Hagen is my name"))
+ -- print(utf.sub("Hans Hagen is my name",5))
+ -- print(utf.sub("Hans Hagen is my name",5,10))
+
+ local utflength = utf.length
+
+ -- also negative indices, upto 10 times slower than a c variant
+
+ local b, e, n, first, last = 0, 0, 0, 0, 0
+
+ local function slide_zero(s,p)
+ n = n + 1
+ if n >= last then
+ e = p - 1
+ else
+ return p
+ end
+ end
+
+ local function slide_one(s,p)
+ n = n + 1
+ if n == first then
+ b = p
+ end
+ if n >= last then
+ e = p - 1
+ else
+ return p
+ end
+ end
+
+ local function slide_two(s,p)
+ n = n + 1
+ if n == first then
+ b = p
+ else
+ return true
+ end
+ end
+
+ local pattern_zero = Cmt(p_utf8char,slide_zero)^0
+ local pattern_one = Cmt(p_utf8char,slide_one )^0
+ local pattern_two = Cmt(p_utf8char,slide_two )^0
+
+ function utf.sub(str,start,stop)
+ if not start then
+ return str
+ end
+ if start == 0 then
+ start = 1
+ end
+ if not stop then
+ if start < 0 then
+ local l = utflength(str) -- we can inline this function if needed
+ start = l + start
+ else
+ start = start - 1
+ end
+ b, n, first = 0, 0, start
+ lpegmatch(pattern_two,str)
+ if n >= first then
+ return sub(str,b)
+ else
+ return ""
+ end
+ end
+ if start < 0 or stop < 0 then
+ local l = utf.length(str)
+ if start < 0 then
+ start = l + start
+ if start <= 0 then
+ start = 1
+ else
+ start = start + 1
+ end
+ end
+ if stop < 0 then
+ stop = l + stop
+ if stop == 0 then
+ stop = 1
+ else
+ stop = stop + 1
+ end
+ end
+ end
+ if start > stop then
+ return ""
+ elseif start > 1 then
+ b, e, n, first, last = 0, 0, 0, start - 1, stop
+ lpegmatch(pattern_one,str)
+ if n >= first and e == 0 then
+ e = #str
+ end
+ return sub(str,b,e)
+ else
+ b, e, n, last = 1, 0, 0, stop
+ lpegmatch(pattern_zero,str)
+ if e == 0 then
+ e = #str
+ end
+ return sub(str,b,e)
+ end
+ end
+
+ -- local n = 100000
+ -- local str = string.rep("123456à áâãäå",100)
+ --
+ -- for i=-15,15,1 do
+ -- for j=-15,15,1 do
+ -- if utf.xsub(str,i,j) ~= utf.sub(str,i,j) then
+ -- print("error",i,j,"l>"..utf.xsub(str,i,j),"s>"..utf.sub(str,i,j))
+ -- end
+ -- end
+ -- if utf.xsub(str,i) ~= utf.sub(str,i) then
+ -- print("error",i,"l>"..utf.xsub(str,i),"s>"..utf.sub(str,i))
+ -- end
+ -- end
+
+ -- print(" 1, 7",utf.xsub(str, 1, 7),utf.sub(str, 1, 7))
+ -- print(" 0, 7",utf.xsub(str, 0, 7),utf.sub(str, 0, 7))
+ -- print(" 0, 9",utf.xsub(str, 0, 9),utf.sub(str, 0, 9))
+ -- print(" 4 ",utf.xsub(str, 4 ),utf.sub(str, 4 ))
+ -- print(" 0 ",utf.xsub(str, 0 ),utf.sub(str, 0 ))
+ -- print(" 0, 0",utf.xsub(str, 0, 0),utf.sub(str, 0, 0))
+ -- print(" 4, 4",utf.xsub(str, 4, 4),utf.sub(str, 4, 4))
+ -- print(" 4, 0",utf.xsub(str, 4, 0),utf.sub(str, 4, 0))
+ -- print("-3, 0",utf.xsub(str,-3, 0),utf.sub(str,-3, 0))
+ -- print(" 0,-3",utf.xsub(str, 0,-3),utf.sub(str, 0,-3))
+ -- print(" 5,-3",utf.xsub(str,-5,-3),utf.sub(str,-5,-3))
+ -- print("-3 ",utf.xsub(str,-3 ),utf.sub(str,-3 ))
+
+end
+
+-- a replacement for simple gsubs:
+
+function utf.remapper(mapping)
+ local pattern = Cs((p_utf8char/mapping)^0)
+ return function(str)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end, pattern
+end
+
+-- local remap = utf.remapper { a = 'd', b = "c", c = "b", d = "a" }
+-- print(remap("abcd 1234 abcd"))
+
+--
+
+function utf.replacer(t) -- no precheck, always string builder
+ local r = replacer(t,false,false,true)
+ return function(str)
+ return lpegmatch(r,str)
+ end
+end
+
+function utf.subtituter(t) -- with precheck and no building if no match
+ local f = finder (t)
+ local r = replacer(t,false,false,true)
+ return function(str)
+ local i = lpegmatch(f,str)
+ if not i then
+ return str
+ elseif i > #str then
+ return str
+ else
+ -- return sub(str,1,i-2) .. lpegmatch(r,str,i-1) -- slower
+ return lpegmatch(r,str)
+ end
+ end
+end
+
+-- inspect(utf.split("a b c d"))
+-- inspect(utf.split("a b c d",true))
+
+local utflinesplitter = p_utfbom^-1 * lpeg.tsplitat(p_newline)
+local utfcharsplitter_ows = p_utfbom^-1 * Ct(C(p_utf8char)^0)
+local utfcharsplitter_iws = p_utfbom^-1 * Ct((p_whitespace^1 + C(p_utf8char))^0)
+local utfcharsplitter_raw = Ct(C(p_utf8char)^0)
+
+patterns.utflinesplitter = utflinesplitter
+
+function utf.splitlines(str)
+ return lpegmatch(utflinesplitter,str or "")
+end
+
+function utf.split(str,ignorewhitespace) -- new
+ if ignorewhitespace then
+ return lpegmatch(utfcharsplitter_iws,str or "")
+ else
+ return lpegmatch(utfcharsplitter_ows,str or "")
+ end
+end
+
+function utf.totable(str) -- keeps bom
+ return lpegmatch(utfcharsplitter_raw,str)
+end
+
+-- 0 EF BB BF UTF-8
+-- 1 FF FE UTF-16-little-endian
+-- 2 FE FF UTF-16-big-endian
+-- 3 FF FE 00 00 UTF-32-little-endian
+-- 4 00 00 FE FF UTF-32-big-endian
+--
+-- \000 fails in <= 5.0 but is valid in >=5.1 where %z is depricated
+
+-- utf.name = {
+-- [0] = 'utf-8',
+-- [1] = 'utf-16-le',
+-- [2] = 'utf-16-be',
+-- [3] = 'utf-32-le',
+-- [4] = 'utf-32-be'
+-- }
+--
+-- function utf.magic(f)
+-- local str = f:read(4)
+-- if not str then
+-- f:seek('set')
+-- return 0
+-- -- elseif find(str,"^%z%z\254\255") then -- depricated
+-- -- elseif find(str,"^\000\000\254\255") then -- not permitted and bugged
+-- elseif find(str,"\000\000\254\255",1,true) then -- seems to work okay (TH)
+-- return 4
+-- -- elseif find(str,"^\255\254%z%z") then -- depricated
+-- -- elseif find(str,"^\255\254\000\000") then -- not permitted and bugged
+-- elseif find(str,"\255\254\000\000",1,true) then -- seems to work okay (TH)
+-- return 3
+-- elseif find(str,"^\254\255") then
+-- f:seek('set',2)
+-- return 2
+-- elseif find(str,"^\255\254") then
+-- f:seek('set',2)
+-- return 1
+-- elseif find(str,"^\239\187\191") then
+-- f:seek('set',3)
+-- return 0
+-- else
+-- f:seek('set')
+-- return 0
+-- end
+-- end
+
+function utf.magic(f) -- not used
+ local str = f:read(4) or ""
+ local off = lpegmatch(p_utfoffset,str)
+ if off < 4 then
+ f:seek('set',off)
+ end
+ return lpegmatch(p_utftype,str)
+end
+
+local function utf16_to_utf8_be(t)
+ if type(t) == "string" then
+ t = lpegmatch(utflinesplitter,t)
+ end
+ local result = { } -- we reuse result
+ for i=1,#t do
+ local r, more = 0, 0
+ for left, right in bytepairs(t[i]) do
+ if right then
+ local now = 256*left + right
+ if more > 0 then
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+ more = 0
+ r = r + 1
+ result[r] = utfchar(now)
+ elseif now >= 0xD800 and now <= 0xDBFF then
+ more = now
+ else
+ r = r + 1
+ result[r] = utfchar(now)
+ end
+ end
+ end
+ t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+ end
+ return t
+end
+
+local function utf16_to_utf8_le(t)
+ if type(t) == "string" then
+ t = lpegmatch(utflinesplitter,t)
+ end
+ local result = { } -- we reuse result
+ for i=1,#t do
+ local r, more = 0, 0
+ for left, right in bytepairs(t[i]) do
+ if right then
+ local now = 256*right + left
+ if more > 0 then
+ now = (more-0xD800)*0x400 + (now-0xDC00) + 0x10000 -- the 0x10000 smells wrong
+ more = 0
+ r = r + 1
+ result[r] = utfchar(now)
+ elseif now >= 0xD800 and now <= 0xDBFF then
+ more = now
+ else
+ r = r + 1
+ result[r] = utfchar(now)
+ end
+ end
+ end
+ t[i] = concat(result,"",1,r) -- we reused tmp, hence t
+ end
+ return t
+end
+
+local function utf32_to_utf8_be(t)
+ if type(t) == "string" then
+ t = lpegmatch(utflinesplitter,t)
+ end
+ local result = { } -- we reuse result
+ for i=1,#t do
+ local r, more = 0, -1
+ for a,b in bytepairs(t[i]) do
+ if a and b then
+ if more < 0 then
+ more = 256*256*256*a + 256*256*b
+ else
+ r = r + 1
+ result[t] = utfchar(more + 256*a + b)
+ more = -1
+ end
+ else
+ break
+ end
+ end
+ t[i] = concat(result,"",1,r)
+ end
+ return t
+end
+
+local function utf32_to_utf8_le(t)
+ if type(t) == "string" then
+ t = lpegmatch(utflinesplitter,t)
+ end
+ local result = { } -- we reuse result
+ for i=1,#t do
+ local r, more = 0, -1
+ for a,b in bytepairs(t[i]) do
+ if a and b then
+ if more < 0 then
+ more = 256*b + a
+ else
+ r = r + 1
+ result[t] = utfchar(more + 256*256*256*b + 256*256*a)
+ more = -1
+ end
+ else
+ break
+ end
+ end
+ t[i] = concat(result,"",1,r)
+ end
+ return t
+end
+
+utf.utf32_to_utf8_be = utf32_to_utf8_be
+utf.utf32_to_utf8_le = utf32_to_utf8_le
+utf.utf16_to_utf8_be = utf16_to_utf8_be
+utf.utf16_to_utf8_le = utf16_to_utf8_le
+
+function utf.utf8_to_utf8(t)
+ return type(t) == "string" and lpegmatch(utflinesplitter,t) or t
+end
+
+function utf.utf16_to_utf8(t,endian)
+ return endian and utf16_to_utf8_be(t) or utf16_to_utf8_le(t) or t
+end
+
+function utf.utf32_to_utf8(t,endian)
+ return endian and utf32_to_utf8_be(t) or utf32_to_utf8_le(t) or t
+end
+
+local function little(c)
+ local b = byte(c)
+ if b < 0x10000 then
+ return char(b%256,b/256)
+ else
+ b = b - 0x10000
+ local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
+ return char(b1%256,b1/256,b2%256,b2/256)
+ end
+end
+
+local function big(c)
+ local b = byte(c)
+ if b < 0x10000 then
+ return char(b/256,b%256)
+ else
+ b = b - 0x10000
+ local b1, b2 = b/1024 + 0xD800, b%1024 + 0xDC00
+ return char(b1/256,b1%256,b2/256,b2%256)
+ end
+end
+
+-- function utf.utf8_to_utf16(str,littleendian)
+-- if littleendian then
+-- return char(255,254) .. utfgsub(str,".",little)
+-- else
+-- return char(254,255) .. utfgsub(str,".",big)
+-- end
+-- end
+
+local _, l_remap = utf.remapper(little)
+local _, b_remap = utf.remapper(big)
+
+function utf.utf8_to_utf16(str,littleendian)
+ if littleendian then
+ return char(255,254) .. lpegmatch(l_remap,str)
+ else
+ return char(254,255) .. lpegmatch(b_remap,str)
+ end
+end
+
+-- function utf.tocodes(str,separator) -- can be sped up with an lpeg
+-- local t, n = { }, 0
+-- for u in utfvalues(str) do
+-- n = n + 1
+-- t[n] = format("0x%04X",u)
+-- end
+-- return concat(t,separator or " ")
+-- end
+
+local pattern = Cs (
+ (p_utf8byte / function(unicode ) return format( "0x%04X", unicode) end) *
+ (p_utf8byte * Carg(1) / function(unicode,separator) return format("%s0x%04X",separator,unicode) end)^0
+)
+
+function utf.tocodes(str,separator)
+ return lpegmatch(pattern,str,1,separator or " ")
+end
+
+function utf.ustring(s)
+ return format("U+%05X",type(s) == "number" and s or utfbyte(s))
+end
+
+function utf.xstring(s)
+ return format("0x%05X",type(s) == "number" and s or utfbyte(s))
+end
+
+--
+
+local p_nany = p_utf8char / ""
+
+if utfgmatch then
+
+ function utf.count(str,what)
+ if type(what) == "string" then
+ local n = 0
+ for _ in utfgmatch(str,what) do
+ n = n + 1
+ end
+ return n
+ else -- 4 times slower but still faster than / function
+ return #lpegmatch(Cs((P(what)/" " + p_nany)^0),str)
+ end
+ end
+
+else
+
+ local cache = { }
+
+ function utf.count(str,what)
+ if type(what) == "string" then
+ local p = cache[what]
+ if not p then
+ p = Cs((P(what)/" " + p_nany)^0)
+ cache[p] = p
+ end
+ return #lpegmatch(p,str)
+ else -- 4 times slower but still faster than / function
+ return #lpegmatch(Cs((P(what)/" " + p_nany)^0),str)
+ end
+ end
+
+end
+
+-- maybe also register as string.utf*
+
+
+if not utf.characters then
+
+ -- New: this gmatch hack is taken from the Lua 5.2 book. It's about two times slower
+ -- than the built-in string.utfcharacters.
+
+ function utf.characters(str)
+ return gmatch(str,".[\128-\191]*")
+ end
+
+ string.utfcharacters = utf.characters
+
+end
+
+if not utf.values then
+
+ -- So, a logical next step is to check for the values variant. It over five times
+ -- slower than the built-in string.utfvalues. I optimized it a bit for n=0,1.
+
+ ----- wrap, yield, gmatch = coroutine.wrap, coroutine.yield, string.gmatch
+ local find = string.find
+
+ local dummy = function()
+ -- we share this one
+ end
+
+ -- function utf.values(str)
+ -- local n = #str
+ -- if n == 0 then
+ -- return wrap(dummy)
+ -- elseif n == 1 then
+ -- return wrap(function() yield(utfbyte(str)) end)
+ -- else
+ -- return wrap(function() for s in gmatch(str,".[\128-\191]*") do
+ -- yield(utfbyte(s))
+ -- end end)
+ -- end
+ -- end
+ --
+ -- faster:
+
+ function utf.values(str)
+ local n = #str
+ if n == 0 then
+ return dummy
+ elseif n == 1 then
+ return function() return utfbyte(str) end
+ else
+ local p = 1
+ -- local n = #str
+ return function()
+ -- if p <= n then -- slower than the last find
+ local b, e = find(str,".[\128-\191]*",p)
+ if b then
+ p = e + 1
+ return utfbyte(sub(str,b,e))
+ end
+ -- end
+ end
+ end
+ end
+
+ -- slower:
+ --
+ -- local pattern = C(patterns.utf8character) * Cp()
+ -- ----- pattern = patterns.utf8character/utfbyte * Cp()
+ -- ----- pattern = patterns.utf8byte * Cp()
+ --
+ -- function utf.values(str) -- one of the cases where a find is faster than an lpeg
+ -- local n = #str
+ -- if n == 0 then
+ -- return dummy
+ -- elseif n == 1 then
+ -- return function() return utfbyte(str) end
+ -- else
+ -- local p = 1
+ -- return function()
+ -- local s, e = lpegmatch(pattern,str,p)
+ -- if e then
+ -- p = e
+ -- return utfbyte(s)
+ -- -- return s
+ -- end
+ -- end
+ -- end
+ -- end
+
+ string.utfvalues = utf.values
+
+end
diff --git a/tex/context/base/l-url.lua b/tex/context/base/l-url.lua
index 4624a0507..5cfeb252c 100644
--- a/tex/context/base/l-url.lua
+++ b/tex/context/base/l-url.lua
@@ -1,344 +1,344 @@
-if not modules then modules = { } end modules ['l-url'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local char, format, byte = string.char, string.format, string.byte
-local concat = table.concat
-local tonumber, type = tonumber, type
-local P, C, R, S, Cs, Cc, Ct, Cf, Cg, V = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.V
-local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer
-
--- from wikipedia:
---
--- foo://username:password@example.com:8042/over/there/index.dtb?type=animal;name=narwhal#nose
--- \_/ \_______________/ \_________/ \__/ \___/ \_/ \______________________/ \__/
--- | | | | | | | |
--- | userinfo hostname port | | query fragment
--- | \________________________________/\_____________|____|/
--- scheme | | | |
--- | authority path | |
--- | | |
--- | path interpretable as filename
--- | ___________|____________ |
--- / \ / \ |
--- urn:example:animal:ferret:nose interpretable as extension
-
-url = url or { }
-local url = url
-
-local tochar = function(s) return char(tonumber(s,16)) end
-
-local colon = P(":")
-local qmark = P("?")
-local hash = P("#")
-local slash = P("/")
-local percent = P("%")
-local endofstring = P(-1)
-
-local hexdigit = R("09","AF","af")
-local plus = P("+")
-local nothing = Cc("")
-local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar
-local escaped = (plus / " ") + escapedchar
-
-local noslash = P("/") / ""
-
--- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
--- we also assume that when we have a scheme, we also have an authority
---
--- maybe we should already split the query (better for unescaping as = & can be part of a value
-
-local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
-local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
-local pathstr = Cs((escaped+(1- qmark-hash))^0)
------ querystr = Cs((escaped+(1- hash))^0)
-local querystr = Cs(( (1- hash))^0)
-local fragmentstr = Cs((escaped+(1- endofstring))^0)
-
-local scheme = schemestr * colon + nothing
-local authority = slash * slash * authoritystr + nothing
-local path = slash * pathstr + nothing
-local query = qmark * querystr + nothing
-local fragment = hash * fragmentstr + nothing
-
-local validurl = scheme * authority * path * query * fragment
-local parser = Ct(validurl)
-
-lpegpatterns.url = validurl
-lpegpatterns.urlsplitter = parser
-
-local escapes = { }
-
-setmetatable(escapes, { __index = function(t,k)
- local v = format("%%%02X",byte(k))
- t[k] = v
- return v
-end })
-
-local escaper = Cs((R("09","AZ","az")^1 + P(" ")/"%%20" + S("-./_")^1 + P(1) / escapes)^0) -- space happens most
-local unescaper = Cs((escapedchar + 1)^0)
-
-lpegpatterns.urlunescaped = escapedchar
-lpegpatterns.urlescaper = escaper
-lpegpatterns.urlunescaper = unescaper
-
--- todo: reconsider Ct as we can as well have five return values (saves a table)
--- so we can have two parsers, one with and one without
-
-local function split(str)
- return (type(str) == "string" and lpegmatch(parser,str)) or str
-end
-
-local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
-
-local function hasscheme(str)
- if str then
- local scheme = lpegmatch(isscheme,str) -- at least one character
- return scheme ~= "" and scheme or false
- else
- return false
- end
-end
-
---~ print(hasscheme("home:"))
---~ print(hasscheme("home://"))
-
--- todo: cache them
-
-local rootletter = R("az","AZ")
- + S("_-+")
-local separator = P("://")
-local qualified = P(".")^0 * P("/")
- + rootletter * P(":")
- + rootletter^1 * separator
- + rootletter^1 * P("/")
-local rootbased = P("/")
- + rootletter * P(":")
-
-local barswapper = replacer("|",":")
-local backslashswapper = replacer("\\","/")
-
--- queries:
-
-local equal = P("=")
-local amp = P("&")
-local key = Cs(((escapedchar+1)-equal )^0)
-local value = Cs(((escapedchar+1)-amp -endofstring)^0)
-
-local splitquery = Cf ( Ct("") * P { "sequence",
- sequence = V("pair") * (amp * V("pair"))^0,
- pair = Cg(key * equal * value),
-}, rawset)
-
--- hasher
-
-local function hashed(str) -- not yet ok (/test?test)
- if str == "" then
- return {
- scheme = "invalid",
- original = str,
- }
- end
- local s = split(str)
- local rawscheme = s[1]
- local rawquery = s[4]
- local somescheme = rawscheme ~= ""
- local somequery = rawquery ~= ""
- if not somescheme and not somequery then
- s = {
- scheme = "file",
- authority = "",
- path = str,
- query = "",
- fragment = "",
- original = str,
- noscheme = true,
- filename = str,
- }
- else -- not always a filename but handy anyway
- local authority, path, filename = s[2], s[3]
- if authority == "" then
- filename = path
- elseif path == "" then
- filename = ""
- else
- filename = authority .. "/" .. path
- end
- s = {
- scheme = rawscheme,
- authority = authority,
- path = path,
- query = lpegmatch(unescaper,rawquery), -- unescaped, but possible conflict with & and =
- queries = lpegmatch(splitquery,rawquery), -- split first and then unescaped
- fragment = s[5],
- original = str,
- noscheme = false,
- filename = filename,
- }
- end
- return s
-end
-
--- inspect(hashed("template://test"))
-
--- Here we assume:
---
--- files: /// = relative
--- files: //// = absolute (!)
-
---~ table.print(hashed("file://c:/opt/tex/texmf-local")) -- c:/opt/tex/texmf-local
---~ table.print(hashed("file://opt/tex/texmf-local" )) -- opt/tex/texmf-local
---~ table.print(hashed("file:///opt/tex/texmf-local" )) -- opt/tex/texmf-local
---~ table.print(hashed("file:////opt/tex/texmf-local" )) -- /opt/tex/texmf-local
---~ table.print(hashed("file:///./opt/tex/texmf-local" )) -- ./opt/tex/texmf-local
-
---~ table.print(hashed("c:/opt/tex/texmf-local" )) -- c:/opt/tex/texmf-local
---~ table.print(hashed("opt/tex/texmf-local" )) -- opt/tex/texmf-local
---~ table.print(hashed("/opt/tex/texmf-local" )) -- /opt/tex/texmf-local
-
-url.split = split
-url.hasscheme = hasscheme
-url.hashed = hashed
-
-function url.addscheme(str,scheme) -- no authority
- if hasscheme(str) then
- return str
- elseif not scheme then
- return "file:///" .. str
- else
- return scheme .. ":///" .. str
- end
-end
-
-function url.construct(hash) -- dodo: we need to escape !
- local fullurl, f = { }, 0
- local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment
- if scheme and scheme ~= "" then
- f = f + 1 ; fullurl[f] = scheme .. "://"
- end
- if authority and authority ~= "" then
- f = f + 1 ; fullurl[f] = authority
- end
- if path and path ~= "" then
- f = f + 1 ; fullurl[f] = "/" .. path
- end
- if query and query ~= "" then
- f = f + 1 ; fullurl[f] = "?".. query
- end
- if fragment and fragment ~= "" then
- f = f + 1 ; fullurl[f] = "#".. fragment
- end
- return lpegmatch(escaper,concat(fullurl))
-end
-
-local pattern = Cs(noslash * R("az","AZ") * (S(":|")/":") * noslash * P(1)^0)
-
-function url.filename(filename)
- local spec = hashed(filename)
- local path = spec.path
- return (spec.scheme == "file" and path and lpegmatch(pattern,path)) or filename
-end
-
--- print(url.filename("/c|/test"))
--- print(url.filename("/c/test"))
-
-local function escapestring(str)
- return lpegmatch(escaper,str)
-end
-
-url.escape = escapestring
-
-function url.query(str)
- if type(str) == "string" then
- return lpegmatch(splitquery,str) or ""
- else
- return str
- end
-end
-
-function url.toquery(data)
- local td = type(data)
- if td == "string" then
- return #str and escape(data) or nil -- beware of double escaping
- elseif td == "table" then
- if next(data) then
- local t = { }
- for k, v in next, data do
- t[#t+1] = format("%s=%s",k,escapestring(v))
- end
- return concat(t,"&")
- end
- else
- -- nil is a signal that no query
- end
-end
-
--- /test/ | /test | test/ | test => test
-
-local pattern = Cs(noslash^0 * (1 - noslash * P(-1))^0)
-
-function url.barepath(path)
- if not path or path == "" then
- return ""
- else
- return lpegmatch(pattern,path)
- end
-end
-
--- print(url.barepath("/test"),url.barepath("test/"),url.barepath("/test/"),url.barepath("test"))
--- print(url.barepath("/x/yz"),url.barepath("x/yz/"),url.barepath("/x/yz/"),url.barepath("x/yz"))
-
---~ print(url.filename("file:///c:/oeps.txt"))
---~ print(url.filename("c:/oeps.txt"))
---~ print(url.filename("file:///oeps.txt"))
---~ print(url.filename("file:///etc/test.txt"))
---~ print(url.filename("/oeps.txt"))
-
---~ from the spec on the web (sort of):
-
---~ local function test(str)
---~ local t = url.hashed(str)
---~ t.constructed = url.construct(t)
---~ print(table.serialize(t))
---~ end
-
---~ inspect(url.hashed("http://www.pragma-ade.com/test%20test?test=test%20test&x=123%3d45"))
---~ inspect(url.hashed("http://www.pragma-ade.com/test%20test?test=test%20test&x=123%3d45"))
-
---~ test("sys:///./colo-rgb")
-
---~ test("/data/site/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733/figuur-cow.jpg")
---~ test("file:///M:/q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733")
---~ test("M:/q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733")
---~ test("file:///q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733")
---~ test("/q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733")
-
---~ test("file:///cow%20with%20spaces")
---~ test("file:///cow%20with%20spaces.pdf")
---~ test("cow%20with%20spaces.pdf")
---~ test("some%20file")
---~ test("/etc/passwords")
---~ test("http://www.myself.com/some%20words.html")
---~ test("file:///c:/oeps.txt")
---~ test("file:///c|/oeps.txt")
---~ test("file:///etc/oeps.txt")
---~ test("file://./etc/oeps.txt")
---~ test("file:////etc/oeps.txt")
---~ test("ftp://ftp.is.co.za/rfc/rfc1808.txt")
---~ test("http://www.ietf.org/rfc/rfc2396.txt")
---~ test("ldap://[2001:db8::7]/c=GB?objectClass?one#what")
---~ test("mailto:John.Doe@example.com")
---~ test("news:comp.infosystems.www.servers.unix")
---~ test("tel:+1-816-555-1212")
---~ test("telnet://192.0.2.16:80/")
---~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2")
---~ test("http://www.pragma-ade.com/spaced%20name")
-
---~ test("zip:///oeps/oeps.zip#bla/bla.tex")
---~ test("zip:///oeps/oeps.zip?bla/bla.tex")
-
---~ table.print(url.hashed("/test?test"))
+if not modules then modules = { } end modules ['l-url'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local char, format, byte = string.char, string.format, string.byte
+local concat = table.concat
+local tonumber, type = tonumber, type
+local P, C, R, S, Cs, Cc, Ct, Cf, Cg, V = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cf, lpeg.Cg, lpeg.V
+local lpegmatch, lpegpatterns, replacer = lpeg.match, lpeg.patterns, lpeg.replacer
+
+-- from wikipedia:
+--
+-- foo://username:password@example.com:8042/over/there/index.dtb?type=animal;name=narwhal#nose
+-- \_/ \_______________/ \_________/ \__/ \___/ \_/ \______________________/ \__/
+-- | | | | | | | |
+-- | userinfo hostname port | | query fragment
+-- | \________________________________/\_____________|____|/
+-- scheme | | | |
+-- | authority path | |
+-- | | |
+-- | path interpretable as filename
+-- | ___________|____________ |
+-- / \ / \ |
+-- urn:example:animal:ferret:nose interpretable as extension
+
+url = url or { }
+local url = url
+
+local tochar = function(s) return char(tonumber(s,16)) end
+
+local colon = P(":")
+local qmark = P("?")
+local hash = P("#")
+local slash = P("/")
+local percent = P("%")
+local endofstring = P(-1)
+
+local hexdigit = R("09","AF","af")
+local plus = P("+")
+local nothing = Cc("")
+local escapedchar = (percent * C(hexdigit * hexdigit)) / tochar
+local escaped = (plus / " ") + escapedchar
+
+local noslash = P("/") / ""
+
+-- we assume schemes with more than 1 character (in order to avoid problems with windows disks)
+-- we also assume that when we have a scheme, we also have an authority
+--
+-- maybe we should already split the query (better for unescaping as = & can be part of a value
+
+local schemestr = Cs((escaped+(1-colon-slash-qmark-hash))^2)
+local authoritystr = Cs((escaped+(1- slash-qmark-hash))^0)
+local pathstr = Cs((escaped+(1- qmark-hash))^0)
+----- querystr = Cs((escaped+(1- hash))^0)
+local querystr = Cs(( (1- hash))^0)
+local fragmentstr = Cs((escaped+(1- endofstring))^0)
+
+local scheme = schemestr * colon + nothing
+local authority = slash * slash * authoritystr + nothing
+local path = slash * pathstr + nothing
+local query = qmark * querystr + nothing
+local fragment = hash * fragmentstr + nothing
+
+local validurl = scheme * authority * path * query * fragment
+local parser = Ct(validurl)
+
+lpegpatterns.url = validurl
+lpegpatterns.urlsplitter = parser
+
+local escapes = { }
+
+setmetatable(escapes, { __index = function(t,k)
+ local v = format("%%%02X",byte(k))
+ t[k] = v
+ return v
+end })
+
+local escaper = Cs((R("09","AZ","az")^1 + P(" ")/"%%20" + S("-./_")^1 + P(1) / escapes)^0) -- space happens most
+local unescaper = Cs((escapedchar + 1)^0)
+
+lpegpatterns.urlunescaped = escapedchar
+lpegpatterns.urlescaper = escaper
+lpegpatterns.urlunescaper = unescaper
+
+-- todo: reconsider Ct as we can as well have five return values (saves a table)
+-- so we can have two parsers, one with and one without
+
+local function split(str)
+ return (type(str) == "string" and lpegmatch(parser,str)) or str
+end
+
+local isscheme = schemestr * colon * slash * slash -- this test also assumes authority
+
+local function hasscheme(str)
+ if str then
+ local scheme = lpegmatch(isscheme,str) -- at least one character
+ return scheme ~= "" and scheme or false
+ else
+ return false
+ end
+end
+
+--~ print(hasscheme("home:"))
+--~ print(hasscheme("home://"))
+
+-- todo: cache them
+
+local rootletter = R("az","AZ")
+ + S("_-+")
+local separator = P("://")
+local qualified = P(".")^0 * P("/")
+ + rootletter * P(":")
+ + rootletter^1 * separator
+ + rootletter^1 * P("/")
+local rootbased = P("/")
+ + rootletter * P(":")
+
+local barswapper = replacer("|",":")
+local backslashswapper = replacer("\\","/")
+
+-- queries:
+
+local equal = P("=")
+local amp = P("&")
+local key = Cs(((escapedchar+1)-equal )^0)
+local value = Cs(((escapedchar+1)-amp -endofstring)^0)
+
+local splitquery = Cf ( Ct("") * P { "sequence",
+ sequence = V("pair") * (amp * V("pair"))^0,
+ pair = Cg(key * equal * value),
+}, rawset)
+
+-- hasher
+
+local function hashed(str) -- not yet ok (/test?test)
+ if str == "" then
+ return {
+ scheme = "invalid",
+ original = str,
+ }
+ end
+ local s = split(str)
+ local rawscheme = s[1]
+ local rawquery = s[4]
+ local somescheme = rawscheme ~= ""
+ local somequery = rawquery ~= ""
+ if not somescheme and not somequery then
+ s = {
+ scheme = "file",
+ authority = "",
+ path = str,
+ query = "",
+ fragment = "",
+ original = str,
+ noscheme = true,
+ filename = str,
+ }
+ else -- not always a filename but handy anyway
+ local authority, path, filename = s[2], s[3]
+ if authority == "" then
+ filename = path
+ elseif path == "" then
+ filename = ""
+ else
+ filename = authority .. "/" .. path
+ end
+ s = {
+ scheme = rawscheme,
+ authority = authority,
+ path = path,
+ query = lpegmatch(unescaper,rawquery), -- unescaped, but possible conflict with & and =
+ queries = lpegmatch(splitquery,rawquery), -- split first and then unescaped
+ fragment = s[5],
+ original = str,
+ noscheme = false,
+ filename = filename,
+ }
+ end
+ return s
+end
+
+-- inspect(hashed("template://test"))
+
+-- Here we assume:
+--
+-- files: /// = relative
+-- files: //// = absolute (!)
+
+--~ table.print(hashed("file://c:/opt/tex/texmf-local")) -- c:/opt/tex/texmf-local
+--~ table.print(hashed("file://opt/tex/texmf-local" )) -- opt/tex/texmf-local
+--~ table.print(hashed("file:///opt/tex/texmf-local" )) -- opt/tex/texmf-local
+--~ table.print(hashed("file:////opt/tex/texmf-local" )) -- /opt/tex/texmf-local
+--~ table.print(hashed("file:///./opt/tex/texmf-local" )) -- ./opt/tex/texmf-local
+
+--~ table.print(hashed("c:/opt/tex/texmf-local" )) -- c:/opt/tex/texmf-local
+--~ table.print(hashed("opt/tex/texmf-local" )) -- opt/tex/texmf-local
+--~ table.print(hashed("/opt/tex/texmf-local" )) -- /opt/tex/texmf-local
+
+url.split = split
+url.hasscheme = hasscheme
+url.hashed = hashed
+
+function url.addscheme(str,scheme) -- no authority
+ if hasscheme(str) then
+ return str
+ elseif not scheme then
+ return "file:///" .. str
+ else
+ return scheme .. ":///" .. str
+ end
+end
+
+function url.construct(hash) -- dodo: we need to escape !
+ local fullurl, f = { }, 0
+ local scheme, authority, path, query, fragment = hash.scheme, hash.authority, hash.path, hash.query, hash.fragment
+ if scheme and scheme ~= "" then
+ f = f + 1 ; fullurl[f] = scheme .. "://"
+ end
+ if authority and authority ~= "" then
+ f = f + 1 ; fullurl[f] = authority
+ end
+ if path and path ~= "" then
+ f = f + 1 ; fullurl[f] = "/" .. path
+ end
+ if query and query ~= "" then
+ f = f + 1 ; fullurl[f] = "?".. query
+ end
+ if fragment and fragment ~= "" then
+ f = f + 1 ; fullurl[f] = "#".. fragment
+ end
+ return lpegmatch(escaper,concat(fullurl))
+end
+
+local pattern = Cs(noslash * R("az","AZ") * (S(":|")/":") * noslash * P(1)^0)
+
+function url.filename(filename)
+ local spec = hashed(filename)
+ local path = spec.path
+ return (spec.scheme == "file" and path and lpegmatch(pattern,path)) or filename
+end
+
+-- print(url.filename("/c|/test"))
+-- print(url.filename("/c/test"))
+
+local function escapestring(str)
+ return lpegmatch(escaper,str)
+end
+
+url.escape = escapestring
+
+function url.query(str)
+ if type(str) == "string" then
+ return lpegmatch(splitquery,str) or ""
+ else
+ return str
+ end
+end
+
+function url.toquery(data)
+ local td = type(data)
+ if td == "string" then
+ return #str and escape(data) or nil -- beware of double escaping
+ elseif td == "table" then
+ if next(data) then
+ local t = { }
+ for k, v in next, data do
+ t[#t+1] = format("%s=%s",k,escapestring(v))
+ end
+ return concat(t,"&")
+ end
+ else
+ -- nil is a signal that no query
+ end
+end
+
+-- /test/ | /test | test/ | test => test
+
+local pattern = Cs(noslash^0 * (1 - noslash * P(-1))^0)
+
+function url.barepath(path)
+ if not path or path == "" then
+ return ""
+ else
+ return lpegmatch(pattern,path)
+ end
+end
+
+-- print(url.barepath("/test"),url.barepath("test/"),url.barepath("/test/"),url.barepath("test"))
+-- print(url.barepath("/x/yz"),url.barepath("x/yz/"),url.barepath("/x/yz/"),url.barepath("x/yz"))
+
+--~ print(url.filename("file:///c:/oeps.txt"))
+--~ print(url.filename("c:/oeps.txt"))
+--~ print(url.filename("file:///oeps.txt"))
+--~ print(url.filename("file:///etc/test.txt"))
+--~ print(url.filename("/oeps.txt"))
+
+--~ from the spec on the web (sort of):
+
+--~ local function test(str)
+--~ local t = url.hashed(str)
+--~ t.constructed = url.construct(t)
+--~ print(table.serialize(t))
+--~ end
+
+--~ inspect(url.hashed("http://www.pragma-ade.com/test%20test?test=test%20test&x=123%3d45"))
+--~ inspect(url.hashed("http://www.pragma-ade.com/test%20test?test=test%20test&x=123%3d45"))
+
+--~ test("sys:///./colo-rgb")
+
+--~ test("/data/site/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733/figuur-cow.jpg")
+--~ test("file:///M:/q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733")
+--~ test("M:/q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733")
+--~ test("file:///q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733")
+--~ test("/q2p/develop/output/q2p-develop/resources/ecaboperception4_res/topicresources/58313733")
+
+--~ test("file:///cow%20with%20spaces")
+--~ test("file:///cow%20with%20spaces.pdf")
+--~ test("cow%20with%20spaces.pdf")
+--~ test("some%20file")
+--~ test("/etc/passwords")
+--~ test("http://www.myself.com/some%20words.html")
+--~ test("file:///c:/oeps.txt")
+--~ test("file:///c|/oeps.txt")
+--~ test("file:///etc/oeps.txt")
+--~ test("file://./etc/oeps.txt")
+--~ test("file:////etc/oeps.txt")
+--~ test("ftp://ftp.is.co.za/rfc/rfc1808.txt")
+--~ test("http://www.ietf.org/rfc/rfc2396.txt")
+--~ test("ldap://[2001:db8::7]/c=GB?objectClass?one#what")
+--~ test("mailto:John.Doe@example.com")
+--~ test("news:comp.infosystems.www.servers.unix")
+--~ test("tel:+1-816-555-1212")
+--~ test("telnet://192.0.2.16:80/")
+--~ test("urn:oasis:names:specification:docbook:dtd:xml:4.1.2")
+--~ test("http://www.pragma-ade.com/spaced%20name")
+
+--~ test("zip:///oeps/oeps.zip#bla/bla.tex")
+--~ test("zip:///oeps/oeps.zip?bla/bla.tex")
+
+--~ table.print(url.hashed("/test?test"))
diff --git a/tex/context/base/l-xml.lua b/tex/context/base/l-xml.lua
index 14e97337b..d8cc4a984 100644
--- a/tex/context/base/l-xml.lua
+++ b/tex/context/base/l-xml.lua
@@ -1,23 +1,23 @@
-if not modules then modules = { } end modules ['l-xml'] = {
- version = 1.001,
- comment = "this module is replaced by the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- We asume that the helper modules l-*.lua are loaded
--- already. But anyway if you use mtxrun to run your script
--- all is taken care of.
-
-if not trackers then
- require('trac-tra')
-end
-
-if not xml then
- require('lxml-tab')
- require('lxml-lpt')
- require('lxml-mis')
- require('lxml-aux')
- require('lxml-xml')
-end
+if not modules then modules = { } end modules ['l-xml'] = {
+ version = 1.001,
+ comment = "this module is replaced by the lxml-* ones",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- We asume that the helper modules l-*.lua are loaded
+-- already. But anyway if you use mtxrun to run your script
+-- all is taken care of.
+
+if not trackers then
+ require('trac-tra')
+end
+
+if not xml then
+ require('lxml-tab')
+ require('lxml-lpt')
+ require('lxml-mis')
+ require('lxml-aux')
+ require('lxml-xml')
+end
diff --git a/tex/context/base/lang-def.lua b/tex/context/base/lang-def.lua
index c0c3981f7..274bb8090 100644
--- a/tex/context/base/lang-def.lua
+++ b/tex/context/base/lang-def.lua
@@ -1,466 +1,466 @@
-if not modules then modules = { } end modules ['lang-def'] = {
- version = 1.001,
- comment = "companion to lang-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
- -- dataonly = true, -- saves 10K
-}
-
-local rawget = rawget
-local lower = string.lower
-
-languages = languages or { }
-local languages = languages
-languages.data = languages.data or { }
-local data = languages.data
-
-local allocate = utilities.storage.allocate
-local setmetatableindex = table.setmetatableindex
-
--- The specifications are based on an analysis done by Arthur. The
--- names of tags were changed by Hans. The data is not yet used but
--- will be some day.
---
--- description
---
--- The description is only meant as an indication; for example 'no' is
--- "Norwegian, undetermined" because that's really what it is.
---
--- script
---
--- This is the 4-letter script tag according to ISO 15924, the
--- official standard.
---
--- bibliographical and terminological
---
--- Then we have *two* ISO-639 3-letter tags: one is supposed to be used
--- for "bibliographical" purposes, the other for "terminological". The
--- first one is quite special (and mostly used in American libraries),
--- and the more interesting one is the other (apparently it's that one
--- we find everywhere).
---
--- context
---
--- These are the ones used in ConteXt. Kind of numberplate ones.
---
--- opentype
---
--- This is the 3-letter OpenType language tag, obviously.
---
--- variant
---
--- This is actually the rfc4646: an extension of ISO-639 that also defines
--- codes for variants like de-1901 for "German, 1901 orthography" or zh-Hans for
--- "Chinese, simplified characters" ('Hans' is the ISO-15924 tag for
--- "HAN ideographs, Simplified" :-) As I said yesterday, I think this
--- should be the reference since it's exactly what we want: it's really
--- standard (it's a RFC) and it's more than simply languages. To my
--- knowledge this is the only system that addresses this issue.
---
--- Warning: it's not unique! Because we have two "German" languages
--- (and could, potentially, have two Chinese, etc.)
---
--- Beware: the abbreviations are lowercased, which makes it more
--- convenient to use them.
---
--- todo: add default features
-
-local specifications = allocate {
- {
- ["description"] = "Dutch",
- ["script"] = "latn",
- -- ["bibliographical"] = "nld",
- -- ["terminological"] = "nld",
- ["context"] = "nl",
- ["opentype"] = "nld",
- ["variant"] = "nl",
- },
- {
- ["description"] = "Basque",
- ["script"] = "latn",
- ["bibliographical"] = "baq",
- ["terminological"] = "eus",
- ["context"] = "ba",
- ["opentype"] = "euq",
- ["variant"] = "eu",
- },
- {
- ["description"] = "Welsh",
- ["script"] = "latn",
- ["bibliographical"] = "wel",
- ["terminological"] = "cym",
- ["context"] = "cy",
- ["opentype"] = "wel",
- ["variant"] = "cy",
- },
- {
- ["description"] = "Icelandic",
- ["script"] = "latn",
- ["bibliographical"] = "ice",
- ["terminological"] = "isl",
- ["context"] = "is",
- ["opentype"] = "isl",
- ["variant"] = "is",
- },
- {
- ["description"] = "Norwegian, undetermined",
- ["script"] = "latn",
- ["bibliographical"] = "nor",
- ["terminological"] = "nor",
- ["context"] = "no",
- ["variant"] = "no",
- },
- {
- ["description"] = "Norwegian bokmal",
- ["script"] = "latn",
- ["bibliographical"] = "nob",
- ["terminological"] = "nob",
- ["opentype"] = "nor", -- not sure!
- ["variant"] = "nb",
- },
- {
- ["description"] = "Norwegian nynorsk",
- ["script"] = "latn",
- ["bibliographical"] = "nno",
- ["terminological"] = "nno",
- ["opentype"] = "nny",
- ["variant"] = "nn",
- },
- {
- ["description"] = "Ancient Greek",
- ["script"] = "grek",
- ["bibliographical"] = "grc",
- ["terminological"] = "grc",
- ["context"] = "agr",
- ["variant"] = "grc",
- },
- {
- ["description"] = "German, 1901 orthography",
- ["script"] = "latn",
- ["terminological"] = "deu",
- ["context"] = "deo",
- ["opentype"] = "deu",
- ["variant"] = "de-1901",
- },
- {
- ["description"] = "German, 1996 orthography",
- ["script"] = "latn",
- ["bibliographical"] = "ger",
- ["terminological"] = "deu",
- ["context"] = "de",
- ["opentype"] = "deu",
- ["variant"] = "de-1996",
- },
- {
- ["description"] = "Afrikaans",
- ["script"] = "latn",
- ["bibliographical"] = "afr",
- ["terminological"] = "afr",
- ["context"] = "af",
- ["opentype"] = "afk",
- ["variant"] = "af",
- },
- {
- ["description"] = "Catalan",
- ["script"] = "latn",
- ["bibliographical"] = "cat",
- ["terminological"] = "cat",
- ["context"] = "ca",
- ["opentype"] = "cat",
- ["variant"] = "ca",
- },
- {
- ["description"] = "Czech",
- ["script"] = "latn",
- ["bibliographical"] = "cze",
- ["terminological"] = "ces",
- ["context"] = "cz",
- ["opentype"] = "csy",
- ["variant"] = "cs",
- },
- {
- ["description"] = "Greek",
- ["script"] = "grek",
- ["bibliographical"] = "gre",
- ["terminological"] = "ell",
- ["context"] = "gr",
- ["opentype"] = "ell",
- ["variant"] = "el",
- },
- {
- ["description"] = "American English",
- ["script"] = "latn",
- ["bibliographical"] = "eng",
- ["terminological"] = "eng",
- ["context"] = "us",
- ["opentype"] = "eng",
- ["variant"] = "en-US",
- },
- {
- ["description"] = "British English",
- ["script"] = "latn",
- ["bibliographical"] = "eng",
- ["terminological"] = "eng",
- ["context"] = "uk",
- ["opentype"] = "eng",
- ["variant"] = "en-UK", -- Could be en-GB as well ...
- },
- {
- ["description"] = "Spanish",
- ["script"] = "latn",
- ["bibliographical"] = "spa",
- ["terminological"] = "spa",
- ["context"] = "es",
- ["opentype"] = "esp",
- ["variant"] = "es",
- },
- {
- ["description"] = "Finnish",
- ["script"] = "latn",
- ["bibliographical"] = "fin",
- ["terminological"] = "fin",
- ["context"] = "fi",
- ["opentype"] = "fin",
- ["variant"] = "fi",
- },
- {
- ["description"] = "French",
- ["script"] = "latn",
- ["bibliographical"] = "fre",
- ["terminological"] = "fra",
- ["context"] = "fr",
- ["opentype"] = "fra",
- ["variant"] = "fr",
- },
- {
- ["description"] = "Croatian",
- ["script"] = "latn",
- ["bibliographical"] = "scr",
- ["terminological"] = "hrv",
- ["context"] = "hr",
- ["opentype"] = "hrv",
- ["variant"] = "hr",
- },
- {
- ["description"] = "Hungarian",
- ["script"] = "latn",
- ["bibliographical"] = "hun",
- ["terminological"] = "hun",
- ["context"] = "hu",
- ["opentype"] = "hun",
- ["variant"] = "hu",
- },
- {
- ["description"] = "Italian",
- ["script"] = "latn",
- ["bibliographical"] = "ita",
- ["terminological"] = "ita",
- ["context"] = "it",
- ["opentype"] = "ita",
- ["variant"] = "it",
- },
- {
- ["description"] = "Japanese",
- ["script"] = "jpan",
- ["bibliographical"] = "jpn",
- ["terminological"] = "jpn",
- ["context"] = "ja",
- ["opentype"] = "jan",
- ["variant"] = "ja",
- },
- {
- ["description"] = "Latin",
- ["script"] = "latn",
- ["bibliographical"] = "lat",
- ["terminological"] = "lat",
- ["context"] = "la",
- ["opentype"] = "lat",
- ["variant"] = "la",
- },
- {
- ["description"] = "Portuguese",
- ["script"] = "latn",
- ["bibliographical"] = "por",
- ["terminological"] = "por",
- ["context"] = "pt",
- ["opentype"] = "ptg",
- ["variant"] = "pt",
- },
- {
- ["description"] = "Polish",
- ["script"] = "latn",
- ["bibliographical"] = "pol",
- ["terminological"] = "pol",
- ["context"] = "pl",
- ["opentype"] = "plk",
- ["variant"] = "pl",
- },
- {
- ["description"] = "Romanian",
- ["script"] = "latn",
- ["bibliographical"] = "rum",
- ["terminological"] = "ron",
- ["context"] = "ro",
- ["opentype"] = "rom",
- ["variant"] = "ro",
- },
- {
- ["description"] = "Russian",
- ["script"] = "cyrl",
- ["bibliographical"] = "rus",
- ["terminological"] = "rus",
- ["context"] = "ru",
- ["opentype"] = "rus",
- ["variant"] = "ru",
- },
- {
- ["description"] = "Slovak",
- ["script"] = "latn",
- ["bibliographical"] = "slo",
- ["terminological"] = "slk",
- ["context"] = "sk",
- ["opentype"] = "sky",
- ["variant"] = "sk",
- },
- {
- ["description"] = "Slovenian",
- ["script"] = "latn",
- ["bibliographical"] = "slv",
- ["terminological"] = "slv",
- ["context"] = "sl",
- ["opentype"] = "slv",
- ["variant"] = "sl",
- },
- {
- ["description"] = "Swedish",
- ["script"] = "latn",
- ["bibliographical"] = "swe",
- ["terminological"] = "swe",
- ["context"] = "sv",
- ["opentype"] = "sve",
- ["variant"] = "sv",
- },
- {
- ["description"] = "Thai",
- ["script"] = "thai",
- -- ["bibliographical"] = "",
- -- ["terminological"] = "",
- ["context"] = "th",
- ["opentype"] = "tha",
- -- ["variant"] = "",
- },
- {
- ["description"] = "Turkish",
- ["script"] = "latn",
- ["bibliographical"] = "tur",
- ["terminological"] = "tur",
- ["context"] = "tr",
- ["opentype"] = "trk",
- ["variant"] = "tr",
- },
- {
- ["description"] = "Vietnamese",
- ["script"] = "latn",
- ["bibliographical"] = "vie",
- ["terminological"] = "vie",
- ["context"] = "vn",
- ["opentype"] = "vit",
- ["variant"] = "vi",
- },
- {
- ["description"] = "Chinese, simplified",
- ["script"] = "hans",
- ["opentypescript"] = "hani",
- ["bibliographical"] = "chi",
- ["terminological"] = "zho",
- ["context"] = "cn",
- ["opentype"] = "zhs",
- ["variant"] = "zh-hans",
- },
-}
-
-data.specifications = specifications
-
-local variants = { } data.variants = variants
-local contexts = { } data.contexts = contexts
-local records = { } data.records = records
-local scripts = { } data.scripts = scripts
-local opentypes = { } data.opentypes = opentypes
-local opentypescripts = { } data.opentypescripts = opentypescripts
-
-for k=1,#specifications do
- local specification = specifications[k]
- local variant = specification.variant
- if variant then
- variants[lower(variant)] = specification
- end
- local opentype = specification.opentype
- if opentype then
- opentypes[lower(opentype)] = specification
- end
- local script = specification.script
- if script then
- scripts[lower(script)] = specification
- end
- local opentypescript = specification.opentypescript
- if opentypescript then
- opentypescripts[lower(opentypescript)] = specification
- end
- local context = context
- if context then
- if type(context) == "table" then
- for k=1,#context do
- contexts[context[k]] = specification
- end
- else
- contexts[context] = specification
- end
- end
-end
-
-local defaultvariant = variants["en-us"]
-
-local function get(k,key)
- local v = rawget(variants,k) or rawget(opentypes,k) or rawget(contexts,k)
- return v and v[key]
-end
-
-setmetatableindex(variants, function(t,k)
- k = lower(k)
- local v = get(k,"language") or defaultvariant.language
- t[k] = v
- return v
-end)
-
-setmetatableindex(opentypes, function(t,k)
- k = lower(k)
- local v = get(k,"opentype") or "dflt"
- t[k] = v
- return v
-end)
-
-setmetatableindex(opentypescripts, function(t,k)
- k = lower(k)
- local v = get(k,"opentypescript") or get(k,"script") or defaultvariant.opentypescript or defaultvariant.script
- t[k] = v
- return v
-end)
-
-setmetatableindex(contexts, function(t,k)
- k = lower(str)
- local v = get(k,"context") or defaultvariant.context
- v = type(v) == "table" and v[1] or v
- t[k] = v
- return v
-end)
-
-setmetatableindex(records, function(t,k) -- how useful is this one?
- k = lower(k)
- local v = get(k) or defaultvariant
- t[k] = v
- return v
-end)
-
--- print(opentypes.nl,opentypescripts.nl)
--- print(opentypes.de,opentypescripts.de)
+if not modules then modules = { } end modules ['lang-def'] = {
+ version = 1.001,
+ comment = "companion to lang-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+ -- dataonly = true, -- saves 10K
+}
+
+local rawget = rawget
+local lower = string.lower
+
+languages = languages or { }
+local languages = languages
+languages.data = languages.data or { }
+local data = languages.data
+
+local allocate = utilities.storage.allocate
+local setmetatableindex = table.setmetatableindex
+
+-- The specifications are based on an analysis done by Arthur. The
+-- names of tags were changed by Hans. The data is not yet used but
+-- will be some day.
+--
+-- description
+--
+-- The description is only meant as an indication; for example 'no' is
+-- "Norwegian, undetermined" because that's really what it is.
+--
+-- script
+--
+-- This is the 4-letter script tag according to ISO 15924, the
+-- official standard.
+--
+-- bibliographical and terminological
+--
+-- Then we have *two* ISO-639 3-letter tags: one is supposed to be used
+-- for "bibliographical" purposes, the other for "terminological". The
+-- first one is quite special (and mostly used in American libraries),
+-- and the more interesting one is the other (apparently it's that one
+-- we find everywhere).
+--
+-- context
+--
+-- These are the ones used in ConteXt. Kind of numberplate ones.
+--
+-- opentype
+--
+-- This is the 3-letter OpenType language tag, obviously.
+--
+-- variant
+--
+-- This is actually the rfc4646: an extension of ISO-639 that also defines
+-- codes for variants like de-1901 for "German, 1901 orthography" or zh-Hans for
+-- "Chinese, simplified characters" ('Hans' is the ISO-15924 tag for
+-- "HAN ideographs, Simplified" :-) As I said yesterday, I think this
+-- should be the reference since it's exactly what we want: it's really
+-- standard (it's a RFC) and it's more than simply languages. To my
+-- knowledge this is the only system that addresses this issue.
+--
+-- Warning: it's not unique! Because we have two "German" languages
+-- (and could, potentially, have two Chinese, etc.)
+--
+-- Beware: the abbreviations are lowercased, which makes it more
+-- convenient to use them.
+--
+-- todo: add default features
+
+local specifications = allocate {
+ {
+ ["description"] = "Dutch",
+ ["script"] = "latn",
+ -- ["bibliographical"] = "nld",
+ -- ["terminological"] = "nld",
+ ["context"] = "nl",
+ ["opentype"] = "nld",
+ ["variant"] = "nl",
+ },
+ {
+ ["description"] = "Basque",
+ ["script"] = "latn",
+ ["bibliographical"] = "baq",
+ ["terminological"] = "eus",
+ ["context"] = "ba",
+ ["opentype"] = "euq",
+ ["variant"] = "eu",
+ },
+ {
+ ["description"] = "Welsh",
+ ["script"] = "latn",
+ ["bibliographical"] = "wel",
+ ["terminological"] = "cym",
+ ["context"] = "cy",
+ ["opentype"] = "wel",
+ ["variant"] = "cy",
+ },
+ {
+ ["description"] = "Icelandic",
+ ["script"] = "latn",
+ ["bibliographical"] = "ice",
+ ["terminological"] = "isl",
+ ["context"] = "is",
+ ["opentype"] = "isl",
+ ["variant"] = "is",
+ },
+ {
+ ["description"] = "Norwegian, undetermined",
+ ["script"] = "latn",
+ ["bibliographical"] = "nor",
+ ["terminological"] = "nor",
+ ["context"] = "no",
+ ["variant"] = "no",
+ },
+ {
+ ["description"] = "Norwegian bokmal",
+ ["script"] = "latn",
+ ["bibliographical"] = "nob",
+ ["terminological"] = "nob",
+ ["opentype"] = "nor", -- not sure!
+ ["variant"] = "nb",
+ },
+ {
+ ["description"] = "Norwegian nynorsk",
+ ["script"] = "latn",
+ ["bibliographical"] = "nno",
+ ["terminological"] = "nno",
+ ["opentype"] = "nny",
+ ["variant"] = "nn",
+ },
+ {
+ ["description"] = "Ancient Greek",
+ ["script"] = "grek",
+ ["bibliographical"] = "grc",
+ ["terminological"] = "grc",
+ ["context"] = "agr",
+ ["variant"] = "grc",
+ },
+ {
+ ["description"] = "German, 1901 orthography",
+ ["script"] = "latn",
+ ["terminological"] = "deu",
+ ["context"] = "deo",
+ ["opentype"] = "deu",
+ ["variant"] = "de-1901",
+ },
+ {
+ ["description"] = "German, 1996 orthography",
+ ["script"] = "latn",
+ ["bibliographical"] = "ger",
+ ["terminological"] = "deu",
+ ["context"] = "de",
+ ["opentype"] = "deu",
+ ["variant"] = "de-1996",
+ },
+ {
+ ["description"] = "Afrikaans",
+ ["script"] = "latn",
+ ["bibliographical"] = "afr",
+ ["terminological"] = "afr",
+ ["context"] = "af",
+ ["opentype"] = "afk",
+ ["variant"] = "af",
+ },
+ {
+ ["description"] = "Catalan",
+ ["script"] = "latn",
+ ["bibliographical"] = "cat",
+ ["terminological"] = "cat",
+ ["context"] = "ca",
+ ["opentype"] = "cat",
+ ["variant"] = "ca",
+ },
+ {
+ ["description"] = "Czech",
+ ["script"] = "latn",
+ ["bibliographical"] = "cze",
+ ["terminological"] = "ces",
+ ["context"] = "cz",
+ ["opentype"] = "csy",
+ ["variant"] = "cs",
+ },
+ {
+ ["description"] = "Greek",
+ ["script"] = "grek",
+ ["bibliographical"] = "gre",
+ ["terminological"] = "ell",
+ ["context"] = "gr",
+ ["opentype"] = "ell",
+ ["variant"] = "el",
+ },
+ {
+ ["description"] = "American English",
+ ["script"] = "latn",
+ ["bibliographical"] = "eng",
+ ["terminological"] = "eng",
+ ["context"] = "us",
+ ["opentype"] = "eng",
+ ["variant"] = "en-US",
+ },
+ {
+ ["description"] = "British English",
+ ["script"] = "latn",
+ ["bibliographical"] = "eng",
+ ["terminological"] = "eng",
+ ["context"] = "uk",
+ ["opentype"] = "eng",
+ ["variant"] = "en-UK", -- Could be en-GB as well ...
+ },
+ {
+ ["description"] = "Spanish",
+ ["script"] = "latn",
+ ["bibliographical"] = "spa",
+ ["terminological"] = "spa",
+ ["context"] = "es",
+ ["opentype"] = "esp",
+ ["variant"] = "es",
+ },
+ {
+ ["description"] = "Finnish",
+ ["script"] = "latn",
+ ["bibliographical"] = "fin",
+ ["terminological"] = "fin",
+ ["context"] = "fi",
+ ["opentype"] = "fin",
+ ["variant"] = "fi",
+ },
+ {
+ ["description"] = "French",
+ ["script"] = "latn",
+ ["bibliographical"] = "fre",
+ ["terminological"] = "fra",
+ ["context"] = "fr",
+ ["opentype"] = "fra",
+ ["variant"] = "fr",
+ },
+ {
+ ["description"] = "Croatian",
+ ["script"] = "latn",
+ ["bibliographical"] = "scr",
+ ["terminological"] = "hrv",
+ ["context"] = "hr",
+ ["opentype"] = "hrv",
+ ["variant"] = "hr",
+ },
+ {
+ ["description"] = "Hungarian",
+ ["script"] = "latn",
+ ["bibliographical"] = "hun",
+ ["terminological"] = "hun",
+ ["context"] = "hu",
+ ["opentype"] = "hun",
+ ["variant"] = "hu",
+ },
+ {
+ ["description"] = "Italian",
+ ["script"] = "latn",
+ ["bibliographical"] = "ita",
+ ["terminological"] = "ita",
+ ["context"] = "it",
+ ["opentype"] = "ita",
+ ["variant"] = "it",
+ },
+ {
+ ["description"] = "Japanese",
+ ["script"] = "jpan",
+ ["bibliographical"] = "jpn",
+ ["terminological"] = "jpn",
+ ["context"] = "ja",
+ ["opentype"] = "jan",
+ ["variant"] = "ja",
+ },
+ {
+ ["description"] = "Latin",
+ ["script"] = "latn",
+ ["bibliographical"] = "lat",
+ ["terminological"] = "lat",
+ ["context"] = "la",
+ ["opentype"] = "lat",
+ ["variant"] = "la",
+ },
+ {
+ ["description"] = "Portuguese",
+ ["script"] = "latn",
+ ["bibliographical"] = "por",
+ ["terminological"] = "por",
+ ["context"] = "pt",
+ ["opentype"] = "ptg",
+ ["variant"] = "pt",
+ },
+ {
+ ["description"] = "Polish",
+ ["script"] = "latn",
+ ["bibliographical"] = "pol",
+ ["terminological"] = "pol",
+ ["context"] = "pl",
+ ["opentype"] = "plk",
+ ["variant"] = "pl",
+ },
+ {
+ ["description"] = "Romanian",
+ ["script"] = "latn",
+ ["bibliographical"] = "rum",
+ ["terminological"] = "ron",
+ ["context"] = "ro",
+ ["opentype"] = "rom",
+ ["variant"] = "ro",
+ },
+ {
+ ["description"] = "Russian",
+ ["script"] = "cyrl",
+ ["bibliographical"] = "rus",
+ ["terminological"] = "rus",
+ ["context"] = "ru",
+ ["opentype"] = "rus",
+ ["variant"] = "ru",
+ },
+ {
+ ["description"] = "Slovak",
+ ["script"] = "latn",
+ ["bibliographical"] = "slo",
+ ["terminological"] = "slk",
+ ["context"] = "sk",
+ ["opentype"] = "sky",
+ ["variant"] = "sk",
+ },
+ {
+ ["description"] = "Slovenian",
+ ["script"] = "latn",
+ ["bibliographical"] = "slv",
+ ["terminological"] = "slv",
+ ["context"] = "sl",
+ ["opentype"] = "slv",
+ ["variant"] = "sl",
+ },
+ {
+ ["description"] = "Swedish",
+ ["script"] = "latn",
+ ["bibliographical"] = "swe",
+ ["terminological"] = "swe",
+ ["context"] = "sv",
+ ["opentype"] = "sve",
+ ["variant"] = "sv",
+ },
+ {
+ ["description"] = "Thai",
+ ["script"] = "thai",
+ -- ["bibliographical"] = "",
+ -- ["terminological"] = "",
+ ["context"] = "th",
+ ["opentype"] = "tha",
+ -- ["variant"] = "",
+ },
+ {
+ ["description"] = "Turkish",
+ ["script"] = "latn",
+ ["bibliographical"] = "tur",
+ ["terminological"] = "tur",
+ ["context"] = "tr",
+ ["opentype"] = "trk",
+ ["variant"] = "tr",
+ },
+ {
+ ["description"] = "Vietnamese",
+ ["script"] = "latn",
+ ["bibliographical"] = "vie",
+ ["terminological"] = "vie",
+ ["context"] = "vn",
+ ["opentype"] = "vit",
+ ["variant"] = "vi",
+ },
+ {
+ ["description"] = "Chinese, simplified",
+ ["script"] = "hans",
+ ["opentypescript"] = "hani",
+ ["bibliographical"] = "chi",
+ ["terminological"] = "zho",
+ ["context"] = "cn",
+ ["opentype"] = "zhs",
+ ["variant"] = "zh-hans",
+ },
+}
+
+data.specifications = specifications
+
+local variants = { } data.variants = variants
+local contexts = { } data.contexts = contexts
+local records = { } data.records = records
+local scripts = { } data.scripts = scripts
+local opentypes = { } data.opentypes = opentypes
+local opentypescripts = { } data.opentypescripts = opentypescripts
+
+for k=1,#specifications do
+ local specification = specifications[k]
+ local variant = specification.variant
+ if variant then
+ variants[lower(variant)] = specification
+ end
+ local opentype = specification.opentype
+ if opentype then
+ opentypes[lower(opentype)] = specification
+ end
+ local script = specification.script
+ if script then
+ scripts[lower(script)] = specification
+ end
+ local opentypescript = specification.opentypescript
+ if opentypescript then
+ opentypescripts[lower(opentypescript)] = specification
+ end
+ local context = context
+ if context then
+ if type(context) == "table" then
+ for k=1,#context do
+ contexts[context[k]] = specification
+ end
+ else
+ contexts[context] = specification
+ end
+ end
+end
+
+local defaultvariant = variants["en-us"]
+
+local function get(k,key)
+ local v = rawget(variants,k) or rawget(opentypes,k) or rawget(contexts,k)
+ return v and v[key]
+end
+
+setmetatableindex(variants, function(t,k)
+ k = lower(k)
+ local v = get(k,"language") or defaultvariant.language
+ t[k] = v
+ return v
+end)
+
+setmetatableindex(opentypes, function(t,k)
+ k = lower(k)
+ local v = get(k,"opentype") or "dflt"
+ t[k] = v
+ return v
+end)
+
+setmetatableindex(opentypescripts, function(t,k)
+ k = lower(k)
+ local v = get(k,"opentypescript") or get(k,"script") or defaultvariant.opentypescript or defaultvariant.script
+ t[k] = v
+ return v
+end)
+
+setmetatableindex(contexts, function(t,k)
+ k = lower(str)
+ local v = get(k,"context") or defaultvariant.context
+ v = type(v) == "table" and v[1] or v
+ t[k] = v
+ return v
+end)
+
+setmetatableindex(records, function(t,k) -- how useful is this one?
+ k = lower(k)
+ local v = get(k) or defaultvariant
+ t[k] = v
+ return v
+end)
+
+-- print(opentypes.nl,opentypescripts.nl)
+-- print(opentypes.de,opentypescripts.de)
diff --git a/tex/context/base/lang-frq-de.lua b/tex/context/base/lang-frq-de.lua
index 3733f39f9..4e54db2c8 100644
--- a/tex/context/base/lang-frq-de.lua
+++ b/tex/context/base/lang-frq-de.lua
@@ -1,12 +1,12 @@
-return {
- language = "de",
- source = "http://www.blankenburg.de/gat/pages/fach/info/analyse2.htm",
- frequencies = {
- [0x0061] = 6.47, [0x0062] = 1.93, [0x0063] = 2.68, [0x0064] = 4.83, [0x0065] = 17.48,
- [0x0066] = 1.65, [0x0067] = 3.06, [0x0068] = 4.23, [0x0069] = 7.73, [0x006A] = 0.27,
- [0x006B] = 1.46, [0x006C] = 3.49, [0x006D] = 2.58, [0x006E] = 9.84, [0x006F] = 2.98,
- [0x0070] = 0.96, [0x0071] = 0.02, [0x0072] = 7.54, [0x0073] = 6.83, [0x0074] = 6.13,
- [0x0075] = 4.17, [0x0076] = 0.94, [0x0077] = 1.48, [0x0078] = 0.04, [0x0079] = 0.08,
- [0x007A] = 1.14,
- }
-}
+return {
+ language = "de",
+ source = "http://www.blankenburg.de/gat/pages/fach/info/analyse2.htm",
+ frequencies = {
+ [0x0061] = 6.47, [0x0062] = 1.93, [0x0063] = 2.68, [0x0064] = 4.83, [0x0065] = 17.48,
+ [0x0066] = 1.65, [0x0067] = 3.06, [0x0068] = 4.23, [0x0069] = 7.73, [0x006A] = 0.27,
+ [0x006B] = 1.46, [0x006C] = 3.49, [0x006D] = 2.58, [0x006E] = 9.84, [0x006F] = 2.98,
+ [0x0070] = 0.96, [0x0071] = 0.02, [0x0072] = 7.54, [0x0073] = 6.83, [0x0074] = 6.13,
+ [0x0075] = 4.17, [0x0076] = 0.94, [0x0077] = 1.48, [0x0078] = 0.04, [0x0079] = 0.08,
+ [0x007A] = 1.14,
+ }
+}
diff --git a/tex/context/base/lang-frq-en.lua b/tex/context/base/lang-frq-en.lua
index 9e18d7166..ee122c9da 100644
--- a/tex/context/base/lang-frq-en.lua
+++ b/tex/context/base/lang-frq-en.lua
@@ -1,26 +1,26 @@
--- return {
--- language = "en",
--- source = "http://caislab.icu.ac.kr/course/2001/spring/ice605/down/010306.pdf",
--- frequencies = {
--- [0x0061] = 8.2, [0x0062] = 1.5, [0x0063] = 2.8, [0x0064] = 4.3, [0x0065] = 12.7,
--- [0x0066] = 2.2, [0x0067] = 2.0, [0x0068] = 6.1, [0x0069] = 7.0, [0x006A] = 0.2,
--- [0x006B] = 0.8, [0x006C] = 4.0, [0x006D] = 2.4, [0x006E] = 6.7, [0x006F] = 7.5,
--- [0x0070] = 1.9, [0x0071] = 0.1, [0x0072] = 6.0, [0x0073] = 6.3, [0x0074] = 9.1,
--- [0x0075] = 2.8, [0x0076] = 1.0, [0x0077] = 2.3, [0x0078] = 0.1, [0x0079] = 2.0,
--- [0x007A] = 0.1,
--- }
--- }
-
-return {
- language = "en",
- source = "http://www.blankenburg.de/gat/pages/fach/info/analyse2.htm",
- frequencies = {
- [0x0061] = 8.04, [0x0062] = 1.54, [0x0063] = 3.06, [0x0064] = 3.99, [0x0065] = 12.51,
- [0x0066] = 2.30, [0x0067] = 1.96, [0x0068] = 5.49, [0x0069] = 7.26, [0x006A] = 0.16,
- [0x006B] = 0.67, [0x006C] = 4.14, [0x006D] = 2.53, [0x006E] = 7.09, [0x006F] = 7.60,
- [0x0070] = 2.00, [0x0071] = 0.11, [0x0072] = 6.12, [0x0073] = 6.54, [0x0074] = 9.25,
- [0x0075] = 2.71, [0x0076] = 0.99, [0x0077] = 1.92, [0x0078] = 0.19, [0x0079] = 1.73,
- [0x007A] = 0.09,
- }
-}
-
+-- return {
+-- language = "en",
+-- source = "http://caislab.icu.ac.kr/course/2001/spring/ice605/down/010306.pdf",
+-- frequencies = {
+-- [0x0061] = 8.2, [0x0062] = 1.5, [0x0063] = 2.8, [0x0064] = 4.3, [0x0065] = 12.7,
+-- [0x0066] = 2.2, [0x0067] = 2.0, [0x0068] = 6.1, [0x0069] = 7.0, [0x006A] = 0.2,
+-- [0x006B] = 0.8, [0x006C] = 4.0, [0x006D] = 2.4, [0x006E] = 6.7, [0x006F] = 7.5,
+-- [0x0070] = 1.9, [0x0071] = 0.1, [0x0072] = 6.0, [0x0073] = 6.3, [0x0074] = 9.1,
+-- [0x0075] = 2.8, [0x0076] = 1.0, [0x0077] = 2.3, [0x0078] = 0.1, [0x0079] = 2.0,
+-- [0x007A] = 0.1,
+-- }
+-- }
+
+return {
+ language = "en",
+ source = "http://www.blankenburg.de/gat/pages/fach/info/analyse2.htm",
+ frequencies = {
+ [0x0061] = 8.04, [0x0062] = 1.54, [0x0063] = 3.06, [0x0064] = 3.99, [0x0065] = 12.51,
+ [0x0066] = 2.30, [0x0067] = 1.96, [0x0068] = 5.49, [0x0069] = 7.26, [0x006A] = 0.16,
+ [0x006B] = 0.67, [0x006C] = 4.14, [0x006D] = 2.53, [0x006E] = 7.09, [0x006F] = 7.60,
+ [0x0070] = 2.00, [0x0071] = 0.11, [0x0072] = 6.12, [0x0073] = 6.54, [0x0074] = 9.25,
+ [0x0075] = 2.71, [0x0076] = 0.99, [0x0077] = 1.92, [0x0078] = 0.19, [0x0079] = 1.73,
+ [0x007A] = 0.09,
+ }
+}
+
diff --git a/tex/context/base/lang-frq-nl.lua b/tex/context/base/lang-frq-nl.lua
index 7b640b779..fa4851e63 100644
--- a/tex/context/base/lang-frq-nl.lua
+++ b/tex/context/base/lang-frq-nl.lua
@@ -1,12 +1,12 @@
-return {
- language = "nl",
- source = "http://www.onzetaal.nl/advies/letterfreq.html",
- frequencies = {
- [0x0061] = 7.47, [0x0062] = 1.58, [0x0063] = 1.24, [0x0064] = 5.93, [0x0065] = 18.91,
- [0x0066] = 0.81, [0x0067] = 3.40, [0x0068] = 2.38, [0x0069] = 6.50, [0x006A] = 1.46,
- [0x006B] = 2.25, [0x006C] = 3.57, [0x006D] = 2.21, [0x006E] = 10.03, [0x006F] = 6.06,
- [0x0070] = 1.57, [0x0071] = 0.009, [0x0072] = 6.41, [0x0073] = 3.73, [0x0074] = 6.79,
- [0x0075] = 1.99, [0x0076] = 2.85, [0x0077] = 1.52, [0x0078] = 0.04, [0x0079] = 0.035,
- [0x007A] = 1.39,
- }
-}
+return {
+ language = "nl",
+ source = "http://www.onzetaal.nl/advies/letterfreq.html",
+ frequencies = {
+ [0x0061] = 7.47, [0x0062] = 1.58, [0x0063] = 1.24, [0x0064] = 5.93, [0x0065] = 18.91,
+ [0x0066] = 0.81, [0x0067] = 3.40, [0x0068] = 2.38, [0x0069] = 6.50, [0x006A] = 1.46,
+ [0x006B] = 2.25, [0x006C] = 3.57, [0x006D] = 2.21, [0x006E] = 10.03, [0x006F] = 6.06,
+ [0x0070] = 1.57, [0x0071] = 0.009, [0x0072] = 6.41, [0x0073] = 3.73, [0x0074] = 6.79,
+ [0x0075] = 1.99, [0x0076] = 2.85, [0x0077] = 1.52, [0x0078] = 0.04, [0x0079] = 0.035,
+ [0x007A] = 1.39,
+ }
+}
diff --git a/tex/context/base/lang-ini.lua b/tex/context/base/lang-ini.lua
index 4ae7656d3..b5bdfd894 100644
--- a/tex/context/base/lang-ini.lua
+++ b/tex/context/base/lang-ini.lua
@@ -1,355 +1,376 @@
-if not modules then modules = { } end modules ['lang-ini'] = {
- version = 1.001,
- comment = "companion to lang-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- needs a cleanup (share locals)
--- discard language when redefined
-
--- 002D : hyphen-minus (ascii)
--- 2010 : hyphen
--- 2011 : nonbreakable hyphen
--- 2013 : endash (compound hyphen)
-
---~ lang:hyphenation(string) string = lang:hyphenation() lang:clear_hyphenation()
-
-local type, tonumber = type, tonumber
-local utfbyte = utf.byte
-local format, gsub = string.format, string.gsub
-local concat, sortedkeys, sortedpairs = table.concat, table.sortedkeys, table.sortedpairs
-local lpegmatch = lpeg.match
-
-local settings_to_array = utilities.parsers.settings_to_array
-
-local trace_patterns = false trackers.register("languages.patterns", function(v) trace_patterns = v end)
-
-local report_initialization = logs.reporter("languages","initialization")
-
-local prehyphenchar = lang.prehyphenchar -- global per language
-local posthyphenchar = lang.posthyphenchar -- global per language
-local lefthyphenmin = lang.lefthyphenmin
-local righthyphenmin = lang.righthyphenmin
-
-local lang = lang
-lang.exceptions = lang.hyphenation
-local new_langage = lang.new
-
-languages = languages or {}
-local languages = languages
-
-languages.version = 1.010
-
-languages.registered = languages.registered or { }
-local registered = languages.registered
-
-languages.associated = languages.associated or { }
-local associated = languages.associated
-
-languages.numbers = languages.numbers or { }
-local numbers = languages.numbers
-
-languages.data = languages.data or { }
-local data = languages.data
-
-storage.register("languages/numbers", numbers, "languages.numbers")
-storage.register("languages/registered",registered,"languages.registered")
-storage.register("languages/associated",associated,"languages.associated")
-storage.register("languages/data", data, "languages.data")
-
-local nofloaded = 0
-
-local function resolve(tag)
- local data, instance = registered[tag], nil
- if data then
- instance = data.instance
- if not instance then
- instance = new_langage(data.number)
- data.instance = instance
- end
- end
- return data, instance
-end
-
-local function tolang(what) -- returns lang object
- local tag = numbers[what]
- local data = tag and registered[tag] or registered[what]
- if data then
- local instance = data.lang
- if not instance then
- instance = new_langage(data.number)
- data.instance = instance
- end
- return instance
- end
-end
-
--- languages.tolang = tolang
-
--- patterns=en
--- patterns=en,de
-
-local function loaddefinitions(tag,specification)
- statistics.starttiming(languages)
- local data, instance = resolve(tag)
- local definitions = settings_to_array(specification.patterns or "")
- if #definitions > 0 then
- if trace_patterns then
- report_initialization("pattern specification for language %a: %s",tag,specification.patterns)
- end
- local dataused, ok = data.used, false
- for i=1,#definitions do
- local definition = definitions[i]
- if definition == "" then
- -- error
- elseif definition == "reset" then -- interfaces.variables.reset
- if trace_patterns then
- report_initialization("clearing patterns for language %a",tag)
- end
- instance:clear_patterns()
- elseif not dataused[definition] then
- dataused[definition] = definition
- local filename = "lang-" .. definition .. ".lua"
- local fullname = resolvers.findfile(filename) or ""
- if fullname ~= "" then
- if trace_patterns then
- report_initialization("loading definition %a for language %a from %a",definition,tag,fullname)
- end
- local defs = dofile(fullname) -- use regular loader instead
- if defs then -- todo: version test
- ok, nofloaded = true, nofloaded + 1
- instance:patterns (defs.patterns and defs.patterns .data or "")
- instance:hyphenation(defs.exceptions and defs.exceptions.data or "")
- else
- report_initialization("invalid definition %a for language %a in %a",definition,tag,filename)
- end
- elseif trace_patterns then
- report_initialization("invalid definition %a for language %a in %a",definition,tag,filename)
- end
- elseif trace_patterns then
- report_initialization("definition %a for language %a already loaded",definition,tag)
- end
- end
- return ok
- elseif trace_patterns then
- report_initialization("no definitions for language %a",tag)
- end
- statistics.stoptiming(languages)
-end
-
-storage.shared.noflanguages = storage.shared.noflanguages or 0
-
-local noflanguages = storage.shared.noflanguages
-
-function languages.define(tag,parent)
- noflanguages = noflanguages + 1
- if trace_patterns then
- report_initialization("assigning number %a to %a",noflanguages,tag)
- end
- numbers[noflanguages] = tag
- registered[tag] = {
- tag = tag,
- parent = parent or "",
- patterns = "",
- loaded = false,
- used = { },
- dirty = true,
- number = noflanguages,
- instance = nil, -- luatex data structure
- synonyms = { },
- }
- storage.shared.noflanguages = noflanguages
-end
-
-function languages.setsynonym(synonym,tag) -- convenience function
- local l = registered[tag]
- if l then
- l.synonyms[synonym] = true -- maybe some day more info
- end
-end
-
-function languages.installed(separator)
- return concat(sortedkeys(registered),separator or ",")
-end
-
-function languages.current(n)
- return numbers[n and tonumber(n) or tex.language]
-end
-
-function languages.associate(tag,script,language) -- not yet used
- associated[tag] = { script, language }
-end
-
-function languages.association(tag) -- not yet used
- if type(tag) == "number" then
- tag = numbers[tag]
- end
- local lat = tag and associated[tag]
- if lat then
- return lat[1], lat[2]
- end
-end
-
-function languages.loadable(tag,defaultlanguage) -- hack
- local l = registered[tag] -- no synonyms
- if l and resolvers.findfile("lang-"..l.patterns..".lua") then
- return true
- else
- return false
- end
-end
-
--- a bit messy, we will do all language setting in lua as we can now assign
--- and 'patterns' will go away here.
-
-function languages.unload(tag)
- local l = registered[tag]
- if l then
- l.dirty = true
- end
-end
-
-if environment.initex then
-
- function languages.getnumber()
- return 0
- end
-
-else
-
- function languages.getnumber(tag,default,patterns)
- local l = registered[tag]
- if l then
- if l.dirty then
- if trace_patterns then
- report_initialization("checking patterns for %a with default %a",tag,default)
- end
- -- patterns is already resolved to parent patterns if applicable
- if patterns and patterns ~= "" then
- if l.patterns ~= patterns then
- l.patterns = patterns
- if trace_patterns then
- report_initialization("loading patterns for %a using specification %a",tag,patterns)
- end
- loaddefinitions(tag,l)
- else
- -- unchanged
- end
- elseif l.patterns == "" then
- l.patterns = tag
- if trace_patterns then
- report_initialization("loading patterns for %a using tag",tag)
- end
- local ok = loaddefinitions(tag,l)
- if not ok and tag ~= default then
- l.patterns = default
- if trace_patterns then
- report_initialization("loading patterns for %a using default",tag)
- end
- loaddefinitions(tag,l)
- end
- end
- l.loaded = true
- l.dirty = false
- end
- return l.number
- else
- return 0
- end
- end
-
-end
-
--- not that usefull, global values
-
-function languages.prehyphenchar (what) return prehyphenchar (tolang(what)) end
-function languages.posthyphenchar(what) return posthyphenchar(tolang(what)) end
-function languages.lefthyphenmin (what) return lefthyphenmin (tolang(what)) end
-function languages.righthyphenmin(what) return righthyphenmin(tolang(what)) end
-
--- e['implementer']= 'imple{m}{-}{-}menter'
--- e['manual'] = 'man{}{}{}'
--- e['as'] = 'a-s'
--- e['user-friendly'] = 'user=friend-ly'
--- e['exceptionally-friendly'] = 'excep-tionally=friend-ly'
-
-function languages.loadwords(tag,filename)
- local data, instance = resolve(tag)
- if data then
- statistics.starttiming(languages)
- instance:hyphenation(io.loaddata(filename) or "")
- statistics.stoptiming(languages)
- end
-end
-
-function languages.setexceptions(tag,str)
- local data, instance = resolve(tag)
- if data then
- instance:hyphenation(string.strip(str)) -- we need to strip leading spaces
- end
-end
-
-function languages.hyphenate(tag,str)
- -- todo: does this still work?
- local data, instance = resolve(tag)
- if data then
- return instance:hyphenate(str)
- else
- return str
- end
-end
-
--- hyphenation.define ("zerolanguage")
--- hyphenation.loadpatterns ("zerolanguage") -- else bug
--- hyphenation.loadexceptions("zerolanguage") -- else bug
-
-languages.logger = languages.logger or { }
-
-function languages.logger.report()
- local result, r = { }, 0
- for tag, l in sortedpairs(registered) do
- if l.loaded then
- r = r + 1
- result[r] = format("%s:%s:%s",tag,l.parent,l.number)
- end
- end
- return r > 0 and concat(result," ") or "none"
-end
-
--- must happen at the tex end .. will use lang-def.lua
-
-languages.associate('en','latn','eng')
-languages.associate('uk','latn','eng')
-languages.associate('nl','latn','nld')
-languages.associate('de','latn','deu')
-languages.associate('fr','latn','fra')
-
-statistics.register("loaded patterns", function()
- local result = languages.logger.report()
- if result ~= "none" then
- return result
- end
-end)
-
-statistics.register("language load time", function()
- return statistics.elapsedseconds(languages, format(", nofpatterns: %s",nofloaded))
-end)
-
--- interface
-
-local getnumber = languages.getnumber
-
-function commands.languagenumber(tag,default,patterns)
- context(getnumber(tag,default,patterns))
-end
-
-function commands.installedlanguages(separator)
- context(languages.installed(separator))
-end
-
-commands.definelanguage = languages.define
-commands.setlanguagesynonym = languages.setsynonym
-commands.unloadlanguage = languages.unload
-commands.setlanguageexceptions = languages.setexceptions
+if not modules then modules = { } end modules ['lang-ini'] = {
+ version = 1.001,
+ comment = "companion to lang-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- needs a cleanup (share locals)
+-- discard language when redefined
+
+-- 002D : hyphen-minus (ascii)
+-- 2010 : hyphen
+-- 2011 : nonbreakable hyphen
+-- 2013 : endash (compound hyphen)
+
+--~ lang:hyphenation(string) string = lang:hyphenation() lang:clear_hyphenation()
+
+local type, tonumber = type, tonumber
+local utfbyte = utf.byte
+local format, gsub = string.format, string.gsub
+local concat, sortedkeys, sortedpairs = table.concat, table.sortedkeys, table.sortedpairs
+local lpegmatch = lpeg.match
+
+local settings_to_array = utilities.parsers.settings_to_array
+
+local trace_patterns = false trackers.register("languages.patterns", function(v) trace_patterns = v end)
+
+local report_initialization = logs.reporter("languages","initialization")
+
+local prehyphenchar = lang.prehyphenchar -- global per language
+local posthyphenchar = lang.posthyphenchar -- global per language
+local lefthyphenmin = lang.lefthyphenmin
+local righthyphenmin = lang.righthyphenmin
+
+local lang = lang
+lang.exceptions = lang.hyphenation
+local new_langage = lang.new
+
+languages = languages or {}
+local languages = languages
+
+languages.version = 1.010
+
+languages.registered = languages.registered or { }
+local registered = languages.registered
+
+languages.associated = languages.associated or { }
+local associated = languages.associated
+
+languages.numbers = languages.numbers or { }
+local numbers = languages.numbers
+
+languages.data = languages.data or { }
+local data = languages.data
+
+storage.register("languages/numbers", numbers, "languages.numbers")
+storage.register("languages/registered",registered,"languages.registered")
+storage.register("languages/associated",associated,"languages.associated")
+storage.register("languages/data", data, "languages.data")
+
+local nofloaded = 0
+
+local function resolve(tag)
+ local data, instance = registered[tag], nil
+ if data then
+ instance = data.instance
+ if not instance then
+ instance = new_langage(data.number)
+ data.instance = instance
+ end
+ end
+ return data, instance
+end
+
+local function tolang(what) -- returns lang object
+ local tag = numbers[what]
+ local data = tag and registered[tag] or registered[what]
+ if data then
+ local instance = data.lang
+ if not instance then
+ instance = new_langage(data.number)
+ data.instance = instance
+ end
+ return instance
+ end
+end
+
+-- languages.tolang = tolang
+
+-- patterns=en
+-- patterns=en,de
+
+local function validdata(dataset,what,tag)
+ if dataset then
+ local data = dataset.data
+ if not data or data == "" then
+ return nil
+ elseif dataset.compression == "zlib" then
+ data = zlib.decompress(data)
+ if dataset.length and dataset.length ~= #data then
+ report_initialization("compression error in %a for language %a","patterns",what,tag)
+ end
+ return data
+ else
+ return data
+ end
+ end
+end
+
+local function loaddefinitions(tag,specification)
+ statistics.starttiming(languages)
+ local data, instance = resolve(tag)
+ local definitions = settings_to_array(specification.patterns or "")
+ if #definitions > 0 then
+ if trace_patterns then
+ report_initialization("pattern specification for language %a: %s",tag,specification.patterns)
+ end
+ local dataused, ok = data.used, false
+ for i=1,#definitions do
+ local definition = definitions[i]
+ if definition == "" then
+ -- error
+ elseif definition == "reset" then -- interfaces.variables.reset
+ if trace_patterns then
+ report_initialization("clearing patterns for language %a",tag)
+ end
+ instance:clear_patterns()
+ elseif not dataused[definition] then
+ dataused[definition] = definition
+ local filename = "lang-" .. definition .. ".lua"
+ local fullname = resolvers.findfile(filename) or ""
+ if fullname ~= "" then
+ if trace_patterns then
+ report_initialization("loading definition %a for language %a from %a",definition,tag,fullname)
+ end
+ local defs = dofile(fullname) -- use regular loader instead
+ if defs then -- todo: version test
+ ok, nofloaded = true, nofloaded + 1
+ -- instance:patterns (defs.patterns and defs.patterns .data or "")
+ -- instance:hyphenation(defs.exceptions and defs.exceptions.data or "")
+ instance:patterns (validdata(defs.patterns, "patterns", tag) or "")
+ instance:hyphenation(validdata(defs.exceptions,"exceptions",tag) or "")
+ else
+ report_initialization("invalid definition %a for language %a in %a",definition,tag,filename)
+ end
+ elseif trace_patterns then
+ report_initialization("invalid definition %a for language %a in %a",definition,tag,filename)
+ end
+ elseif trace_patterns then
+ report_initialization("definition %a for language %a already loaded",definition,tag)
+ end
+ end
+ return ok
+ elseif trace_patterns then
+ report_initialization("no definitions for language %a",tag)
+ end
+ statistics.stoptiming(languages)
+end
+
+storage.shared.noflanguages = storage.shared.noflanguages or 0
+
+local noflanguages = storage.shared.noflanguages
+
+function languages.define(tag,parent)
+ noflanguages = noflanguages + 1
+ if trace_patterns then
+ report_initialization("assigning number %a to %a",noflanguages,tag)
+ end
+ numbers[noflanguages] = tag
+ registered[tag] = {
+ tag = tag,
+ parent = parent or "",
+ patterns = "",
+ loaded = false,
+ used = { },
+ dirty = true,
+ number = noflanguages,
+ instance = nil, -- luatex data structure
+ synonyms = { },
+ }
+ storage.shared.noflanguages = noflanguages
+end
+
+function languages.setsynonym(synonym,tag) -- convenience function
+ local l = registered[tag]
+ if l then
+ l.synonyms[synonym] = true -- maybe some day more info
+ end
+end
+
+function languages.installed(separator)
+ return concat(sortedkeys(registered),separator or ",")
+end
+
+function languages.current(n)
+ return numbers[n and tonumber(n) or tex.language]
+end
+
+function languages.associate(tag,script,language) -- not yet used
+ associated[tag] = { script, language }
+end
+
+function languages.association(tag) -- not yet used
+ if type(tag) == "number" then
+ tag = numbers[tag]
+ end
+ local lat = tag and associated[tag]
+ if lat then
+ return lat[1], lat[2]
+ end
+end
+
+function languages.loadable(tag,defaultlanguage) -- hack
+ local l = registered[tag] -- no synonyms
+ if l and resolvers.findfile("lang-"..l.patterns..".lua") then
+ return true
+ else
+ return false
+ end
+end
+
+-- a bit messy, we will do all language setting in lua as we can now assign
+-- and 'patterns' will go away here.
+
+function languages.unload(tag)
+ local l = registered[tag]
+ if l then
+ l.dirty = true
+ end
+end
+
+if environment.initex then
+
+ function languages.getnumber()
+ return 0
+ end
+
+else
+
+ function languages.getnumber(tag,default,patterns)
+ local l = registered[tag]
+ if l then
+ if l.dirty then
+ if trace_patterns then
+ report_initialization("checking patterns for %a with default %a",tag,default)
+ end
+ -- patterns is already resolved to parent patterns if applicable
+ if patterns and patterns ~= "" then
+ if l.patterns ~= patterns then
+ l.patterns = patterns
+ if trace_patterns then
+ report_initialization("loading patterns for %a using specification %a",tag,patterns)
+ end
+ loaddefinitions(tag,l)
+ else
+ -- unchanged
+ end
+ elseif l.patterns == "" then
+ l.patterns = tag
+ if trace_patterns then
+ report_initialization("loading patterns for %a using tag",tag)
+ end
+ local ok = loaddefinitions(tag,l)
+ if not ok and tag ~= default then
+ l.patterns = default
+ if trace_patterns then
+ report_initialization("loading patterns for %a using default",tag)
+ end
+ loaddefinitions(tag,l)
+ end
+ end
+ l.loaded = true
+ l.dirty = false
+ end
+ return l.number
+ else
+ return 0
+ end
+ end
+
+end
+
+-- not that usefull, global values
+
+function languages.prehyphenchar (what) return prehyphenchar (tolang(what)) end
+function languages.posthyphenchar(what) return posthyphenchar(tolang(what)) end
+function languages.lefthyphenmin (what) return lefthyphenmin (tolang(what)) end
+function languages.righthyphenmin(what) return righthyphenmin(tolang(what)) end
+
+-- e['implementer']= 'imple{m}{-}{-}menter'
+-- e['manual'] = 'man{}{}{}'
+-- e['as'] = 'a-s'
+-- e['user-friendly'] = 'user=friend-ly'
+-- e['exceptionally-friendly'] = 'excep-tionally=friend-ly'
+
+function languages.loadwords(tag,filename)
+ local data, instance = resolve(tag)
+ if data then
+ statistics.starttiming(languages)
+ instance:hyphenation(io.loaddata(filename) or "")
+ statistics.stoptiming(languages)
+ end
+end
+
+function languages.setexceptions(tag,str)
+ local data, instance = resolve(tag)
+ if data then
+ instance:hyphenation(string.strip(str)) -- we need to strip leading spaces
+ end
+end
+
+function languages.hyphenate(tag,str)
+ -- todo: does this still work?
+ local data, instance = resolve(tag)
+ if data then
+ return instance:hyphenate(str)
+ else
+ return str
+ end
+end
+
+-- hyphenation.define ("zerolanguage")
+-- hyphenation.loadpatterns ("zerolanguage") -- else bug
+-- hyphenation.loadexceptions("zerolanguage") -- else bug
+
+languages.logger = languages.logger or { }
+
+function languages.logger.report()
+ local result, r = { }, 0
+ for tag, l in sortedpairs(registered) do
+ if l.loaded then
+ r = r + 1
+ result[r] = format("%s:%s:%s",tag,l.parent,l.number)
+ end
+ end
+ return r > 0 and concat(result," ") or "none"
+end
+
+-- must happen at the tex end .. will use lang-def.lua
+
+languages.associate('en','latn','eng')
+languages.associate('uk','latn','eng')
+languages.associate('nl','latn','nld')
+languages.associate('de','latn','deu')
+languages.associate('fr','latn','fra')
+
+statistics.register("loaded patterns", function()
+ local result = languages.logger.report()
+ if result ~= "none" then
+-- return result
+ return format("%s, load time: %s",result,statistics.elapsedtime(languages))
+ end
+end)
+
+-- statistics.register("language load time", function()
+-- -- often zero so we can merge that in the above
+-- return statistics.elapsedseconds(languages, format(", nofpatterns: %s",nofloaded))
+-- end)
+
+-- interface
+
+local getnumber = languages.getnumber
+
+function commands.languagenumber(tag,default,patterns)
+ context(getnumber(tag,default,patterns))
+end
+
+function commands.installedlanguages(separator)
+ context(languages.installed(separator))
+end
+
+commands.definelanguage = languages.define
+commands.setlanguagesynonym = languages.setsynonym
+commands.unloadlanguage = languages.unload
+commands.setlanguageexceptions = languages.setexceptions
diff --git a/tex/context/base/lang-lab.lua b/tex/context/base/lang-lab.lua
index 91c258418..c83cd8bc8 100644
--- a/tex/context/base/lang-lab.lua
+++ b/tex/context/base/lang-lab.lua
@@ -1,142 +1,142 @@
-if not modules then modules = { } end modules ['lang-lab'] = {
- version = 1.001,
- comment = "companion to lang-lab.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, find = string.format, string.find
-local next, rawget, type = next, rawget, type
-local lpegmatch = lpeg.match
-local formatters = string.formatters
-
-local prtcatcodes = catcodes.numbers.prtcatcodes -- todo: use different method
-
-local trace_labels = false trackers.register("languages.labels", function(v) trace_labels = v end)
-local report_labels = logs.reporter("languages","labels")
-
-languages.labels = languages.labels or { }
-local labels = languages.labels
-
-local variables = interfaces.variables
-local settings_to_array = utilities.parsers.settings_to_array
-
-local splitter = lpeg.splitat(":")
-
-local function split(tag)
- return lpegmatch(splitter,tag)
-end
-
-labels.split = split
-
-local contextsprint = context.sprint
-
-local function definelanguagelabels(data,class,tag,rawtag)
- for language, text in next, data.labels do
- if text == "" then
- -- skip
- elseif type(text) == "table" then
- contextsprint(prtcatcodes,"\\setlabeltextpair{",class,"}{",language,"}{",tag,"}{",text[1],"}{",text[2],"}")
- if trace_labels then
- report_labels("language %a, defining label %a as %a and %a",language,rawtag,text[1],text[2])
- end
- else
- contextsprint(prtcatcodes,"\\setlabeltextpair{",class,"}{",language,"}{",tag,"}{",text,"}{}")
- if trace_labels then
- report_labels("language %a, defining label %a as %a",language,rawtag,text)
- end
- end
- end
-end
-
-function labels.define(class,name,prefixed)
- local list = languages.data.labels[name]
- if list then
- report_labels("defining label set %a",name)
- for tag, data in next, list do
- if data.hidden then
- -- skip
- elseif prefixed then
- local first, second = lpegmatch(splitter,tag)
- if second then
- if rawget(variables,first) then
- if rawget(variables,second) then
- definelanguagelabels(data,class,formatters["\\v!%s:\\v!%s"](first,second),tag)
- else
- definelanguagelabels(data,class,formatters["\\v!%s:%s"](first,second),tag)
- end
- elseif rawget(variables,second) then
- definelanguagelabels(data,class,formatters["%s:\\v!%s"](first,second),tag)
- else
- definelanguagelabels(data,class,formatters["%s:%s"](first,second),tag)
- end
- elseif rawget(variables,rawtag) then
- definelanguagelabels(data,class,formatters["\\v!%s"](tag),tag)
- else
- definelanguagelabels(data,class,tag,tag)
- end
- else
- definelanguagelabels(data,class,tag,tag)
- end
- end
- else
- report_labels("unknown label set %a",name)
- end
-end
-
--- function labels.check()
--- for category, list in next, languages.data.labels do
--- for tag, specification in next, list do
--- for language, text in next, specification.labels do
--- if type(text) == "string" and find(text,",") then
--- report_labels("warning: label with comma found, category %a, language %a, tag %a, text %a",
--- category, language, tag, text)
--- end
--- end
--- end
--- end
--- end
---
--- labels.check()
-
--- interface
-
-commands.definelabels = labels.define
-
--- function commands.setstrippedtextprefix(str)
--- context(string.strip(str))
--- end
-
--- list : { "a", "b", "c" }
--- separator : ", "
--- last : " and "
-
--- text : "a,b,c"
--- separators : "{, },{ and }"
-
-function commands.concatcommalist(settings) -- it's too easy to forget that this one is there
- local list = settings.list or settings_to_array(settings.text or "")
- local size = #list
- local command = settings.command and context[settings.command] or context
- if size > 1 then
- local separator, last = " ", " "
- if settings.separators then
- local set = settings_to_array(settings.separators)
- separator = set[1] or settings.separator or separator
- last = set[2] or settings.last or last
- else
- separator = settings.separator or separator
- last = settings.last or last
- end
- command(list[1])
- for i=2,size-1 do
- context(separator)
- command(list[i])
- end
- context(last)
- end
- if size > 0 then
- command(list[size])
- end
-end
+if not modules then modules = { } end modules ['lang-lab'] = {
+ version = 1.001,
+ comment = "companion to lang-lab.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format, find = string.format, string.find
+local next, rawget, type = next, rawget, type
+local lpegmatch = lpeg.match
+local formatters = string.formatters
+
+local prtcatcodes = catcodes.numbers.prtcatcodes -- todo: use different method
+
+local trace_labels = false trackers.register("languages.labels", function(v) trace_labels = v end)
+local report_labels = logs.reporter("languages","labels")
+
+languages.labels = languages.labels or { }
+local labels = languages.labels
+
+local variables = interfaces.variables
+local settings_to_array = utilities.parsers.settings_to_array
+
+local splitter = lpeg.splitat(":")
+
+local function split(tag)
+ return lpegmatch(splitter,tag)
+end
+
+labels.split = split
+
+local contextsprint = context.sprint
+
+local function definelanguagelabels(data,class,tag,rawtag)
+ for language, text in next, data.labels do
+ if text == "" then
+ -- skip
+ elseif type(text) == "table" then
+ contextsprint(prtcatcodes,"\\setlabeltextpair{",class,"}{",language,"}{",tag,"}{",text[1],"}{",text[2],"}")
+ if trace_labels then
+ report_labels("language %a, defining label %a as %a and %a",language,rawtag,text[1],text[2])
+ end
+ else
+ contextsprint(prtcatcodes,"\\setlabeltextpair{",class,"}{",language,"}{",tag,"}{",text,"}{}")
+ if trace_labels then
+ report_labels("language %a, defining label %a as %a",language,rawtag,text)
+ end
+ end
+ end
+end
+
+function labels.define(class,name,prefixed)
+ local list = languages.data.labels[name]
+ if list then
+ report_labels("defining label set %a",name)
+ for tag, data in next, list do
+ if data.hidden then
+ -- skip
+ elseif prefixed then
+ local first, second = lpegmatch(splitter,tag)
+ if second then
+ if rawget(variables,first) then
+ if rawget(variables,second) then
+ definelanguagelabels(data,class,formatters["\\v!%s:\\v!%s"](first,second),tag)
+ else
+ definelanguagelabels(data,class,formatters["\\v!%s:%s"](first,second),tag)
+ end
+ elseif rawget(variables,second) then
+ definelanguagelabels(data,class,formatters["%s:\\v!%s"](first,second),tag)
+ else
+ definelanguagelabels(data,class,formatters["%s:%s"](first,second),tag)
+ end
+ elseif rawget(variables,rawtag) then
+ definelanguagelabels(data,class,formatters["\\v!%s"](tag),tag)
+ else
+ definelanguagelabels(data,class,tag,tag)
+ end
+ else
+ definelanguagelabels(data,class,tag,tag)
+ end
+ end
+ else
+ report_labels("unknown label set %a",name)
+ end
+end
+
+-- function labels.check()
+-- for category, list in next, languages.data.labels do
+-- for tag, specification in next, list do
+-- for language, text in next, specification.labels do
+-- if type(text) == "string" and find(text,",") then
+-- report_labels("warning: label with comma found, category %a, language %a, tag %a, text %a",
+-- category, language, tag, text)
+-- end
+-- end
+-- end
+-- end
+-- end
+--
+-- labels.check()
+
+-- interface
+
+commands.definelabels = labels.define
+
+-- function commands.setstrippedtextprefix(str)
+-- context(string.strip(str))
+-- end
+
+-- list : { "a", "b", "c" }
+-- separator : ", "
+-- last : " and "
+
+-- text : "a,b,c"
+-- separators : "{, },{ and }"
+
+function commands.concatcommalist(settings) -- it's too easy to forget that this one is there
+ local list = settings.list or settings_to_array(settings.text or "")
+ local size = #list
+ local command = settings.command and context[settings.command] or context
+ if size > 1 then
+ local separator, last = " ", " "
+ if settings.separators then
+ local set = settings_to_array(settings.separators)
+ separator = set[1] or settings.separator or separator
+ last = set[2] or settings.last or last
+ else
+ separator = settings.separator or separator
+ last = settings.last or last
+ end
+ command(list[1])
+ for i=2,size-1 do
+ context(separator)
+ command(list[i])
+ end
+ context(last)
+ end
+ if size > 0 then
+ command(list[size])
+ end
+end
diff --git a/tex/context/base/lang-url.lua b/tex/context/base/lang-url.lua
index 35381e672..86733c876 100644
--- a/tex/context/base/lang-url.lua
+++ b/tex/context/base/lang-url.lua
@@ -1,113 +1,113 @@
-if not modules then modules = { } end modules ['lang-url'] = {
- version = 1.001,
- comment = "companion to lang-url.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local utfcharacters, utfvalues, utfbyte, utfchar = utf.characters, utf.values, utf.byte, utf.char
-
-context = context
-
-commands = commands or { }
-local commands = commands
-
---[[
-
Hyphenating 's is somewhat tricky and a matter of taste. I did
-consider using a dedicated hyphenation pattern or dealing with it by node
-parsing, but the following solution suits as well. After all, we're mostly
-dealing with characters.
-]]--
-
-commands.hyphenatedurl = commands.hyphenatedurl or { }
-local hyphenatedurl = commands.hyphenatedurl
-
-local characters = utilities.storage.allocate {
- ["!"] = 1,
- ["\""] = 1,
- ["#"] = 1,
- ["$"] = 1,
- ["%"] = 1,
- ["&"] = 1,
- ["("] = 1,
- ["*"] = 1,
- ["+"] = 1,
- [","] = 1,
- ["-"] = 1,
- ["."] = 1,
- ["/"] = 1,
- [":"] = 1,
- [";"] = 1,
- ["<"] = 1,
- ["="] = 1,
- [">"] = 1,
- ["?"] = 1,
- ["@"] = 1,
- ["["] = 1,
- ["\\"] = 1,
- ["^"] = 1,
- ["_"] = 1,
- ["`"] = 1,
- ["{"] = 1,
- ["|"] = 1,
- ["~"] = 1,
-
- ["'"] = 2,
- [")"] = 2,
- ["]"] = 2,
- ["}"] = 2,
-}
-
-local mapping = utilities.storage.allocate {
- -- [utfchar(0xA0)] = "~", -- nbsp (catch)
-}
-
-hyphenatedurl.characters = characters
-hyphenatedurl.mapping = mapping
-hyphenatedurl.lefthyphenmin = 2
-hyphenatedurl.righthyphenmin = 3
-hyphenatedurl.discretionary = nil
-
--- more fun is to write nodes .. maybe it's nicer to do this
--- in an attribute handler anyway
-
-local function action(hyphenatedurl,str,left,right,disc)
- local n = 0
- local b = math.max( left or hyphenatedurl.lefthyphenmin, 2)
- local e = math.min(#str-(right or hyphenatedurl.righthyphenmin)+2,#str)
- local d = disc or hyphenatedurl.discretionary
- for s in utfcharacters(str) do
- n = n + 1
- s = mapping[s] or s
- if n > 1 then
- context.s() -- can be option
- end
- if s == d then
- context.d(utfbyte(s))
- else
- local c = characters[s]
- if not c or n<=b or n>=e then
- context.n(utfbyte(s))
- elseif c == 1 then
- context.b(utfbyte(s))
- elseif c == 2 then
- context.a(utfbyte(s))
- end
- end
- end
-end
-
--- hyphenatedurl.action = function(_,...) action(...) end -- sort of obsolete
-
-table.setmetatablecall(hyphenatedurl,action) -- watch out: a caller
-
--- todo, no interface in mkiv yet
-
-function hyphenatedurl.setcharacters(str,value) -- 1, 2 == before, after
- for s in utfcharacters(str) do
- characters[s] = value or 1
- end
-end
-
--- .hyphenatedurl.setcharacters("')]}",2)
+if not modules then modules = { } end modules ['lang-url'] = {
+ version = 1.001,
+ comment = "companion to lang-url.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local utfcharacters, utfvalues, utfbyte, utfchar = utf.characters, utf.values, utf.byte, utf.char
+
+context = context
+
+commands = commands or { }
+local commands = commands
+
+--[[
+
Hyphenating 's is somewhat tricky and a matter of taste. I did
+consider using a dedicated hyphenation pattern or dealing with it by node
+parsing, but the following solution suits as well. After all, we're mostly
+dealing with characters.
+]]--
+
+commands.hyphenatedurl = commands.hyphenatedurl or { }
+local hyphenatedurl = commands.hyphenatedurl
+
+local characters = utilities.storage.allocate {
+ ["!"] = 1,
+ ["\""] = 1,
+ ["#"] = 1,
+ ["$"] = 1,
+ ["%"] = 1,
+ ["&"] = 1,
+ ["("] = 1,
+ ["*"] = 1,
+ ["+"] = 1,
+ [","] = 1,
+ ["-"] = 1,
+ ["."] = 1,
+ ["/"] = 1,
+ [":"] = 1,
+ [";"] = 1,
+ ["<"] = 1,
+ ["="] = 1,
+ [">"] = 1,
+ ["?"] = 1,
+ ["@"] = 1,
+ ["["] = 1,
+ ["\\"] = 1,
+ ["^"] = 1,
+ ["_"] = 1,
+ ["`"] = 1,
+ ["{"] = 1,
+ ["|"] = 1,
+ ["~"] = 1,
+
+ ["'"] = 2,
+ [")"] = 2,
+ ["]"] = 2,
+ ["}"] = 2,
+}
+
+local mapping = utilities.storage.allocate {
+ -- [utfchar(0xA0)] = "~", -- nbsp (catch)
+}
+
+hyphenatedurl.characters = characters
+hyphenatedurl.mapping = mapping
+hyphenatedurl.lefthyphenmin = 2
+hyphenatedurl.righthyphenmin = 3
+hyphenatedurl.discretionary = nil
+
+-- more fun is to write nodes .. maybe it's nicer to do this
+-- in an attribute handler anyway
+
+local function action(hyphenatedurl,str,left,right,disc)
+ local n = 0
+ local b = math.max( left or hyphenatedurl.lefthyphenmin, 2)
+ local e = math.min(#str-(right or hyphenatedurl.righthyphenmin)+2,#str)
+ local d = disc or hyphenatedurl.discretionary
+ for s in utfcharacters(str) do
+ n = n + 1
+ s = mapping[s] or s
+ if n > 1 then
+ context.s() -- can be option
+ end
+ if s == d then
+ context.d(utfbyte(s))
+ else
+ local c = characters[s]
+ if not c or n<=b or n>=e then
+ context.n(utfbyte(s))
+ elseif c == 1 then
+ context.b(utfbyte(s))
+ elseif c == 2 then
+ context.a(utfbyte(s))
+ end
+ end
+ end
+end
+
+-- hyphenatedurl.action = function(_,...) action(...) end -- sort of obsolete
+
+table.setmetatablecall(hyphenatedurl,action) -- watch out: a caller
+
+-- todo, no interface in mkiv yet
+
+function hyphenatedurl.setcharacters(str,value) -- 1, 2 == before, after
+ for s in utfcharacters(str) do
+ characters[s] = value or 1
+ end
+end
+
+-- .hyphenatedurl.setcharacters("')]}",2)
diff --git a/tex/context/base/lang-wrd.lua b/tex/context/base/lang-wrd.lua
index 06a2311a6..6a9b39fdf 100644
--- a/tex/context/base/lang-wrd.lua
+++ b/tex/context/base/lang-wrd.lua
@@ -1,353 +1,353 @@
-if not modules then modules = { } end modules ['lang-wrd'] = {
- version = 1.001,
- comment = "companion to lang-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lower = string.lower
-local utfchar = utf.char
-local concat = table.concat
-local lpegmatch = lpeg.match
-local P, S, Cs = lpeg.P, lpeg.S, lpeg.Cs
-
-local report_words = logs.reporter("languages","words")
-
-local nodes, node, languages = nodes, node, languages
-
-languages.words = languages.words or { }
-local words = languages.words
-
-words.data = words.data or { }
-words.enables = false
-words.threshold = 4
-
-local numbers = languages.numbers
-local registered = languages.registered
-
-local traverse_nodes = node.traverse
-local wordsdata = words.data
-local chardata = characters.data
-local tasks = nodes.tasks
-
-local unsetvalue = attributes.unsetvalue
-
-local nodecodes = nodes.nodecodes
-local kerncodes = nodes.kerncodes
-
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local kern_code = nodecodes.kern
-
-local kerning_code = kerncodes.kerning
-local lowerchar = characters.lower
-
-local a_color = attributes.private('color')
-local colist = attributes.list[a_color]
-
-local is_letter = characters.is_letter -- maybe is_character as variant
-
-local spacing = S(" \n\r\t")
-local markup = S("-=")
-local lbrace = P("{")
-local rbrace = P("}")
-local disc = (lbrace * (1-rbrace)^0 * rbrace)^1 -- or just 3 times, time this
-local word = Cs((markup/"" + disc/"" + (1-spacing))^1)
-
-local loaded = { } -- we share lists
-
-function words.load(tag,filename)
- local fullname = resolvers.findfile(filename,'other text file') or ""
- if fullname ~= "" then
- report_words("loading word file %a",fullname)
- statistics.starttiming(languages)
- local list = loaded[fullname]
- if not list then
- list = wordsdata[tag] or { }
- local parser = (spacing + word/function(s) list[s] = true end)^0
- lpegmatch(parser,io.loaddata(fullname) or "")
- loaded[fullname] = list
- end
- wordsdata[tag] = list
- statistics.stoptiming(languages)
- else
- report_words("missing word file %a",filename)
- end
-end
-
-function words.found(id, str)
- local tag = languages.numbers[id]
- if tag then
- local data = wordsdata[tag]
- if data then
- if data[str] then
- return 1
- elseif data[lower(str)] then
- return 2
- end
- end
- end
-end
-
--- The following code is an adaption of experimental code for hyphenating and
--- spell checking.
-
--- there is an n=1 problem somewhere in nested boxes
-
-local function mark_words(head,whenfound) -- can be optimized and shared
- local current, language, done = head, nil, nil, 0, false
- local str, s, nds, n = { }, 0, { }, 0 -- n could also be a table, saves calls
- local function action()
- if s > 0 then
- local word = concat(str,"",1,s)
- local mark = whenfound(language,word)
- if mark then
- done = true
- for i=1,n do
- mark(nds[i])
- end
- end
- end
- n, s = 0, 0
- end
- while current do
- local id = current.id
- if id == glyph_code then
- local a = current.lang
- if a then
- if a ~= language then
- if s > 0 then
- action()
- end
- language = a
- end
- elseif s > 0 then
- action()
- language = a
- end
- local components = current.components
- if components then
- n = n + 1
- nds[n] = current
- for g in traverse_nodes(components) do
- s = s + 1
- str[s] = utfchar(g.char)
- end
- else
- local code = current.char
- local data = chardata[code]
- if is_letter[data.category] then
- n = n + 1
- nds[n] = current
- s = s + 1
- str[s] = utfchar(code)
- elseif s > 0 then
- action()
- end
- end
- elseif id == disc_code then -- take the replace
- if n > 0 then
- n = n + 1
- nds[n] = current
- end
- elseif id == kern_code and current.subtype == kerning_code and s > 0 then
- -- ok
- elseif s > 0 then
- action()
- end
- current = current.next
- end
- if s > 0 then
- action()
- end
- return head, done
-end
-
-local methods = { }
-words.methods = methods
-
-local enablers = { }
-words.enablers = enablers
-
-local wordmethod = 1
-local enabled = false
-
-function words.check(head)
- if enabled then
- return methods[wordmethod](head)
- else
- return head, false
- end
-end
-
-function words.enable(settings)
- local method = settings.method
- wordmethod = method and tonumber(method) or wordmethod or 1
- local e = enablers[wordmethod]
- if e then e(settings) end
- tasks.enableaction("processors","languages.words.check")
- enabled = true
-end
-
-function words.disable()
- enabled = false
-end
-
--- colors
-
-local cache = { } -- can also be done with method 1 -- frozen colors once used
-
-table.setmetatableindex(cache, function(t,k) -- k == language, numbers[k] == tag
- local c
- if type(k) == "string" then
- c = colist[k]
- elseif k < 0 then
- c = colist["word:unset"]
- else
- c = colist["word:" .. (numbers[k] or "unset")] or colist["word:unknown"]
- end
- local v = c and function(n) n[a_color] = c end or false
- t[k] = v
- return v
-end)
-
--- method 1
-
-local function sweep(language,str)
- if #str < words.threshold then
- return false
- elseif words.found(language,str) then -- can become a local wordsfound
- return cache["word:yes"] -- maybe variables.yes
- else
- return cache["word:no"]
- end
-end
-
-methods[1] = function(head)
- for n in traverse_nodes(head) do
- n[a_color] = unsetvalue -- hm, not that selective (reset color)
- end
- return mark_words(head,sweep)
-end
-
--- method 2
-
-local dumpname = nil
-local dumpthem = false
-local listname = "document"
-
-local category = { }
-local categories = { }
-
-setmetatable(categories, {
- __index = function(t,k)
- local languages = { }
- setmetatable(languages, {
- __index = function(t,k)
- local r = registered[k]
- local v = {
- number = language,
- parent = r and r.parent or nil,
- patterns = r and r.patterns or nil,
- tag = r and r.tag or nil,
- list = { },
- total = 0,
- unique = 0,
- }
- t[k] = v
- return v
- end
- } )
- local v = {
- languages = languages,
- total = 0,
- }
- t[k] = v
- return v
- end
-} )
-
-local collected = {
- total = 0,
- version = 1.000,
- categories = categories,
-}
-
-enablers[2] = function(settings)
- local name = settings.list
- listname = name and name ~= "" and name or "document"
- category = collected.categories[listname]
-end
-
-local function sweep(language,str)
- if #str >= words.threshold then
- str = lowerchar(str)
- local words = category.languages[numbers[language] or "unset"]
- local list = words.list
- local ls = list[str]
- if ls then
- list[str] = ls + 1
- else
- list[str] = 1
- words.unique = words.unique + 1
- end
- collected.total = collected.total + 1
- category.total = category.total + 1
- words.total = words.total + 1
- end
-end
-
-methods[2] = function(head)
- dumpthem = true
- return mark_words(head,sweep)
-end
-
-local function dumpusedwords()
- if dumpthem then
- collected.threshold = words.threshold
- dumpname = dumpname or file.addsuffix(tex.jobname,"words")
- report_words("saving list of used words in %a",dumpname)
- io.savedata(dumpname,table.serialize(collected,true))
- -- table.tofile(dumpname,list,true)
- end
-end
-
-directives.register("languages.words.dump", function(v)
- dumpname = type(v) == "string" and v ~= "" and v
-end)
-
-luatex.registerstopactions(dumpusedwords)
-
--- method 3
-
-local function sweep(language,str)
- return cache[language]
-end
-
-methods[3] = function(head)
- for n in traverse_nodes(head) do
- n[a_color] = unsetvalue
- end
- return mark_words(head,sweep)
-end
-
--- for the moment we hook it into the attribute handler
-
---~ languagehacks = { }
-
---~ function languagehacks.process(namespace,attribute,head)
---~ return languages.check(head)
---~ end
-
---~ chars.plugins[chars.plugins+1] = {
---~ name = "language",
---~ namespace = languagehacks,
---~ processor = languagehacks.process
---~ }
-
--- interface
-
-commands.enablespellchecking = words.enable
-commands.disablespellchecking = words.disable
-commands.loadspellchecklist = words.load
+if not modules then modules = { } end modules ['lang-wrd'] = {
+ version = 1.001,
+ comment = "companion to lang-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local lower = string.lower
+local utfchar = utf.char
+local concat = table.concat
+local lpegmatch = lpeg.match
+local P, S, Cs = lpeg.P, lpeg.S, lpeg.Cs
+
+local report_words = logs.reporter("languages","words")
+
+local nodes, node, languages = nodes, node, languages
+
+languages.words = languages.words or { }
+local words = languages.words
+
+words.data = words.data or { }
+words.enables = false
+words.threshold = 4
+
+local numbers = languages.numbers
+local registered = languages.registered
+
+local traverse_nodes = node.traverse
+local wordsdata = words.data
+local chardata = characters.data
+local tasks = nodes.tasks
+
+local unsetvalue = attributes.unsetvalue
+
+local nodecodes = nodes.nodecodes
+local kerncodes = nodes.kerncodes
+
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local kern_code = nodecodes.kern
+
+local kerning_code = kerncodes.kerning
+local lowerchar = characters.lower
+
+local a_color = attributes.private('color')
+local colist = attributes.list[a_color]
+
+local is_letter = characters.is_letter -- maybe is_character as variant
+
+local spacing = S(" \n\r\t")
+local markup = S("-=")
+local lbrace = P("{")
+local rbrace = P("}")
+local disc = (lbrace * (1-rbrace)^0 * rbrace)^1 -- or just 3 times, time this
+local word = Cs((markup/"" + disc/"" + (1-spacing))^1)
+
+local loaded = { } -- we share lists
+
+function words.load(tag,filename)
+ local fullname = resolvers.findfile(filename,'other text file') or ""
+ if fullname ~= "" then
+ report_words("loading word file %a",fullname)
+ statistics.starttiming(languages)
+ local list = loaded[fullname]
+ if not list then
+ list = wordsdata[tag] or { }
+ local parser = (spacing + word/function(s) list[s] = true end)^0
+ lpegmatch(parser,io.loaddata(fullname) or "")
+ loaded[fullname] = list
+ end
+ wordsdata[tag] = list
+ statistics.stoptiming(languages)
+ else
+ report_words("missing word file %a",filename)
+ end
+end
+
+function words.found(id, str)
+ local tag = languages.numbers[id]
+ if tag then
+ local data = wordsdata[tag]
+ if data then
+ if data[str] then
+ return 1
+ elseif data[lower(str)] then
+ return 2
+ end
+ end
+ end
+end
+
+-- The following code is an adaption of experimental code for hyphenating and
+-- spell checking.
+
+-- there is an n=1 problem somewhere in nested boxes
+
+local function mark_words(head,whenfound) -- can be optimized and shared
+ local current, language, done = head, nil, nil, 0, false
+ local str, s, nds, n = { }, 0, { }, 0 -- n could also be a table, saves calls
+ local function action()
+ if s > 0 then
+ local word = concat(str,"",1,s)
+ local mark = whenfound(language,word)
+ if mark then
+ done = true
+ for i=1,n do
+ mark(nds[i])
+ end
+ end
+ end
+ n, s = 0, 0
+ end
+ while current do
+ local id = current.id
+ if id == glyph_code then
+ local a = current.lang
+ if a then
+ if a ~= language then
+ if s > 0 then
+ action()
+ end
+ language = a
+ end
+ elseif s > 0 then
+ action()
+ language = a
+ end
+ local components = current.components
+ if components then
+ n = n + 1
+ nds[n] = current
+ for g in traverse_nodes(components) do
+ s = s + 1
+ str[s] = utfchar(g.char)
+ end
+ else
+ local code = current.char
+ local data = chardata[code]
+ if is_letter[data.category] then
+ n = n + 1
+ nds[n] = current
+ s = s + 1
+ str[s] = utfchar(code)
+ elseif s > 0 then
+ action()
+ end
+ end
+ elseif id == disc_code then -- take the replace
+ if n > 0 then
+ n = n + 1
+ nds[n] = current
+ end
+ elseif id == kern_code and current.subtype == kerning_code and s > 0 then
+ -- ok
+ elseif s > 0 then
+ action()
+ end
+ current = current.next
+ end
+ if s > 0 then
+ action()
+ end
+ return head, done
+end
+
+local methods = { }
+words.methods = methods
+
+local enablers = { }
+words.enablers = enablers
+
+local wordmethod = 1
+local enabled = false
+
+function words.check(head)
+ if enabled then
+ return methods[wordmethod](head)
+ else
+ return head, false
+ end
+end
+
+function words.enable(settings)
+ local method = settings.method
+ wordmethod = method and tonumber(method) or wordmethod or 1
+ local e = enablers[wordmethod]
+ if e then e(settings) end
+ tasks.enableaction("processors","languages.words.check")
+ enabled = true
+end
+
+function words.disable()
+ enabled = false
+end
+
+-- colors
+
+local cache = { } -- can also be done with method 1 -- frozen colors once used
+
+table.setmetatableindex(cache, function(t,k) -- k == language, numbers[k] == tag
+ local c
+ if type(k) == "string" then
+ c = colist[k]
+ elseif k < 0 then
+ c = colist["word:unset"]
+ else
+ c = colist["word:" .. (numbers[k] or "unset")] or colist["word:unknown"]
+ end
+ local v = c and function(n) n[a_color] = c end or false
+ t[k] = v
+ return v
+end)
+
+-- method 1
+
+local function sweep(language,str)
+ if #str < words.threshold then
+ return false
+ elseif words.found(language,str) then -- can become a local wordsfound
+ return cache["word:yes"] -- maybe variables.yes
+ else
+ return cache["word:no"]
+ end
+end
+
+methods[1] = function(head)
+ for n in traverse_nodes(head) do
+ n[a_color] = unsetvalue -- hm, not that selective (reset color)
+ end
+ return mark_words(head,sweep)
+end
+
+-- method 2
+
+local dumpname = nil
+local dumpthem = false
+local listname = "document"
+
+local category = { }
+local categories = { }
+
+setmetatable(categories, {
+ __index = function(t,k)
+ local languages = { }
+ setmetatable(languages, {
+ __index = function(t,k)
+ local r = registered[k]
+ local v = {
+ number = language,
+ parent = r and r.parent or nil,
+ patterns = r and r.patterns or nil,
+ tag = r and r.tag or nil,
+ list = { },
+ total = 0,
+ unique = 0,
+ }
+ t[k] = v
+ return v
+ end
+ } )
+ local v = {
+ languages = languages,
+ total = 0,
+ }
+ t[k] = v
+ return v
+ end
+} )
+
+local collected = {
+ total = 0,
+ version = 1.000,
+ categories = categories,
+}
+
+enablers[2] = function(settings)
+ local name = settings.list
+ listname = name and name ~= "" and name or "document"
+ category = collected.categories[listname]
+end
+
+local function sweep(language,str)
+ if #str >= words.threshold then
+ str = lowerchar(str)
+ local words = category.languages[numbers[language] or "unset"]
+ local list = words.list
+ local ls = list[str]
+ if ls then
+ list[str] = ls + 1
+ else
+ list[str] = 1
+ words.unique = words.unique + 1
+ end
+ collected.total = collected.total + 1
+ category.total = category.total + 1
+ words.total = words.total + 1
+ end
+end
+
+methods[2] = function(head)
+ dumpthem = true
+ return mark_words(head,sweep)
+end
+
+local function dumpusedwords()
+ if dumpthem then
+ collected.threshold = words.threshold
+ dumpname = dumpname or file.addsuffix(tex.jobname,"words")
+ report_words("saving list of used words in %a",dumpname)
+ io.savedata(dumpname,table.serialize(collected,true))
+ -- table.tofile(dumpname,list,true)
+ end
+end
+
+directives.register("languages.words.dump", function(v)
+ dumpname = type(v) == "string" and v ~= "" and v
+end)
+
+luatex.registerstopactions(dumpusedwords)
+
+-- method 3
+
+local function sweep(language,str)
+ return cache[language]
+end
+
+methods[3] = function(head)
+ for n in traverse_nodes(head) do
+ n[a_color] = unsetvalue
+ end
+ return mark_words(head,sweep)
+end
+
+-- for the moment we hook it into the attribute handler
+
+--~ languagehacks = { }
+
+--~ function languagehacks.process(namespace,attribute,head)
+--~ return languages.check(head)
+--~ end
+
+--~ chars.plugins[chars.plugins+1] = {
+--~ name = "language",
+--~ namespace = languagehacks,
+--~ processor = languagehacks.process
+--~ }
+
+-- interface
+
+commands.enablespellchecking = words.enable
+commands.disablespellchecking = words.disable
+commands.loadspellchecklist = words.load
diff --git a/tex/context/base/layo-ini.lua b/tex/context/base/layo-ini.lua
index 56ced2c0b..cc483aa3b 100644
--- a/tex/context/base/layo-ini.lua
+++ b/tex/context/base/layo-ini.lua
@@ -1,61 +1,61 @@
-if not modules then modules = { } end modules ['layo-ini'] = {
- version = 1.001,
- comment = "companion to layo-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- We need to share information between the TeX and Lua end
--- about the typographical model. This happens here.
---
--- Code might move.
-
--- conditionals.layoutisdoublesided
--- conditionals.layoutissinglesided
--- texcount.pagenoshift
--- texcount.realpageno
-
-local texcount = tex.count
-local conditionals = tex.conditionals
-
-layouts = {
- status = { },
-}
-
-local status = layouts.status
-
-function status.leftorrightpagection(left,right)
- if left == nil then
- left, right = false, true
- end
- if not conditionals.layoutisdoublesided then
- return left, right
- elseif conditionals.layoutissinglesided then
- return left, right
- elseif texcount.pagenoshift % 2 == 0 then
- if texcount.realpageno % 2 == 0 then
- return right, left
- else
- return left, right
- end
- else
- if texcount.realpageno % 2 == 0 then
- return left, right
- else
- return right, left
- end
- end
-end
-
-function status.isleftpage()
- if not conditionals.layoutisdoublesided then
- return false
- elseif conditionals.layoutissinglesided then
- return false
- elseif texcount.pagenoshift % 2 == 0 then
- return texcount.realpageno % 2 == 0
- else
- return not texcount.realpageno % 2 == 0
- end
-end
+if not modules then modules = { } end modules ['layo-ini'] = {
+ version = 1.001,
+ comment = "companion to layo-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- We need to share information between the TeX and Lua end
+-- about the typographical model. This happens here.
+--
+-- Code might move.
+
+-- conditionals.layoutisdoublesided
+-- conditionals.layoutissinglesided
+-- texcount.pagenoshift
+-- texcount.realpageno
+
+local texcount = tex.count
+local conditionals = tex.conditionals
+
+layouts = {
+ status = { },
+}
+
+local status = layouts.status
+
+function status.leftorrightpagection(left,right)
+ if left == nil then
+ left, right = false, true
+ end
+ if not conditionals.layoutisdoublesided then
+ return left, right
+ elseif conditionals.layoutissinglesided then
+ return left, right
+ elseif texcount.pagenoshift % 2 == 0 then
+ if texcount.realpageno % 2 == 0 then
+ return right, left
+ else
+ return left, right
+ end
+ else
+ if texcount.realpageno % 2 == 0 then
+ return left, right
+ else
+ return right, left
+ end
+ end
+end
+
+function status.isleftpage()
+ if not conditionals.layoutisdoublesided then
+ return false
+ elseif conditionals.layoutissinglesided then
+ return false
+ elseif texcount.pagenoshift % 2 == 0 then
+ return texcount.realpageno % 2 == 0
+ else
+ return not texcount.realpageno % 2 == 0
+ end
+end
diff --git a/tex/context/base/lpdf-ano.lua b/tex/context/base/lpdf-ano.lua
index adfea3812..ee9cb851b 100644
--- a/tex/context/base/lpdf-ano.lua
+++ b/tex/context/base/lpdf-ano.lua
@@ -1,753 +1,753 @@
-if not modules then modules = { } end modules ['lpdf-ano'] = {
- version = 1.001,
- comment = "companion to lpdf-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- when using rotation: \disabledirectives[refences.sharelinks] (maybe flag links)
-
--- todo: /AA << WC << ... >> >> : WillClose actions etc
-
-local next, tostring = next, tostring
-local rep, format = string.rep, string.format
-local texcount = tex.count
-local lpegmatch = lpeg.match
-local formatters = string.formatters
-
-local backends, lpdf = backends, lpdf
-
-local trace_references = false trackers.register("references.references", function(v) trace_references = v end)
-local trace_destinations = false trackers.register("references.destinations", function(v) trace_destinations = v end)
-local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end)
-
-local report_reference = logs.reporter("backend","references")
-local report_destination = logs.reporter("backend","destinations")
-local report_bookmark = logs.reporter("backend","bookmarks")
-
-local variables = interfaces.variables
-local constants = interfaces.constants
-
-local settings_to_array = utilities.parsers.settings_to_array
-
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-local registrations = backends.pdf.registrations
-
-local javascriptcode = interactions.javascripts.code
-
-local references = structures.references
-local bookmarks = structures.bookmarks
-
-local runners = references.runners
-local specials = references.specials
-local handlers = references.handlers
-local executers = references.executers
-local getinnermethod = references.getinnermethod
-
-local nodepool = nodes.pool
-
-local pdfannotation_node = nodepool.pdfannotation
-local pdfdestination_node = nodepool.pdfdestination
-local latelua_node = nodepool.latelua
-
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfreference = lpdf.reference
-local pdfunicode = lpdf.unicode
-local pdfconstant = lpdf.constant
-local pdfflushobject = lpdf.flushobject
-local pdfshareobjectreference = lpdf.shareobjectreference
-local pdfreserveobject = lpdf.reserveobject
-local pdfpagereference = lpdf.pagereference
-local pdfdelayedobject = lpdf.delayedobject
-local pdfregisterannotation = lpdf.registerannotation
-
--- todo: 3dview
-
-local pdf_annot = pdfconstant("Annot")
-local pdf_uri = pdfconstant("URI")
-local pdf_gotor = pdfconstant("GoToR")
-local pdf_goto = pdfconstant("GoTo")
-local pdf_launch = pdfconstant("Launch")
-local pdf_javascript = pdfconstant("JavaScript")
-local pdf_link = pdfconstant("Link")
-local pdf_n = pdfconstant("N")
-local pdf_t = pdfconstant("T")
-local pdf_fit = pdfconstant("Fit")
-local pdf_named = pdfconstant("Named")
-
-local pdf_border = pdfarray { 0, 0, 0 }
-
-local cache = { }
-
-local function pagedestination(n) -- only cache fit
- if n > 0 then
- local pd = cache[n]
- if not pd then
- local a = pdfarray {
- pdfreference(pdfpagereference(n)),
- pdf_fit,
- }
- pd = pdfshareobjectreference(a)
- cache[n] = pd
- end
- return pd
- end
-end
-
-lpdf.pagedestination = pagedestination
-
-local defaultdestination = pdfarray { 0, pdf_fit }
-
-local function link(url,filename,destination,page,actions)
- if filename and filename ~= "" then
- if file.basename(filename) == tex.jobname then
- return false
- else
- filename = file.addsuffix(filename,"pdf")
- end
- end
- if url and url ~= "" then
- if filename and filename ~= "" then
- if destination and destination ~= "" then
- url = file.join(url,filename).."#"..destination
- else
- url = file.join(url,filename)
- end
- end
- return pdfdictionary {
- S = pdf_uri,
- URI = url,
- }
- elseif filename and filename ~= "" then
- -- no page ?
- if destination == "" then
- destination = nil
- end
- if not destination and page then
- destination = pdfarray { page - 1, pdf_fit }
- end
- return pdfdictionary {
- S = pdf_gotor, -- can also be pdf_launch
- F = filename,
- D = destination or defaultdestination, -- D is mandate
- NewWindow = (actions.newwindow and true) or nil,
- }
- elseif destination and destination ~= "" then
- return pdfdictionary { -- can be cached
- S = pdf_goto,
- D = destination,
- }
- else
- local p = tonumber(page)
- if p and p > 0 then
- return pdfdictionary { -- can be cached
- S = pdf_goto,
- D = pdfarray {
- pdfreference(pdfpagereference(p)),
- pdf_fit,
- }
- }
- elseif trace_references then
- report_reference("invalid page reference %a",page)
- end
- end
- return false
-end
-
-lpdf.link = link
-
-function lpdf.launch(program,parameters)
- if program and program ~= "" then
- local d = pdfdictionary {
- S = pdf_launch,
- F = program,
- D = ".",
- }
- if parameters and parameters ~= "" then
- d.P = parameters
- end
- return d
- end
-end
-
-function lpdf.javascript(name,arguments)
- local script = javascriptcode(name,arguments) -- make into object (hash)
- if script then
- return pdfdictionary {
- S = pdf_javascript,
- JS = script,
- }
- end
-end
-
-local function pdfaction(actions)
- local nofactions = #actions
- if nofactions > 0 then
- local a = actions[1]
- local action = runners[a.kind]
- if action then
- action = action(a,actions)
- end
- if action then
- local first = action
- for i=2,nofactions do
- local a = actions[i]
- local what = runners[a.kind]
- if what then
- what = what(a,actions)
- end
- if what then
- action.Next = what
- action = what
- else
- -- error
- return nil
- end
- end
- return first, actions.n
- end
- end
-end
-
-lpdf.action = pdfaction
-
-function codeinjections.prerollreference(actions) -- share can become option
- if actions then
- local main, n = pdfaction(actions)
- if main then
- main = pdfdictionary {
- Subtype = pdf_link,
- Border = pdf_border,
- H = (not actions.highlight and pdf_n) or nil,
- A = pdfshareobjectreference(main),
- F = 4, -- print (mandate in pdf/a)
- }
- return main("A"), n
- end
- end
-end
-
-local function use_normal_annotations()
-
- local function reference(width,height,depth,prerolled) -- keep this one
- if prerolled then
- if trace_references then
- report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
- end
- return pdfannotation_node(width,height,depth,prerolled)
- end
- end
-
- local function finishreference()
- end
-
- return reference, finishreference
-
-end
-
--- eventually we can do this for special refs only
-
-local hashed, nofunique, nofused = { }, 0, 0
-
-local f_annot = formatters["<< /Type /Annot %s /Rect [%0.3f %0.3f %0.3f %0.3f] >>"]
-local f_bpnf = formatters["_bpnf_(%s,%s,%s,'%s')"]
-
-local function use_shared_annotations()
-
- local factor = number.dimenfactors.bp
-
- local function finishreference(width,height,depth,prerolled) -- %0.2f looks okay enough (no scaling anyway)
- local h, v = pdf.h, pdf.v
- local llx, lly = h*factor, (v - depth)*factor
- local urx, ury = (h + width)*factor, (v + height)*factor
- local annot = f_annot(prerolled,llx,lly,urx,ury)
- local n = hashed[annot]
- if not n then
- n = pdfdelayedobject(annot)
- hashed[annot] = n
- nofunique = nofunique + 1
- end
- nofused = nofused + 1
- pdfregisterannotation(n)
- end
-
- _bpnf_ = finishreference
-
- local function reference(width,height,depth,prerolled)
- if prerolled then
- if trace_references then
- report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
- end
- local luacode = f_bpnf(width,height,depth,prerolled)
- return latelua_node(luacode)
- end
- end
-
- statistics.register("pdf annotations", function()
- if nofused > 0 then
- return format("%s embedded, %s unique",nofused,nofunique)
- else
- return nil
- end
- end)
-
-
- return reference, finishreference
-
-end
-
-local lln = latelua_node() if node.has_field(lln,'string') then
-
- directives.register("refences.sharelinks", function(v)
- if v then
- nodeinjections.reference, codeinjections.finishreference = use_shared_annotations()
- else
- nodeinjections.reference, codeinjections.finishreference = use_normal_annotations()
- end
- end)
-
- nodeinjections.reference, codeinjections.finishreference = use_shared_annotations()
-
-else
-
- nodeinjections.reference, codeinjections.finishreference = use_normal_annotations()
-
-end node.free(lln)
-
--- -- -- --
--- -- -- --
-
-local done = { } -- prevent messages
-
-function nodeinjections.destination(width,height,depth,name,view)
- if not done[name] then
- done[name] = true
- if trace_destinations then
- report_destination("width %p, height %p, depth %p, name %a, view %a",width,height,depth,name,view)
- end
- return pdfdestination_node(width,height,depth,name,view) -- can be begin/end node
- end
-end
-
--- runners and specials
-
-runners["inner"] = function(var,actions)
- if getinnermethod() == "names" then
- local vi = var.i
- if vi then
- local vir = vi.references
- if vir then
- local internal = vir.internal
- if internal then
- var.inner = "aut:" .. internal
- end
- end
- end
- else
- var.inner = nil
- end
- local prefix = var.p
- local inner = var.inner
- if inner and prefix and prefix ~= "" then
- inner = prefix .. ":" .. inner -- might not always be ok
- end
- return link(nil,nil,inner,var.r,actions)
-end
-
-runners["inner with arguments"] = function(var,actions)
- report_reference("todo: inner with arguments")
- return false
-end
-
-runners["outer"] = function(var,actions)
- local file, url = references.checkedfileorurl(var.outer,var.outer)
- return link(url,file,var.arguments,nil,actions)
-end
-
-runners["outer with inner"] = function(var,actions)
- local file = references.checkedfile(var.outer) -- was var.f but fails ... why
- return link(nil,file,var.inner,var.r,actions)
-end
-
-runners["special outer with operation"] = function(var,actions)
- local handler = specials[var.special]
- return handler and handler(var,actions)
-end
-
-runners["special outer"] = function(var,actions)
- report_reference("todo: special outer")
- return false
-end
-
-runners["special"] = function(var,actions)
- local handler = specials[var.special]
- return handler and handler(var,actions)
-end
-
-runners["outer with inner with arguments"] = function(var,actions)
- report_reference("todo: outer with inner with arguments")
- return false
-end
-
-runners["outer with special and operation and arguments"] = function(var,actions)
- report_reference("todo: outer with special and operation and arguments")
- return false
-end
-
-runners["outer with special"] = function(var,actions)
- report_reference("todo: outer with special")
- return false
-end
-
-runners["outer with special and operation"] = function(var,actions)
- report_reference("todo: outer with special and operation")
- return false
-end
-
-runners["special operation"] = runners["special"]
-runners["special operation with arguments"] = runners["special"]
-
-function specials.internal(var,actions) -- better resolve in strc-ref
- local i = tonumber(var.operation)
- local v = i and references.internals[i]
- if not v then
- -- error
- report_reference("no internal reference %a",i)
- elseif getinnermethod() == "names" then
- -- named
- return link(nil,nil,"aut:"..i,v.references.realpage,actions)
- else
- -- page
- return link(nil,nil,nil,v.references.realpage,actions)
- end
-end
-
--- realpage already resolved
-
-specials.i = specials.internal
-
-local pages = references.pages
-
-function specials.page(var,actions)
- local file = var.f
- if file then
- file = references.checkedfile(file)
- return link(nil,file,nil,var.operation,actions)
- else
- local p = var.r
- if not p then -- todo: call special from reference code
- p = pages[var.operation]
- if type(p) == "function" then -- double
- p = p()
- else
- p = references.realpageofpage(tonumber(p))
- end
- -- if p then
- -- var.r = p
- -- end
- end
- return link(nil,nil,nil,p or var.operation,actions)
- end
-end
-
-function specials.realpage(var,actions)
- local file = var.f
- if file then
- file = references.checkedfile(file)
- return link(nil,file,nil,var.operation,actions)
- else
- return link(nil,nil,nil,var.operation,actions)
- end
-end
-
-function specials.userpage(var,actions)
- local file = var.f
- if file then
- file = references.checkedfile(file)
- return link(nil,file,nil,var.operation,actions)
- else
- local p = var.r
- if not p then -- todo: call special from reference code
- p = var.operation
- if p then -- no function and special check here. only numbers
- p = references.realpageofpage(tonumber(p))
- end
- -- if p then
- -- var.r = p
- -- end
- end
- return link(nil,nil,nil,p or var.operation,actions)
- end
-end
-
-function specials.deltapage(var,actions)
- local p = tonumber(var.operation)
- if p then
- p = references.checkedrealpage(p + texcount.realpageno)
- return link(nil,nil,nil,p,actions)
- end
-end
-
--- sections
-
--- function specials.section(var,actions)
--- local sectionname = var.operation
--- local destination = var.arguments
--- local internal = structures.sections.internalreference(sectionname,destination)
--- if internal then
--- var.special = "internal"
--- var.operation = internal
--- var.arguments = nil
--- specials.internal(var,actions)
--- end
--- end
-
-specials.section = specials.internal -- specials.section just need to have a value as it's checked
-
--- todo, do this in references namespace ordered instead (this is an experiment)
-
-local splitter = lpeg.splitat(":")
-
-function specials.order(var,actions) -- references.specials !
- local operation = var.operation
- if operation then
- local kind, name, n = lpegmatch(splitter,operation)
- local order = structures.lists.ordered[kind]
- order = order and order[name]
- local v = order[tonumber(n)]
- local r = v and v.references.realpage
- if r then
- var.operation = r -- brrr, but test anyway
- return specials.page(var,actions)
- end
- end
-end
-
-function specials.url(var,actions)
- local url = references.checkedurl(var.operation)
- return link(url,nil,var.arguments,nil,actions)
-end
-
-function specials.file(var,actions)
- local file = references.checkedfile(var.operation)
- return link(nil,file,var.arguments,nil,actions)
-end
-
-function specials.fileorurl(var,actions)
- local file, url = references.checkedfileorurl(var.operation,var.operation)
- return link(url,file,var.arguments,nil,actions)
-end
-
-function specials.program(var,content)
- local program = references.checkedprogram(var.operation)
- return lpdf.launch(program,var.arguments)
-end
-
-function specials.javascript(var)
- return lpdf.javascript(var.operation,var.arguments)
-end
-
-specials.JS = specials.javascript
-
-executers.importform = pdfdictionary { S = pdf_named, N = pdfconstant("AcroForm:ImportFDF") }
-executers.exportform = pdfdictionary { S = pdf_named, N = pdfconstant("AcroForm:ExportFDF") }
-executers.first = pdfdictionary { S = pdf_named, N = pdfconstant("FirstPage") }
-executers.previous = pdfdictionary { S = pdf_named, N = pdfconstant("PrevPage") }
-executers.next = pdfdictionary { S = pdf_named, N = pdfconstant("NextPage") }
-executers.last = pdfdictionary { S = pdf_named, N = pdfconstant("LastPage") }
-executers.backward = pdfdictionary { S = pdf_named, N = pdfconstant("GoBack") }
-executers.forward = pdfdictionary { S = pdf_named, N = pdfconstant("GoForward") }
-executers.print = pdfdictionary { S = pdf_named, N = pdfconstant("Print") }
-executers.exit = pdfdictionary { S = pdf_named, N = pdfconstant("Quit") }
-executers.close = pdfdictionary { S = pdf_named, N = pdfconstant("Close") }
-executers.save = pdfdictionary { S = pdf_named, N = pdfconstant("Save") }
-executers.savenamed = pdfdictionary { S = pdf_named, N = pdfconstant("SaveAs") }
-executers.opennamed = pdfdictionary { S = pdf_named, N = pdfconstant("Open") }
-executers.help = pdfdictionary { S = pdf_named, N = pdfconstant("HelpUserGuide") }
-executers.toggle = pdfdictionary { S = pdf_named, N = pdfconstant("FullScreen") }
-executers.search = pdfdictionary { S = pdf_named, N = pdfconstant("Find") }
-executers.searchagain = pdfdictionary { S = pdf_named, N = pdfconstant("FindAgain") }
-executers.gotopage = pdfdictionary { S = pdf_named, N = pdfconstant("GoToPage") }
-executers.query = pdfdictionary { S = pdf_named, N = pdfconstant("AcroSrch:Query") }
-executers.queryagain = pdfdictionary { S = pdf_named, N = pdfconstant("AcroSrch:NextHit") }
-executers.fitwidth = pdfdictionary { S = pdf_named, N = pdfconstant("FitWidth") }
-executers.fitheight = pdfdictionary { S = pdf_named, N = pdfconstant("FitHeight") }
-
-local function fieldset(arguments)
- -- [\dogetfieldset{#1}]
- return nil
-end
-
-function executers.resetform(arguments)
- arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments)
- return pdfdictionary {
- S = pdfconstant("ResetForm"),
- Field = fieldset(arguments[1])
- }
-end
-
-local formmethod = "post" -- "get" "post"
-local formformat = "xml" -- "xml" "html" "fdf"
-
--- bit 3 = html bit 6 = xml bit 4 = get
-
-local flags = {
- get = {
- html = 12, fdf = 8, xml = 40,
- },
- post = {
- html = 4, fdf = 0, xml = 32,
- }
-}
-
-function executers.submitform(arguments)
- arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments)
- local flag = flags[formmethod] or flags.post
- flag = (flag and (flag[formformat] or flag.xml)) or 32 -- default: post, xml
- return pdfdictionary {
- S = pdfconstant("SubmitForm"),
- F = arguments[1],
- Field = fieldset(arguments[2]),
- Flags = flag,
- -- \PDFsubmitfiller
- }
-end
-
-local pdf_hide = pdfconstant("Hide")
-
-function executers.hide(arguments)
- return pdfdictionary {
- S = pdf_hide,
- H = true,
- T = arguments,
- }
-end
-
-function executers.show(arguments)
- return pdfdictionary {
- S = pdf_hide,
- H = false,
- T = arguments,
- }
-end
-
-local pdf_movie = pdfconstant("Movie")
-local pdf_start = pdfconstant("Start")
-local pdf_stop = pdfconstant("Stop")
-local pdf_resume = pdfconstant("Resume")
-local pdf_pause = pdfconstant("Pause")
-
-local function movie_or_sound(operation,arguments)
- arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments)
- return pdfdictionary {
- S = pdf_movie,
- T = format("movie %s",arguments[1] or "noname"),
- Operation = operation,
- }
-end
-
-function executers.startmovie (arguments) return movie_or_sound(pdf_start ,arguments) end
-function executers.stopmovie (arguments) return movie_or_sound(pdf_stop ,arguments) end
-function executers.resumemovie(arguments) return movie_or_sound(pdf_resume,arguments) end
-function executers.pausemovie (arguments) return movie_or_sound(pdf_pause ,arguments) end
-
-function executers.startsound (arguments) return movie_or_sound(pdf_start ,arguments) end
-function executers.stopsound (arguments) return movie_or_sound(pdf_stop ,arguments) end
-function executers.resumesound(arguments) return movie_or_sound(pdf_resume,arguments) end
-function executers.pausesound (arguments) return movie_or_sound(pdf_pause ,arguments) end
-
-function specials.action(var)
- local operation = var.operation
- if var.operation and operation ~= "" then
- local e = executers[operation]
- if type(e) == "table" then
- return e
- elseif type(e) == "function" then
- return e(var.arguments)
- end
- end
-end
-
---~ entry.A = pdfdictionary {
---~ S = pdf_goto,
---~ D = ....
---~ }
-
-local function build(levels,start,parent,method)
- local startlevel = levels[start][1]
- local i, n = start, 0
- local child, entry, m, prev, first, last, f, l
- while i and i <= #levels do
- local li = levels[i]
- local level, title, reference, open = li[1], li[2], li[3], li[4]
- if level < startlevel then
- pdfflushobject(child,entry)
- return i, n, first, last
- elseif level == startlevel then
- if trace_bookmarks then
- report_bookmark("%3i %w%s %s",reference.realpage,(level-1)*2,(open and "+") or "-",title)
- end
- local prev = child
- child = pdfreserveobject()
- if entry then
- entry.Next = child and pdfreference(child)
- pdfflushobject(prev,entry)
- end
- entry = pdfdictionary {
- Title = pdfunicode(title),
- Parent = parent,
- Prev = prev and pdfreference(prev),
- }
- if method == "internal" then
- entry.Dest = "aut:" .. reference.internal
- else -- if method == "page" then
- entry.Dest = pagedestination(reference.realpage)
- end
- if not first then first, last = child, child end
- prev = child
- last = prev
- n = n + 1
- i = i + 1
- elseif i < #levels and level > startlevel then
- i, m, f, l = build(levels,i,pdfreference(child),method)
- entry.Count = (open and m) or -m
- if m > 0 then
- entry.First, entry.Last = pdfreference(f), pdfreference(l)
- end
- else
- -- missing intermediate level but ok
- i, m, f, l = build(levels,i,pdfreference(child),method)
- entry.Count = (open and m) or -m
- if m > 0 then
- entry.First, entry.Last = pdfreference(f), pdfreference(l)
- end
- pdfflushobject(child,entry)
- return i, n, first, last
- end
- end
- pdfflushobject(child,entry)
- return nil, n, first, last
-end
-
-function codeinjections.addbookmarks(levels,method)
- if #levels > 0 then
- structures.bookmarks.flatten(levels) -- dirty trick for lack of structure
- local parent = pdfreserveobject()
- local _, m, first, last = build(levels,1,pdfreference(parent),method or "internal")
- local dict = pdfdictionary {
- Type = pdfconstant("Outlines"),
- First = pdfreference(first),
- Last = pdfreference(last),
- Count = m,
- }
- pdfflushobject(parent,dict)
- lpdf.addtocatalog("Outlines",lpdf.reference(parent))
- end
-end
-
--- this could also be hooked into the frontend finalizer
-
-lpdf.registerdocumentfinalizer(function() bookmarks.place() end,1,"bookmarks")
+if not modules then modules = { } end modules ['lpdf-ano'] = {
+ version = 1.001,
+ comment = "companion to lpdf-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- when using rotation: \disabledirectives[refences.sharelinks] (maybe flag links)
+
+-- todo: /AA << WC << ... >> >> : WillClose actions etc
+
+local next, tostring = next, tostring
+local rep, format = string.rep, string.format
+local texcount = tex.count
+local lpegmatch = lpeg.match
+local formatters = string.formatters
+
+local backends, lpdf = backends, lpdf
+
+local trace_references = false trackers.register("references.references", function(v) trace_references = v end)
+local trace_destinations = false trackers.register("references.destinations", function(v) trace_destinations = v end)
+local trace_bookmarks = false trackers.register("references.bookmarks", function(v) trace_bookmarks = v end)
+
+local report_reference = logs.reporter("backend","references")
+local report_destination = logs.reporter("backend","destinations")
+local report_bookmark = logs.reporter("backend","bookmarks")
+
+local variables = interfaces.variables
+local constants = interfaces.constants
+
+local settings_to_array = utilities.parsers.settings_to_array
+
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+local registrations = backends.pdf.registrations
+
+local javascriptcode = interactions.javascripts.code
+
+local references = structures.references
+local bookmarks = structures.bookmarks
+
+local runners = references.runners
+local specials = references.specials
+local handlers = references.handlers
+local executers = references.executers
+local getinnermethod = references.getinnermethod
+
+local nodepool = nodes.pool
+
+local pdfannotation_node = nodepool.pdfannotation
+local pdfdestination_node = nodepool.pdfdestination
+local latelua_node = nodepool.latelua
+
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfreference = lpdf.reference
+local pdfunicode = lpdf.unicode
+local pdfconstant = lpdf.constant
+local pdfflushobject = lpdf.flushobject
+local pdfshareobjectreference = lpdf.shareobjectreference
+local pdfreserveobject = lpdf.reserveobject
+local pdfpagereference = lpdf.pagereference
+local pdfdelayedobject = lpdf.delayedobject
+local pdfregisterannotation = lpdf.registerannotation
+
+-- todo: 3dview
+
+local pdf_annot = pdfconstant("Annot")
+local pdf_uri = pdfconstant("URI")
+local pdf_gotor = pdfconstant("GoToR")
+local pdf_goto = pdfconstant("GoTo")
+local pdf_launch = pdfconstant("Launch")
+local pdf_javascript = pdfconstant("JavaScript")
+local pdf_link = pdfconstant("Link")
+local pdf_n = pdfconstant("N")
+local pdf_t = pdfconstant("T")
+local pdf_fit = pdfconstant("Fit")
+local pdf_named = pdfconstant("Named")
+
+local pdf_border = pdfarray { 0, 0, 0 }
+
+local cache = { }
+
+local function pagedestination(n) -- only cache fit
+ if n > 0 then
+ local pd = cache[n]
+ if not pd then
+ local a = pdfarray {
+ pdfreference(pdfpagereference(n)),
+ pdf_fit,
+ }
+ pd = pdfshareobjectreference(a)
+ cache[n] = pd
+ end
+ return pd
+ end
+end
+
+lpdf.pagedestination = pagedestination
+
+local defaultdestination = pdfarray { 0, pdf_fit }
+
+local function link(url,filename,destination,page,actions)
+ if filename and filename ~= "" then
+ if file.basename(filename) == tex.jobname then
+ return false
+ else
+ filename = file.addsuffix(filename,"pdf")
+ end
+ end
+ if url and url ~= "" then
+ if filename and filename ~= "" then
+ if destination and destination ~= "" then
+ url = file.join(url,filename).."#"..destination
+ else
+ url = file.join(url,filename)
+ end
+ end
+ return pdfdictionary {
+ S = pdf_uri,
+ URI = url,
+ }
+ elseif filename and filename ~= "" then
+ -- no page ?
+ if destination == "" then
+ destination = nil
+ end
+ if not destination and page then
+ destination = pdfarray { page - 1, pdf_fit }
+ end
+ return pdfdictionary {
+ S = pdf_gotor, -- can also be pdf_launch
+ F = filename,
+ D = destination or defaultdestination, -- D is mandate
+ NewWindow = (actions.newwindow and true) or nil,
+ }
+ elseif destination and destination ~= "" then
+ return pdfdictionary { -- can be cached
+ S = pdf_goto,
+ D = destination,
+ }
+ else
+ local p = tonumber(page)
+ if p and p > 0 then
+ return pdfdictionary { -- can be cached
+ S = pdf_goto,
+ D = pdfarray {
+ pdfreference(pdfpagereference(p)),
+ pdf_fit,
+ }
+ }
+ elseif trace_references then
+ report_reference("invalid page reference %a",page)
+ end
+ end
+ return false
+end
+
+lpdf.link = link
+
+function lpdf.launch(program,parameters)
+ if program and program ~= "" then
+ local d = pdfdictionary {
+ S = pdf_launch,
+ F = program,
+ D = ".",
+ }
+ if parameters and parameters ~= "" then
+ d.P = parameters
+ end
+ return d
+ end
+end
+
+function lpdf.javascript(name,arguments)
+ local script = javascriptcode(name,arguments) -- make into object (hash)
+ if script then
+ return pdfdictionary {
+ S = pdf_javascript,
+ JS = script,
+ }
+ end
+end
+
+local function pdfaction(actions)
+ local nofactions = #actions
+ if nofactions > 0 then
+ local a = actions[1]
+ local action = runners[a.kind]
+ if action then
+ action = action(a,actions)
+ end
+ if action then
+ local first = action
+ for i=2,nofactions do
+ local a = actions[i]
+ local what = runners[a.kind]
+ if what then
+ what = what(a,actions)
+ end
+ if what then
+ action.Next = what
+ action = what
+ else
+ -- error
+ return nil
+ end
+ end
+ return first, actions.n
+ end
+ end
+end
+
+lpdf.action = pdfaction
+
+function codeinjections.prerollreference(actions) -- share can become option
+ if actions then
+ local main, n = pdfaction(actions)
+ if main then
+ main = pdfdictionary {
+ Subtype = pdf_link,
+ Border = pdf_border,
+ H = (not actions.highlight and pdf_n) or nil,
+ A = pdfshareobjectreference(main),
+ F = 4, -- print (mandate in pdf/a)
+ }
+ return main("A"), n
+ end
+ end
+end
+
+local function use_normal_annotations()
+
+ local function reference(width,height,depth,prerolled) -- keep this one
+ if prerolled then
+ if trace_references then
+ report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
+ end
+ return pdfannotation_node(width,height,depth,prerolled)
+ end
+ end
+
+ local function finishreference()
+ end
+
+ return reference, finishreference
+
+end
+
+-- eventually we can do this for special refs only
+
+local hashed, nofunique, nofused = { }, 0, 0
+
+local f_annot = formatters["<< /Type /Annot %s /Rect [%0.3f %0.3f %0.3f %0.3f] >>"]
+local f_bpnf = formatters["_bpnf_(%s,%s,%s,'%s')"]
+
+local function use_shared_annotations()
+
+ local factor = number.dimenfactors.bp
+
+ local function finishreference(width,height,depth,prerolled) -- %0.2f looks okay enough (no scaling anyway)
+ local h, v = pdf.h, pdf.v
+ local llx, lly = h*factor, (v - depth)*factor
+ local urx, ury = (h + width)*factor, (v + height)*factor
+ local annot = f_annot(prerolled,llx,lly,urx,ury)
+ local n = hashed[annot]
+ if not n then
+ n = pdfdelayedobject(annot)
+ hashed[annot] = n
+ nofunique = nofunique + 1
+ end
+ nofused = nofused + 1
+ pdfregisterannotation(n)
+ end
+
+ _bpnf_ = finishreference
+
+ local function reference(width,height,depth,prerolled)
+ if prerolled then
+ if trace_references then
+ report_reference("width %p, height %p, depth %p, prerolled %a",width,height,depth,prerolled)
+ end
+ local luacode = f_bpnf(width,height,depth,prerolled)
+ return latelua_node(luacode)
+ end
+ end
+
+ statistics.register("pdf annotations", function()
+ if nofused > 0 then
+ return format("%s embedded, %s unique",nofused,nofunique)
+ else
+ return nil
+ end
+ end)
+
+
+ return reference, finishreference
+
+end
+
+local lln = latelua_node() if node.has_field(lln,'string') then
+
+ directives.register("refences.sharelinks", function(v)
+ if v then
+ nodeinjections.reference, codeinjections.finishreference = use_shared_annotations()
+ else
+ nodeinjections.reference, codeinjections.finishreference = use_normal_annotations()
+ end
+ end)
+
+ nodeinjections.reference, codeinjections.finishreference = use_shared_annotations()
+
+else
+
+ nodeinjections.reference, codeinjections.finishreference = use_normal_annotations()
+
+end node.free(lln)
+
+-- -- -- --
+-- -- -- --
+
+local done = { } -- prevent messages
+
+function nodeinjections.destination(width,height,depth,name,view)
+ if not done[name] then
+ done[name] = true
+ if trace_destinations then
+ report_destination("width %p, height %p, depth %p, name %a, view %a",width,height,depth,name,view)
+ end
+ return pdfdestination_node(width,height,depth,name,view) -- can be begin/end node
+ end
+end
+
+-- runners and specials
+
+runners["inner"] = function(var,actions)
+ if getinnermethod() == "names" then
+ local vi = var.i
+ if vi then
+ local vir = vi.references
+ if vir then
+ local internal = vir.internal
+ if internal then
+ var.inner = "aut:" .. internal
+ end
+ end
+ end
+ else
+ var.inner = nil
+ end
+ local prefix = var.p
+ local inner = var.inner
+ if inner and prefix and prefix ~= "" then
+ inner = prefix .. ":" .. inner -- might not always be ok
+ end
+ return link(nil,nil,inner,var.r,actions)
+end
+
+runners["inner with arguments"] = function(var,actions)
+ report_reference("todo: inner with arguments")
+ return false
+end
+
+runners["outer"] = function(var,actions)
+ local file, url = references.checkedfileorurl(var.outer,var.outer)
+ return link(url,file,var.arguments,nil,actions)
+end
+
+runners["outer with inner"] = function(var,actions)
+ local file = references.checkedfile(var.outer) -- was var.f but fails ... why
+ return link(nil,file,var.inner,var.r,actions)
+end
+
+runners["special outer with operation"] = function(var,actions)
+ local handler = specials[var.special]
+ return handler and handler(var,actions)
+end
+
+runners["special outer"] = function(var,actions)
+ report_reference("todo: special outer")
+ return false
+end
+
+runners["special"] = function(var,actions)
+ local handler = specials[var.special]
+ return handler and handler(var,actions)
+end
+
+runners["outer with inner with arguments"] = function(var,actions)
+ report_reference("todo: outer with inner with arguments")
+ return false
+end
+
+runners["outer with special and operation and arguments"] = function(var,actions)
+ report_reference("todo: outer with special and operation and arguments")
+ return false
+end
+
+runners["outer with special"] = function(var,actions)
+ report_reference("todo: outer with special")
+ return false
+end
+
+runners["outer with special and operation"] = function(var,actions)
+ report_reference("todo: outer with special and operation")
+ return false
+end
+
+runners["special operation"] = runners["special"]
+runners["special operation with arguments"] = runners["special"]
+
+function specials.internal(var,actions) -- better resolve in strc-ref
+ local i = tonumber(var.operation)
+ local v = i and references.internals[i]
+ if not v then
+ -- error
+ report_reference("no internal reference %a",i)
+ elseif getinnermethod() == "names" then
+ -- named
+ return link(nil,nil,"aut:"..i,v.references.realpage,actions)
+ else
+ -- page
+ return link(nil,nil,nil,v.references.realpage,actions)
+ end
+end
+
+-- realpage already resolved
+
+specials.i = specials.internal
+
+local pages = references.pages
+
+function specials.page(var,actions)
+ local file = var.f
+ if file then
+ file = references.checkedfile(file)
+ return link(nil,file,nil,var.operation,actions)
+ else
+ local p = var.r
+ if not p then -- todo: call special from reference code
+ p = pages[var.operation]
+ if type(p) == "function" then -- double
+ p = p()
+ else
+ p = references.realpageofpage(tonumber(p))
+ end
+ -- if p then
+ -- var.r = p
+ -- end
+ end
+ return link(nil,nil,nil,p or var.operation,actions)
+ end
+end
+
+function specials.realpage(var,actions)
+ local file = var.f
+ if file then
+ file = references.checkedfile(file)
+ return link(nil,file,nil,var.operation,actions)
+ else
+ return link(nil,nil,nil,var.operation,actions)
+ end
+end
+
+function specials.userpage(var,actions)
+ local file = var.f
+ if file then
+ file = references.checkedfile(file)
+ return link(nil,file,nil,var.operation,actions)
+ else
+ local p = var.r
+ if not p then -- todo: call special from reference code
+ p = var.operation
+ if p then -- no function and special check here. only numbers
+ p = references.realpageofpage(tonumber(p))
+ end
+ -- if p then
+ -- var.r = p
+ -- end
+ end
+ return link(nil,nil,nil,p or var.operation,actions)
+ end
+end
+
+function specials.deltapage(var,actions)
+ local p = tonumber(var.operation)
+ if p then
+ p = references.checkedrealpage(p + texcount.realpageno)
+ return link(nil,nil,nil,p,actions)
+ end
+end
+
+-- sections
+
+-- function specials.section(var,actions)
+-- local sectionname = var.operation
+-- local destination = var.arguments
+-- local internal = structures.sections.internalreference(sectionname,destination)
+-- if internal then
+-- var.special = "internal"
+-- var.operation = internal
+-- var.arguments = nil
+-- specials.internal(var,actions)
+-- end
+-- end
+
+specials.section = specials.internal -- specials.section just need to have a value as it's checked
+
+-- todo, do this in references namespace ordered instead (this is an experiment)
+
+local splitter = lpeg.splitat(":")
+
+function specials.order(var,actions) -- references.specials !
+ local operation = var.operation
+ if operation then
+ local kind, name, n = lpegmatch(splitter,operation)
+ local order = structures.lists.ordered[kind]
+ order = order and order[name]
+ local v = order[tonumber(n)]
+ local r = v and v.references.realpage
+ if r then
+ var.operation = r -- brrr, but test anyway
+ return specials.page(var,actions)
+ end
+ end
+end
+
+function specials.url(var,actions)
+ local url = references.checkedurl(var.operation)
+ return link(url,nil,var.arguments,nil,actions)
+end
+
+function specials.file(var,actions)
+ local file = references.checkedfile(var.operation)
+ return link(nil,file,var.arguments,nil,actions)
+end
+
+function specials.fileorurl(var,actions)
+ local file, url = references.checkedfileorurl(var.operation,var.operation)
+ return link(url,file,var.arguments,nil,actions)
+end
+
+function specials.program(var,content)
+ local program = references.checkedprogram(var.operation)
+ return lpdf.launch(program,var.arguments)
+end
+
+function specials.javascript(var)
+ return lpdf.javascript(var.operation,var.arguments)
+end
+
+specials.JS = specials.javascript
+
+executers.importform = pdfdictionary { S = pdf_named, N = pdfconstant("AcroForm:ImportFDF") }
+executers.exportform = pdfdictionary { S = pdf_named, N = pdfconstant("AcroForm:ExportFDF") }
+executers.first = pdfdictionary { S = pdf_named, N = pdfconstant("FirstPage") }
+executers.previous = pdfdictionary { S = pdf_named, N = pdfconstant("PrevPage") }
+executers.next = pdfdictionary { S = pdf_named, N = pdfconstant("NextPage") }
+executers.last = pdfdictionary { S = pdf_named, N = pdfconstant("LastPage") }
+executers.backward = pdfdictionary { S = pdf_named, N = pdfconstant("GoBack") }
+executers.forward = pdfdictionary { S = pdf_named, N = pdfconstant("GoForward") }
+executers.print = pdfdictionary { S = pdf_named, N = pdfconstant("Print") }
+executers.exit = pdfdictionary { S = pdf_named, N = pdfconstant("Quit") }
+executers.close = pdfdictionary { S = pdf_named, N = pdfconstant("Close") }
+executers.save = pdfdictionary { S = pdf_named, N = pdfconstant("Save") }
+executers.savenamed = pdfdictionary { S = pdf_named, N = pdfconstant("SaveAs") }
+executers.opennamed = pdfdictionary { S = pdf_named, N = pdfconstant("Open") }
+executers.help = pdfdictionary { S = pdf_named, N = pdfconstant("HelpUserGuide") }
+executers.toggle = pdfdictionary { S = pdf_named, N = pdfconstant("FullScreen") }
+executers.search = pdfdictionary { S = pdf_named, N = pdfconstant("Find") }
+executers.searchagain = pdfdictionary { S = pdf_named, N = pdfconstant("FindAgain") }
+executers.gotopage = pdfdictionary { S = pdf_named, N = pdfconstant("GoToPage") }
+executers.query = pdfdictionary { S = pdf_named, N = pdfconstant("AcroSrch:Query") }
+executers.queryagain = pdfdictionary { S = pdf_named, N = pdfconstant("AcroSrch:NextHit") }
+executers.fitwidth = pdfdictionary { S = pdf_named, N = pdfconstant("FitWidth") }
+executers.fitheight = pdfdictionary { S = pdf_named, N = pdfconstant("FitHeight") }
+
+local function fieldset(arguments)
+ -- [\dogetfieldset{#1}]
+ return nil
+end
+
+function executers.resetform(arguments)
+ arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments)
+ return pdfdictionary {
+ S = pdfconstant("ResetForm"),
+ Field = fieldset(arguments[1])
+ }
+end
+
+local formmethod = "post" -- "get" "post"
+local formformat = "xml" -- "xml" "html" "fdf"
+
+-- bit 3 = html bit 6 = xml bit 4 = get
+
+local flags = {
+ get = {
+ html = 12, fdf = 8, xml = 40,
+ },
+ post = {
+ html = 4, fdf = 0, xml = 32,
+ }
+}
+
+function executers.submitform(arguments)
+ arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments)
+ local flag = flags[formmethod] or flags.post
+ flag = (flag and (flag[formformat] or flag.xml)) or 32 -- default: post, xml
+ return pdfdictionary {
+ S = pdfconstant("SubmitForm"),
+ F = arguments[1],
+ Field = fieldset(arguments[2]),
+ Flags = flag,
+ -- \PDFsubmitfiller
+ }
+end
+
+local pdf_hide = pdfconstant("Hide")
+
+function executers.hide(arguments)
+ return pdfdictionary {
+ S = pdf_hide,
+ H = true,
+ T = arguments,
+ }
+end
+
+function executers.show(arguments)
+ return pdfdictionary {
+ S = pdf_hide,
+ H = false,
+ T = arguments,
+ }
+end
+
+local pdf_movie = pdfconstant("Movie")
+local pdf_start = pdfconstant("Start")
+local pdf_stop = pdfconstant("Stop")
+local pdf_resume = pdfconstant("Resume")
+local pdf_pause = pdfconstant("Pause")
+
+local function movie_or_sound(operation,arguments)
+ arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments)
+ return pdfdictionary {
+ S = pdf_movie,
+ T = format("movie %s",arguments[1] or "noname"),
+ Operation = operation,
+ }
+end
+
+function executers.startmovie (arguments) return movie_or_sound(pdf_start ,arguments) end
+function executers.stopmovie (arguments) return movie_or_sound(pdf_stop ,arguments) end
+function executers.resumemovie(arguments) return movie_or_sound(pdf_resume,arguments) end
+function executers.pausemovie (arguments) return movie_or_sound(pdf_pause ,arguments) end
+
+function executers.startsound (arguments) return movie_or_sound(pdf_start ,arguments) end
+function executers.stopsound (arguments) return movie_or_sound(pdf_stop ,arguments) end
+function executers.resumesound(arguments) return movie_or_sound(pdf_resume,arguments) end
+function executers.pausesound (arguments) return movie_or_sound(pdf_pause ,arguments) end
+
+function specials.action(var)
+ local operation = var.operation
+ if var.operation and operation ~= "" then
+ local e = executers[operation]
+ if type(e) == "table" then
+ return e
+ elseif type(e) == "function" then
+ return e(var.arguments)
+ end
+ end
+end
+
+--~ entry.A = pdfdictionary {
+--~ S = pdf_goto,
+--~ D = ....
+--~ }
+
+local function build(levels,start,parent,method)
+ local startlevel = levels[start][1]
+ local i, n = start, 0
+ local child, entry, m, prev, first, last, f, l
+ while i and i <= #levels do
+ local li = levels[i]
+ local level, title, reference, open = li[1], li[2], li[3], li[4]
+ if level < startlevel then
+ pdfflushobject(child,entry)
+ return i, n, first, last
+ elseif level == startlevel then
+ if trace_bookmarks then
+ report_bookmark("%3i %w%s %s",reference.realpage,(level-1)*2,(open and "+") or "-",title)
+ end
+ local prev = child
+ child = pdfreserveobject()
+ if entry then
+ entry.Next = child and pdfreference(child)
+ pdfflushobject(prev,entry)
+ end
+ entry = pdfdictionary {
+ Title = pdfunicode(title),
+ Parent = parent,
+ Prev = prev and pdfreference(prev),
+ }
+ if method == "internal" then
+ entry.Dest = "aut:" .. reference.internal
+ else -- if method == "page" then
+ entry.Dest = pagedestination(reference.realpage)
+ end
+ if not first then first, last = child, child end
+ prev = child
+ last = prev
+ n = n + 1
+ i = i + 1
+ elseif i < #levels and level > startlevel then
+ i, m, f, l = build(levels,i,pdfreference(child),method)
+ entry.Count = (open and m) or -m
+ if m > 0 then
+ entry.First, entry.Last = pdfreference(f), pdfreference(l)
+ end
+ else
+ -- missing intermediate level but ok
+ i, m, f, l = build(levels,i,pdfreference(child),method)
+ entry.Count = (open and m) or -m
+ if m > 0 then
+ entry.First, entry.Last = pdfreference(f), pdfreference(l)
+ end
+ pdfflushobject(child,entry)
+ return i, n, first, last
+ end
+ end
+ pdfflushobject(child,entry)
+ return nil, n, first, last
+end
+
+function codeinjections.addbookmarks(levels,method)
+ if #levels > 0 then
+ structures.bookmarks.flatten(levels) -- dirty trick for lack of structure
+ local parent = pdfreserveobject()
+ local _, m, first, last = build(levels,1,pdfreference(parent),method or "internal")
+ local dict = pdfdictionary {
+ Type = pdfconstant("Outlines"),
+ First = pdfreference(first),
+ Last = pdfreference(last),
+ Count = m,
+ }
+ pdfflushobject(parent,dict)
+ lpdf.addtocatalog("Outlines",lpdf.reference(parent))
+ end
+end
+
+-- this could also be hooked into the frontend finalizer
+
+lpdf.registerdocumentfinalizer(function() bookmarks.place() end,1,"bookmarks")
diff --git a/tex/context/base/lpdf-enc.lua b/tex/context/base/lpdf-enc.lua
index 090fb15cd..6dd286191 100644
--- a/tex/context/base/lpdf-enc.lua
+++ b/tex/context/base/lpdf-enc.lua
@@ -1,157 +1,157 @@
-if not modules then modules = { } end modules ['lpdf-enc'] = {
- version = 1.001,
- comment = "companion to lpdf-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- delayed loading
-
-local pdfconstant = lpdf.constant
-
-return lpdf.dictionary {
- Type = pdfconstant("Encoding"),
- Differences = lpdf.array {
- 24,
- pdfconstant("breve"),
- pdfconstant("caron"),
- pdfconstant("circumflex"),
- pdfconstant("dotaccent"),
- pdfconstant("hungarumlaut"),
- pdfconstant("ogonek"),
- pdfconstant("ring"),
- pdfconstant("tilde"),
- 39,
- pdfconstant("quotesingle"),
- 96,
- pdfconstant("grave"),
- 128,
- pdfconstant("bullet"),
- pdfconstant("dagger"),
- pdfconstant("daggerdbl"),
- pdfconstant("ellipsis"),
- pdfconstant("emdash"),
- pdfconstant("endash"),
- pdfconstant("florin"),
- pdfconstant("fraction"),
- pdfconstant("guilsinglleft"),
- pdfconstant("guilsinglright"),
- pdfconstant("minus"),
- pdfconstant("perthousand"),
- pdfconstant("quotedblbase"),
- pdfconstant("quotedblleft"),
- pdfconstant("quotedblright"),
- pdfconstant("quoteleft"),
- pdfconstant("quoteright"),
- pdfconstant("quotesinglbase"),
- pdfconstant("trademark"),
- pdfconstant("fi"),
- pdfconstant("fl"),
- pdfconstant("Lslash"),
- pdfconstant("OE"),
- pdfconstant("Scaron"),
- pdfconstant("Ydieresis"),
- pdfconstant("Zcaron"),
- pdfconstant("dotlessi"),
- pdfconstant("lslash"),
- pdfconstant("oe"),
- pdfconstant("scaron"),
- pdfconstant("zcaron"),
- 160,
- pdfconstant("Euro"),
- 164,
- pdfconstant("currency"),
- 166,
- pdfconstant("brokenbar"),
- 168,
- pdfconstant("dieresis"),
- pdfconstant("copyright"),
- pdfconstant("ordfeminine"),
- 172,
- pdfconstant("logicalnot"),
- pdfconstant(".notdef"),
- pdfconstant("registered"),
- pdfconstant("macron"),
- pdfconstant("degree"),
- pdfconstant("plusminus"),
- pdfconstant("twosuperior"),
- pdfconstant("threesuperior"),
- pdfconstant("acute"),
- pdfconstant("mu"),
- 183,
- pdfconstant("periodcentered"),
- pdfconstant("cedilla"),
- pdfconstant("onesuperior"),
- pdfconstant("ordmasculine"),
- 188,
- pdfconstant("onequarter"),
- pdfconstant("onehalf"),
- pdfconstant("threequarters"),
- 192,
- pdfconstant("Agrave"),
- pdfconstant("Aacute"),
- pdfconstant("Acircumflex"),
- pdfconstant("Atilde"),
- pdfconstant("Adieresis"),
- pdfconstant("Aring"),
- pdfconstant("AE"),
- pdfconstant("Ccedilla"),
- pdfconstant("Egrave"),
- pdfconstant("Eacute"),
- pdfconstant("Ecircumflex"),
- pdfconstant("Edieresis"),
- pdfconstant("Igrave"),
- pdfconstant("Iacute"),
- pdfconstant("Icircumflex"),
- pdfconstant("Idieresis"),
- pdfconstant("Eth"),
- pdfconstant("Ntilde"),
- pdfconstant("Ograve"),
- pdfconstant("Oacute"),
- pdfconstant("Ocircumflex"),
- pdfconstant("Otilde"),
- pdfconstant("Odieresis"),
- pdfconstant("multiply"),
- pdfconstant("Oslash"),
- pdfconstant("Ugrave"),
- pdfconstant("Uacute"),
- pdfconstant("Ucircumflex"),
- pdfconstant("Udieresis"),
- pdfconstant("Yacute"),
- pdfconstant("Thorn"),
- pdfconstant("germandbls"),
- pdfconstant("agrave"),
- pdfconstant("aacute"),
- pdfconstant("acircumflex"),
- pdfconstant("atilde"),
- pdfconstant("adieresis"),
- pdfconstant("aring"),
- pdfconstant("ae"),
- pdfconstant("ccedilla"),
- pdfconstant("egrave"),
- pdfconstant("eacute"),
- pdfconstant("ecircumflex"),
- pdfconstant("edieresis"),
- pdfconstant("igrave"),
- pdfconstant("iacute"),
- pdfconstant("icircumflex"),
- pdfconstant("idieresis"),
- pdfconstant("eth"),
- pdfconstant("ntilde"),
- pdfconstant("ograve"),
- pdfconstant("oacute"),
- pdfconstant("ocircumflex"),
- pdfconstant("otilde"),
- pdfconstant("odieresis"),
- pdfconstant("divide"),
- pdfconstant("oslash"),
- pdfconstant("ugrave"),
- pdfconstant("uacute"),
- pdfconstant("ucircumflex"),
- pdfconstant("udieresis"),
- pdfconstant("yacute"),
- pdfconstant("thorn"),
- pdfconstant("ydieresis"),
- },
-}
+if not modules then modules = { } end modules ['lpdf-enc'] = {
+ version = 1.001,
+ comment = "companion to lpdf-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- delayed loading
+
+local pdfconstant = lpdf.constant
+
+return lpdf.dictionary {
+ Type = pdfconstant("Encoding"),
+ Differences = lpdf.array {
+ 24,
+ pdfconstant("breve"),
+ pdfconstant("caron"),
+ pdfconstant("circumflex"),
+ pdfconstant("dotaccent"),
+ pdfconstant("hungarumlaut"),
+ pdfconstant("ogonek"),
+ pdfconstant("ring"),
+ pdfconstant("tilde"),
+ 39,
+ pdfconstant("quotesingle"),
+ 96,
+ pdfconstant("grave"),
+ 128,
+ pdfconstant("bullet"),
+ pdfconstant("dagger"),
+ pdfconstant("daggerdbl"),
+ pdfconstant("ellipsis"),
+ pdfconstant("emdash"),
+ pdfconstant("endash"),
+ pdfconstant("florin"),
+ pdfconstant("fraction"),
+ pdfconstant("guilsinglleft"),
+ pdfconstant("guilsinglright"),
+ pdfconstant("minus"),
+ pdfconstant("perthousand"),
+ pdfconstant("quotedblbase"),
+ pdfconstant("quotedblleft"),
+ pdfconstant("quotedblright"),
+ pdfconstant("quoteleft"),
+ pdfconstant("quoteright"),
+ pdfconstant("quotesinglbase"),
+ pdfconstant("trademark"),
+ pdfconstant("fi"),
+ pdfconstant("fl"),
+ pdfconstant("Lslash"),
+ pdfconstant("OE"),
+ pdfconstant("Scaron"),
+ pdfconstant("Ydieresis"),
+ pdfconstant("Zcaron"),
+ pdfconstant("dotlessi"),
+ pdfconstant("lslash"),
+ pdfconstant("oe"),
+ pdfconstant("scaron"),
+ pdfconstant("zcaron"),
+ 160,
+ pdfconstant("Euro"),
+ 164,
+ pdfconstant("currency"),
+ 166,
+ pdfconstant("brokenbar"),
+ 168,
+ pdfconstant("dieresis"),
+ pdfconstant("copyright"),
+ pdfconstant("ordfeminine"),
+ 172,
+ pdfconstant("logicalnot"),
+ pdfconstant(".notdef"),
+ pdfconstant("registered"),
+ pdfconstant("macron"),
+ pdfconstant("degree"),
+ pdfconstant("plusminus"),
+ pdfconstant("twosuperior"),
+ pdfconstant("threesuperior"),
+ pdfconstant("acute"),
+ pdfconstant("mu"),
+ 183,
+ pdfconstant("periodcentered"),
+ pdfconstant("cedilla"),
+ pdfconstant("onesuperior"),
+ pdfconstant("ordmasculine"),
+ 188,
+ pdfconstant("onequarter"),
+ pdfconstant("onehalf"),
+ pdfconstant("threequarters"),
+ 192,
+ pdfconstant("Agrave"),
+ pdfconstant("Aacute"),
+ pdfconstant("Acircumflex"),
+ pdfconstant("Atilde"),
+ pdfconstant("Adieresis"),
+ pdfconstant("Aring"),
+ pdfconstant("AE"),
+ pdfconstant("Ccedilla"),
+ pdfconstant("Egrave"),
+ pdfconstant("Eacute"),
+ pdfconstant("Ecircumflex"),
+ pdfconstant("Edieresis"),
+ pdfconstant("Igrave"),
+ pdfconstant("Iacute"),
+ pdfconstant("Icircumflex"),
+ pdfconstant("Idieresis"),
+ pdfconstant("Eth"),
+ pdfconstant("Ntilde"),
+ pdfconstant("Ograve"),
+ pdfconstant("Oacute"),
+ pdfconstant("Ocircumflex"),
+ pdfconstant("Otilde"),
+ pdfconstant("Odieresis"),
+ pdfconstant("multiply"),
+ pdfconstant("Oslash"),
+ pdfconstant("Ugrave"),
+ pdfconstant("Uacute"),
+ pdfconstant("Ucircumflex"),
+ pdfconstant("Udieresis"),
+ pdfconstant("Yacute"),
+ pdfconstant("Thorn"),
+ pdfconstant("germandbls"),
+ pdfconstant("agrave"),
+ pdfconstant("aacute"),
+ pdfconstant("acircumflex"),
+ pdfconstant("atilde"),
+ pdfconstant("adieresis"),
+ pdfconstant("aring"),
+ pdfconstant("ae"),
+ pdfconstant("ccedilla"),
+ pdfconstant("egrave"),
+ pdfconstant("eacute"),
+ pdfconstant("ecircumflex"),
+ pdfconstant("edieresis"),
+ pdfconstant("igrave"),
+ pdfconstant("iacute"),
+ pdfconstant("icircumflex"),
+ pdfconstant("idieresis"),
+ pdfconstant("eth"),
+ pdfconstant("ntilde"),
+ pdfconstant("ograve"),
+ pdfconstant("oacute"),
+ pdfconstant("ocircumflex"),
+ pdfconstant("otilde"),
+ pdfconstant("odieresis"),
+ pdfconstant("divide"),
+ pdfconstant("oslash"),
+ pdfconstant("ugrave"),
+ pdfconstant("uacute"),
+ pdfconstant("ucircumflex"),
+ pdfconstant("udieresis"),
+ pdfconstant("yacute"),
+ pdfconstant("thorn"),
+ pdfconstant("ydieresis"),
+ },
+}
diff --git a/tex/context/base/lpdf-epa.lua b/tex/context/base/lpdf-epa.lua
index 034e6d7e2..8d00c8c26 100644
--- a/tex/context/base/lpdf-epa.lua
+++ b/tex/context/base/lpdf-epa.lua
@@ -1,226 +1,226 @@
-if not modules then modules = { } end modules ['lpdf-epa'] = {
- version = 1.001,
- comment = "companion to lpdf-epa.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This is a rather experimental feature and the code will probably
--- change.
-
-local type, tonumber = type, tonumber
-local format, gsub = string.format, string.gsub
-local formatters = string.formatters
-
------ lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-
-local trace_links = false trackers.register("figures.links", function(v) trace_links = v end)
-
-local report_link = logs.reporter("backend","merging")
-
-local backends, lpdf = backends, lpdf
-
-local variables = interfaces.variables
-local codeinjections = backends.pdf.codeinjections
------ urlescaper = lpegpatterns.urlescaper
------ utftohigh = lpegpatterns.utftohigh
-local escapetex = characters.filters.utf.private.escape
-
-local layerspec = { -- predefining saves time
- "epdflinks"
-}
-
-local function makenamespace(filename)
- return format("lpdf-epa-%s-",file.removesuffix(file.basename(filename)))
-end
-
-local function add_link(x,y,w,h,destination,what)
- x = x .. "bp"
- y = y .. "bp"
- w = w .. "bp"
- h = h .. "bp"
- if trace_links then
- report_link("destination %a, type %a, dx %s, dy %s, wd %s, ht %s",destination,what,x,y,w,h)
- end
- local locationspec = { -- predefining saves time
- x = x,
- y = y,
- preset = "leftbottom",
- }
- local buttonspec = {
- width = w,
- height = h,
- offset = variables.overlay,
- frame = trace_links and variables.on or variables.off,
- }
- context.setlayer (
- layerspec,
- locationspec,
- function() context.button ( buttonspec, "", { destination } ) end
- -- context.nested.button(buttonspec, "", { destination }) -- time this
- )
-end
-
-local function link_goto(x,y,w,h,document,annotation,pagedata,namespace)
- local a = annotation.A
- if a then
- local destination = a.D -- [ 18 0 R /Fit ]
- local what = "page"
- if type(destination) == "string" then
- local destinations = document.destinations
- local wanted = destinations[destination]
- destination = wanted and wanted.D
- if destination then what = "named" end
- end
- local pagedata = destination and destination[1]
- if pagedata then
- local destinationpage = pagedata.number
- if destinationpage then
- add_link(x,y,w,h,namespace .. destinationpage,what)
- end
- end
- end
-end
-
-local function link_uri(x,y,w,h,document,annotation)
- local url = annotation.A.URI
- if url then
- -- url = lpegmatch(urlescaper,url)
- -- url = lpegmatch(utftohigh,url)
- url = escapetex(url)
- add_link(x,y,w,h,formatters["url(%s)"](url),"url")
- end
-end
-
-local function link_file(x,y,w,h,document,annotation)
- local a = annotation.A
- if a then
- local filename = a.F
- if filename then
- filename = escapetex(filename)
- local destination = a.D
- if not destination then
- add_link(x,y,w,h,formatters["file(%s)"](filename),"file")
- elseif type(destination) == "string" then
- add_link(x,y,w,h,formatters["%s::%s"](filename,destination),"file (named)")
- else
- destination = destination[1] -- array
- if tonumber(destination) then
- add_link(x,y,w,h,formatters["%s::page(%s)"](filename,destination),"file (page)")
- else
- add_link(x,y,w,h,formatters["file(%s)"](filename),"file")
- end
- end
- end
- end
-end
-
-function codeinjections.mergereferences(specification)
- if figures and not specification then
- specification = figures and figures.current()
- specification = specification and specification.status
- end
- if specification then
- local fullname = specification.fullname
- local document = lpdf.epdf.load(fullname)
- if document then
- local pagenumber = specification.page or 1
- local xscale = specification.yscale or 1
- local yscale = specification.yscale or 1
- local size = specification.size or "crop" -- todo
- local pagedata = document.pages[pagenumber]
- local annotations = pagedata and pagedata.Annots
- if annotations and annotations.n > 0 then
- local namespace = format("lpdf-epa-%s-",file.removesuffix(file.basename(fullname)))
- local reference = namespace .. pagenumber
- local mediabox = pagedata.MediaBox
- local llx, lly, urx, ury = mediabox[1], mediabox[2], mediabox[3], mediabox[4]
- local width, height = xscale * (urx - llx), yscale * (ury - lly) -- \\overlaywidth, \\overlayheight
- context.definelayer( { "epdflinks" }, { height = height.."bp" , width = width.."bp" })
- for i=1,annotations.n do
- local annotation = annotations[i]
- if annotation then
- local subtype = annotation.Subtype
- local rectangle = annotation.Rect
- local a_llx, a_lly, a_urx, a_ury = rectangle[1], rectangle[2], rectangle[3], rectangle[4]
- local x, y = xscale * (a_llx - llx), yscale * (a_lly - lly)
- local w, h = xscale * (a_urx - a_llx), yscale * (a_ury - a_lly)
- if subtype == "Link" then
- local a = annotation.A
- if a then
- local linktype = a.S
- if linktype == "GoTo" then
- link_goto(x,y,w,h,document,annotation,pagedata,namespace)
- elseif linktype == "GoToR" then
- link_file(x,y,w,h,document,annotation)
- elseif linktype == "URI" then
- link_uri(x,y,w,h,document,annotation)
- elseif trace_links then
- report_link("unsupported link annotation %a",linktype)
- end
- else
- report_link("mising link annotation")
- end
- elseif trace_links then
- report_link("unsupported annotation %a",subtype)
- end
- elseif trace_links then
- report_link("broken annotation, index %a",i)
- end
- end
- context.flushlayer { "epdflinks" }
- -- context("\\gdef\\figurereference{%s}",reference) -- global
- context.setgvalue("figurereference",reference) -- global
- if trace_links then
- report_link("setting figure reference to %a",reference)
- end
- specification.reference = reference
- return namespace
- end
- end
- end
- return ""-- no namespace, empty, not nil
-end
-
-function codeinjections.mergeviewerlayers(specification)
- -- todo: parse included page for layers
- if true then
- return
- end
- if not specification then
- specification = figures and figures.current()
- specification = specification and specification.status
- end
- if specification then
- local fullname = specification.fullname
- local document = lpdf.epdf.load(fullname)
- if document then
- local namespace = format("lpdf:epa:%s:",file.removesuffix(file.basename(fullname)))
- local layers = document.layers
- if layers then
- for i=1,layers.n do
- local layer = layers[i]
- if layer then
- local tag = namespace .. gsub(layer," ",":")
- local title = tag
- if trace_links then
- report_link("using layer %a",tag)
- end
- attributes.viewerlayers.define { -- also does some cleaning
- tag = tag, -- todo: #3A or so
- title = title,
- visible = variables.start,
- editable = variables.yes,
- printable = variables.yes,
- }
- codeinjections.useviewerlayer(tag)
- elseif trace_links then
- report_link("broken layer, index %a",i)
- end
- end
- end
- end
- end
-end
-
+if not modules then modules = { } end modules ['lpdf-epa'] = {
+ version = 1.001,
+ comment = "companion to lpdf-epa.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is a rather experimental feature and the code will probably
+-- change.
+
+local type, tonumber = type, tonumber
+local format, gsub = string.format, string.gsub
+local formatters = string.formatters
+
+----- lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+
+local trace_links = false trackers.register("figures.links", function(v) trace_links = v end)
+
+local report_link = logs.reporter("backend","merging")
+
+local backends, lpdf = backends, lpdf
+
+local variables = interfaces.variables
+local codeinjections = backends.pdf.codeinjections
+----- urlescaper = lpegpatterns.urlescaper
+----- utftohigh = lpegpatterns.utftohigh
+local escapetex = characters.filters.utf.private.escape
+
+local layerspec = { -- predefining saves time
+ "epdflinks"
+}
+
+local function makenamespace(filename)
+ return format("lpdf-epa-%s-",file.removesuffix(file.basename(filename)))
+end
+
+local function add_link(x,y,w,h,destination,what)
+ x = x .. "bp"
+ y = y .. "bp"
+ w = w .. "bp"
+ h = h .. "bp"
+ if trace_links then
+ report_link("destination %a, type %a, dx %s, dy %s, wd %s, ht %s",destination,what,x,y,w,h)
+ end
+ local locationspec = { -- predefining saves time
+ x = x,
+ y = y,
+ preset = "leftbottom",
+ }
+ local buttonspec = {
+ width = w,
+ height = h,
+ offset = variables.overlay,
+ frame = trace_links and variables.on or variables.off,
+ }
+ context.setlayer (
+ layerspec,
+ locationspec,
+ function() context.button ( buttonspec, "", { destination } ) end
+ -- context.nested.button(buttonspec, "", { destination }) -- time this
+ )
+end
+
+local function link_goto(x,y,w,h,document,annotation,pagedata,namespace)
+ local a = annotation.A
+ if a then
+ local destination = a.D -- [ 18 0 R /Fit ]
+ local what = "page"
+ if type(destination) == "string" then
+ local destinations = document.destinations
+ local wanted = destinations[destination]
+ destination = wanted and wanted.D
+ if destination then what = "named" end
+ end
+ local pagedata = destination and destination[1]
+ if pagedata then
+ local destinationpage = pagedata.number
+ if destinationpage then
+ add_link(x,y,w,h,namespace .. destinationpage,what)
+ end
+ end
+ end
+end
+
+local function link_uri(x,y,w,h,document,annotation)
+ local url = annotation.A.URI
+ if url then
+ -- url = lpegmatch(urlescaper,url)
+ -- url = lpegmatch(utftohigh,url)
+ url = escapetex(url)
+ add_link(x,y,w,h,formatters["url(%s)"](url),"url")
+ end
+end
+
+local function link_file(x,y,w,h,document,annotation)
+ local a = annotation.A
+ if a then
+ local filename = a.F
+ if filename then
+ filename = escapetex(filename)
+ local destination = a.D
+ if not destination then
+ add_link(x,y,w,h,formatters["file(%s)"](filename),"file")
+ elseif type(destination) == "string" then
+ add_link(x,y,w,h,formatters["%s::%s"](filename,destination),"file (named)")
+ else
+ destination = destination[1] -- array
+ if tonumber(destination) then
+ add_link(x,y,w,h,formatters["%s::page(%s)"](filename,destination),"file (page)")
+ else
+ add_link(x,y,w,h,formatters["file(%s)"](filename),"file")
+ end
+ end
+ end
+ end
+end
+
+function codeinjections.mergereferences(specification)
+ if figures and not specification then
+ specification = figures and figures.current()
+ specification = specification and specification.status
+ end
+ if specification then
+ local fullname = specification.fullname
+ local document = lpdf.epdf.load(fullname)
+ if document then
+ local pagenumber = specification.page or 1
+ local xscale = specification.yscale or 1
+ local yscale = specification.yscale or 1
+ local size = specification.size or "crop" -- todo
+ local pagedata = document.pages[pagenumber]
+ local annotations = pagedata and pagedata.Annots
+ if annotations and annotations.n > 0 then
+ local namespace = format("lpdf-epa-%s-",file.removesuffix(file.basename(fullname)))
+ local reference = namespace .. pagenumber
+ local mediabox = pagedata.MediaBox
+ local llx, lly, urx, ury = mediabox[1], mediabox[2], mediabox[3], mediabox[4]
+ local width, height = xscale * (urx - llx), yscale * (ury - lly) -- \\overlaywidth, \\overlayheight
+ context.definelayer( { "epdflinks" }, { height = height.."bp" , width = width.."bp" })
+ for i=1,annotations.n do
+ local annotation = annotations[i]
+ if annotation then
+ local subtype = annotation.Subtype
+ local rectangle = annotation.Rect
+ local a_llx, a_lly, a_urx, a_ury = rectangle[1], rectangle[2], rectangle[3], rectangle[4]
+ local x, y = xscale * (a_llx - llx), yscale * (a_lly - lly)
+ local w, h = xscale * (a_urx - a_llx), yscale * (a_ury - a_lly)
+ if subtype == "Link" then
+ local a = annotation.A
+ if a then
+ local linktype = a.S
+ if linktype == "GoTo" then
+ link_goto(x,y,w,h,document,annotation,pagedata,namespace)
+ elseif linktype == "GoToR" then
+ link_file(x,y,w,h,document,annotation)
+ elseif linktype == "URI" then
+ link_uri(x,y,w,h,document,annotation)
+ elseif trace_links then
+ report_link("unsupported link annotation %a",linktype)
+ end
+ else
+ report_link("mising link annotation")
+ end
+ elseif trace_links then
+ report_link("unsupported annotation %a",subtype)
+ end
+ elseif trace_links then
+ report_link("broken annotation, index %a",i)
+ end
+ end
+ context.flushlayer { "epdflinks" }
+ -- context("\\gdef\\figurereference{%s}",reference) -- global
+ context.setgvalue("figurereference",reference) -- global
+ if trace_links then
+ report_link("setting figure reference to %a",reference)
+ end
+ specification.reference = reference
+ return namespace
+ end
+ end
+ end
+ return ""-- no namespace, empty, not nil
+end
+
+function codeinjections.mergeviewerlayers(specification)
+ -- todo: parse included page for layers
+ if true then
+ return
+ end
+ if not specification then
+ specification = figures and figures.current()
+ specification = specification and specification.status
+ end
+ if specification then
+ local fullname = specification.fullname
+ local document = lpdf.epdf.load(fullname)
+ if document then
+ local namespace = format("lpdf:epa:%s:",file.removesuffix(file.basename(fullname)))
+ local layers = document.layers
+ if layers then
+ for i=1,layers.n do
+ local layer = layers[i]
+ if layer then
+ local tag = namespace .. gsub(layer," ",":")
+ local title = tag
+ if trace_links then
+ report_link("using layer %a",tag)
+ end
+ attributes.viewerlayers.define { -- also does some cleaning
+ tag = tag, -- todo: #3A or so
+ title = title,
+ visible = variables.start,
+ editable = variables.yes,
+ printable = variables.yes,
+ }
+ codeinjections.useviewerlayer(tag)
+ elseif trace_links then
+ report_link("broken layer, index %a",i)
+ end
+ end
+ end
+ end
+ end
+end
+
diff --git a/tex/context/base/lpdf-epd.lua b/tex/context/base/lpdf-epd.lua
index b9f8cfc7c..4bf98edcc 100644
--- a/tex/context/base/lpdf-epd.lua
+++ b/tex/context/base/lpdf-epd.lua
@@ -1,351 +1,351 @@
-if not modules then modules = { } end modules ['lpdf-epd'] = {
- version = 1.001,
- comment = "companion to lpdf-epa.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This is an experimental layer around the epdf library. The reason for
--- this layer is that I want to be independent of the library (which
--- implements a selection of what a file provides) and also because I
--- want an interface closer to Lua's table model while the API stays
--- close to the original xpdf library. Of course, after prototyping a
--- solution, we can optimize it using the low level epdf accessors.
-
--- It will be handy when we have a __length and __next that can trigger
--- the resolve till then we will provide .n as #.
-
--- As there can be references to the parent we cannot expand a tree. I
--- played with some expansion variants but it does to pay off.
-
--- Maybe we need a close().
--- We cannot access all destinations in one run.
-
-local setmetatable, rawset, rawget, tostring, tonumber = setmetatable, rawset, rawget, tostring, tonumber
-local lower, match, char, find, sub = string.lower, string.match, string.char, string.find, string.sub
-local concat = table.concat
-local toutf = string.toutf
-
-local report_epdf = logs.reporter("epdf")
-
--- a bit of protection
-
-local limited = false
-
-directives.register("system.inputmode", function(v)
- if not limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- epdf.open = i_limiter.protect(epdf.open)
- limited = true
- end
- end
-end)
-
---
-
-function epdf.type(o)
- local t = lower(match(tostring(o),"[^ :]+"))
- return t or "?"
-end
-
-lpdf = lpdf or { }
-local lpdf = lpdf
-
-lpdf.epdf = { }
-
-local checked_access
-
-local function prepare(document,d,t,n,k)
- for i=1,n do
- local v = d:getVal(i)
- local r = d:getValNF(i)
- if r:getTypeName() == "ref" then
- r = r:getRef().num
- local c = document.cache[r]
- if c then
- --
- else
- c = checked_access[v:getTypeName()](v,document,r)
- if c then
- document.cache[r] = c
- document.xrefs[c] = r
- end
- end
- t[d:getKey(i)] = c
- else
- t[d:getKey(i)] = checked_access[v:getTypeName()](v,document)
- end
- end
- getmetatable(t).__index = nil
- return t[k]
-end
-
-local function some_dictionary(d,document,r)
- local n = d and d:getLength() or 0
- if n > 0 then
- local t = { }
- setmetatable(t, { __index = function(t,k) return prepare(document,d,t,n,k) end } )
- return t
- end
-end
-
-local done = { }
-
-local function prepare(document,a,t,n,k)
- for i=1,n do
- local v = a:get(i)
- local r = a:getNF(i)
- if v:getTypeName() == "null" then
- -- TH: weird, but appears possible
- elseif r:getTypeName() == "ref" then
- r = r:getRef().num
- local c = document.cache[r]
- if c then
- --
- else
- c = checked_access[v:getTypeName()](v,document,r)
- document.cache[r] = c
- document.xrefs[c] = r
- end
- t[i] = c
- else
- t[i] = checked_access[v:getTypeName()](v,document)
- end
- end
- getmetatable(t).__index = nil
- return t[k]
-end
-
-local function some_array(a,document,r)
- local n = a and a:getLength() or 0
- if n > 0 then
- local t = { n = n }
- setmetatable(t, { __index = function(t,k) return prepare(document,a,t,n,k) end } )
- return t
- end
-end
-
-local function streamaccess(s,_,what)
- if not what or what == "all" or what == "*all" then
- local t, n = { }, 0
- s:streamReset()
- while true do
- local c = s:streamGetChar()
- if c < 0 then
- break
- else
- n = n + 1
- t[n] = char(c)
- end
- end
- return concat(t)
- end
-end
-
-local function some_stream(d,document,r)
- if d then
- d:streamReset()
- local s = some_dictionary(d:streamGetDict(),document,r)
- getmetatable(s).__call = function(...) return streamaccess(d,...) end
- return s
- end
-end
-
--- we need epdf.getBool
-
-checked_access = {
- dictionary = function(d,document,r)
- return some_dictionary(d:getDict(),document,r)
- end,
- array = function(a,document,r)
- return some_array(a:getArray(),document,r)
- end,
- stream = function(v,document,r)
- return some_stream(v,document,r)
- end,
- real = function(v)
- return v:getReal()
- end,
- integer = function(v)
- return v:getNum()
- end,
- string = function(v)
- return toutf(v:getString())
- end,
- boolean = function(v)
- return v:getBool()
- end,
- name = function(v)
- return v:getName()
- end,
- ref = function(v)
- return v:getRef()
- end,
- null = function()
- return nil
- end,
-}
-
--- checked_access.real = epdf.real
--- checked_access.integer = epdf.integer
--- checked_access.string = epdf.string
--- checked_access.boolean = epdf.boolean
--- checked_access.name = epdf.name
--- checked_access.ref = epdf.ref
-
-local function getnames(document,n,target) -- direct
- if n then
- local Names = n.Names
- if Names then
- if not target then
- target = { }
- end
- for i=1,Names.n,2 do
- target[Names[i]] = Names[i+1]
- end
- else
- local Kids = n.Kids
- if Kids then
- for i=1,Kids.n do
- target = getnames(document,Kids[i],target)
- end
- end
- end
- return target
- end
-end
-
-local function getkids(document,n,target) -- direct
- if n then
- local Kids = n.Kids
- if Kids then
- for i=1,Kids.n do
- target = getkids(document,Kids[i],target)
- end
- elseif target then
- target[#target+1] = n
- else
- target = { n }
- end
- return target
- end
-end
-
--- /OCProperties <<
--- /OCGs [ 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 25 0 R 27 0 R ]
--- /D <<
--- /Order [ 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 25 0 R 27 0 R ]
--- /ON [ 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 25 0 R 27 0 R ]
--- /OFF [ ]
--- >>
--- >>
-
-local function getlayers(document)
- local properties = document.Catalog.OCProperties
- if properties then
- local layers = properties.OCGs
- if layers then
- local t = { }
- local n = layers.n
- for i=1,n do
- local layer = layers[i]
---~ print(document.xrefs[layer])
- t[i] = layer.Name
- end
- t.n = n
- return t
- end
- end
-end
-
-local function getpages(document)
- local data = document.data
- local xrefs = document.xrefs
- local cache = document.cache
- local cata = data:getCatalog()
- local xref = data:getXRef()
- local pages = { }
- local nofpages = cata:getNumPages()
- for pagenumber=1,nofpages do
- local pagereference = cata:getPageRef(pagenumber).num
- local pagedata = some_dictionary(xref:fetch(pagereference,0):getDict(),document,pagereference)
- if pagedata then
- pagedata.number = pagenumber
- pages[pagenumber] = pagedata
- xrefs[pagedata] = pagereference
- cache[pagereference] = pagedata
- else
- report_epdf("missing pagedata at slot %i",i)
- end
- end
- pages.n = nofpages
- return pages
-end
-
--- loader
-
-local function delayed(document,tag,f)
- local t = { }
- setmetatable(t, { __index = function(t,k)
- local result = f()
- if result then
- document[tag] = result
- return result[k]
- end
- end } )
- return t
-end
-
-local loaded = { }
-
-function lpdf.epdf.load(filename)
- local document = loaded[filename]
- if not document then
- statistics.starttiming(lpdf.epdf)
- local data = epdf.open(filename) -- maybe resolvers.find_file
- if data then
- document = {
- filename = filename,
- cache = { },
- xrefs = { },
- data = data,
- }
- local Catalog = some_dictionary(data:getXRef():getCatalog():getDict(),document)
- local Info = some_dictionary(data:getXRef():getDocInfo():getDict(),document)
- document.Catalog = Catalog
- document.Info = Info
- -- document.catalog = Catalog
- -- a few handy helper tables
- document.pages = delayed(document,"pages", function() return getpages(document) end)
- document.destinations = delayed(document,"destinations", function() return getnames(document,Catalog.Names and Catalog.Names.Dests) end)
- document.javascripts = delayed(document,"javascripts", function() return getnames(document,Catalog.Names and Catalog.Names.JS) end)
- document.widgets = delayed(document,"widgets", function() return getnames(document,Catalog.Names and Catalog.Names.AcroForm) end)
- document.embeddedfiles = delayed(document,"embeddedfiles",function() return getnames(document,Catalog.Names and Catalog.Names.EmbeddedFiles) end)
- document.layers = delayed(document,"layers", function() return getlayers(document) end)
- else
- document = false
- end
- loaded[filename] = document
- statistics.stoptiming(lpdf.epdf)
- -- print(statistics.elapsedtime(lpdf.epdf))
- end
- return document
-end
-
--- for k, v in next, expand(t) do
-
-function lpdf.epdf.expand(t)
- if type(t) == "table" then
- local dummy = t.dummy
- end
- return t
-end
-
--- helpers
-
--- function lpdf.epdf.getdestinationpage(document,name)
--- local destination = document.data:findDest(name)
--- return destination and destination.number
--- end
+if not modules then modules = { } end modules ['lpdf-epd'] = {
+ version = 1.001,
+ comment = "companion to lpdf-epa.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is an experimental layer around the epdf library. The reason for
+-- this layer is that I want to be independent of the library (which
+-- implements a selection of what a file provides) and also because I
+-- want an interface closer to Lua's table model while the API stays
+-- close to the original xpdf library. Of course, after prototyping a
+-- solution, we can optimize it using the low level epdf accessors.
+
+-- It will be handy when we have a __length and __next that can trigger
+-- the resolve till then we will provide .n as #.
+
+-- As there can be references to the parent we cannot expand a tree. I
+-- played with some expansion variants but it does to pay off.
+
+-- Maybe we need a close().
+-- We cannot access all destinations in one run.
+
+local setmetatable, rawset, rawget, tostring, tonumber = setmetatable, rawset, rawget, tostring, tonumber
+local lower, match, char, find, sub = string.lower, string.match, string.char, string.find, string.sub
+local concat = table.concat
+local toutf = string.toutf
+
+local report_epdf = logs.reporter("epdf")
+
+-- a bit of protection
+
+local limited = false
+
+directives.register("system.inputmode", function(v)
+ if not limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ epdf.open = i_limiter.protect(epdf.open)
+ limited = true
+ end
+ end
+end)
+
+--
+
+function epdf.type(o)
+ local t = lower(match(tostring(o),"[^ :]+"))
+ return t or "?"
+end
+
+lpdf = lpdf or { }
+local lpdf = lpdf
+
+lpdf.epdf = { }
+
+local checked_access
+
+local function prepare(document,d,t,n,k)
+ for i=1,n do
+ local v = d:getVal(i)
+ local r = d:getValNF(i)
+ if r:getTypeName() == "ref" then
+ r = r:getRef().num
+ local c = document.cache[r]
+ if c then
+ --
+ else
+ c = checked_access[v:getTypeName()](v,document,r)
+ if c then
+ document.cache[r] = c
+ document.xrefs[c] = r
+ end
+ end
+ t[d:getKey(i)] = c
+ else
+ t[d:getKey(i)] = checked_access[v:getTypeName()](v,document)
+ end
+ end
+ getmetatable(t).__index = nil
+ return t[k]
+end
+
+local function some_dictionary(d,document,r)
+ local n = d and d:getLength() or 0
+ if n > 0 then
+ local t = { }
+ setmetatable(t, { __index = function(t,k) return prepare(document,d,t,n,k) end } )
+ return t
+ end
+end
+
+local done = { }
+
+local function prepare(document,a,t,n,k)
+ for i=1,n do
+ local v = a:get(i)
+ local r = a:getNF(i)
+ if v:getTypeName() == "null" then
+ -- TH: weird, but appears possible
+ elseif r:getTypeName() == "ref" then
+ r = r:getRef().num
+ local c = document.cache[r]
+ if c then
+ --
+ else
+ c = checked_access[v:getTypeName()](v,document,r)
+ document.cache[r] = c
+ document.xrefs[c] = r
+ end
+ t[i] = c
+ else
+ t[i] = checked_access[v:getTypeName()](v,document)
+ end
+ end
+ getmetatable(t).__index = nil
+ return t[k]
+end
+
+local function some_array(a,document,r)
+ local n = a and a:getLength() or 0
+ if n > 0 then
+ local t = { n = n }
+ setmetatable(t, { __index = function(t,k) return prepare(document,a,t,n,k) end } )
+ return t
+ end
+end
+
+local function streamaccess(s,_,what)
+ if not what or what == "all" or what == "*all" then
+ local t, n = { }, 0
+ s:streamReset()
+ while true do
+ local c = s:streamGetChar()
+ if c < 0 then
+ break
+ else
+ n = n + 1
+ t[n] = char(c)
+ end
+ end
+ return concat(t)
+ end
+end
+
+local function some_stream(d,document,r)
+ if d then
+ d:streamReset()
+ local s = some_dictionary(d:streamGetDict(),document,r)
+ getmetatable(s).__call = function(...) return streamaccess(d,...) end
+ return s
+ end
+end
+
+-- we need epdf.getBool
+
+checked_access = {
+ dictionary = function(d,document,r)
+ return some_dictionary(d:getDict(),document,r)
+ end,
+ array = function(a,document,r)
+ return some_array(a:getArray(),document,r)
+ end,
+ stream = function(v,document,r)
+ return some_stream(v,document,r)
+ end,
+ real = function(v)
+ return v:getReal()
+ end,
+ integer = function(v)
+ return v:getNum()
+ end,
+ string = function(v)
+ return toutf(v:getString())
+ end,
+ boolean = function(v)
+ return v:getBool()
+ end,
+ name = function(v)
+ return v:getName()
+ end,
+ ref = function(v)
+ return v:getRef()
+ end,
+ null = function()
+ return nil
+ end,
+}
+
+-- checked_access.real = epdf.real
+-- checked_access.integer = epdf.integer
+-- checked_access.string = epdf.string
+-- checked_access.boolean = epdf.boolean
+-- checked_access.name = epdf.name
+-- checked_access.ref = epdf.ref
+
+local function getnames(document,n,target) -- direct
+ if n then
+ local Names = n.Names
+ if Names then
+ if not target then
+ target = { }
+ end
+ for i=1,Names.n,2 do
+ target[Names[i]] = Names[i+1]
+ end
+ else
+ local Kids = n.Kids
+ if Kids then
+ for i=1,Kids.n do
+ target = getnames(document,Kids[i],target)
+ end
+ end
+ end
+ return target
+ end
+end
+
+local function getkids(document,n,target) -- direct
+ if n then
+ local Kids = n.Kids
+ if Kids then
+ for i=1,Kids.n do
+ target = getkids(document,Kids[i],target)
+ end
+ elseif target then
+ target[#target+1] = n
+ else
+ target = { n }
+ end
+ return target
+ end
+end
+
+-- /OCProperties <<
+-- /OCGs [ 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 25 0 R 27 0 R ]
+-- /D <<
+-- /Order [ 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 25 0 R 27 0 R ]
+-- /ON [ 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 25 0 R 27 0 R ]
+-- /OFF [ ]
+-- >>
+-- >>
+
+local function getlayers(document)
+ local properties = document.Catalog.OCProperties
+ if properties then
+ local layers = properties.OCGs
+ if layers then
+ local t = { }
+ local n = layers.n
+ for i=1,n do
+ local layer = layers[i]
+--~ print(document.xrefs[layer])
+ t[i] = layer.Name
+ end
+ t.n = n
+ return t
+ end
+ end
+end
+
+local function getpages(document)
+ local data = document.data
+ local xrefs = document.xrefs
+ local cache = document.cache
+ local cata = data:getCatalog()
+ local xref = data:getXRef()
+ local pages = { }
+ local nofpages = cata:getNumPages()
+ for pagenumber=1,nofpages do
+ local pagereference = cata:getPageRef(pagenumber).num
+ local pagedata = some_dictionary(xref:fetch(pagereference,0):getDict(),document,pagereference)
+ if pagedata then
+ pagedata.number = pagenumber
+ pages[pagenumber] = pagedata
+ xrefs[pagedata] = pagereference
+ cache[pagereference] = pagedata
+ else
+ report_epdf("missing pagedata at slot %i",i)
+ end
+ end
+ pages.n = nofpages
+ return pages
+end
+
+-- loader
+
+local function delayed(document,tag,f)
+ local t = { }
+ setmetatable(t, { __index = function(t,k)
+ local result = f()
+ if result then
+ document[tag] = result
+ return result[k]
+ end
+ end } )
+ return t
+end
+
+local loaded = { }
+
+function lpdf.epdf.load(filename)
+ local document = loaded[filename]
+ if not document then
+ statistics.starttiming(lpdf.epdf)
+ local data = epdf.open(filename) -- maybe resolvers.find_file
+ if data then
+ document = {
+ filename = filename,
+ cache = { },
+ xrefs = { },
+ data = data,
+ }
+ local Catalog = some_dictionary(data:getXRef():getCatalog():getDict(),document)
+ local Info = some_dictionary(data:getXRef():getDocInfo():getDict(),document)
+ document.Catalog = Catalog
+ document.Info = Info
+ -- document.catalog = Catalog
+ -- a few handy helper tables
+ document.pages = delayed(document,"pages", function() return getpages(document) end)
+ document.destinations = delayed(document,"destinations", function() return getnames(document,Catalog.Names and Catalog.Names.Dests) end)
+ document.javascripts = delayed(document,"javascripts", function() return getnames(document,Catalog.Names and Catalog.Names.JS) end)
+ document.widgets = delayed(document,"widgets", function() return getnames(document,Catalog.Names and Catalog.Names.AcroForm) end)
+ document.embeddedfiles = delayed(document,"embeddedfiles",function() return getnames(document,Catalog.Names and Catalog.Names.EmbeddedFiles) end)
+ document.layers = delayed(document,"layers", function() return getlayers(document) end)
+ else
+ document = false
+ end
+ loaded[filename] = document
+ statistics.stoptiming(lpdf.epdf)
+ -- print(statistics.elapsedtime(lpdf.epdf))
+ end
+ return document
+end
+
+-- for k, v in next, expand(t) do
+
+function lpdf.epdf.expand(t)
+ if type(t) == "table" then
+ local dummy = t.dummy
+ end
+ return t
+end
+
+-- helpers
+
+-- function lpdf.epdf.getdestinationpage(document,name)
+-- local destination = document.data:findDest(name)
+-- return destination and destination.number
+-- end
diff --git a/tex/context/base/lpdf-fld.lua b/tex/context/base/lpdf-fld.lua
index a9b9fd72d..0a15bb850 100644
--- a/tex/context/base/lpdf-fld.lua
+++ b/tex/context/base/lpdf-fld.lua
@@ -1,1305 +1,1305 @@
-if not modules then modules = { } end modules ['lpdf-fld'] = {
- version = 1.001,
- comment = "companion to lpdf-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- The problem with widgets is that so far each version of acrobat
--- has some rendering problem. I tried to keep up with this but
--- it makes no sense to do so as one cannot rely on the viewer
--- not changing. Especially Btn fields are tricky as their appearences
--- need to be synchronized in the case of children but e.g. acrobat
--- 10 does not retain the state and forces a check symbol. If you
--- make a file in acrobat then it has MK entries that seem to overload
--- the already present appearance streams (they're probably only meant for
--- printing) as it looks like the viewer has some fallback on (auto
--- generated) MK behaviour built in. So ... hard to test. Unfortunately
--- not even the default appearance is generated. This will probably be
--- solved at some point.
---
--- Also, for some reason the viewer does not always show custom appearances
--- when fields are being rolled over or clicked upon, and circles or checks
--- pop up when you don't expect them. I fear that this kind of instability
--- eventually will kill pdf forms. After all, the manual says: "individual
--- annotation handlers may ignore this entry and provide their own appearances"
--- and one might wonder what 'individual' means here, but effectively this
--- renders the whole concept of appearances useless.
---
--- Okay, here is one observation. A pdf file contains objects and one might
--- consider each one to be a static entity when read in. However, acrobat
--- starts rendering and seems to manipulate (appearance streams) of objects
--- in place (this is visible when the file is saved again). And, combined
--- with some other caching and hashing, this might give side effects for
--- shared objects. So, it seems that for some cases one can best be not too
--- clever and not share but duplicate information. Of course this defeats the
--- whole purpose of these objects. Of course I can be wrong.
---
--- A rarther weird side effect of the viewer is that the highlighting of fields
--- obscures values, unless you uses one of the BS variants, and this makes
--- custum appearances rather useless as there is no way to control this apart
--- from changing the viewer preferences. It could of course be a bug but it would
--- be nice if the highlighting was at least transparent. I have no clue why the
--- built in shapes work ok (some xform based appearances are generated) while
--- equally valid other xforms fail. It looks like acrobat appearances come on
--- top (being refered to in the MK) while custom ones are behind the highlight
--- rectangle. One can disable the "Show border hover color for fields" option
--- in the preferences. If you load java-imp-rhh this side effect gets disabled
--- and you get what you expect (it took me a while to figure out this hack).
---
--- When highlighting is enabled, those default symbols flash up, so it looks
--- like we have some inteference between this setting and custom appearances.
---
--- Anyhow, the NeedAppearances is really needed in order to get a rendering
--- for printing especially when highlighting (those colorfull foregrounds) is
--- on.
-
-local gmatch, lower, format = string.gmatch, string.lower, string.format
-local lpegmatch = lpeg.match
-local utfchar = utf.char
-local bpfactor, todimen = number.dimenfactors.bp, string.todimen
-
-local trace_fields = false trackers.register("backends.fields", function(v) trace_fields = v end)
-
-local report_fields = logs.reporter("backend","fields")
-
-local backends, lpdf = backends, lpdf
-
-local variables = interfaces.variables
-local context = context
-
-local references = structures.references
-local settings_to_array = utilities.parsers.settings_to_array
-
-local pdfbackend = backends.pdf
-
-local nodeinjections = pdfbackend.nodeinjections
-local codeinjections = pdfbackend.codeinjections
-local registrations = pdfbackend.registrations
-
-local registeredsymbol = codeinjections.registeredsymbol
-
-local pdfstream = lpdf.stream
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfreference = lpdf.reference
-local pdfunicode = lpdf.unicode
-local pdfstring = lpdf.string
-local pdfconstant = lpdf.constant
-local pdftoeight = lpdf.toeight
-local pdfflushobject = lpdf.flushobject
-local pdfshareobjectreference = lpdf.shareobjectreference
-local pdfshareobject = lpdf.shareobject
-local pdfreserveobject = lpdf.reserveobject
-local pdfreserveannotation = lpdf.reserveannotation
-local pdfaction = lpdf.action
-
-local hpack_node = node.hpack
-
-local nodepool = nodes.pool
-
-local pdfannotation_node = nodepool.pdfannotation
-
-local submitoutputformat = 0 -- 0=unknown 1=HTML 2=FDF 3=XML => not yet used, needs to be checked
-
-local pdf_widget = pdfconstant("Widget")
-local pdf_tx = pdfconstant("Tx")
-local pdf_ch = pdfconstant("Ch")
-local pdf_btn = pdfconstant("Btn")
------ pdf_yes = pdfconstant("Yes")
-local pdf_off = pdfconstant("Off")
-local pdf_p = pdfconstant("P") -- None Invert Outline Push
-local pdf_n = pdfconstant("N") -- None Invert Outline Push
---
-local pdf_no_rect = pdfarray { 0, 0, 0, 0 }
-
-local splitter = lpeg.splitat("=>")
-
-local formats = {
- html = 1, fdf = 2, xml = 3,
-}
-
-function codeinjections.setformsmethod(name)
- submitoutputformat = formats[lower(name)] or formats.xml
-end
-
-local flag = { -- /Ff
- ReadOnly = 1, -- 1
- Required = 2, -- 2
- NoExport = 4, -- 3
- MultiLine = 4096, -- 13
- Password = 8192, -- 14
- NoToggleToOff = 16384, -- 15
- Radio = 32768, -- 16
- PushButton = 65536, -- 17
- PopUp = 131072, -- 18
- Edit = 262144, -- 19
- Sort = 524288, -- 20
- FileSelect = 1048576, -- 21
- DoNotSpellCheck = 4194304, -- 23
- DoNotScroll = 8388608, -- 24
- Comb = 16777216, -- 25
- RichText = 33554432, -- 26
- RadiosInUnison = 33554432, -- 26
- CommitOnSelChange = 67108864, -- 27
-}
-
-local plus = { -- /F
- Invisible = 1, -- 1
- Hidden = 2, -- 2
- Printable = 4, -- 3
- Print = 4, -- 3
- NoZoom = 8, -- 4
- NoRotate = 16, -- 5
- NoView = 32, -- 6
- ReadOnly = 64, -- 7
- Locked = 128, -- 8
- ToggleNoView = 256, -- 9
- LockedContents = 512, -- 10,
- AutoView = 256, -- 288 (6+9)
-}
-
--- todo: check what is interfaced
-
-flag.readonly = flag.ReadOnly
-flag.required = flag.Required
-flag.protected = flag.Password
-flag.sorted = flag.Sort
-flag.unavailable = flag.NoExport
-flag.nocheck = flag.DoNotSpellCheck
-flag.fixed = flag.DoNotScroll
-flag.file = flag.FileSelect
-
-plus.hidden = plus.Hidden
-plus.printable = plus.Printable
-plus.auto = plus.AutoView
-
--- some day .. lpeg with function or table
-
-local function fieldflag(specification) -- /Ff
- local o, n = specification.option, 0
- if o and o ~= "" then
- for f in gmatch(o,"[^, ]+") do
- n = n + (flag[f] or 0)
- end
- end
- return n
-end
-
-local function fieldplus(specification) -- /F
- local o, n = specification.option, 0
- if o and o ~= "" then
- for p in gmatch(o,"[^, ]+") do
- n = n + (plus[p] or 0)
- end
- end
--- n = n + 4
- return n
-end
-
-local function checked(what)
- local set, bug = references.identify("",what)
- if not bug and #set > 0 then
- local r, n = pdfaction(set)
- return pdfshareobjectreference(r)
- end
-end
-
-local function fieldactions(specification) -- share actions
- local d, a = { }, nil
- a = specification.mousedown
- or specification.clickin if a and a ~= "" then d.D = checked(a) end
- a = specification.mouseup
- or specification.clickout if a and a ~= "" then d.U = checked(a) end
- a = specification.regionin if a and a ~= "" then d.E = checked(a) end -- Enter
- a = specification.regionout if a and a ~= "" then d.X = checked(a) end -- eXit
- a = specification.afterkey if a and a ~= "" then d.K = checked(a) end
- a = specification.format if a and a ~= "" then d.F = checked(a) end
- a = specification.validate if a and a ~= "" then d.V = checked(a) end
- a = specification.calculate if a and a ~= "" then d.C = checked(a) end
- a = specification.focusin if a and a ~= "" then d.Fo = checked(a) end
- a = specification.focusout if a and a ~= "" then d.Bl = checked(a) end
- a = specification.openpage if a and a ~= "" then d.PO = checked(a) end
- a = specification.closepage if a and a ~= "" then d.PC = checked(a) end
- -- a = specification.visiblepage if a and a ~= "" then d.PV = checked(a) end
- -- a = specification.invisiblepage if a and a ~= "" then d.PI = checked(a) end
- return next(d) and pdfdictionary(d)
-end
-
--- fonts and color
-
-local pdfdocencodingvector, pdfdocencodingcapsule
-
--- The pdf doc encoding vector is needed in order to
--- trigger propper unicode. Interesting is that when
--- a glyph is not in the vector, it is still visible
--- as it is taken from some other font. Messy.
-
--- To be checked: only when text/line fields.
-
-local function checkpdfdocencoding()
- report_fields("adding pdfdoc encoding vector")
- local encoding = dofile(resolvers.findfile("lpdf-enc.lua")) -- no checking, fatal if not present
- pdfdocencodingvector = pdfreference(pdfflushobject(encoding))
- local capsule = pdfdictionary {
- PDFDocEncoding = pdfdocencodingvector
- }
- pdfdocencodingcapsule = pdfreference(pdfflushobject(capsule))
- checkpdfdocencoding = function() end
-end
-
-local fontnames = {
- rm = {
- tf = "Times-Roman",
- bf = "Times-Bold",
- it = "Times-Italic",
- sl = "Times-Italic",
- bi = "Times-BoldItalic",
- bs = "Times-BoldItalic",
- },
- ss = {
- tf = "Helvetica",
- bf = "Helvetica-Bold",
- it = "Helvetica-Oblique",
- sl = "Helvetica-Oblique",
- bi = "Helvetica-BoldOblique",
- bs = "Helvetica-BoldOblique",
- },
- tt = {
- tf = "Courier",
- bf = "Courier-Bold",
- it = "Courier-Oblique",
- sl = "Courier-Oblique",
- bi = "Courier-BoldOblique",
- bs = "Courier-BoldOblique",
- },
- symbol = {
- dingbats = "ZapfDingbats",
- }
-}
-
-local usedfonts = { }
-
-local function fieldsurrounding(specification)
- local fontsize = specification.fontsize or "12pt"
- local fontstyle = specification.fontstyle or "rm"
- local fontalternative = specification.fontalternative or "tf"
- local colorvalue = specification.colorvalue
- local s = fontnames[fontstyle]
- if not s then
- fontstyle, s = "rm", fontnames.rm
- end
- local a = s[fontalternative]
- if not a then
- alternative, a = "tf", s.tf
- end
- local tag = fontstyle .. fontalternative
- fontsize = todimen(fontsize)
- fontsize = fontsize and (bpfactor * fontsize) or 12
- fontraise = 0.1 * fontsize -- todo: figure out what the natural one is and compensate for strutdp
- local fontcode = format("%0.4f Tf %0.4f Ts",fontsize,fontraise)
- -- we could test for colorvalue being 1 (black) and omit it then
- local colorcode = lpdf.color(3,colorvalue) -- we force an rgb color space
- if trace_fields then
- report_fields("using font, style %a, alternative %a, size %p, tag %a, code %a",fontstyle,fontalternative,fontsize,tag,fontcode)
- report_fields("using color, value %a, code %a",colorvalue,colorcode)
- end
- local stream = pdfstream {
- pdfconstant(tag),
- format("%s %s",fontcode,colorcode)
- }
- usedfonts[tag] = a -- the name
- -- move up with "x.y Ts"
- return tostring(stream)
-end
-
-local function registerfonts()
- if next(usedfonts) then
- checkpdfdocencoding() -- already done
- local d = pdfdictionary()
- local pdffonttype, pdffontsubtype = pdfconstant("Font"), pdfconstant("Type1")
- for tag, name in next, usedfonts do
- local f = pdfdictionary {
- Type = pdffonttype,
- Subtype = pdffontsubtype,
- Name = pdfconstant(tag),
- BaseFont = pdfconstant(name),
- Encoding = pdfdocencodingvector,
- }
- d[tag] = pdfreference(pdfflushobject(f))
- end
- return d
- end
-end
-
--- symbols
-
-local function fieldappearances(specification)
- -- todo: caching
- local values = specification.values
- local default = specification.default -- todo
- if not values then
- -- error
- return
- end
- local v = settings_to_array(values)
- local n, r, d
- if #v == 1 then
- n, r, d = v[1], v[1], v[1]
- elseif #v == 2 then
- n, r, d = v[1], v[1], v[2]
- else
- n, r, d = v[1], v[2], v[3]
- end
- local appearance = pdfdictionary {
- N = registeredsymbol(n), R = registeredsymbol(r), D = registeredsymbol(d),
- }
- return pdfshareobjectreference(appearance)
--- return pdfreference(pdfflushobject(appearance))
-end
-
-local YesorOn = "Yes" -- somehow On is not always working out well any longer (why o why this change)
-
--- beware ... maybe we should have unique /Yes1 ... we will probably
--- change this one too.
---
--- TODO: the same as radio .. play safe and use different names.
-
-local function fieldstates_check(specification,forceyes,values,default,yesdefault)
- -- we don't use Opt here (too messy for radio buttons)
- local values, default = values or specification.values, default or specification.default
- if not values or values == "" then
- -- error
- return
- end
- local v = settings_to_array(values)
- local yes, off, yesn, yesr, yesd, offn, offr, offd
- if #v == 1 then
- yes, off = v[1], v[1]
- else
- yes, off = v[1], v[2]
- end
- local yesshown, yesvalue = lpegmatch(splitter,yes)
- if not (yesshown and yesvalue) then
- yesshown = yes, yes
- end
- yes = settings_to_array(yesshown)
- local offshown, offvalue = lpegmatch(splitter,off)
- if not (offshown and offvalue) then
- offshown = off, off
- end
- off = settings_to_array(offshown)
- if #yes == 1 then
- yesn, yesr, yesd = yes[1], yes[1], yes[1]
- elseif #yes == 2 then
- yesn, yesr, yesd = yes[1], yes[1], yes[2]
- else
- yesn, yesr, yesd = yes[1], yes[2], yes[3]
- end
- if #off == 1 then
- offn, offr, offd = off[1], off[1], off[1]
- elseif #off == 2 then
- offn, offr, offd = off[1], off[1], off[2]
- else
- offn, offr, offd = off[1], off[2], off[3]
- end
- if not yesvalue then
- yesvalue = yesdefault or yesn
- end
- if not offvalue then
- offvalue = offn
- end
- if forceyes == true then
- forceyes = YesorOn -- spec likes Yes more but we've used On for ages now
- else
- -- false or string
- end
- if default == yesn then
- default = pdfconstant(forceyes or yesn)
- else
- default = pdf_off
- end
- local appearance
- if false then -- needs testing
- appearance = pdfdictionary { -- maybe also cache components
- N = pdfshareobjectreference(pdfdictionary { [forceyes or yesn] = registeredsymbol(yesn), Off = registeredsymbol(offn) }),
- R = pdfshareobjectreference(pdfdictionary { [forceyes or yesr] = registeredsymbol(yesr), Off = registeredsymbol(offr) }),
- D = pdfshareobjectreference(pdfdictionary { [forceyes or yesd] = registeredsymbol(yesd), Off = registeredsymbol(offd) }),
- }
- else
- appearance = pdfdictionary { -- maybe also cache components
- N = pdfdictionary { [forceyes or yesn] = registeredsymbol(yesn), Off = registeredsymbol(offn) },
- R = pdfdictionary { [forceyes or yesr] = registeredsymbol(yesr), Off = registeredsymbol(offr) },
- D = pdfdictionary { [forceyes or yesd] = registeredsymbol(yesd), Off = registeredsymbol(offd) }
- }
- end
- local appearanceref = pdfshareobjectreference(appearance)
- -- local appearanceref = pdfreference(pdfflushobject(appearance))
- return appearanceref, default, yesvalue
-end
-
--- It looks like there is always a (MK related) symbol used and that
--- the appearances are only used as ornaments behind a symbol. So,
--- contrary to what we did when widgets showed up, we now limit
--- ourself to more dumb definitions. Especially when highlighting is
--- enabled weird interferences happen. So, we play safe (some nice code
--- has been removed that worked well till recently).
-
-local function fieldstates_radio(specification,name,parent)
- local values = values or specification.values
- local default = default or parent.default -- specification.default
- if not values or values == "" then
- -- error
- return
- end
- local v = settings_to_array(values)
- local yes, off, yesn, yesr, yesd, offn, offr, offd
- if #v == 1 then
- yes, off = v[1], v[1]
- else
- yes, off = v[1], v[2]
- end
- -- yes keys might be the same in the three appearances within a field
- -- but can best be different among fields ... don't ask why
- local yessymbols, yesvalue = lpegmatch(splitter,yes) -- n,r,d=>x
- if not (yessymbols and yesvalue) then
- yessymbols = yes
- end
- if not yesvalue then
- yesvalue = name
- end
- yessymbols = settings_to_array(yessymbols)
- if #yessymbols == 1 then
- yesn = yessymbols[1]
- yesr = yesn
- yesd = yesr
- elseif #yessymbols == 2 then
- yesn = yessymbols[1]
- yesr = yessymbols[2]
- yesd = yesr
- else
- yesn = yessymbols[1]
- yesr = yessymbols[2]
- yesd = yessymbols[3]
- end
- -- we don't care about names, as all will be /Off
- local offsymbols = lpegmatch(splitter,off) or off
- offsymbols = settings_to_array(offsymbols)
- if #offsymbols == 1 then
- offn = offsymbols[1]
- offr = offn
- offd = offr
- elseif #offsymbols == 2 then
- offn = offsymbols[1]
- offr = offsymbols[2]
- offd = offr
- else
- offn = offsymbols[1]
- offr = offsymbols[2]
- offd = offsymbols[3]
- end
- if default == name then
- default = pdfconstant(name)
- else
- default = pdf_off
- end
- --
- local appearance
- if false then -- needs testing
- appearance = pdfdictionary { -- maybe also cache components
- N = pdfshareobjectreference(pdfdictionary { [name] = registeredsymbol(yesn), Off = registeredsymbol(offn) }),
- R = pdfshareobjectreference(pdfdictionary { [name] = registeredsymbol(yesr), Off = registeredsymbol(offr) }),
- D = pdfshareobjectreference(pdfdictionary { [name] = registeredsymbol(yesd), Off = registeredsymbol(offd) }),
- }
- else
- appearance = pdfdictionary { -- maybe also cache components
- N = pdfdictionary { [name] = registeredsymbol(yesn), Off = registeredsymbol(offn) },
- R = pdfdictionary { [name] = registeredsymbol(yesr), Off = registeredsymbol(offr) },
- D = pdfdictionary { [name] = registeredsymbol(yesd), Off = registeredsymbol(offd) }
- }
- end
- local appearanceref = pdfshareobjectreference(appearance) -- pdfreference(pdfflushobject(appearance))
- return appearanceref, default, yesvalue
-end
-
-local function fielddefault(field)
- local default = field.default
- if not default or default == "" then
- local values = settings_to_array(field.values)
- default = values[1]
- end
- if not default or default == "" then
- return pdf_off
- else
- return pdfconstant(default)
- end
-end
-
-local function fieldoptions(specification)
- local values = specification.values
- local default = specification.default
- if values then
- local v = settings_to_array(values)
- for i=1,#v do
- local vi = v[i]
- local shown, value = lpegmatch(splitter,vi)
- if shown and value then
- v[i] = pdfarray { pdfunicode(value), shown }
- else
- v[i] = pdfunicode(v[i])
- end
- end
- return pdfarray(v)
- end
-end
-
-local mapping = {
- -- acrobat compliant (messy, probably some pdfdoc encoding interference here)
- check = "4", -- 0x34
- circle = "l", -- 0x6C
- cross = "8", -- 0x38
- diamond = "u", -- 0x75
- square = "n", -- 0x6E
- star = "H", -- 0x48
-}
-
-local function todingbat(n)
- if n and n ~= "" then
- return mapping[n] or ""
- end
-end
-
--- local zero_bc = pdfarray { 0, 0, 0 }
--- local zero_bg = pdfarray { 1, 1, 1 }
-
-local function fieldrendering(specification)
- local bvalue = tonumber(specification.backgroundcolorvalue)
- local fvalue = tonumber(specification.framecolorvalue)
- local svalue = specification.fontsymbol
- if bvalue or fvalue or (svalue and svalue ~= "") then
- return pdfdictionary {
- BG = bvalue and pdfarray { lpdf.colorvalues(3,bvalue) } or nil, -- or zero_bg,
- BC = fvalue and pdfarray { lpdf.colorvalues(3,fvalue) } or nil, -- or zero_bc,
- CA = svalue and pdfstring (svalue) or nil,
- }
- end
-end
-
--- layers
-
-local function fieldlayer(specification) -- we can move this in line
- local layer = specification.layer
- return (layer and lpdf.layerreference(layer)) or nil
-end
-
--- defining
-
-local fields, radios, clones, fieldsets, calculationset = { }, { }, { }, { }, nil
-
-local xfdftemplate = [[
-
-
-
-
-
-%s
-
-
-]]
-
-function codeinjections.exportformdata(name)
- local result = { }
- for k, v in table.sortedhash(fields) do
- result[#result+1] = format(" %s",v.name or k,v.default or "")
- end
- local base = file.basename(tex.jobname)
- local xfdf = format(xfdftemplate,base,table.concat(result,"\n"))
- if not name or name == "" then
- name = base
- end
- io.savedata(file.addsuffix(name,"xfdf"),xfdf)
-end
-
-function codeinjections.definefieldset(tag,list)
- fieldsets[tag] = list
-end
-
-function codeinjections.getfieldset(tag)
- return fieldsets[tag]
-end
-
-local function fieldsetlist(tag)
- if tag then
- local ft = fieldsets[tag]
- if ft then
- local a = pdfarray()
- for name in gmatch(list,"[^, ]+") do
- local f = field[name]
- if f and f.pobj then
- a[#a+1] = pdfreference(f.pobj)
- end
- end
- return a
- end
- end
-end
-
-function codeinjections.setfieldcalculationset(tag)
- calculationset = tag
-end
-
-local function predefinesymbols(specification)
- local values = specification.values
- if values then
- local symbols = settings_to_array(values)
- for i=1,#symbols do
- local symbol = symbols[i]
- local a, b = lpegmatch(splitter,symbol)
- codeinjections.presetsymbol(a or symbol)
- end
- end
-end
-
-function codeinjections.getdefaultfieldvalue(name)
- local f = fields[name]
- if f then
- local values = f.values
- local default = f.default
- if not default or default == "" then
- local symbols = settings_to_array(values)
- local symbol = symbols[1]
- if symbol then
- local a, b = lpegmatch(splitter,symbol) -- splits at =>
- default = a or symbol
- end
- end
- return default
- end
-end
-
-function codeinjections.definefield(specification)
- local n = specification.name
- local f = fields[n]
- if not f then
- local fieldtype = specification.type
- if not fieldtype then
- if trace_fields then
- report_fields("invalid definition for %a, unknown type",n)
- end
- elseif fieldtype == "radio" then
- local values = specification.values
- if values and values ~= "" then
- values = settings_to_array(values)
- for v=1,#values do
- radios[values[v]] = { parent = n }
- end
- fields[n] = specification
- if trace_fields then
- report_fields("defining %a as type %a",n,"radio")
- end
- elseif trace_fields then
- report_fields("invalid definition of radio %a, missing values",n)
- end
- elseif fieldtype == "sub" then
- -- not in main field list !
- local radio = radios[n]
- if radio then
- -- merge specification
- for key, value in next, specification do
- radio[key] = value
- end
- if trace_fields then
- local p = radios[n] and radios[n].parent
- report_fields("defining %a as type sub of radio %a",n,p)
- end
- elseif trace_fields then
- report_fields("invalid definition of radio sub %a, no parent given",n)
- end
- predefinesymbols(specification)
- elseif fieldtype == "text" or fieldtype == "line" then
- fields[n] = specification
- if trace_fields then
- report_fields("defining %a as type %a",n,fieldtype)
- end
- if specification.values ~= "" and specification.default == "" then
- specification.default, specification.values = specification.values, nil
- end
- else
- fields[n] = specification
- if trace_fields then
- report_fields("defining %a as type %a",n,fieldtype)
- end
- predefinesymbols(specification)
- end
- elseif trace_fields then
- report_fields("invalid definition for %a, already defined",n)
- end
-end
-
-function codeinjections.clonefield(specification) -- obsolete
- local p, c, v = specification.parent, specification.children, specification.alternative
- if not p or not c then
- if trace_fields then
- report_fields("invalid clone, children %a, parent %a, alternative %a",c,p,v)
- end
- return
- end
- local x = fields[p] or radios[p]
- if not x then
- if trace_fields then
- report_fields("invalid clone, unknown parent %a",p)
- end
- return
- end
- for n in gmatch(c,"[^, ]+") do
- local f, r, c = fields[n], radios[n], clones[n]
- if f or r or c then
- if trace_fields then
- report_fields("already cloned, child %a, parent %a, alternative %a",n,p,v)
- end
- else
- if trace_fields then
- report_fields("cloning, child %a, parent %a, alternative %a",n,p,v)
- end
- clones[n] = specification
- predefinesymbols(specification)
- end
- end
-end
-
-function codeinjections.getfieldcategory(name)
- local f = fields[name] or radios[name] or clones[name]
- if f then
- local g = f.category
- if not g or g == "" then
- local v, p, t = f.alternative, f.parent, f.type
- if v == "clone" or v == "copy" then
- f = fields[p] or radios[p]
- g = f and f.category
- elseif t == "sub" then
- f = fields[p]
- g = f and f.category
- end
- end
- return g
- end
-end
-
---
-
-function codeinjections.validfieldcategory(name)
- return fields[name] or radios[name] or clones[name]
-end
-
-function codeinjections.validfieldset(name)
- return fieldsets[tag]
-end
-
-function codeinjections.validfield(name)
- return fields[name]
-end
-
---
-
-local alignments = {
- flushleft = 0, right = 0,
- center = 1, middle = 1,
- flushright = 2, left = 2,
-}
-
-local function fieldalignment(specification)
- return alignments[specification.align] or 0
-end
-
-local function enhance(specification,option)
- local so = specification.option
- if so and so ~= "" then
- specification.option = so .. "," .. option
- else
- specification.option = option
- end
- return specification
-end
-
--- finish
-
-local collected = pdfarray()
-local forceencoding = false
-
-local function finishfields()
- local sometext = forceencoding
- for name, field in next, fields do
- local kids = field.kids
- if kids then
- pdfflushobject(field.kidsnum,kids)
- end
- local opt = field.opt
- if opt then
- pdfflushobject(field.optnum,opt)
- end
- local type = field.type
- if not sometext and (type == "text" or type == "line") then
- sometext = true
- end
- end
- for name, field in next, radios do
- local kids = field.kids
- if kids then
- pdfflushobject(field.kidsnum,kids)
- end
- local opt = field.opt
- if opt then
- pdfflushobject(field.optnum,opt)
- end
- end
- if #collected > 0 then
- local acroform = pdfdictionary {
- NeedAppearances = true,
- Fields = pdfreference(pdfflushobject(collected)),
- CO = fieldsetlist(calculationset),
- }
- if sometext then
- checkpdfdocencoding()
- usedfonts.tttf = fontnames.tt.tf
- acroform.DA = "/tttf 12 Tf 0 g"
- acroform.DR = pdfdictionary {
- Font = registerfonts(),
- Encoding = pdfdocencodingcapsule,
- }
- end
- lpdf.addtocatalog("AcroForm",pdfreference(pdfflushobject(acroform)))
- end
-end
-
-lpdf.registerdocumentfinalizer(finishfields,"form fields")
-
-local methods = { }
-
-function nodeinjections.typesetfield(name,specification)
- local field = fields[name] or radios[name] or clones[name]
- if not field then
- report_fields( "unknown child %a",name)
- -- unknown field
- return
- end
- local alternative, parent = field.alternative, field.parent
- if alternative == "copy" or alternative == "clone" then -- only in clones
- field = fields[parent] or radios[parent]
- end
- local method = methods[field.type]
- if method then
- return method(name,specification,alternative)
- else
- report_fields( "unknown method %a for child %a",field.type,name)
- end
-end
-
-local function save_parent(field,specification,d,hasopt)
- local kidsnum = pdfreserveobject()
- d.Kids = pdfreference(kidsnum)
- field.kidsnum = kidsnum
- field.kids = pdfarray()
- if hasopt then
- local optnum = pdfreserveobject()
- d.Opt = pdfreference(optnum)
- field.optnum = optnum
- field.opt = pdfarray()
- end
- local pnum = pdfflushobject(d)
- field.pobj = pnum
- collected[#collected+1] = pdfreference(pnum)
-end
-
-local function save_kid(field,specification,d,optname)
- local kn = pdfreserveannotation()
- field.kids[#field.kids+1] = pdfreference(kn)
- if optname then
- local opt = field.opt
- if opt then
- opt[#opt+1] = optname
- end
- end
- local width, height, depth = specification.width or 0, specification.height or 0, specification.depth
- local box = hpack_node(pdfannotation_node(width,height,depth,d(),kn))
- box.width, box.height, box.depth = width, height, depth -- redundant
- return box
-end
-
-local function makelineparent(field,specification)
- local text = pdfunicode(field.default)
- local length = tonumber(specification.length or 0) or 0
- local d = pdfdictionary {
- Subtype = pdf_widget,
- T = pdfunicode(specification.title),
- F = fieldplus(specification),
- Ff = fieldflag(specification),
- OC = fieldlayer(specification),
- DA = fieldsurrounding(specification),
- AA = fieldactions(specification),
- FT = pdf_tx,
- Q = fieldalignment(specification),
- MaxLen = length == 0 and 1000 or length,
- DV = text,
- V = text,
- }
- save_parent(field,specification,d)
-end
-
-local function makelinechild(name,specification)
- local field, parent = clones[name], nil
- if field then
- parent = fields[field.parent]
- if not parent.pobj then
- if trace_fields then
- report_fields("forcing parent text %a",parent.name)
- end
- makelineparent(parent,specification)
- end
- else
- parent = fields[name]
- field = parent
- if not parent.pobj then
- if trace_fields then
- report_fields("using parent text %a",name)
- end
- makelineparent(parent,specification)
- end
- end
- if trace_fields then
- report_fields("using child text %a",name)
- end
- local d = pdfdictionary {
- Subtype = pdf_widget,
- Parent = pdfreference(parent.pobj),
- F = fieldplus(specification),
- OC = fieldlayer(specification),
- DA = fieldsurrounding(specification),
- AA = fieldactions(specification),
- MK = fieldrendering(specification),
- Q = fieldalignment(specification),
- }
- return save_kid(parent,specification,d)
-end
-
-function methods.line(name,specification)
- return makelinechild(name,specification)
-end
-
-function methods.text(name,specification)
- return makelinechild(name,enhance(specification,"MultiLine"))
-end
-
-local function makechoiceparent(field,specification)
- local d = pdfdictionary {
- Subtype = pdf_widget,
- T = pdfunicode(specification.title),
- F = fieldplus(specification),
- Ff = fieldflag(specification),
- OC = fieldlayer(specification),
- AA = fieldactions(specification),
- FT = pdf_ch,
- Opt = fieldoptions(field), -- todo
- }
- save_parent(field,specification,d)
-end
-
-local function makechoicechild(name,specification)
- local field, parent = clones[name], nil
- if field then
- parent = fields[field.parent]
- if not parent.pobj then
- if trace_fields then
- report_fields("forcing parent choice %a",parent.name)
- end
- makechoiceparent(parent,specification,extras)
- end
- else
- parent = fields[name]
- field = parent
- if not parent.pobj then
- if trace_fields then
- report_fields("using parent choice %a",name)
- end
- makechoiceparent(parent,specification,extras)
- end
- end
- if trace_fields then
- report_fields("using child choice %a",name)
- end
- local d = pdfdictionary {
- Subtype = pdf_widget,
- Parent = pdfreference(parent.pobj),
- F = fieldplus(specification),
- OC = fieldlayer(specification),
- AA = fieldactions(specification),
- }
- return save_kid(parent,specification,d) -- do opt here
-end
-
-function methods.choice(name,specification)
- return makechoicechild(name,specification)
-end
-
-function methods.popup(name,specification)
- return makechoicechild(name,enhance(specification,"PopUp"))
-end
-
-function methods.combo(name,specification)
- return makechoicechild(name,enhance(specification,"PopUp,Edit"))
-end
-
-local function makecheckparent(field,specification)
- local d = pdfdictionary {
- T = pdfunicode(specification.title), -- todo: when tracing use a string
- F = fieldplus(specification),
- Ff = fieldflag(specification),
- OC = fieldlayer(specification),
- AA = fieldactions(specification),
- FT = pdf_btn,
- V = fielddefault(field),
- }
- save_parent(field,specification,d,true)
-end
-
-local function makecheckchild(name,specification)
- local field, parent = clones[name], nil
- if field then
- parent = fields[field.parent]
- if not parent.pobj then
- if trace_fields then
- report_fields("forcing parent check %a",parent.name)
- end
- makecheckparent(parent,specification,extras)
- end
- else
- parent = fields[name]
- field = parent
- if not parent.pobj then
- if trace_fields then
- report_fields("using parent check %a",name)
- end
- makecheckparent(parent,specification,extras)
- end
- end
- if trace_fields then
- report_fields("using child check %a",name)
- end
- local d = pdfdictionary {
- Subtype = pdf_widget,
- Parent = pdfreference(parent.pobj),
- F = fieldplus(specification),
- OC = fieldlayer(specification),
- AA = fieldactions(specification),
- H = pdf_n,
- }
- local fontsymbol = specification.fontsymbol
- if fontsymbol and fontsymbol ~= "" then
- specification.fontsymbol = todingbat(fontsymbol)
- specification.fontstyle = "symbol"
- specification.fontalternative = "dingbats"
- d.DA = fieldsurrounding(specification)
- d.MK = fieldrendering(specification)
- return save_kid(parent,specification,d)
- else
- local appearance, default, value = fieldstates_check(field,true)
- d.AS = default
- d.AP = appearance
- return save_kid(parent,specification,d,value)
- end
-end
-
-function methods.check(name,specification)
- return makecheckchild(name,specification)
-end
-
-local function makepushparent(field,specification) -- check if we can share with the previous
- local d = pdfdictionary {
- Subtype = pdf_widget,
- T = pdfunicode(specification.title),
- F = fieldplus(specification),
- Ff = fieldflag(specification),
- OC = fieldlayer(specification),
- AA = fieldactions(specification),
- FT = pdf_btn,
- AP = fieldappearances(field),
- H = pdf_p,
- }
- save_parent(field,specification,d)
-end
-
-local function makepushchild(name,specification)
- local field, parent = clones[name], nil
- if field then
- parent = fields[field.parent]
- if not parent.pobj then
- if trace_fields then
- report_fields("forcing parent push %a",parent.name)
- end
- makepushparent(parent,specification)
- end
- else
- parent = fields[name]
- field = parent
- if not parent.pobj then
- if trace_fields then
- report_fields("using parent push %a",name)
- end
- makepushparent(parent,specification)
- end
- end
- if trace_fields then
- report_fields("using child push %a",name)
- end
- local fontsymbol = specification.fontsymbol
- local d = pdfdictionary {
- Subtype = pdf_widget,
- Parent = pdfreference(field.pobj),
- F = fieldplus(specification),
- OC = fieldlayer(specification),
- AA = fieldactions(specification),
- H = pdf_p,
- }
- if fontsymbol and fontsymbol ~= "" then
- specification.fontsymbol = todingbat(fontsymbol)
- specification.fontstyle = "symbol"
- specification.fontalternative = "dingbats"
- d.DA = fieldsurrounding(specification)
- d.MK = fieldrendering(specification)
- else
- d.AP = fieldappearances(field)
- end
- return save_kid(parent,specification,d)
-end
-
-function methods.push(name,specification)
- return makepushchild(name,enhance(specification,"PushButton"))
-end
-
-local function makeradioparent(field,specification)
--- specification = enhance(specification,"Radio,RadiosInUnison")
- specification = enhance(specification,"Radio,RadiosInUnison,Print,NoToggleToOff")
--- specification = enhance(specification,"Radio,Print,NoToggleToOff")
- local d = pdfdictionary {
- T = field.name,
- FT = pdf_btn,
--- F = fieldplus(specification),
- Ff = fieldflag(specification),
--- H = pdf_n,
- V = fielddefault(field),
- }
- save_parent(field,specification,d,true)
-end
-
--- local function makeradiochild(name,specification)
--- local field, parent = clones[name], nil
--- if field then
--- field = radios[field.parent]
--- parent = fields[field.parent]
--- if not parent.pobj then
--- if trace_fields then
--- report_fields("forcing parent radio %a",parent.name)
--- end
--- makeradioparent(parent,parent)
--- end
--- else
--- field = radios[name]
--- if not field then
--- report_fields("there is some problem with field %a",name)
--- return nil
--- end
--- parent = fields[field.parent]
--- if not parent.pobj then
--- if trace_fields then
--- report_fields("using parent radio %a",name)
--- end
--- makeradioparent(parent,parent)
--- end
--- end
--- if trace_fields then
--- report_fields("using child radio %a with values %a and default %a",name,field.values,field.default)
--- end
--- local fontsymbol = specification.fontsymbol
--- fontsymbol="star"
--- local d = pdfdictionary {
--- Subtype = pdf_widget,
--- Parent = pdfreference(parent.pobj),
--- F = fieldplus(specification),
--- OC = fieldlayer(specification),
--- AA = fieldactions(specification),
--- H = pdf_n,
--- }
--- if fontsymbol and fontsymbol ~= "" then
--- local appearance, default, value = fieldstates_radio(field,true,false,false,name) -- false is also ok
--- specification.fontsymbol = todingbat(fontsymbol)
--- specification.fontstyle = "symbol"
--- specification.fontalternative = "dingbats"
--- d.DA = fieldsurrounding(specification)
--- d.MK = fieldrendering(specification)
--- d.AS = pdfconstant(value) -- default -- mandate when AP but confuses viewers
--- d.AP = appearance
--- return save_kid(parent,specification,d,value)
--- -- return save_kid(parent,specification,d,name)
--- else
--- -- local appearance, default, value = fieldstates_radio(field,true) -- false is also ok
--- local appearance, default, value = fieldstates_radio(field,true,false,false,name) -- false is also ok
--- d.AS = default -- mandate when AP but confuses viewers
--- d.AP = appearance
--- return save_kid(parent,specification,d,value)
--- end
--- end
-
-local function makeradiochild(name,specification)
- local field, parent = clones[name], nil
- if field then
- field = radios[field.parent]
- parent = fields[field.parent]
- if not parent.pobj then
- if trace_fields then
- report_fields("forcing parent radio %a",parent.name)
- end
- makeradioparent(parent,parent)
- end
- else
- field = radios[name]
- if not field then
- report_fields("there is some problem with field %a",name)
- return nil
- end
- parent = fields[field.parent]
- if not parent.pobj then
- if trace_fields then
- report_fields("using parent radio %a",name)
- end
- makeradioparent(parent,parent)
- end
- end
- if trace_fields then
- report_fields("using child radio %a with values %a and default %a",name,field.values,field.default)
- end
- local fontsymbol = specification.fontsymbol
- -- fontsymbol = "circle"
- local d = pdfdictionary {
- Subtype = pdf_widget,
- Parent = pdfreference(parent.pobj),
- F = fieldplus(specification),
- OC = fieldlayer(specification),
- AA = fieldactions(specification),
- H = pdf_n,
- }
- if fontsymbol and fontsymbol ~= "" then
- specification.fontsymbol = todingbat(fontsymbol)
- specification.fontstyle = "symbol"
- specification.fontalternative = "dingbats"
- d.DA = fieldsurrounding(specification)
- d.MK = fieldrendering(specification)
- end
- local appearance, default, value = fieldstates_radio(field,name,fields[field.parent])
- d.AP = appearance
- d.AS = default -- /Whatever
- return save_kid(parent,specification,d,value)
-end
-
-function methods.sub(name,specification)
- return makeradiochild(name,enhance(specification,"Radio,RadiosInUnison"))
-end
+if not modules then modules = { } end modules ['lpdf-fld'] = {
+ version = 1.001,
+ comment = "companion to lpdf-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- The problem with widgets is that so far each version of acrobat
+-- has some rendering problem. I tried to keep up with this but
+-- it makes no sense to do so as one cannot rely on the viewer
+-- not changing. Especially Btn fields are tricky as their appearences
+-- need to be synchronized in the case of children but e.g. acrobat
+-- 10 does not retain the state and forces a check symbol. If you
+-- make a file in acrobat then it has MK entries that seem to overload
+-- the already present appearance streams (they're probably only meant for
+-- printing) as it looks like the viewer has some fallback on (auto
+-- generated) MK behaviour built in. So ... hard to test. Unfortunately
+-- not even the default appearance is generated. This will probably be
+-- solved at some point.
+--
+-- Also, for some reason the viewer does not always show custom appearances
+-- when fields are being rolled over or clicked upon, and circles or checks
+-- pop up when you don't expect them. I fear that this kind of instability
+-- eventually will kill pdf forms. After all, the manual says: "individual
+-- annotation handlers may ignore this entry and provide their own appearances"
+-- and one might wonder what 'individual' means here, but effectively this
+-- renders the whole concept of appearances useless.
+--
+-- Okay, here is one observation. A pdf file contains objects and one might
+-- consider each one to be a static entity when read in. However, acrobat
+-- starts rendering and seems to manipulate (appearance streams) of objects
+-- in place (this is visible when the file is saved again). And, combined
+-- with some other caching and hashing, this might give side effects for
+-- shared objects. So, it seems that for some cases one can best be not too
+-- clever and not share but duplicate information. Of course this defeats the
+-- whole purpose of these objects. Of course I can be wrong.
+--
+-- A rarther weird side effect of the viewer is that the highlighting of fields
+-- obscures values, unless you uses one of the BS variants, and this makes
+-- custum appearances rather useless as there is no way to control this apart
+-- from changing the viewer preferences. It could of course be a bug but it would
+-- be nice if the highlighting was at least transparent. I have no clue why the
+-- built in shapes work ok (some xform based appearances are generated) while
+-- equally valid other xforms fail. It looks like acrobat appearances come on
+-- top (being refered to in the MK) while custom ones are behind the highlight
+-- rectangle. One can disable the "Show border hover color for fields" option
+-- in the preferences. If you load java-imp-rhh this side effect gets disabled
+-- and you get what you expect (it took me a while to figure out this hack).
+--
+-- When highlighting is enabled, those default symbols flash up, so it looks
+-- like we have some inteference between this setting and custom appearances.
+--
+-- Anyhow, the NeedAppearances is really needed in order to get a rendering
+-- for printing especially when highlighting (those colorfull foregrounds) is
+-- on.
+
+local gmatch, lower, format = string.gmatch, string.lower, string.format
+local lpegmatch = lpeg.match
+local utfchar = utf.char
+local bpfactor, todimen = number.dimenfactors.bp, string.todimen
+
+local trace_fields = false trackers.register("backends.fields", function(v) trace_fields = v end)
+
+local report_fields = logs.reporter("backend","fields")
+
+local backends, lpdf = backends, lpdf
+
+local variables = interfaces.variables
+local context = context
+
+local references = structures.references
+local settings_to_array = utilities.parsers.settings_to_array
+
+local pdfbackend = backends.pdf
+
+local nodeinjections = pdfbackend.nodeinjections
+local codeinjections = pdfbackend.codeinjections
+local registrations = pdfbackend.registrations
+
+local registeredsymbol = codeinjections.registeredsymbol
+
+local pdfstream = lpdf.stream
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfreference = lpdf.reference
+local pdfunicode = lpdf.unicode
+local pdfstring = lpdf.string
+local pdfconstant = lpdf.constant
+local pdftoeight = lpdf.toeight
+local pdfflushobject = lpdf.flushobject
+local pdfshareobjectreference = lpdf.shareobjectreference
+local pdfshareobject = lpdf.shareobject
+local pdfreserveobject = lpdf.reserveobject
+local pdfreserveannotation = lpdf.reserveannotation
+local pdfaction = lpdf.action
+
+local hpack_node = node.hpack
+
+local nodepool = nodes.pool
+
+local pdfannotation_node = nodepool.pdfannotation
+
+local submitoutputformat = 0 -- 0=unknown 1=HTML 2=FDF 3=XML => not yet used, needs to be checked
+
+local pdf_widget = pdfconstant("Widget")
+local pdf_tx = pdfconstant("Tx")
+local pdf_ch = pdfconstant("Ch")
+local pdf_btn = pdfconstant("Btn")
+----- pdf_yes = pdfconstant("Yes")
+local pdf_off = pdfconstant("Off")
+local pdf_p = pdfconstant("P") -- None Invert Outline Push
+local pdf_n = pdfconstant("N") -- None Invert Outline Push
+--
+local pdf_no_rect = pdfarray { 0, 0, 0, 0 }
+
+local splitter = lpeg.splitat("=>")
+
+local formats = {
+ html = 1, fdf = 2, xml = 3,
+}
+
+function codeinjections.setformsmethod(name)
+ submitoutputformat = formats[lower(name)] or formats.xml
+end
+
+local flag = { -- /Ff
+ ReadOnly = 1, -- 1
+ Required = 2, -- 2
+ NoExport = 4, -- 3
+ MultiLine = 4096, -- 13
+ Password = 8192, -- 14
+ NoToggleToOff = 16384, -- 15
+ Radio = 32768, -- 16
+ PushButton = 65536, -- 17
+ PopUp = 131072, -- 18
+ Edit = 262144, -- 19
+ Sort = 524288, -- 20
+ FileSelect = 1048576, -- 21
+ DoNotSpellCheck = 4194304, -- 23
+ DoNotScroll = 8388608, -- 24
+ Comb = 16777216, -- 25
+ RichText = 33554432, -- 26
+ RadiosInUnison = 33554432, -- 26
+ CommitOnSelChange = 67108864, -- 27
+}
+
+local plus = { -- /F
+ Invisible = 1, -- 1
+ Hidden = 2, -- 2
+ Printable = 4, -- 3
+ Print = 4, -- 3
+ NoZoom = 8, -- 4
+ NoRotate = 16, -- 5
+ NoView = 32, -- 6
+ ReadOnly = 64, -- 7
+ Locked = 128, -- 8
+ ToggleNoView = 256, -- 9
+ LockedContents = 512, -- 10,
+ AutoView = 256, -- 288 (6+9)
+}
+
+-- todo: check what is interfaced
+
+flag.readonly = flag.ReadOnly
+flag.required = flag.Required
+flag.protected = flag.Password
+flag.sorted = flag.Sort
+flag.unavailable = flag.NoExport
+flag.nocheck = flag.DoNotSpellCheck
+flag.fixed = flag.DoNotScroll
+flag.file = flag.FileSelect
+
+plus.hidden = plus.Hidden
+plus.printable = plus.Printable
+plus.auto = plus.AutoView
+
+-- some day .. lpeg with function or table
+
+local function fieldflag(specification) -- /Ff
+ local o, n = specification.option, 0
+ if o and o ~= "" then
+ for f in gmatch(o,"[^, ]+") do
+ n = n + (flag[f] or 0)
+ end
+ end
+ return n
+end
+
+local function fieldplus(specification) -- /F
+ local o, n = specification.option, 0
+ if o and o ~= "" then
+ for p in gmatch(o,"[^, ]+") do
+ n = n + (plus[p] or 0)
+ end
+ end
+-- n = n + 4
+ return n
+end
+
+local function checked(what)
+ local set, bug = references.identify("",what)
+ if not bug and #set > 0 then
+ local r, n = pdfaction(set)
+ return pdfshareobjectreference(r)
+ end
+end
+
+local function fieldactions(specification) -- share actions
+ local d, a = { }, nil
+ a = specification.mousedown
+ or specification.clickin if a and a ~= "" then d.D = checked(a) end
+ a = specification.mouseup
+ or specification.clickout if a and a ~= "" then d.U = checked(a) end
+ a = specification.regionin if a and a ~= "" then d.E = checked(a) end -- Enter
+ a = specification.regionout if a and a ~= "" then d.X = checked(a) end -- eXit
+ a = specification.afterkey if a and a ~= "" then d.K = checked(a) end
+ a = specification.format if a and a ~= "" then d.F = checked(a) end
+ a = specification.validate if a and a ~= "" then d.V = checked(a) end
+ a = specification.calculate if a and a ~= "" then d.C = checked(a) end
+ a = specification.focusin if a and a ~= "" then d.Fo = checked(a) end
+ a = specification.focusout if a and a ~= "" then d.Bl = checked(a) end
+ a = specification.openpage if a and a ~= "" then d.PO = checked(a) end
+ a = specification.closepage if a and a ~= "" then d.PC = checked(a) end
+ -- a = specification.visiblepage if a and a ~= "" then d.PV = checked(a) end
+ -- a = specification.invisiblepage if a and a ~= "" then d.PI = checked(a) end
+ return next(d) and pdfdictionary(d)
+end
+
+-- fonts and color
+
+local pdfdocencodingvector, pdfdocencodingcapsule
+
+-- The pdf doc encoding vector is needed in order to
+-- trigger propper unicode. Interesting is that when
+-- a glyph is not in the vector, it is still visible
+-- as it is taken from some other font. Messy.
+
+-- To be checked: only when text/line fields.
+
+local function checkpdfdocencoding()
+ report_fields("adding pdfdoc encoding vector")
+ local encoding = dofile(resolvers.findfile("lpdf-enc.lua")) -- no checking, fatal if not present
+ pdfdocencodingvector = pdfreference(pdfflushobject(encoding))
+ local capsule = pdfdictionary {
+ PDFDocEncoding = pdfdocencodingvector
+ }
+ pdfdocencodingcapsule = pdfreference(pdfflushobject(capsule))
+ checkpdfdocencoding = function() end
+end
+
+local fontnames = {
+ rm = {
+ tf = "Times-Roman",
+ bf = "Times-Bold",
+ it = "Times-Italic",
+ sl = "Times-Italic",
+ bi = "Times-BoldItalic",
+ bs = "Times-BoldItalic",
+ },
+ ss = {
+ tf = "Helvetica",
+ bf = "Helvetica-Bold",
+ it = "Helvetica-Oblique",
+ sl = "Helvetica-Oblique",
+ bi = "Helvetica-BoldOblique",
+ bs = "Helvetica-BoldOblique",
+ },
+ tt = {
+ tf = "Courier",
+ bf = "Courier-Bold",
+ it = "Courier-Oblique",
+ sl = "Courier-Oblique",
+ bi = "Courier-BoldOblique",
+ bs = "Courier-BoldOblique",
+ },
+ symbol = {
+ dingbats = "ZapfDingbats",
+ }
+}
+
+local usedfonts = { }
+
+local function fieldsurrounding(specification)
+ local fontsize = specification.fontsize or "12pt"
+ local fontstyle = specification.fontstyle or "rm"
+ local fontalternative = specification.fontalternative or "tf"
+ local colorvalue = specification.colorvalue
+ local s = fontnames[fontstyle]
+ if not s then
+ fontstyle, s = "rm", fontnames.rm
+ end
+ local a = s[fontalternative]
+ if not a then
+ alternative, a = "tf", s.tf
+ end
+ local tag = fontstyle .. fontalternative
+ fontsize = todimen(fontsize)
+ fontsize = fontsize and (bpfactor * fontsize) or 12
+ fontraise = 0.1 * fontsize -- todo: figure out what the natural one is and compensate for strutdp
+ local fontcode = format("%0.4f Tf %0.4f Ts",fontsize,fontraise)
+ -- we could test for colorvalue being 1 (black) and omit it then
+ local colorcode = lpdf.color(3,colorvalue) -- we force an rgb color space
+ if trace_fields then
+ report_fields("using font, style %a, alternative %a, size %p, tag %a, code %a",fontstyle,fontalternative,fontsize,tag,fontcode)
+ report_fields("using color, value %a, code %a",colorvalue,colorcode)
+ end
+ local stream = pdfstream {
+ pdfconstant(tag),
+ format("%s %s",fontcode,colorcode)
+ }
+ usedfonts[tag] = a -- the name
+ -- move up with "x.y Ts"
+ return tostring(stream)
+end
+
+local function registerfonts()
+ if next(usedfonts) then
+ checkpdfdocencoding() -- already done
+ local d = pdfdictionary()
+ local pdffonttype, pdffontsubtype = pdfconstant("Font"), pdfconstant("Type1")
+ for tag, name in next, usedfonts do
+ local f = pdfdictionary {
+ Type = pdffonttype,
+ Subtype = pdffontsubtype,
+ Name = pdfconstant(tag),
+ BaseFont = pdfconstant(name),
+ Encoding = pdfdocencodingvector,
+ }
+ d[tag] = pdfreference(pdfflushobject(f))
+ end
+ return d
+ end
+end
+
+-- symbols
+
+local function fieldappearances(specification)
+ -- todo: caching
+ local values = specification.values
+ local default = specification.default -- todo
+ if not values then
+ -- error
+ return
+ end
+ local v = settings_to_array(values)
+ local n, r, d
+ if #v == 1 then
+ n, r, d = v[1], v[1], v[1]
+ elseif #v == 2 then
+ n, r, d = v[1], v[1], v[2]
+ else
+ n, r, d = v[1], v[2], v[3]
+ end
+ local appearance = pdfdictionary {
+ N = registeredsymbol(n), R = registeredsymbol(r), D = registeredsymbol(d),
+ }
+ return pdfshareobjectreference(appearance)
+-- return pdfreference(pdfflushobject(appearance))
+end
+
+local YesorOn = "Yes" -- somehow On is not always working out well any longer (why o why this change)
+
+-- beware ... maybe we should have unique /Yes1 ... we will probably
+-- change this one too.
+--
+-- TODO: the same as radio .. play safe and use different names.
+
+local function fieldstates_check(specification,forceyes,values,default,yesdefault)
+ -- we don't use Opt here (too messy for radio buttons)
+ local values, default = values or specification.values, default or specification.default
+ if not values or values == "" then
+ -- error
+ return
+ end
+ local v = settings_to_array(values)
+ local yes, off, yesn, yesr, yesd, offn, offr, offd
+ if #v == 1 then
+ yes, off = v[1], v[1]
+ else
+ yes, off = v[1], v[2]
+ end
+ local yesshown, yesvalue = lpegmatch(splitter,yes)
+ if not (yesshown and yesvalue) then
+ yesshown = yes, yes
+ end
+ yes = settings_to_array(yesshown)
+ local offshown, offvalue = lpegmatch(splitter,off)
+ if not (offshown and offvalue) then
+ offshown = off, off
+ end
+ off = settings_to_array(offshown)
+ if #yes == 1 then
+ yesn, yesr, yesd = yes[1], yes[1], yes[1]
+ elseif #yes == 2 then
+ yesn, yesr, yesd = yes[1], yes[1], yes[2]
+ else
+ yesn, yesr, yesd = yes[1], yes[2], yes[3]
+ end
+ if #off == 1 then
+ offn, offr, offd = off[1], off[1], off[1]
+ elseif #off == 2 then
+ offn, offr, offd = off[1], off[1], off[2]
+ else
+ offn, offr, offd = off[1], off[2], off[3]
+ end
+ if not yesvalue then
+ yesvalue = yesdefault or yesn
+ end
+ if not offvalue then
+ offvalue = offn
+ end
+ if forceyes == true then
+ forceyes = YesorOn -- spec likes Yes more but we've used On for ages now
+ else
+ -- false or string
+ end
+ if default == yesn then
+ default = pdfconstant(forceyes or yesn)
+ else
+ default = pdf_off
+ end
+ local appearance
+ if false then -- needs testing
+ appearance = pdfdictionary { -- maybe also cache components
+ N = pdfshareobjectreference(pdfdictionary { [forceyes or yesn] = registeredsymbol(yesn), Off = registeredsymbol(offn) }),
+ R = pdfshareobjectreference(pdfdictionary { [forceyes or yesr] = registeredsymbol(yesr), Off = registeredsymbol(offr) }),
+ D = pdfshareobjectreference(pdfdictionary { [forceyes or yesd] = registeredsymbol(yesd), Off = registeredsymbol(offd) }),
+ }
+ else
+ appearance = pdfdictionary { -- maybe also cache components
+ N = pdfdictionary { [forceyes or yesn] = registeredsymbol(yesn), Off = registeredsymbol(offn) },
+ R = pdfdictionary { [forceyes or yesr] = registeredsymbol(yesr), Off = registeredsymbol(offr) },
+ D = pdfdictionary { [forceyes or yesd] = registeredsymbol(yesd), Off = registeredsymbol(offd) }
+ }
+ end
+ local appearanceref = pdfshareobjectreference(appearance)
+ -- local appearanceref = pdfreference(pdfflushobject(appearance))
+ return appearanceref, default, yesvalue
+end
+
+-- It looks like there is always a (MK related) symbol used and that
+-- the appearances are only used as ornaments behind a symbol. So,
+-- contrary to what we did when widgets showed up, we now limit
+-- ourself to more dumb definitions. Especially when highlighting is
+-- enabled weird interferences happen. So, we play safe (some nice code
+-- has been removed that worked well till recently).
+
+local function fieldstates_radio(specification,name,parent)
+ local values = values or specification.values
+ local default = default or parent.default -- specification.default
+ if not values or values == "" then
+ -- error
+ return
+ end
+ local v = settings_to_array(values)
+ local yes, off, yesn, yesr, yesd, offn, offr, offd
+ if #v == 1 then
+ yes, off = v[1], v[1]
+ else
+ yes, off = v[1], v[2]
+ end
+ -- yes keys might be the same in the three appearances within a field
+ -- but can best be different among fields ... don't ask why
+ local yessymbols, yesvalue = lpegmatch(splitter,yes) -- n,r,d=>x
+ if not (yessymbols and yesvalue) then
+ yessymbols = yes
+ end
+ if not yesvalue then
+ yesvalue = name
+ end
+ yessymbols = settings_to_array(yessymbols)
+ if #yessymbols == 1 then
+ yesn = yessymbols[1]
+ yesr = yesn
+ yesd = yesr
+ elseif #yessymbols == 2 then
+ yesn = yessymbols[1]
+ yesr = yessymbols[2]
+ yesd = yesr
+ else
+ yesn = yessymbols[1]
+ yesr = yessymbols[2]
+ yesd = yessymbols[3]
+ end
+ -- we don't care about names, as all will be /Off
+ local offsymbols = lpegmatch(splitter,off) or off
+ offsymbols = settings_to_array(offsymbols)
+ if #offsymbols == 1 then
+ offn = offsymbols[1]
+ offr = offn
+ offd = offr
+ elseif #offsymbols == 2 then
+ offn = offsymbols[1]
+ offr = offsymbols[2]
+ offd = offr
+ else
+ offn = offsymbols[1]
+ offr = offsymbols[2]
+ offd = offsymbols[3]
+ end
+ if default == name then
+ default = pdfconstant(name)
+ else
+ default = pdf_off
+ end
+ --
+ local appearance
+ if false then -- needs testing
+ appearance = pdfdictionary { -- maybe also cache components
+ N = pdfshareobjectreference(pdfdictionary { [name] = registeredsymbol(yesn), Off = registeredsymbol(offn) }),
+ R = pdfshareobjectreference(pdfdictionary { [name] = registeredsymbol(yesr), Off = registeredsymbol(offr) }),
+ D = pdfshareobjectreference(pdfdictionary { [name] = registeredsymbol(yesd), Off = registeredsymbol(offd) }),
+ }
+ else
+ appearance = pdfdictionary { -- maybe also cache components
+ N = pdfdictionary { [name] = registeredsymbol(yesn), Off = registeredsymbol(offn) },
+ R = pdfdictionary { [name] = registeredsymbol(yesr), Off = registeredsymbol(offr) },
+ D = pdfdictionary { [name] = registeredsymbol(yesd), Off = registeredsymbol(offd) }
+ }
+ end
+ local appearanceref = pdfshareobjectreference(appearance) -- pdfreference(pdfflushobject(appearance))
+ return appearanceref, default, yesvalue
+end
+
+local function fielddefault(field)
+ local default = field.default
+ if not default or default == "" then
+ local values = settings_to_array(field.values)
+ default = values[1]
+ end
+ if not default or default == "" then
+ return pdf_off
+ else
+ return pdfconstant(default)
+ end
+end
+
+local function fieldoptions(specification)
+ local values = specification.values
+ local default = specification.default
+ if values then
+ local v = settings_to_array(values)
+ for i=1,#v do
+ local vi = v[i]
+ local shown, value = lpegmatch(splitter,vi)
+ if shown and value then
+ v[i] = pdfarray { pdfunicode(value), shown }
+ else
+ v[i] = pdfunicode(v[i])
+ end
+ end
+ return pdfarray(v)
+ end
+end
+
+local mapping = {
+ -- acrobat compliant (messy, probably some pdfdoc encoding interference here)
+ check = "4", -- 0x34
+ circle = "l", -- 0x6C
+ cross = "8", -- 0x38
+ diamond = "u", -- 0x75
+ square = "n", -- 0x6E
+ star = "H", -- 0x48
+}
+
+local function todingbat(n)
+ if n and n ~= "" then
+ return mapping[n] or ""
+ end
+end
+
+-- local zero_bc = pdfarray { 0, 0, 0 }
+-- local zero_bg = pdfarray { 1, 1, 1 }
+
+local function fieldrendering(specification)
+ local bvalue = tonumber(specification.backgroundcolorvalue)
+ local fvalue = tonumber(specification.framecolorvalue)
+ local svalue = specification.fontsymbol
+ if bvalue or fvalue or (svalue and svalue ~= "") then
+ return pdfdictionary {
+ BG = bvalue and pdfarray { lpdf.colorvalues(3,bvalue) } or nil, -- or zero_bg,
+ BC = fvalue and pdfarray { lpdf.colorvalues(3,fvalue) } or nil, -- or zero_bc,
+ CA = svalue and pdfstring (svalue) or nil,
+ }
+ end
+end
+
+-- layers
+
+local function fieldlayer(specification) -- we can move this in line
+ local layer = specification.layer
+ return (layer and lpdf.layerreference(layer)) or nil
+end
+
+-- defining
+
+local fields, radios, clones, fieldsets, calculationset = { }, { }, { }, { }, nil
+
+local xfdftemplate = [[
+
+
+
+
+
+%s
+
+
+]]
+
+function codeinjections.exportformdata(name)
+ local result = { }
+ for k, v in table.sortedhash(fields) do
+ result[#result+1] = format(" %s",v.name or k,v.default or "")
+ end
+ local base = file.basename(tex.jobname)
+ local xfdf = format(xfdftemplate,base,table.concat(result,"\n"))
+ if not name or name == "" then
+ name = base
+ end
+ io.savedata(file.addsuffix(name,"xfdf"),xfdf)
+end
+
+function codeinjections.definefieldset(tag,list)
+ fieldsets[tag] = list
+end
+
+function codeinjections.getfieldset(tag)
+ return fieldsets[tag]
+end
+
+local function fieldsetlist(tag)
+ if tag then
+ local ft = fieldsets[tag]
+ if ft then
+ local a = pdfarray()
+ for name in gmatch(list,"[^, ]+") do
+ local f = field[name]
+ if f and f.pobj then
+ a[#a+1] = pdfreference(f.pobj)
+ end
+ end
+ return a
+ end
+ end
+end
+
+function codeinjections.setfieldcalculationset(tag)
+ calculationset = tag
+end
+
+local function predefinesymbols(specification)
+ local values = specification.values
+ if values then
+ local symbols = settings_to_array(values)
+ for i=1,#symbols do
+ local symbol = symbols[i]
+ local a, b = lpegmatch(splitter,symbol)
+ codeinjections.presetsymbol(a or symbol)
+ end
+ end
+end
+
+function codeinjections.getdefaultfieldvalue(name)
+ local f = fields[name]
+ if f then
+ local values = f.values
+ local default = f.default
+ if not default or default == "" then
+ local symbols = settings_to_array(values)
+ local symbol = symbols[1]
+ if symbol then
+ local a, b = lpegmatch(splitter,symbol) -- splits at =>
+ default = a or symbol
+ end
+ end
+ return default
+ end
+end
+
+function codeinjections.definefield(specification)
+ local n = specification.name
+ local f = fields[n]
+ if not f then
+ local fieldtype = specification.type
+ if not fieldtype then
+ if trace_fields then
+ report_fields("invalid definition for %a, unknown type",n)
+ end
+ elseif fieldtype == "radio" then
+ local values = specification.values
+ if values and values ~= "" then
+ values = settings_to_array(values)
+ for v=1,#values do
+ radios[values[v]] = { parent = n }
+ end
+ fields[n] = specification
+ if trace_fields then
+ report_fields("defining %a as type %a",n,"radio")
+ end
+ elseif trace_fields then
+ report_fields("invalid definition of radio %a, missing values",n)
+ end
+ elseif fieldtype == "sub" then
+ -- not in main field list !
+ local radio = radios[n]
+ if radio then
+ -- merge specification
+ for key, value in next, specification do
+ radio[key] = value
+ end
+ if trace_fields then
+ local p = radios[n] and radios[n].parent
+ report_fields("defining %a as type sub of radio %a",n,p)
+ end
+ elseif trace_fields then
+ report_fields("invalid definition of radio sub %a, no parent given",n)
+ end
+ predefinesymbols(specification)
+ elseif fieldtype == "text" or fieldtype == "line" then
+ fields[n] = specification
+ if trace_fields then
+ report_fields("defining %a as type %a",n,fieldtype)
+ end
+ if specification.values ~= "" and specification.default == "" then
+ specification.default, specification.values = specification.values, nil
+ end
+ else
+ fields[n] = specification
+ if trace_fields then
+ report_fields("defining %a as type %a",n,fieldtype)
+ end
+ predefinesymbols(specification)
+ end
+ elseif trace_fields then
+ report_fields("invalid definition for %a, already defined",n)
+ end
+end
+
+function codeinjections.clonefield(specification) -- obsolete
+ local p, c, v = specification.parent, specification.children, specification.alternative
+ if not p or not c then
+ if trace_fields then
+ report_fields("invalid clone, children %a, parent %a, alternative %a",c,p,v)
+ end
+ return
+ end
+ local x = fields[p] or radios[p]
+ if not x then
+ if trace_fields then
+ report_fields("invalid clone, unknown parent %a",p)
+ end
+ return
+ end
+ for n in gmatch(c,"[^, ]+") do
+ local f, r, c = fields[n], radios[n], clones[n]
+ if f or r or c then
+ if trace_fields then
+ report_fields("already cloned, child %a, parent %a, alternative %a",n,p,v)
+ end
+ else
+ if trace_fields then
+ report_fields("cloning, child %a, parent %a, alternative %a",n,p,v)
+ end
+ clones[n] = specification
+ predefinesymbols(specification)
+ end
+ end
+end
+
+function codeinjections.getfieldcategory(name)
+ local f = fields[name] or radios[name] or clones[name]
+ if f then
+ local g = f.category
+ if not g or g == "" then
+ local v, p, t = f.alternative, f.parent, f.type
+ if v == "clone" or v == "copy" then
+ f = fields[p] or radios[p]
+ g = f and f.category
+ elseif t == "sub" then
+ f = fields[p]
+ g = f and f.category
+ end
+ end
+ return g
+ end
+end
+
+--
+
+function codeinjections.validfieldcategory(name)
+ return fields[name] or radios[name] or clones[name]
+end
+
+function codeinjections.validfieldset(name)
+ return fieldsets[tag]
+end
+
+function codeinjections.validfield(name)
+ return fields[name]
+end
+
+--
+
+local alignments = {
+ flushleft = 0, right = 0,
+ center = 1, middle = 1,
+ flushright = 2, left = 2,
+}
+
+local function fieldalignment(specification)
+ return alignments[specification.align] or 0
+end
+
+local function enhance(specification,option)
+ local so = specification.option
+ if so and so ~= "" then
+ specification.option = so .. "," .. option
+ else
+ specification.option = option
+ end
+ return specification
+end
+
+-- finish
+
+local collected = pdfarray()
+local forceencoding = false
+
+local function finishfields()
+ local sometext = forceencoding
+ for name, field in next, fields do
+ local kids = field.kids
+ if kids then
+ pdfflushobject(field.kidsnum,kids)
+ end
+ local opt = field.opt
+ if opt then
+ pdfflushobject(field.optnum,opt)
+ end
+ local type = field.type
+ if not sometext and (type == "text" or type == "line") then
+ sometext = true
+ end
+ end
+ for name, field in next, radios do
+ local kids = field.kids
+ if kids then
+ pdfflushobject(field.kidsnum,kids)
+ end
+ local opt = field.opt
+ if opt then
+ pdfflushobject(field.optnum,opt)
+ end
+ end
+ if #collected > 0 then
+ local acroform = pdfdictionary {
+ NeedAppearances = true,
+ Fields = pdfreference(pdfflushobject(collected)),
+ CO = fieldsetlist(calculationset),
+ }
+ if sometext then
+ checkpdfdocencoding()
+ usedfonts.tttf = fontnames.tt.tf
+ acroform.DA = "/tttf 12 Tf 0 g"
+ acroform.DR = pdfdictionary {
+ Font = registerfonts(),
+ Encoding = pdfdocencodingcapsule,
+ }
+ end
+ lpdf.addtocatalog("AcroForm",pdfreference(pdfflushobject(acroform)))
+ end
+end
+
+lpdf.registerdocumentfinalizer(finishfields,"form fields")
+
+local methods = { }
+
+function nodeinjections.typesetfield(name,specification)
+ local field = fields[name] or radios[name] or clones[name]
+ if not field then
+ report_fields( "unknown child %a",name)
+ -- unknown field
+ return
+ end
+ local alternative, parent = field.alternative, field.parent
+ if alternative == "copy" or alternative == "clone" then -- only in clones
+ field = fields[parent] or radios[parent]
+ end
+ local method = methods[field.type]
+ if method then
+ return method(name,specification,alternative)
+ else
+ report_fields( "unknown method %a for child %a",field.type,name)
+ end
+end
+
+local function save_parent(field,specification,d,hasopt)
+ local kidsnum = pdfreserveobject()
+ d.Kids = pdfreference(kidsnum)
+ field.kidsnum = kidsnum
+ field.kids = pdfarray()
+ if hasopt then
+ local optnum = pdfreserveobject()
+ d.Opt = pdfreference(optnum)
+ field.optnum = optnum
+ field.opt = pdfarray()
+ end
+ local pnum = pdfflushobject(d)
+ field.pobj = pnum
+ collected[#collected+1] = pdfreference(pnum)
+end
+
+local function save_kid(field,specification,d,optname)
+ local kn = pdfreserveannotation()
+ field.kids[#field.kids+1] = pdfreference(kn)
+ if optname then
+ local opt = field.opt
+ if opt then
+ opt[#opt+1] = optname
+ end
+ end
+ local width, height, depth = specification.width or 0, specification.height or 0, specification.depth
+ local box = hpack_node(pdfannotation_node(width,height,depth,d(),kn))
+ box.width, box.height, box.depth = width, height, depth -- redundant
+ return box
+end
+
+local function makelineparent(field,specification)
+ local text = pdfunicode(field.default)
+ local length = tonumber(specification.length or 0) or 0
+ local d = pdfdictionary {
+ Subtype = pdf_widget,
+ T = pdfunicode(specification.title),
+ F = fieldplus(specification),
+ Ff = fieldflag(specification),
+ OC = fieldlayer(specification),
+ DA = fieldsurrounding(specification),
+ AA = fieldactions(specification),
+ FT = pdf_tx,
+ Q = fieldalignment(specification),
+ MaxLen = length == 0 and 1000 or length,
+ DV = text,
+ V = text,
+ }
+ save_parent(field,specification,d)
+end
+
+local function makelinechild(name,specification)
+ local field, parent = clones[name], nil
+ if field then
+ parent = fields[field.parent]
+ if not parent.pobj then
+ if trace_fields then
+ report_fields("forcing parent text %a",parent.name)
+ end
+ makelineparent(parent,specification)
+ end
+ else
+ parent = fields[name]
+ field = parent
+ if not parent.pobj then
+ if trace_fields then
+ report_fields("using parent text %a",name)
+ end
+ makelineparent(parent,specification)
+ end
+ end
+ if trace_fields then
+ report_fields("using child text %a",name)
+ end
+ local d = pdfdictionary {
+ Subtype = pdf_widget,
+ Parent = pdfreference(parent.pobj),
+ F = fieldplus(specification),
+ OC = fieldlayer(specification),
+ DA = fieldsurrounding(specification),
+ AA = fieldactions(specification),
+ MK = fieldrendering(specification),
+ Q = fieldalignment(specification),
+ }
+ return save_kid(parent,specification,d)
+end
+
+function methods.line(name,specification)
+ return makelinechild(name,specification)
+end
+
+function methods.text(name,specification)
+ return makelinechild(name,enhance(specification,"MultiLine"))
+end
+
+local function makechoiceparent(field,specification)
+ local d = pdfdictionary {
+ Subtype = pdf_widget,
+ T = pdfunicode(specification.title),
+ F = fieldplus(specification),
+ Ff = fieldflag(specification),
+ OC = fieldlayer(specification),
+ AA = fieldactions(specification),
+ FT = pdf_ch,
+ Opt = fieldoptions(field), -- todo
+ }
+ save_parent(field,specification,d)
+end
+
+local function makechoicechild(name,specification)
+ local field, parent = clones[name], nil
+ if field then
+ parent = fields[field.parent]
+ if not parent.pobj then
+ if trace_fields then
+ report_fields("forcing parent choice %a",parent.name)
+ end
+ makechoiceparent(parent,specification,extras)
+ end
+ else
+ parent = fields[name]
+ field = parent
+ if not parent.pobj then
+ if trace_fields then
+ report_fields("using parent choice %a",name)
+ end
+ makechoiceparent(parent,specification,extras)
+ end
+ end
+ if trace_fields then
+ report_fields("using child choice %a",name)
+ end
+ local d = pdfdictionary {
+ Subtype = pdf_widget,
+ Parent = pdfreference(parent.pobj),
+ F = fieldplus(specification),
+ OC = fieldlayer(specification),
+ AA = fieldactions(specification),
+ }
+ return save_kid(parent,specification,d) -- do opt here
+end
+
+function methods.choice(name,specification)
+ return makechoicechild(name,specification)
+end
+
+function methods.popup(name,specification)
+ return makechoicechild(name,enhance(specification,"PopUp"))
+end
+
+function methods.combo(name,specification)
+ return makechoicechild(name,enhance(specification,"PopUp,Edit"))
+end
+
+local function makecheckparent(field,specification)
+ local d = pdfdictionary {
+ T = pdfunicode(specification.title), -- todo: when tracing use a string
+ F = fieldplus(specification),
+ Ff = fieldflag(specification),
+ OC = fieldlayer(specification),
+ AA = fieldactions(specification),
+ FT = pdf_btn,
+ V = fielddefault(field),
+ }
+ save_parent(field,specification,d,true)
+end
+
+local function makecheckchild(name,specification)
+ local field, parent = clones[name], nil
+ if field then
+ parent = fields[field.parent]
+ if not parent.pobj then
+ if trace_fields then
+ report_fields("forcing parent check %a",parent.name)
+ end
+ makecheckparent(parent,specification,extras)
+ end
+ else
+ parent = fields[name]
+ field = parent
+ if not parent.pobj then
+ if trace_fields then
+ report_fields("using parent check %a",name)
+ end
+ makecheckparent(parent,specification,extras)
+ end
+ end
+ if trace_fields then
+ report_fields("using child check %a",name)
+ end
+ local d = pdfdictionary {
+ Subtype = pdf_widget,
+ Parent = pdfreference(parent.pobj),
+ F = fieldplus(specification),
+ OC = fieldlayer(specification),
+ AA = fieldactions(specification),
+ H = pdf_n,
+ }
+ local fontsymbol = specification.fontsymbol
+ if fontsymbol and fontsymbol ~= "" then
+ specification.fontsymbol = todingbat(fontsymbol)
+ specification.fontstyle = "symbol"
+ specification.fontalternative = "dingbats"
+ d.DA = fieldsurrounding(specification)
+ d.MK = fieldrendering(specification)
+ return save_kid(parent,specification,d)
+ else
+ local appearance, default, value = fieldstates_check(field,true)
+ d.AS = default
+ d.AP = appearance
+ return save_kid(parent,specification,d,value)
+ end
+end
+
+function methods.check(name,specification)
+ return makecheckchild(name,specification)
+end
+
+local function makepushparent(field,specification) -- check if we can share with the previous
+ local d = pdfdictionary {
+ Subtype = pdf_widget,
+ T = pdfunicode(specification.title),
+ F = fieldplus(specification),
+ Ff = fieldflag(specification),
+ OC = fieldlayer(specification),
+ AA = fieldactions(specification),
+ FT = pdf_btn,
+ AP = fieldappearances(field),
+ H = pdf_p,
+ }
+ save_parent(field,specification,d)
+end
+
+local function makepushchild(name,specification)
+ local field, parent = clones[name], nil
+ if field then
+ parent = fields[field.parent]
+ if not parent.pobj then
+ if trace_fields then
+ report_fields("forcing parent push %a",parent.name)
+ end
+ makepushparent(parent,specification)
+ end
+ else
+ parent = fields[name]
+ field = parent
+ if not parent.pobj then
+ if trace_fields then
+ report_fields("using parent push %a",name)
+ end
+ makepushparent(parent,specification)
+ end
+ end
+ if trace_fields then
+ report_fields("using child push %a",name)
+ end
+ local fontsymbol = specification.fontsymbol
+ local d = pdfdictionary {
+ Subtype = pdf_widget,
+ Parent = pdfreference(field.pobj),
+ F = fieldplus(specification),
+ OC = fieldlayer(specification),
+ AA = fieldactions(specification),
+ H = pdf_p,
+ }
+ if fontsymbol and fontsymbol ~= "" then
+ specification.fontsymbol = todingbat(fontsymbol)
+ specification.fontstyle = "symbol"
+ specification.fontalternative = "dingbats"
+ d.DA = fieldsurrounding(specification)
+ d.MK = fieldrendering(specification)
+ else
+ d.AP = fieldappearances(field)
+ end
+ return save_kid(parent,specification,d)
+end
+
+function methods.push(name,specification)
+ return makepushchild(name,enhance(specification,"PushButton"))
+end
+
+local function makeradioparent(field,specification)
+-- specification = enhance(specification,"Radio,RadiosInUnison")
+ specification = enhance(specification,"Radio,RadiosInUnison,Print,NoToggleToOff")
+-- specification = enhance(specification,"Radio,Print,NoToggleToOff")
+ local d = pdfdictionary {
+ T = field.name,
+ FT = pdf_btn,
+-- F = fieldplus(specification),
+ Ff = fieldflag(specification),
+-- H = pdf_n,
+ V = fielddefault(field),
+ }
+ save_parent(field,specification,d,true)
+end
+
+-- local function makeradiochild(name,specification)
+-- local field, parent = clones[name], nil
+-- if field then
+-- field = radios[field.parent]
+-- parent = fields[field.parent]
+-- if not parent.pobj then
+-- if trace_fields then
+-- report_fields("forcing parent radio %a",parent.name)
+-- end
+-- makeradioparent(parent,parent)
+-- end
+-- else
+-- field = radios[name]
+-- if not field then
+-- report_fields("there is some problem with field %a",name)
+-- return nil
+-- end
+-- parent = fields[field.parent]
+-- if not parent.pobj then
+-- if trace_fields then
+-- report_fields("using parent radio %a",name)
+-- end
+-- makeradioparent(parent,parent)
+-- end
+-- end
+-- if trace_fields then
+-- report_fields("using child radio %a with values %a and default %a",name,field.values,field.default)
+-- end
+-- local fontsymbol = specification.fontsymbol
+-- fontsymbol="star"
+-- local d = pdfdictionary {
+-- Subtype = pdf_widget,
+-- Parent = pdfreference(parent.pobj),
+-- F = fieldplus(specification),
+-- OC = fieldlayer(specification),
+-- AA = fieldactions(specification),
+-- H = pdf_n,
+-- }
+-- if fontsymbol and fontsymbol ~= "" then
+-- local appearance, default, value = fieldstates_radio(field,true,false,false,name) -- false is also ok
+-- specification.fontsymbol = todingbat(fontsymbol)
+-- specification.fontstyle = "symbol"
+-- specification.fontalternative = "dingbats"
+-- d.DA = fieldsurrounding(specification)
+-- d.MK = fieldrendering(specification)
+-- d.AS = pdfconstant(value) -- default -- mandate when AP but confuses viewers
+-- d.AP = appearance
+-- return save_kid(parent,specification,d,value)
+-- -- return save_kid(parent,specification,d,name)
+-- else
+-- -- local appearance, default, value = fieldstates_radio(field,true) -- false is also ok
+-- local appearance, default, value = fieldstates_radio(field,true,false,false,name) -- false is also ok
+-- d.AS = default -- mandate when AP but confuses viewers
+-- d.AP = appearance
+-- return save_kid(parent,specification,d,value)
+-- end
+-- end
+
+local function makeradiochild(name,specification)
+ local field, parent = clones[name], nil
+ if field then
+ field = radios[field.parent]
+ parent = fields[field.parent]
+ if not parent.pobj then
+ if trace_fields then
+ report_fields("forcing parent radio %a",parent.name)
+ end
+ makeradioparent(parent,parent)
+ end
+ else
+ field = radios[name]
+ if not field then
+ report_fields("there is some problem with field %a",name)
+ return nil
+ end
+ parent = fields[field.parent]
+ if not parent.pobj then
+ if trace_fields then
+ report_fields("using parent radio %a",name)
+ end
+ makeradioparent(parent,parent)
+ end
+ end
+ if trace_fields then
+ report_fields("using child radio %a with values %a and default %a",name,field.values,field.default)
+ end
+ local fontsymbol = specification.fontsymbol
+ -- fontsymbol = "circle"
+ local d = pdfdictionary {
+ Subtype = pdf_widget,
+ Parent = pdfreference(parent.pobj),
+ F = fieldplus(specification),
+ OC = fieldlayer(specification),
+ AA = fieldactions(specification),
+ H = pdf_n,
+ }
+ if fontsymbol and fontsymbol ~= "" then
+ specification.fontsymbol = todingbat(fontsymbol)
+ specification.fontstyle = "symbol"
+ specification.fontalternative = "dingbats"
+ d.DA = fieldsurrounding(specification)
+ d.MK = fieldrendering(specification)
+ end
+ local appearance, default, value = fieldstates_radio(field,name,fields[field.parent])
+ d.AP = appearance
+ d.AS = default -- /Whatever
+ return save_kid(parent,specification,d,value)
+end
+
+function methods.sub(name,specification)
+ return makeradiochild(name,enhance(specification,"Radio,RadiosInUnison"))
+end
diff --git a/tex/context/base/lpdf-grp.lua b/tex/context/base/lpdf-grp.lua
index fed5e6a46..a255658ed 100644
--- a/tex/context/base/lpdf-grp.lua
+++ b/tex/context/base/lpdf-grp.lua
@@ -1,244 +1,244 @@
-if not modules then modules = { } end modules ['lpdf-grp'] = {
- version = 1.001,
- comment = "companion to lpdf-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, gsub = string.format, string.gsub
-local concat = table.concat
-local round = math.round
-
-local backends, lpdf = backends, lpdf
-
-local nodeinjections = backends.pdf.nodeinjections
-
-local colors = attributes.colors
-local basepoints = number.dimenfactors["bp"]
-local inches = number.dimenfactors["in"]
-
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-local registrations = backends.pdf.registrations
-
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfconstant = lpdf.constant
-local pdfboolean = lpdf.boolean
-local pdfreference = lpdf.reference
-local pdfflushobject = lpdf.flushobject
-
--- can also be done indirectly:
---
--- 12 : << /AntiAlias false /ColorSpace 8 0 R /Coords [ 0.0 0.0 1.0 0.0 ] /Domain [ 0.0 1.0 ] /Extend [ true true ] /Function 22 0 R /ShadingType 2 >>
--- 22 : << /Bounds [ ] /Domain [ 0.0 1.0 ] /Encode [ 0.0 1.0 ] /FunctionType 3 /Functions [ 31 0 R ] >>
--- 31 : << /C0 [ 1.0 0.0 ] /C1 [ 0.0 1.0 ] /Domain [ 0.0 1.0 ] /FunctionType 2 /N 1.0 >>
-
-local function shade(stype,name,domain,color_a,color_b,n,colorspace,coordinates,separation)
- local f = pdfdictionary {
- FunctionType = 2,
- Domain = pdfarray(domain), -- domain is actually a string
- C0 = pdfarray(color_a),
- C1 = pdfarray(color_b),
- N = tonumber(n),
- }
- separation = separation and registrations.getspotcolorreference(separation)
- local s = pdfdictionary {
- ShadingType = stype,
- ColorSpace = separation and pdfreference(separation) or pdfconstant(colorspace),
- Function = pdfreference(pdfflushobject(f)),
- Coords = pdfarray(coordinates),
- Extend = pdfarray { true, true },
- AntiAlias = pdfboolean(true),
- }
- lpdf.adddocumentshade(name,pdfreference(pdfflushobject(s)))
-end
-
-function lpdf.circularshade(name,domain,color_a,color_b,n,colorspace,coordinates,separation)
- shade(3,name,domain,color_a,color_b,n,colorspace,coordinates,separation)
-end
-
-function lpdf.linearshade(name,domain,color_a,color_b,n,colorspace,coordinates,separation)
- shade(2,name,domain,color_a,color_b,n,colorspace,coordinates,separation)
-end
-
--- inline bitmaps but xform'd
---
--- we could derive the colorspace if we strip the data
--- and divide by x*y
-
-local template = "q BI %s ID %s > EI Q"
-local factor = 72/300
-
-function nodeinjections.injectbitmap(t)
- -- encoding is ascii hex, no checking here
- local xresolution, yresolution = t.xresolution or 0, t.yresolution or 0
- if xresolution == 0 or yresolution == 0 then
- return -- fatal error
- end
- local colorspace = t.colorspace
- if colorspace ~= "rgb" and colorspace ~= "cmyk" and colorspace ~= "gray" then
- -- not that efficient but ok
- local d = gsub(t.data,"[^0-9a-f]","")
- local b = math.round(#d / (xresolution * yresolution))
- if b == 2 then
- colorspace = "gray"
- elseif b == 6 then
- colorspace = "rgb"
- elseif b == 8 then
- colorspace = "cmyk"
- end
- end
- colorspace = lpdf.colorspaceconstants[colorspace]
- if not colorspace then
- return -- fatal error
- end
- local d = pdfdictionary {
- W = xresolution,
- H = yresolution,
- CS = colorspace,
- BPC = 8,
- F = pdfconstant("AHx"),
---~ CS = nil,
---~ BPC = 1,
---~ IM = true,
- }
- -- for some reasons it only works well if we take a 1bp boundingbox
- local urx, ury = 1/basepoints, 1/basepoints
- -- urx = (xresolution/300)/basepoints
- -- ury = (yresolution/300)/basepoints
- local width, height = t.width or 0, t.height or 0
- if width == 0 and height == 0 then
- width = factor * xresolution / basepoints
- height = factor * yresolution / basepoints
- elseif width == 0 then
- width = height * xresolution / yresolution
- elseif height == 0 then
- height = width * yresolution / xresolution
- end
- local image = img.new {
- stream = format(template,d(),t.data),
- width = width,
- height = height,
- bbox = { 0, 0, urx, ury },
- }
- return img.node(image)
-end
-
--- general graphic helpers
-
-function codeinjections.setfigurealternative(data,figure)
- local request = data.request
- local display = request.display
- if display and display ~= "" then
- local nested = figures.push {
- name = display,
- page = request.page,
- size = request.size,
- prefix = request.prefix,
- cache = request.cache,
- width = request.width,
- height = request.height,
- }
- figures.identify()
- local displayfigure = figures.check()
- if displayfigure then
- -- figure.aform = true
- img.immediatewrite(figure)
- local a = pdfarray {
- pdfdictionary {
- Image = pdfreference(figure.objnum),
- DefaultForPrinting = true,
- }
- }
- local d = pdfdictionary {
- Alternates = pdfreference(pdfflushobject(a)),
- }
- displayfigure.attr = d()
- figures.pop()
- return displayfigure, nested
- else
- figures.pop()
- end
- end
-end
-
-function codeinjections.getpreviewfigure(request)
- local figure = figures.initialize(request)
- if not figure then
- return
- end
- figure = figures.identify(figure)
- if not (figure and figure.status and figure.status.fullname) then
- return
- end
- figure = figures.check(figure)
- if not (figure and figure.status and figure.status.fullname) then
- return
- end
- local image = figure.status.private
- if image then
- img.immediatewrite(image)
- end
- return figure
-end
-
-function codeinjections.setfiguremask(data,figure) -- mark
- local request = data.request
- local mask = request.mask
- if mask and mask ~= "" then
- figures.push {
- name = mask,
- page = request.page,
- size = request.size,
- prefix = request.prefix,
- cache = request.cache,
- width = request.width,
- height = request.height,
- }
- figures.identify()
- local maskfigure = figures.check()
- if maskfigure then
- local image = maskfigure.status.private
- if image then
- img.immediatewrite(image)
- local d = pdfdictionary {
- Interpolate = false,
- SMask = pdfreference(image.objnum),
- }
- figure.attr = d()
- end
- end
- figures.pop()
- end
-end
-
--- temp hack
-
-local factor = number.dimenfactors.bp
-
-function img.package(image) -- see lpdf-u3d **
- local boundingbox = image.bbox
- local imagetag = "Im" .. image.index
- local resources = pdfdictionary {
- ProcSet = pdfarray {
- pdfconstant("PDF"),
- pdfconstant("ImageC")
- },
- Resources = pdfdictionary {
- XObject = pdfdictionary {
- [imagetag] = pdfreference(image.objnum)
- }
- }
- }
- local width = boundingbox[3]
- local height = boundingbox[4]
- local xform = img.scan {
- attr = resources(),
- stream = format("%f 0 0 %f 0 0 cm /%s Do",width,height,imagetag),
- bbox = { 0, 0, width/factor, height/factor },
- }
- img.immediatewrite(xform)
- return xform
-end
+if not modules then modules = { } end modules ['lpdf-grp'] = {
+ version = 1.001,
+ comment = "companion to lpdf-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format, gsub = string.format, string.gsub
+local concat = table.concat
+local round = math.round
+
+local backends, lpdf = backends, lpdf
+
+local nodeinjections = backends.pdf.nodeinjections
+
+local colors = attributes.colors
+local basepoints = number.dimenfactors["bp"]
+local inches = number.dimenfactors["in"]
+
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+local registrations = backends.pdf.registrations
+
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfconstant = lpdf.constant
+local pdfboolean = lpdf.boolean
+local pdfreference = lpdf.reference
+local pdfflushobject = lpdf.flushobject
+
+-- can also be done indirectly:
+--
+-- 12 : << /AntiAlias false /ColorSpace 8 0 R /Coords [ 0.0 0.0 1.0 0.0 ] /Domain [ 0.0 1.0 ] /Extend [ true true ] /Function 22 0 R /ShadingType 2 >>
+-- 22 : << /Bounds [ ] /Domain [ 0.0 1.0 ] /Encode [ 0.0 1.0 ] /FunctionType 3 /Functions [ 31 0 R ] >>
+-- 31 : << /C0 [ 1.0 0.0 ] /C1 [ 0.0 1.0 ] /Domain [ 0.0 1.0 ] /FunctionType 2 /N 1.0 >>
+
+local function shade(stype,name,domain,color_a,color_b,n,colorspace,coordinates,separation)
+ local f = pdfdictionary {
+ FunctionType = 2,
+ Domain = pdfarray(domain), -- domain is actually a string
+ C0 = pdfarray(color_a),
+ C1 = pdfarray(color_b),
+ N = tonumber(n),
+ }
+ separation = separation and registrations.getspotcolorreference(separation)
+ local s = pdfdictionary {
+ ShadingType = stype,
+ ColorSpace = separation and pdfreference(separation) or pdfconstant(colorspace),
+ Function = pdfreference(pdfflushobject(f)),
+ Coords = pdfarray(coordinates),
+ Extend = pdfarray { true, true },
+ AntiAlias = pdfboolean(true),
+ }
+ lpdf.adddocumentshade(name,pdfreference(pdfflushobject(s)))
+end
+
+function lpdf.circularshade(name,domain,color_a,color_b,n,colorspace,coordinates,separation)
+ shade(3,name,domain,color_a,color_b,n,colorspace,coordinates,separation)
+end
+
+function lpdf.linearshade(name,domain,color_a,color_b,n,colorspace,coordinates,separation)
+ shade(2,name,domain,color_a,color_b,n,colorspace,coordinates,separation)
+end
+
+-- inline bitmaps but xform'd
+--
+-- we could derive the colorspace if we strip the data
+-- and divide by x*y
+
+local template = "q BI %s ID %s > EI Q"
+local factor = 72/300
+
+function nodeinjections.injectbitmap(t)
+ -- encoding is ascii hex, no checking here
+ local xresolution, yresolution = t.xresolution or 0, t.yresolution or 0
+ if xresolution == 0 or yresolution == 0 then
+ return -- fatal error
+ end
+ local colorspace = t.colorspace
+ if colorspace ~= "rgb" and colorspace ~= "cmyk" and colorspace ~= "gray" then
+ -- not that efficient but ok
+ local d = gsub(t.data,"[^0-9a-f]","")
+ local b = math.round(#d / (xresolution * yresolution))
+ if b == 2 then
+ colorspace = "gray"
+ elseif b == 6 then
+ colorspace = "rgb"
+ elseif b == 8 then
+ colorspace = "cmyk"
+ end
+ end
+ colorspace = lpdf.colorspaceconstants[colorspace]
+ if not colorspace then
+ return -- fatal error
+ end
+ local d = pdfdictionary {
+ W = xresolution,
+ H = yresolution,
+ CS = colorspace,
+ BPC = 8,
+ F = pdfconstant("AHx"),
+--~ CS = nil,
+--~ BPC = 1,
+--~ IM = true,
+ }
+ -- for some reasons it only works well if we take a 1bp boundingbox
+ local urx, ury = 1/basepoints, 1/basepoints
+ -- urx = (xresolution/300)/basepoints
+ -- ury = (yresolution/300)/basepoints
+ local width, height = t.width or 0, t.height or 0
+ if width == 0 and height == 0 then
+ width = factor * xresolution / basepoints
+ height = factor * yresolution / basepoints
+ elseif width == 0 then
+ width = height * xresolution / yresolution
+ elseif height == 0 then
+ height = width * yresolution / xresolution
+ end
+ local image = img.new {
+ stream = format(template,d(),t.data),
+ width = width,
+ height = height,
+ bbox = { 0, 0, urx, ury },
+ }
+ return img.node(image)
+end
+
+-- general graphic helpers
+
+function codeinjections.setfigurealternative(data,figure)
+ local request = data.request
+ local display = request.display
+ if display and display ~= "" then
+ local nested = figures.push {
+ name = display,
+ page = request.page,
+ size = request.size,
+ prefix = request.prefix,
+ cache = request.cache,
+ width = request.width,
+ height = request.height,
+ }
+ figures.identify()
+ local displayfigure = figures.check()
+ if displayfigure then
+ -- figure.aform = true
+ img.immediatewrite(figure)
+ local a = pdfarray {
+ pdfdictionary {
+ Image = pdfreference(figure.objnum),
+ DefaultForPrinting = true,
+ }
+ }
+ local d = pdfdictionary {
+ Alternates = pdfreference(pdfflushobject(a)),
+ }
+ displayfigure.attr = d()
+ figures.pop()
+ return displayfigure, nested
+ else
+ figures.pop()
+ end
+ end
+end
+
+function codeinjections.getpreviewfigure(request)
+ local figure = figures.initialize(request)
+ if not figure then
+ return
+ end
+ figure = figures.identify(figure)
+ if not (figure and figure.status and figure.status.fullname) then
+ return
+ end
+ figure = figures.check(figure)
+ if not (figure and figure.status and figure.status.fullname) then
+ return
+ end
+ local image = figure.status.private
+ if image then
+ img.immediatewrite(image)
+ end
+ return figure
+end
+
+function codeinjections.setfiguremask(data,figure) -- mark
+ local request = data.request
+ local mask = request.mask
+ if mask and mask ~= "" then
+ figures.push {
+ name = mask,
+ page = request.page,
+ size = request.size,
+ prefix = request.prefix,
+ cache = request.cache,
+ width = request.width,
+ height = request.height,
+ }
+ figures.identify()
+ local maskfigure = figures.check()
+ if maskfigure then
+ local image = maskfigure.status.private
+ if image then
+ img.immediatewrite(image)
+ local d = pdfdictionary {
+ Interpolate = false,
+ SMask = pdfreference(image.objnum),
+ }
+ figure.attr = d()
+ end
+ end
+ figures.pop()
+ end
+end
+
+-- temp hack
+
+local factor = number.dimenfactors.bp
+
+function img.package(image) -- see lpdf-u3d **
+ local boundingbox = image.bbox
+ local imagetag = "Im" .. image.index
+ local resources = pdfdictionary {
+ ProcSet = pdfarray {
+ pdfconstant("PDF"),
+ pdfconstant("ImageC")
+ },
+ Resources = pdfdictionary {
+ XObject = pdfdictionary {
+ [imagetag] = pdfreference(image.objnum)
+ }
+ }
+ }
+ local width = boundingbox[3]
+ local height = boundingbox[4]
+ local xform = img.scan {
+ attr = resources(),
+ stream = format("%f 0 0 %f 0 0 cm /%s Do",width,height,imagetag),
+ bbox = { 0, 0, width/factor, height/factor },
+ }
+ img.immediatewrite(xform)
+ return xform
+end
diff --git a/tex/context/base/lpdf-ini.lua b/tex/context/base/lpdf-ini.lua
index cd601f21f..77ccd85fc 100644
--- a/tex/context/base/lpdf-ini.lua
+++ b/tex/context/base/lpdf-ini.lua
@@ -1,822 +1,822 @@
-if not modules then modules = { } end modules ['lpdf-ini'] = {
- version = 1.001,
- comment = "companion to lpdf-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local setmetatable, getmetatable, type, next, tostring, tonumber, rawset = setmetatable, getmetatable, type, next, tostring, tonumber, rawset
-local char, byte, format, gsub, concat, match, sub, gmatch = string.char, string.byte, string.format, string.gsub, table.concat, string.match, string.sub, string.gmatch
-local utfchar, utfvalues = utf.char, utf.values
-local sind, cosd = math.sind, math.cosd
-local lpegmatch, P, C, R, S, Cc, Cs = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc, lpeg.Cs
-local formatters = string.formatters
-
-local pdfreserveobject = pdf.reserveobj
-local pdfimmediateobject = pdf.immediateobj
-local pdfdeferredobject = pdf.obj
-local pdfreferenceobject = pdf.refobj
-
-local trace_finalizers = false trackers.register("backend.finalizers", function(v) trace_finalizers = v end)
-local trace_resources = false trackers.register("backend.resources", function(v) trace_resources = v end)
-local trace_objects = false trackers.register("backend.objects", function(v) trace_objects = v end)
-local trace_detail = false trackers.register("backend.detail", function(v) trace_detail = v end)
-
-local report_objects = logs.reporter("backend","objects")
-local report_finalizing = logs.reporter("backend","finalizing")
-
-local backends = backends
-
-backends.pdf = backends.pdf or {
- comment = "backend for directly generating pdf output",
- nodeinjections = { },
- codeinjections = { },
- registrations = { },
- tables = { },
-}
-
-lpdf = lpdf or { }
-local lpdf = lpdf
-
-local function tosixteen(str) -- an lpeg might be faster (no table)
- if not str or str == "" then
- return "" -- not () as we want an indication that it's unicode
- else
- local r, n = { ""
- return concat(r)
- end
-end
-
-lpdf.tosixteen = tosixteen
-
--- lpeg is some 5 times faster than gsub (in test) on escaping
-
--- local escapes = {
--- ["\\"] = "\\\\",
--- ["/"] = "\\/", ["#"] = "\\#",
--- ["<"] = "\\<", [">"] = "\\>",
--- ["["] = "\\[", ["]"] = "\\]",
--- ["("] = "\\(", [")"] = "\\)",
--- }
---
--- local escaped = Cs(Cc("(") * (S("\\/#<>[]()")/escapes + P(1))^0 * Cc(")"))
---
--- local function toeight(str)
--- if not str or str == "" then
--- return "()"
--- else
--- return lpegmatch(escaped,str)
--- end
--- end
---
--- -- no need for escaping .. just use unicode instead
-
--- \0 \t \n \r \f ( ) [ ] { } / %
-
-local function toeight(str)
- return "(" .. str .. ")"
-end
-
-lpdf.toeight = toeight
-
---~ local escaped = lpeg.Cs((lpeg.S("\0\t\n\r\f ()[]{}/%")/function(s) return format("#%02X",byte(s)) end + lpeg.P(1))^0)
-
---~ local function cleaned(str)
---~ return (str and str ~= "" and lpegmatch(escaped,str)) or ""
---~ end
-
---~ lpdf.cleaned = cleaned -- not public yet
-
-local function merge_t(a,b)
- local t = { }
- for k,v in next, a do t[k] = v end
- for k,v in next, b do t[k] = v end
- return setmetatable(t,getmetatable(a))
-end
-
-local f_key_value = formatters["/%s %s"]
-local f_key_dictionary = formatters["/%s << % t >>"]
-local f_dictionary = formatters["<< % t >>"]
-local f_key_array = formatters["/%s [ % t ]"]
-local f_array = formatters["[ % t ]"]
-
-local tostring_a, tostring_d
-
-tostring_d = function(t,contentonly,key)
- if not next(t) then
- if contentonly then
- return ""
- else
- return "<< >>"
- end
- else
- local r, rn = { }, 0
- for k, v in next, t do
- rn = rn + 1
- local tv = type(v)
- if tv == "string" then
- r[rn] = f_key_value(k,toeight(v))
- elseif tv == "unicode" then
- r[rn] = f_key_value(k,tosixteen(v))
- elseif tv == "table" then
- local mv = getmetatable(v)
- if mv and mv.__lpdftype then
- r[rn] = f_key_value(k,tostring(v))
- elseif v[1] then
- r[rn] = f_key_value(k,tostring_a(v))
- else
- r[rn] = f_key_value(k,tostring_d(v))
- end
- else
- r[rn] = f_key_value(k,tostring(v))
- end
- end
- if contentonly then
- return concat(r," ")
- elseif key then
- return f_key_dictionary(key,r)
- else
- return f_dictionary(r)
- end
- end
-end
-
-tostring_a = function(t,contentonly,key)
- local tn = #t
- if tn == 0 then
- if contentonly then
- return ""
- else
- return "[ ]"
- end
- else
- local r = { }
- for k=1,tn do
- local v = t[k]
- local tv = type(v)
- if tv == "string" then
- r[k] = toeight(v)
- elseif tv == "unicode" then
- r[k] = tosixteen(v)
- elseif tv == "table" then
- local mv = getmetatable(v)
- local mt = mv and mv.__lpdftype
- if mt then
- r[k] = tostring(v)
- elseif v[1] then
- r[k] = tostring_a(v)
- else
- r[k] = tostring_d(v)
- end
- else
- r[k] = tostring(v)
- end
- end
- if contentonly then
- return concat(r, " ")
- elseif key then
- return f_key_array(key,r)
- else
- return f_array(r)
- end
- end
-end
-
-local tostring_x = function(t) return concat(t, " ") end
-local tostring_s = function(t) return toeight(t[1]) end
-local tostring_u = function(t) return tosixteen(t[1]) end
-local tostring_n = function(t) return tostring(t[1]) end -- tostring not needed
-local tostring_c = function(t) return t[1] end -- already prefixed (hashed)
-local tostring_z = function() return "null" end
-local tostring_t = function() return "true" end
-local tostring_f = function() return "false" end
-local tostring_r = function(t) local n = t[1] return n and n > 0 and (n .. " 0 R") or "NULL" end
-
-local tostring_v = function(t)
- local s = t[1]
- if type(s) == "table" then
- return concat(s,"")
- else
- return s
- end
-end
-
-local function value_x(t) return t end -- the call is experimental
-local function value_s(t,key) return t[1] end -- the call is experimental
-local function value_u(t,key) return t[1] end -- the call is experimental
-local function value_n(t,key) return t[1] end -- the call is experimental
-local function value_c(t) return sub(t[1],2) end -- the call is experimental
-local function value_d(t) return tostring_d(t,true) end -- the call is experimental
-local function value_a(t) return tostring_a(t,true) end -- the call is experimental
-local function value_z() return nil end -- the call is experimental
-local function value_t(t) return t.value or true end -- the call is experimental
-local function value_f(t) return t.value or false end -- the call is experimental
-local function value_r() return t[1] or 0 end -- the call is experimental -- NULL
-local function value_v() return t[1] end -- the call is experimental
-
-local function add_x(t,k,v) rawset(t,k,tostring(v)) end
-
-local mt_x = { __lpdftype = "stream", __tostring = tostring_x, __call = value_x, __newindex = add_x }
-local mt_d = { __lpdftype = "dictionary", __tostring = tostring_d, __call = value_d }
-local mt_a = { __lpdftype = "array", __tostring = tostring_a, __call = value_a }
-local mt_u = { __lpdftype = "unicode", __tostring = tostring_u, __call = value_u }
-local mt_s = { __lpdftype = "string", __tostring = tostring_s, __call = value_s }
-local mt_n = { __lpdftype = "number", __tostring = tostring_n, __call = value_n }
-local mt_c = { __lpdftype = "constant", __tostring = tostring_c, __call = value_c }
-local mt_z = { __lpdftype = "null", __tostring = tostring_z, __call = value_z }
-local mt_t = { __lpdftype = "true", __tostring = tostring_t, __call = value_t }
-local mt_f = { __lpdftype = "false", __tostring = tostring_f, __call = value_f }
-local mt_r = { __lpdftype = "reference", __tostring = tostring_r, __call = value_r }
-local mt_v = { __lpdftype = "verbose", __tostring = tostring_v, __call = value_v }
-
-local function pdfstream(t) -- we need to add attributes
- if t then
- for i=1,#t do
- t[i] = tostring(t[i])
- end
- end
- return setmetatable(t or { },mt_x)
-end
-
-local function pdfdictionary(t)
- return setmetatable(t or { },mt_d)
-end
-
-local function pdfarray(t)
- if type(t) == "string" then
- return setmetatable({ t },mt_a)
- else
- return setmetatable(t or { },mt_a)
- end
-end
-
-local function pdfstring(str,default)
- return setmetatable({ str or default or "" },mt_s)
-end
-
-local function pdfunicode(str,default)
- return setmetatable({ str or default or "" },mt_u)
-end
-
-local cache = { } -- can be weak
-
-local function pdfnumber(n,default) -- 0-10
- n = n or default
- local c = cache[n]
- if not c then
- c = setmetatable({ n },mt_n)
- -- cache[n] = c -- too many numbers
- end
- return c
-end
-
-for i=-1,9 do cache[i] = pdfnumber(i) end
-
-local cache = { } -- can be weak
-
-local forbidden, replacements = "\0\t\n\r\f ()[]{}/%%#\\", { } -- table faster than function
-
-for s in gmatch(forbidden,".") do
- replacements[s] = format("#%02x",byte(s))
-end
-
-local escaped = Cs(Cc("/") * (S(forbidden)/replacements + P(1))^0)
-
-local function pdfconstant(str,default)
- str = str or default or ""
- local c = cache[str]
- if not c then
- -- c = setmetatable({ "/" .. str },mt_c)
- c = setmetatable({ lpegmatch(escaped,str) },mt_c)
- cache[str] = c
- end
- return c
-end
-
-local p_null = { } setmetatable(p_null, mt_z)
-local p_true = { } setmetatable(p_true, mt_t)
-local p_false = { } setmetatable(p_false,mt_f)
-
-local function pdfnull()
- return p_null
-end
-
---~ print(pdfboolean(false),pdfboolean(false,false),pdfboolean(false,true))
---~ print(pdfboolean(true),pdfboolean(true,false),pdfboolean(true,true))
---~ print(pdfboolean(nil,true),pdfboolean(nil,false))
-
-local function pdfboolean(b,default)
- if type(b) == "boolean" then
- return b and p_true or p_false
- else
- return default and p_true or p_false
- end
-end
-
-local function pdfreference(r)
- return setmetatable({ r or 0 },mt_r)
-end
-
-local function pdfverbose(t) -- maybe check for type
- return setmetatable({ t or "" },mt_v)
-end
-
-lpdf.stream = pdfstream -- THIS WILL PROBABLY CHANGE
-lpdf.dictionary = pdfdictionary
-lpdf.array = pdfarray
-lpdf.string = pdfstring
-lpdf.unicode = pdfunicode
-lpdf.number = pdfnumber
-lpdf.constant = pdfconstant
-lpdf.null = pdfnull
-lpdf.boolean = pdfboolean
-lpdf.reference = pdfreference
-lpdf.verbose = pdfverbose
-
--- n = pdf.obj(n, str)
--- n = pdf.obj(n, "file", filename)
--- n = pdf.obj(n, "stream", streamtext, attrtext)
--- n = pdf.obj(n, "streamfile", filename, attrtext)
-
--- we only use immediate objects
-
--- todo: tracing
-
-local names, cache = { }, { }
-
-function lpdf.reserveobject(name)
- if name == "annot" then
- -- catch misuse
- return pdfreserveobject("annot")
- else
- local r = pdfreserveobject()
- if name then
- names[name] = r
- if trace_objects then
- report_objects("reserving number %a under name %a",r,name)
- end
- elseif trace_objects then
- report_objects("reserving number %a",r)
- end
- return r
- end
-end
-
-function lpdf.reserveannotation()
- return pdfreserveobject("annot")
-end
-
--- lpdf.immediateobject = pdfimmediateobject
--- lpdf.deferredobject = pdfdeferredobject
--- lpdf.object = pdfdeferredobject
--- lpdf.referenceobject = pdfreferenceobject
-
-lpdf.pagereference = pdf.pageref or tex.pdfpageref
-lpdf.registerannotation = pdf.registerannot
-
-function lpdf.delayedobject(data) -- we will get rid of this one
- local n = pdfdeferredobject(data)
- pdfreferenceobject(n)
- return n
-end
-
-function lpdf.flushobject(name,data)
- if data then
- local named = names[name]
- if named then
- if not trace_objects then
- elseif trace_detail then
- report_objects("flushing data to reserved object with name %a, data: %S",name,data)
- else
- report_objects("flushing data to reserved object with name %a",name)
- end
- return pdfimmediateobject(named,tostring(data))
- else
- if not trace_objects then
- elseif trace_detail then
- report_objects("flushing data to reserved object with number %s, data: %S",name,data)
- else
- report_objects("flushing data to reserved object with number %s",name)
- end
- return pdfimmediateobject(name,tostring(data))
- end
- else
- if trace_objects and trace_detail then
- report_objects("flushing data: %S",name)
- end
- return pdfimmediateobject(tostring(name))
- end
-end
-
-
-function lpdf.flushstreamobject(data,dict,compressed) -- default compressed
- if trace_objects then
- report_objects("flushing stream object of %s bytes",#data)
- end
- local dtype = type(dict)
- return pdfdeferredobject {
- immediate = true,
- compresslevel = compressed == false and 0 or nil,
- type = "stream",
- string = data,
- attr = (dtype == "string" and dict) or (dtype == "table" and dict()) or nil,
- }
-end
-
-function lpdf.flushstreamfileobject(filename,dict,compressed) -- default compressed
- if trace_objects then
- report_objects("flushing stream file object %a",filename)
- end
- local dtype = type(dict)
- return pdfdeferredobject {
- immediate = true,
- compresslevel = compressed == false and 0 or nil,
- type = "stream",
- file = filename,
- attr = (dtype == "string" and dict) or (dtype == "table" and dict()) or nil,
- }
-end
-
-local shareobjectcache, shareobjectreferencecache = { }, { }
-
-function lpdf.shareobject(content)
- if content == nil then
- -- invalid object not created
- else
- content = tostring(content)
- local o = shareobjectcache[content]
- if not o then
- o = pdfimmediateobject(content)
- shareobjectcache[content] = o
- end
- return o
- end
-end
-
-function lpdf.shareobjectreference(content)
- if content == nil then
- -- invalid object not created
- else
- content = tostring(content)
- local r = shareobjectreferencecache[content]
- if not r then
- local o = shareobjectcache[content]
- if not o then
- o = pdfimmediateobject(content)
- shareobjectcache[content] = o
- end
- r = pdfreference(o)
- shareobjectreferencecache[content] = r
- end
- return r
- end
-end
-
---~ local d = lpdf.dictionary()
---~ local e = lpdf.dictionary { ["e"] = "abc", x = lpdf.dictionary { ["f"] = "ABC" } }
---~ local f = lpdf.dictionary { ["f"] = "ABC" }
---~ local a = lpdf.array { lpdf.array { lpdf.string("xxx") } }
-
---~ print(a)
---~ os.exit()
-
---~ d["test"] = lpdf.string ("test")
---~ d["more"] = "more"
---~ d["bool"] = true
---~ d["numb"] = 1234
---~ d["oeps"] = lpdf.dictionary { ["hans"] = "ton" }
---~ d["whow"] = lpdf.array { lpdf.string("ton") }
-
---~ a[#a+1] = lpdf.string("xxx")
---~ a[#a+1] = lpdf.string("yyy")
-
---~ d.what = a
-
---~ print(e)
-
---~ local d = lpdf.dictionary()
---~ d["abcd"] = { 1, 2, 3, "test" }
---~ print(d)
---~ print(d())
-
---~ local d = lpdf.array()
---~ d[#d+1] = 1
---~ d[#d+1] = 2
---~ d[#d+1] = 3
---~ d[#d+1] = "test"
---~ print(d)
-
---~ local d = lpdf.array()
---~ d[#d+1] = { 1, 2, 3, "test" }
---~ print(d)
-
---~ local d = lpdf.array()
---~ d[#d+1] = { a=1, b=2, c=3, d="test" }
---~ print(d)
-
---~ local s = lpdf.constant("xx")
---~ print(s) -- fails somehow
---~ print(s()) -- fails somehow
-
---~ local s = lpdf.boolean(false)
---~ s.value = true
---~ print(s)
---~ print(s())
-
--- three priority levels, default=2
-
-local pagefinalizers, documentfinalizers = { { }, { }, { } }, { { }, { }, { } }
-
-local pageresources, pageattributes, pagesattributes
-
-local function resetpageproperties()
- pageresources = pdfdictionary()
- pageattributes = pdfdictionary()
- pagesattributes = pdfdictionary()
-end
-
-resetpageproperties()
-
-local function setpageproperties()
- pdf.pageresources = pageresources ()
- pdf.pageattributes = pageattributes ()
- pdf.pagesattributes = pagesattributes()
-end
-
-local function addtopageresources (k,v) pageresources [k] = v end
-local function addtopageattributes (k,v) pageattributes [k] = v end
-local function addtopagesattributes(k,v) pagesattributes[k] = v end
-
-lpdf.addtopageresources = addtopageresources
-lpdf.addtopageattributes = addtopageattributes
-lpdf.addtopagesattributes = addtopagesattributes
-
-local function set(where,what,f,when,comment)
- if type(when) == "string" then
- when, comment = 2, when
- elseif not when then
- when = 2
- end
- local w = where[when]
- w[#w+1] = { f, comment }
- if trace_finalizers then
- report_finalizing("%s set: [%s,%s]",what,when,#w)
- end
-end
-
-local function run(where,what)
- if trace_finalizers then
- report_finalizing("start backend, category %a, n %a",what,#where)
- end
- for i=1,#where do
- local w = where[i]
- for j=1,#w do
- local wj = w[j]
- if trace_finalizers then
- report_finalizing("%s finalizer: [%s,%s] %s",what,i,j,wj[2] or "")
- end
- wj[1]()
- end
- end
- if trace_finalizers then
- report_finalizing("stop finalizing")
- end
-end
-
-local function registerpagefinalizer(f,when,comment)
- set(pagefinalizers,"page",f,when,comment)
-end
-
-local function registerdocumentfinalizer(f,when,comment)
- set(documentfinalizers,"document",f,when,comment)
-end
-
-lpdf.registerpagefinalizer = registerpagefinalizer
-lpdf.registerdocumentfinalizer = registerdocumentfinalizer
-
-function lpdf.finalizepage()
- if not environment.initex then
- -- resetpageproperties() -- maybe better before
- run(pagefinalizers,"page")
- setpageproperties()
- resetpageproperties() -- maybe better before
- end
-end
-
-function lpdf.finalizedocument()
- if not environment.initex then
- run(documentfinalizers,"document")
- function lpdf.finalizedocument()
- report_finalizing("serious error: the document is finalized multiple times")
- function lpdf.finalizedocument() end
- end
- end
-end
-
-backends.pdf.codeinjections.finalizepage = lpdf.finalizepage -- will go when we have hook
-
---~ callbacks.register("finish_pdfpage", lpdf.finalizepage)
-callbacks.register("finish_pdffile", lpdf.finalizedocument)
-
--- some minimal tracing, handy for checking the order
-
-local function trace_set(what,key)
- if trace_resources then
- report_finalizing("setting key %a in %a",key,what)
- end
-end
-local function trace_flush(what)
- if trace_resources then
- report_finalizing("flushing %a",what)
- end
-end
-
-lpdf.protectresources = true
-
-local catalog = pdfdictionary { Type = pdfconstant("Catalog") } -- nicer, but when we assign we nil the Type
-local info = pdfdictionary { Type = pdfconstant("Info") } -- nicer, but when we assign we nil the Type
-local names = pdfdictionary { Type = pdfconstant("Names") } -- nicer, but when we assign we nil the Type
-
-local function flushcatalog() if not environment.initex then trace_flush("catalog") catalog.Type = nil pdf.catalog = catalog() end end
-local function flushinfo () if not environment.initex then trace_flush("info") info .Type = nil pdf.info = info () end end
-local function flushnames () if not environment.initex then trace_flush("names") names .Type = nil pdf.names = names () end end
-
-function lpdf.addtocatalog(k,v) if not (lpdf.protectresources and catalog[k]) then trace_set("catalog",k) catalog[k] = v end end
-function lpdf.addtoinfo (k,v) if not (lpdf.protectresources and info [k]) then trace_set("info", k) info [k] = v end end
-function lpdf.addtonames (k,v) if not (lpdf.protectresources and names [k]) then trace_set("names", k) names [k] = v end end
-
-local dummy = pdfreserveobject() -- else bug in hvmd due so some internal luatex conflict
-
--- Some day I will implement a proper minimalized resource management.
-
-local r_extgstates, d_extgstates = pdfreserveobject(), pdfdictionary() local p_extgstates = pdfreference(r_extgstates)
-local r_colorspaces, d_colorspaces = pdfreserveobject(), pdfdictionary() local p_colorspaces = pdfreference(r_colorspaces)
-local r_patterns, d_patterns = pdfreserveobject(), pdfdictionary() local p_patterns = pdfreference(r_patterns)
-local r_shades, d_shades = pdfreserveobject(), pdfdictionary() local p_shades = pdfreference(r_shades)
-
-local function checkextgstates () if next(d_extgstates ) then addtopageresources("ExtGState", p_extgstates ) end end
-local function checkcolorspaces() if next(d_colorspaces) then addtopageresources("ColorSpace",p_colorspaces) end end
-local function checkpatterns () if next(d_patterns ) then addtopageresources("Pattern", p_patterns ) end end
-local function checkshades () if next(d_shades ) then addtopageresources("Shading", p_shades ) end end
-
-local function flushextgstates () if next(d_extgstates ) then trace_flush("extgstates") pdfimmediateobject(r_extgstates, tostring(d_extgstates )) end end
-local function flushcolorspaces() if next(d_colorspaces) then trace_flush("colorspaces") pdfimmediateobject(r_colorspaces,tostring(d_colorspaces)) end end
-local function flushpatterns () if next(d_patterns ) then trace_flush("patterns") pdfimmediateobject(r_patterns, tostring(d_patterns )) end end
-local function flushshades () if next(d_shades ) then trace_flush("shades") pdfimmediateobject(r_shades, tostring(d_shades )) end end
-
-function lpdf.collectedresources()
- local ExtGState = next(d_extgstates ) and p_extgstates
- local ColorSpace = next(d_colorspaces) and p_colorspaces
- local Pattern = next(d_patterns ) and p_patterns
- local Shading = next(d_shades ) and p_shades
- if ExtGState or ColorSpace or Pattern or Shading then
- local collected = pdfdictionary {
- ExtGState = ExtGState,
- ColorSpace = ColorSpace,
- Pattern = Pattern,
- Shading = Shading,
- -- ProcSet = pdfarray { pdfconstant("PDF") },
- }
- return collected()
- else
- return ""
- end
-end
-
-function lpdf.adddocumentextgstate (k,v) d_extgstates [k] = v end
-function lpdf.adddocumentcolorspace(k,v) d_colorspaces[k] = v end
-function lpdf.adddocumentpattern (k,v) d_patterns [k] = v end
-function lpdf.adddocumentshade (k,v) d_shades [k] = v end
-
-registerdocumentfinalizer(flushextgstates,3,"extended graphic states")
-registerdocumentfinalizer(flushcolorspaces,3,"color spaces")
-registerdocumentfinalizer(flushpatterns,3,"patterns")
-registerdocumentfinalizer(flushshades,3,"shades")
-
-registerdocumentfinalizer(flushcatalog,3,"catalog")
-registerdocumentfinalizer(flushinfo,3,"info")
-registerdocumentfinalizer(flushnames,3,"names") -- before catalog
-
-registerpagefinalizer(checkextgstates,3,"extended graphic states")
-registerpagefinalizer(checkcolorspaces,3,"color spaces")
-registerpagefinalizer(checkpatterns,3,"patterns")
-registerpagefinalizer(checkshades,3,"shades")
-
--- in strc-bkm: lpdf.registerdocumentfinalizer(function() structures.bookmarks.place() end,1)
-
-function lpdf.rotationcm(a)
- local s, c = sind(a), cosd(a)
- return format("%0.6f %0.6f %0.6f %0.6f 0 0 cm",c,s,-s,c)
-end
-
--- ! -> universaltime
-
-local timestamp = os.date("%Y-%m-%dT%X") .. os.timezone(true)
-
-function lpdf.timestamp()
- return timestamp
-end
-
-function lpdf.pdftimestamp(str)
- local Y, M, D, h, m, s, Zs, Zh, Zm = match(str,"^(%d%d%d%d)%-(%d%d)%-(%d%d)T(%d%d):(%d%d):(%d%d)([%+%-])(%d%d):(%d%d)$")
- return Y and format("D:%s%s%s%s%s%s%s%s'%s'",Y,M,D,h,m,s,Zs,Zh,Zm)
-end
-
-function lpdf.id()
- return format("%s.%s",tex.jobname,timestamp)
-end
-
-function lpdf.checkedkey(t,key,variant)
- local pn = t and t[key]
- if pn then
- local tn = type(pn)
- if tn == variant then
- if variant == "string" then
- return pn ~= "" and pn or nil
- elseif variant == "table" then
- return next(pn) and pn or nil
- else
- return pn
- end
- elseif tn == "string" and variant == "number" then
- return tonumber(pn)
- end
- end
-end
-
-function lpdf.checkedvalue(value,variant) -- code not shared
- if value then
- local tv = type(value)
- if tv == variant then
- if variant == "string" then
- return value ~= "" and value
- elseif variant == "table" then
- return next(value) and value
- else
- return value
- end
- elseif tv == "string" and variant == "number" then
- return tonumber(value)
- end
- end
-end
-
-function lpdf.limited(n,min,max,default)
- if not n then
- return default
- else
- n = tonumber(n)
- if not n then
- return default
- elseif n > max then
- return max
- elseif n < min then
- return min
- else
- return n
- end
- end
-end
-
--- lpdf.addtoinfo("ConTeXt.Version", tex.contextversiontoks)
--- lpdf.addtoinfo("ConTeXt.Time", os.date("%Y.%m.%d %H:%M")) -- :%S
--- lpdf.addtoinfo("ConTeXt.Jobname", environment.jobname)
--- lpdf.addtoinfo("ConTeXt.Url", "www.pragma-ade.com")
-
-if not pdfreferenceobject then
-
- local delayed = { }
-
- local function flush()
- local n = 0
- for k,v in next, delayed do
- pdfimmediateobject(k,v)
- n = n + 1
- end
- if trace_objects then
- report_objects("%s objects flushed",n)
- end
- delayed = { }
- end
-
- lpdf.registerdocumentfinalizer(flush,3,"objects") -- so we need a final flush too
- lpdf.registerpagefinalizer (flush,3,"objects") -- somehow this lags behind .. I need to look into that some day
-
- function lpdf.delayedobject(data)
- local n = pdfreserveobject()
- delayed[n] = data
- return n
- end
-
-end
+if not modules then modules = { } end modules ['lpdf-ini'] = {
+ version = 1.001,
+ comment = "companion to lpdf-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local setmetatable, getmetatable, type, next, tostring, tonumber, rawset = setmetatable, getmetatable, type, next, tostring, tonumber, rawset
+local char, byte, format, gsub, concat, match, sub, gmatch = string.char, string.byte, string.format, string.gsub, table.concat, string.match, string.sub, string.gmatch
+local utfchar, utfvalues = utf.char, utf.values
+local sind, cosd = math.sind, math.cosd
+local lpegmatch, P, C, R, S, Cc, Cs = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cc, lpeg.Cs
+local formatters = string.formatters
+
+local pdfreserveobject = pdf.reserveobj
+local pdfimmediateobject = pdf.immediateobj
+local pdfdeferredobject = pdf.obj
+local pdfreferenceobject = pdf.refobj
+
+local trace_finalizers = false trackers.register("backend.finalizers", function(v) trace_finalizers = v end)
+local trace_resources = false trackers.register("backend.resources", function(v) trace_resources = v end)
+local trace_objects = false trackers.register("backend.objects", function(v) trace_objects = v end)
+local trace_detail = false trackers.register("backend.detail", function(v) trace_detail = v end)
+
+local report_objects = logs.reporter("backend","objects")
+local report_finalizing = logs.reporter("backend","finalizing")
+
+local backends = backends
+
+backends.pdf = backends.pdf or {
+ comment = "backend for directly generating pdf output",
+ nodeinjections = { },
+ codeinjections = { },
+ registrations = { },
+ tables = { },
+}
+
+lpdf = lpdf or { }
+local lpdf = lpdf
+
+local function tosixteen(str) -- an lpeg might be faster (no table)
+ if not str or str == "" then
+ return "" -- not () as we want an indication that it's unicode
+ else
+ local r, n = { ""
+ return concat(r)
+ end
+end
+
+lpdf.tosixteen = tosixteen
+
+-- lpeg is some 5 times faster than gsub (in test) on escaping
+
+-- local escapes = {
+-- ["\\"] = "\\\\",
+-- ["/"] = "\\/", ["#"] = "\\#",
+-- ["<"] = "\\<", [">"] = "\\>",
+-- ["["] = "\\[", ["]"] = "\\]",
+-- ["("] = "\\(", [")"] = "\\)",
+-- }
+--
+-- local escaped = Cs(Cc("(") * (S("\\/#<>[]()")/escapes + P(1))^0 * Cc(")"))
+--
+-- local function toeight(str)
+-- if not str or str == "" then
+-- return "()"
+-- else
+-- return lpegmatch(escaped,str)
+-- end
+-- end
+--
+-- -- no need for escaping .. just use unicode instead
+
+-- \0 \t \n \r \f ( ) [ ] { } / %
+
+local function toeight(str)
+ return "(" .. str .. ")"
+end
+
+lpdf.toeight = toeight
+
+--~ local escaped = lpeg.Cs((lpeg.S("\0\t\n\r\f ()[]{}/%")/function(s) return format("#%02X",byte(s)) end + lpeg.P(1))^0)
+
+--~ local function cleaned(str)
+--~ return (str and str ~= "" and lpegmatch(escaped,str)) or ""
+--~ end
+
+--~ lpdf.cleaned = cleaned -- not public yet
+
+local function merge_t(a,b)
+ local t = { }
+ for k,v in next, a do t[k] = v end
+ for k,v in next, b do t[k] = v end
+ return setmetatable(t,getmetatable(a))
+end
+
+local f_key_value = formatters["/%s %s"]
+local f_key_dictionary = formatters["/%s << % t >>"]
+local f_dictionary = formatters["<< % t >>"]
+local f_key_array = formatters["/%s [ % t ]"]
+local f_array = formatters["[ % t ]"]
+
+local tostring_a, tostring_d
+
+tostring_d = function(t,contentonly,key)
+ if not next(t) then
+ if contentonly then
+ return ""
+ else
+ return "<< >>"
+ end
+ else
+ local r, rn = { }, 0
+ for k, v in next, t do
+ rn = rn + 1
+ local tv = type(v)
+ if tv == "string" then
+ r[rn] = f_key_value(k,toeight(v))
+ elseif tv == "unicode" then
+ r[rn] = f_key_value(k,tosixteen(v))
+ elseif tv == "table" then
+ local mv = getmetatable(v)
+ if mv and mv.__lpdftype then
+ r[rn] = f_key_value(k,tostring(v))
+ elseif v[1] then
+ r[rn] = f_key_value(k,tostring_a(v))
+ else
+ r[rn] = f_key_value(k,tostring_d(v))
+ end
+ else
+ r[rn] = f_key_value(k,tostring(v))
+ end
+ end
+ if contentonly then
+ return concat(r," ")
+ elseif key then
+ return f_key_dictionary(key,r)
+ else
+ return f_dictionary(r)
+ end
+ end
+end
+
+tostring_a = function(t,contentonly,key)
+ local tn = #t
+ if tn == 0 then
+ if contentonly then
+ return ""
+ else
+ return "[ ]"
+ end
+ else
+ local r = { }
+ for k=1,tn do
+ local v = t[k]
+ local tv = type(v)
+ if tv == "string" then
+ r[k] = toeight(v)
+ elseif tv == "unicode" then
+ r[k] = tosixteen(v)
+ elseif tv == "table" then
+ local mv = getmetatable(v)
+ local mt = mv and mv.__lpdftype
+ if mt then
+ r[k] = tostring(v)
+ elseif v[1] then
+ r[k] = tostring_a(v)
+ else
+ r[k] = tostring_d(v)
+ end
+ else
+ r[k] = tostring(v)
+ end
+ end
+ if contentonly then
+ return concat(r, " ")
+ elseif key then
+ return f_key_array(key,r)
+ else
+ return f_array(r)
+ end
+ end
+end
+
+local tostring_x = function(t) return concat(t, " ") end
+local tostring_s = function(t) return toeight(t[1]) end
+local tostring_u = function(t) return tosixteen(t[1]) end
+local tostring_n = function(t) return tostring(t[1]) end -- tostring not needed
+local tostring_c = function(t) return t[1] end -- already prefixed (hashed)
+local tostring_z = function() return "null" end
+local tostring_t = function() return "true" end
+local tostring_f = function() return "false" end
+local tostring_r = function(t) local n = t[1] return n and n > 0 and (n .. " 0 R") or "NULL" end
+
+local tostring_v = function(t)
+ local s = t[1]
+ if type(s) == "table" then
+ return concat(s,"")
+ else
+ return s
+ end
+end
+
+local function value_x(t) return t end -- the call is experimental
+local function value_s(t,key) return t[1] end -- the call is experimental
+local function value_u(t,key) return t[1] end -- the call is experimental
+local function value_n(t,key) return t[1] end -- the call is experimental
+local function value_c(t) return sub(t[1],2) end -- the call is experimental
+local function value_d(t) return tostring_d(t,true) end -- the call is experimental
+local function value_a(t) return tostring_a(t,true) end -- the call is experimental
+local function value_z() return nil end -- the call is experimental
+local function value_t(t) return t.value or true end -- the call is experimental
+local function value_f(t) return t.value or false end -- the call is experimental
+local function value_r() return t[1] or 0 end -- the call is experimental -- NULL
+local function value_v() return t[1] end -- the call is experimental
+
+local function add_x(t,k,v) rawset(t,k,tostring(v)) end
+
+local mt_x = { __lpdftype = "stream", __tostring = tostring_x, __call = value_x, __newindex = add_x }
+local mt_d = { __lpdftype = "dictionary", __tostring = tostring_d, __call = value_d }
+local mt_a = { __lpdftype = "array", __tostring = tostring_a, __call = value_a }
+local mt_u = { __lpdftype = "unicode", __tostring = tostring_u, __call = value_u }
+local mt_s = { __lpdftype = "string", __tostring = tostring_s, __call = value_s }
+local mt_n = { __lpdftype = "number", __tostring = tostring_n, __call = value_n }
+local mt_c = { __lpdftype = "constant", __tostring = tostring_c, __call = value_c }
+local mt_z = { __lpdftype = "null", __tostring = tostring_z, __call = value_z }
+local mt_t = { __lpdftype = "true", __tostring = tostring_t, __call = value_t }
+local mt_f = { __lpdftype = "false", __tostring = tostring_f, __call = value_f }
+local mt_r = { __lpdftype = "reference", __tostring = tostring_r, __call = value_r }
+local mt_v = { __lpdftype = "verbose", __tostring = tostring_v, __call = value_v }
+
+local function pdfstream(t) -- we need to add attributes
+ if t then
+ for i=1,#t do
+ t[i] = tostring(t[i])
+ end
+ end
+ return setmetatable(t or { },mt_x)
+end
+
+local function pdfdictionary(t)
+ return setmetatable(t or { },mt_d)
+end
+
+local function pdfarray(t)
+ if type(t) == "string" then
+ return setmetatable({ t },mt_a)
+ else
+ return setmetatable(t or { },mt_a)
+ end
+end
+
+local function pdfstring(str,default)
+ return setmetatable({ str or default or "" },mt_s)
+end
+
+local function pdfunicode(str,default)
+ return setmetatable({ str or default or "" },mt_u)
+end
+
+local cache = { } -- can be weak
+
+local function pdfnumber(n,default) -- 0-10
+ n = n or default
+ local c = cache[n]
+ if not c then
+ c = setmetatable({ n },mt_n)
+ -- cache[n] = c -- too many numbers
+ end
+ return c
+end
+
+for i=-1,9 do cache[i] = pdfnumber(i) end
+
+local cache = { } -- can be weak
+
+local forbidden, replacements = "\0\t\n\r\f ()[]{}/%%#\\", { } -- table faster than function
+
+for s in gmatch(forbidden,".") do
+ replacements[s] = format("#%02x",byte(s))
+end
+
+local escaped = Cs(Cc("/") * (S(forbidden)/replacements + P(1))^0)
+
+local function pdfconstant(str,default)
+ str = str or default or ""
+ local c = cache[str]
+ if not c then
+ -- c = setmetatable({ "/" .. str },mt_c)
+ c = setmetatable({ lpegmatch(escaped,str) },mt_c)
+ cache[str] = c
+ end
+ return c
+end
+
+local p_null = { } setmetatable(p_null, mt_z)
+local p_true = { } setmetatable(p_true, mt_t)
+local p_false = { } setmetatable(p_false,mt_f)
+
+local function pdfnull()
+ return p_null
+end
+
+--~ print(pdfboolean(false),pdfboolean(false,false),pdfboolean(false,true))
+--~ print(pdfboolean(true),pdfboolean(true,false),pdfboolean(true,true))
+--~ print(pdfboolean(nil,true),pdfboolean(nil,false))
+
+local function pdfboolean(b,default)
+ if type(b) == "boolean" then
+ return b and p_true or p_false
+ else
+ return default and p_true or p_false
+ end
+end
+
+local function pdfreference(r)
+ return setmetatable({ r or 0 },mt_r)
+end
+
+local function pdfverbose(t) -- maybe check for type
+ return setmetatable({ t or "" },mt_v)
+end
+
+lpdf.stream = pdfstream -- THIS WILL PROBABLY CHANGE
+lpdf.dictionary = pdfdictionary
+lpdf.array = pdfarray
+lpdf.string = pdfstring
+lpdf.unicode = pdfunicode
+lpdf.number = pdfnumber
+lpdf.constant = pdfconstant
+lpdf.null = pdfnull
+lpdf.boolean = pdfboolean
+lpdf.reference = pdfreference
+lpdf.verbose = pdfverbose
+
+-- n = pdf.obj(n, str)
+-- n = pdf.obj(n, "file", filename)
+-- n = pdf.obj(n, "stream", streamtext, attrtext)
+-- n = pdf.obj(n, "streamfile", filename, attrtext)
+
+-- we only use immediate objects
+
+-- todo: tracing
+
+local names, cache = { }, { }
+
+function lpdf.reserveobject(name)
+ if name == "annot" then
+ -- catch misuse
+ return pdfreserveobject("annot")
+ else
+ local r = pdfreserveobject()
+ if name then
+ names[name] = r
+ if trace_objects then
+ report_objects("reserving number %a under name %a",r,name)
+ end
+ elseif trace_objects then
+ report_objects("reserving number %a",r)
+ end
+ return r
+ end
+end
+
+function lpdf.reserveannotation()
+ return pdfreserveobject("annot")
+end
+
+-- lpdf.immediateobject = pdfimmediateobject
+-- lpdf.deferredobject = pdfdeferredobject
+-- lpdf.object = pdfdeferredobject
+-- lpdf.referenceobject = pdfreferenceobject
+
+lpdf.pagereference = pdf.pageref or tex.pdfpageref
+lpdf.registerannotation = pdf.registerannot
+
+function lpdf.delayedobject(data) -- we will get rid of this one
+ local n = pdfdeferredobject(data)
+ pdfreferenceobject(n)
+ return n
+end
+
+function lpdf.flushobject(name,data)
+ if data then
+ local named = names[name]
+ if named then
+ if not trace_objects then
+ elseif trace_detail then
+ report_objects("flushing data to reserved object with name %a, data: %S",name,data)
+ else
+ report_objects("flushing data to reserved object with name %a",name)
+ end
+ return pdfimmediateobject(named,tostring(data))
+ else
+ if not trace_objects then
+ elseif trace_detail then
+ report_objects("flushing data to reserved object with number %s, data: %S",name,data)
+ else
+ report_objects("flushing data to reserved object with number %s",name)
+ end
+ return pdfimmediateobject(name,tostring(data))
+ end
+ else
+ if trace_objects and trace_detail then
+ report_objects("flushing data: %S",name)
+ end
+ return pdfimmediateobject(tostring(name))
+ end
+end
+
+
+function lpdf.flushstreamobject(data,dict,compressed) -- default compressed
+ if trace_objects then
+ report_objects("flushing stream object of %s bytes",#data)
+ end
+ local dtype = type(dict)
+ return pdfdeferredobject {
+ immediate = true,
+ compresslevel = compressed == false and 0 or nil,
+ type = "stream",
+ string = data,
+ attr = (dtype == "string" and dict) or (dtype == "table" and dict()) or nil,
+ }
+end
+
+function lpdf.flushstreamfileobject(filename,dict,compressed) -- default compressed
+ if trace_objects then
+ report_objects("flushing stream file object %a",filename)
+ end
+ local dtype = type(dict)
+ return pdfdeferredobject {
+ immediate = true,
+ compresslevel = compressed == false and 0 or nil,
+ type = "stream",
+ file = filename,
+ attr = (dtype == "string" and dict) or (dtype == "table" and dict()) or nil,
+ }
+end
+
+local shareobjectcache, shareobjectreferencecache = { }, { }
+
+function lpdf.shareobject(content)
+ if content == nil then
+ -- invalid object not created
+ else
+ content = tostring(content)
+ local o = shareobjectcache[content]
+ if not o then
+ o = pdfimmediateobject(content)
+ shareobjectcache[content] = o
+ end
+ return o
+ end
+end
+
+function lpdf.shareobjectreference(content)
+ if content == nil then
+ -- invalid object not created
+ else
+ content = tostring(content)
+ local r = shareobjectreferencecache[content]
+ if not r then
+ local o = shareobjectcache[content]
+ if not o then
+ o = pdfimmediateobject(content)
+ shareobjectcache[content] = o
+ end
+ r = pdfreference(o)
+ shareobjectreferencecache[content] = r
+ end
+ return r
+ end
+end
+
+--~ local d = lpdf.dictionary()
+--~ local e = lpdf.dictionary { ["e"] = "abc", x = lpdf.dictionary { ["f"] = "ABC" } }
+--~ local f = lpdf.dictionary { ["f"] = "ABC" }
+--~ local a = lpdf.array { lpdf.array { lpdf.string("xxx") } }
+
+--~ print(a)
+--~ os.exit()
+
+--~ d["test"] = lpdf.string ("test")
+--~ d["more"] = "more"
+--~ d["bool"] = true
+--~ d["numb"] = 1234
+--~ d["oeps"] = lpdf.dictionary { ["hans"] = "ton" }
+--~ d["whow"] = lpdf.array { lpdf.string("ton") }
+
+--~ a[#a+1] = lpdf.string("xxx")
+--~ a[#a+1] = lpdf.string("yyy")
+
+--~ d.what = a
+
+--~ print(e)
+
+--~ local d = lpdf.dictionary()
+--~ d["abcd"] = { 1, 2, 3, "test" }
+--~ print(d)
+--~ print(d())
+
+--~ local d = lpdf.array()
+--~ d[#d+1] = 1
+--~ d[#d+1] = 2
+--~ d[#d+1] = 3
+--~ d[#d+1] = "test"
+--~ print(d)
+
+--~ local d = lpdf.array()
+--~ d[#d+1] = { 1, 2, 3, "test" }
+--~ print(d)
+
+--~ local d = lpdf.array()
+--~ d[#d+1] = { a=1, b=2, c=3, d="test" }
+--~ print(d)
+
+--~ local s = lpdf.constant("xx")
+--~ print(s) -- fails somehow
+--~ print(s()) -- fails somehow
+
+--~ local s = lpdf.boolean(false)
+--~ s.value = true
+--~ print(s)
+--~ print(s())
+
+-- three priority levels, default=2
+
+local pagefinalizers, documentfinalizers = { { }, { }, { } }, { { }, { }, { } }
+
+local pageresources, pageattributes, pagesattributes
+
+local function resetpageproperties()
+ pageresources = pdfdictionary()
+ pageattributes = pdfdictionary()
+ pagesattributes = pdfdictionary()
+end
+
+resetpageproperties()
+
+local function setpageproperties()
+ pdf.pageresources = pageresources ()
+ pdf.pageattributes = pageattributes ()
+ pdf.pagesattributes = pagesattributes()
+end
+
+local function addtopageresources (k,v) pageresources [k] = v end
+local function addtopageattributes (k,v) pageattributes [k] = v end
+local function addtopagesattributes(k,v) pagesattributes[k] = v end
+
+lpdf.addtopageresources = addtopageresources
+lpdf.addtopageattributes = addtopageattributes
+lpdf.addtopagesattributes = addtopagesattributes
+
+local function set(where,what,f,when,comment)
+ if type(when) == "string" then
+ when, comment = 2, when
+ elseif not when then
+ when = 2
+ end
+ local w = where[when]
+ w[#w+1] = { f, comment }
+ if trace_finalizers then
+ report_finalizing("%s set: [%s,%s]",what,when,#w)
+ end
+end
+
+local function run(where,what)
+ if trace_finalizers then
+ report_finalizing("start backend, category %a, n %a",what,#where)
+ end
+ for i=1,#where do
+ local w = where[i]
+ for j=1,#w do
+ local wj = w[j]
+ if trace_finalizers then
+ report_finalizing("%s finalizer: [%s,%s] %s",what,i,j,wj[2] or "")
+ end
+ wj[1]()
+ end
+ end
+ if trace_finalizers then
+ report_finalizing("stop finalizing")
+ end
+end
+
+local function registerpagefinalizer(f,when,comment)
+ set(pagefinalizers,"page",f,when,comment)
+end
+
+local function registerdocumentfinalizer(f,when,comment)
+ set(documentfinalizers,"document",f,when,comment)
+end
+
+lpdf.registerpagefinalizer = registerpagefinalizer
+lpdf.registerdocumentfinalizer = registerdocumentfinalizer
+
+function lpdf.finalizepage()
+ if not environment.initex then
+ -- resetpageproperties() -- maybe better before
+ run(pagefinalizers,"page")
+ setpageproperties()
+ resetpageproperties() -- maybe better before
+ end
+end
+
+function lpdf.finalizedocument()
+ if not environment.initex then
+ run(documentfinalizers,"document")
+ function lpdf.finalizedocument()
+ report_finalizing("serious error: the document is finalized multiple times")
+ function lpdf.finalizedocument() end
+ end
+ end
+end
+
+backends.pdf.codeinjections.finalizepage = lpdf.finalizepage -- will go when we have hook
+
+--~ callbacks.register("finish_pdfpage", lpdf.finalizepage)
+callbacks.register("finish_pdffile", lpdf.finalizedocument)
+
+-- some minimal tracing, handy for checking the order
+
+local function trace_set(what,key)
+ if trace_resources then
+ report_finalizing("setting key %a in %a",key,what)
+ end
+end
+local function trace_flush(what)
+ if trace_resources then
+ report_finalizing("flushing %a",what)
+ end
+end
+
+lpdf.protectresources = true
+
+local catalog = pdfdictionary { Type = pdfconstant("Catalog") } -- nicer, but when we assign we nil the Type
+local info = pdfdictionary { Type = pdfconstant("Info") } -- nicer, but when we assign we nil the Type
+local names = pdfdictionary { Type = pdfconstant("Names") } -- nicer, but when we assign we nil the Type
+
+local function flushcatalog() if not environment.initex then trace_flush("catalog") catalog.Type = nil pdf.catalog = catalog() end end
+local function flushinfo () if not environment.initex then trace_flush("info") info .Type = nil pdf.info = info () end end
+local function flushnames () if not environment.initex then trace_flush("names") names .Type = nil pdf.names = names () end end
+
+function lpdf.addtocatalog(k,v) if not (lpdf.protectresources and catalog[k]) then trace_set("catalog",k) catalog[k] = v end end
+function lpdf.addtoinfo (k,v) if not (lpdf.protectresources and info [k]) then trace_set("info", k) info [k] = v end end
+function lpdf.addtonames (k,v) if not (lpdf.protectresources and names [k]) then trace_set("names", k) names [k] = v end end
+
+local dummy = pdfreserveobject() -- else bug in hvmd due so some internal luatex conflict
+
+-- Some day I will implement a proper minimalized resource management.
+
+local r_extgstates, d_extgstates = pdfreserveobject(), pdfdictionary() local p_extgstates = pdfreference(r_extgstates)
+local r_colorspaces, d_colorspaces = pdfreserveobject(), pdfdictionary() local p_colorspaces = pdfreference(r_colorspaces)
+local r_patterns, d_patterns = pdfreserveobject(), pdfdictionary() local p_patterns = pdfreference(r_patterns)
+local r_shades, d_shades = pdfreserveobject(), pdfdictionary() local p_shades = pdfreference(r_shades)
+
+local function checkextgstates () if next(d_extgstates ) then addtopageresources("ExtGState", p_extgstates ) end end
+local function checkcolorspaces() if next(d_colorspaces) then addtopageresources("ColorSpace",p_colorspaces) end end
+local function checkpatterns () if next(d_patterns ) then addtopageresources("Pattern", p_patterns ) end end
+local function checkshades () if next(d_shades ) then addtopageresources("Shading", p_shades ) end end
+
+local function flushextgstates () if next(d_extgstates ) then trace_flush("extgstates") pdfimmediateobject(r_extgstates, tostring(d_extgstates )) end end
+local function flushcolorspaces() if next(d_colorspaces) then trace_flush("colorspaces") pdfimmediateobject(r_colorspaces,tostring(d_colorspaces)) end end
+local function flushpatterns () if next(d_patterns ) then trace_flush("patterns") pdfimmediateobject(r_patterns, tostring(d_patterns )) end end
+local function flushshades () if next(d_shades ) then trace_flush("shades") pdfimmediateobject(r_shades, tostring(d_shades )) end end
+
+function lpdf.collectedresources()
+ local ExtGState = next(d_extgstates ) and p_extgstates
+ local ColorSpace = next(d_colorspaces) and p_colorspaces
+ local Pattern = next(d_patterns ) and p_patterns
+ local Shading = next(d_shades ) and p_shades
+ if ExtGState or ColorSpace or Pattern or Shading then
+ local collected = pdfdictionary {
+ ExtGState = ExtGState,
+ ColorSpace = ColorSpace,
+ Pattern = Pattern,
+ Shading = Shading,
+ -- ProcSet = pdfarray { pdfconstant("PDF") },
+ }
+ return collected()
+ else
+ return ""
+ end
+end
+
+function lpdf.adddocumentextgstate (k,v) d_extgstates [k] = v end
+function lpdf.adddocumentcolorspace(k,v) d_colorspaces[k] = v end
+function lpdf.adddocumentpattern (k,v) d_patterns [k] = v end
+function lpdf.adddocumentshade (k,v) d_shades [k] = v end
+
+registerdocumentfinalizer(flushextgstates,3,"extended graphic states")
+registerdocumentfinalizer(flushcolorspaces,3,"color spaces")
+registerdocumentfinalizer(flushpatterns,3,"patterns")
+registerdocumentfinalizer(flushshades,3,"shades")
+
+registerdocumentfinalizer(flushcatalog,3,"catalog")
+registerdocumentfinalizer(flushinfo,3,"info")
+registerdocumentfinalizer(flushnames,3,"names") -- before catalog
+
+registerpagefinalizer(checkextgstates,3,"extended graphic states")
+registerpagefinalizer(checkcolorspaces,3,"color spaces")
+registerpagefinalizer(checkpatterns,3,"patterns")
+registerpagefinalizer(checkshades,3,"shades")
+
+-- in strc-bkm: lpdf.registerdocumentfinalizer(function() structures.bookmarks.place() end,1)
+
+function lpdf.rotationcm(a)
+ local s, c = sind(a), cosd(a)
+ return format("%0.6f %0.6f %0.6f %0.6f 0 0 cm",c,s,-s,c)
+end
+
+-- ! -> universaltime
+
+local timestamp = os.date("%Y-%m-%dT%X") .. os.timezone(true)
+
+function lpdf.timestamp()
+ return timestamp
+end
+
+function lpdf.pdftimestamp(str)
+ local Y, M, D, h, m, s, Zs, Zh, Zm = match(str,"^(%d%d%d%d)%-(%d%d)%-(%d%d)T(%d%d):(%d%d):(%d%d)([%+%-])(%d%d):(%d%d)$")
+ return Y and format("D:%s%s%s%s%s%s%s%s'%s'",Y,M,D,h,m,s,Zs,Zh,Zm)
+end
+
+function lpdf.id()
+ return format("%s.%s",tex.jobname,timestamp)
+end
+
+function lpdf.checkedkey(t,key,variant)
+ local pn = t and t[key]
+ if pn then
+ local tn = type(pn)
+ if tn == variant then
+ if variant == "string" then
+ return pn ~= "" and pn or nil
+ elseif variant == "table" then
+ return next(pn) and pn or nil
+ else
+ return pn
+ end
+ elseif tn == "string" and variant == "number" then
+ return tonumber(pn)
+ end
+ end
+end
+
+function lpdf.checkedvalue(value,variant) -- code not shared
+ if value then
+ local tv = type(value)
+ if tv == variant then
+ if variant == "string" then
+ return value ~= "" and value
+ elseif variant == "table" then
+ return next(value) and value
+ else
+ return value
+ end
+ elseif tv == "string" and variant == "number" then
+ return tonumber(value)
+ end
+ end
+end
+
+function lpdf.limited(n,min,max,default)
+ if not n then
+ return default
+ else
+ n = tonumber(n)
+ if not n then
+ return default
+ elseif n > max then
+ return max
+ elseif n < min then
+ return min
+ else
+ return n
+ end
+ end
+end
+
+-- lpdf.addtoinfo("ConTeXt.Version", tex.contextversiontoks)
+-- lpdf.addtoinfo("ConTeXt.Time", os.date("%Y.%m.%d %H:%M")) -- :%S
+-- lpdf.addtoinfo("ConTeXt.Jobname", environment.jobname)
+-- lpdf.addtoinfo("ConTeXt.Url", "www.pragma-ade.com")
+
+if not pdfreferenceobject then
+
+ local delayed = { }
+
+ local function flush()
+ local n = 0
+ for k,v in next, delayed do
+ pdfimmediateobject(k,v)
+ n = n + 1
+ end
+ if trace_objects then
+ report_objects("%s objects flushed",n)
+ end
+ delayed = { }
+ end
+
+ lpdf.registerdocumentfinalizer(flush,3,"objects") -- so we need a final flush too
+ lpdf.registerpagefinalizer (flush,3,"objects") -- somehow this lags behind .. I need to look into that some day
+
+ function lpdf.delayedobject(data)
+ local n = pdfreserveobject()
+ delayed[n] = data
+ return n
+ end
+
+end
diff --git a/tex/context/base/lpdf-mov.lua b/tex/context/base/lpdf-mov.lua
index 41db97e0c..2f0033d1a 100644
--- a/tex/context/base/lpdf-mov.lua
+++ b/tex/context/base/lpdf-mov.lua
@@ -1,63 +1,63 @@
-if not modules then modules = { } end modules ['lpdf-mov'] = {
- version = 1.001,
- comment = "companion to lpdf-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format = string.format
-
-local lpdf = lpdf
-
-local nodeinjections = backends.pdf.nodeinjections
-local pdfannotation_node = nodes.pool.pdfannotation
-local pdfconstant = lpdf.constant
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local write_node = node.write
-
-function nodeinjections.insertmovie(specification)
- -- managed in figure inclusion: width, height, factor, repeat, controls, preview, label, foundname
- local width = specification.width
- local height = specification.height
- local factor = specification.factor or number.dimenfactors.bp
- local moviedict = pdfdictionary {
- F = specification.foundname,
- Aspect = pdfarray { factor * width, factor * height },
- Poster = (specification.preview and true) or false,
- }
- local controldict = pdfdictionary {
- ShowControls = (specification.controls and true) or false,
- Mode = (specification["repeat"] and pdfconstant("Repeat")) or nil,
- }
- local action = pdfdictionary {
- Subtype = pdfconstant("Movie"),
- Border = pdfarray { 0, 0, 0 },
- T = format("movie %s",specification.label),
- Movie = moviedict,
- A = controldict,
- }
- write_node(pdfannotation_node(width,height,0,action())) -- test: context(...)
-end
-
-function nodeinjections.insertsound(specification)
- -- rmanaged in interaction: repeat, label, foundname
- local soundclip = interactions.soundclips.soundclip(specification.label)
- if soundclip then
- local controldict = pdfdictionary {
- Mode = (specification["repeat"] and pdfconstant("Repeat")) or nil
- }
- local sounddict = pdfdictionary {
- F = soundclip.filename
- }
- local action = pdfdictionary {
- Subtype = pdfconstant("Movie"),
- Border = pdfarray { 0, 0, 0 },
- T = format("sound %s",specification.label),
- Movie = sounddict,
- A = controldict,
- }
- write_node(pdfannotation_node(0,0,0,action())) -- test: context(...)
- end
-end
+if not modules then modules = { } end modules ['lpdf-mov'] = {
+ version = 1.001,
+ comment = "companion to lpdf-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format = string.format
+
+local lpdf = lpdf
+
+local nodeinjections = backends.pdf.nodeinjections
+local pdfannotation_node = nodes.pool.pdfannotation
+local pdfconstant = lpdf.constant
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local write_node = node.write
+
+function nodeinjections.insertmovie(specification)
+ -- managed in figure inclusion: width, height, factor, repeat, controls, preview, label, foundname
+ local width = specification.width
+ local height = specification.height
+ local factor = specification.factor or number.dimenfactors.bp
+ local moviedict = pdfdictionary {
+ F = specification.foundname,
+ Aspect = pdfarray { factor * width, factor * height },
+ Poster = (specification.preview and true) or false,
+ }
+ local controldict = pdfdictionary {
+ ShowControls = (specification.controls and true) or false,
+ Mode = (specification["repeat"] and pdfconstant("Repeat")) or nil,
+ }
+ local action = pdfdictionary {
+ Subtype = pdfconstant("Movie"),
+ Border = pdfarray { 0, 0, 0 },
+ T = format("movie %s",specification.label),
+ Movie = moviedict,
+ A = controldict,
+ }
+ write_node(pdfannotation_node(width,height,0,action())) -- test: context(...)
+end
+
+function nodeinjections.insertsound(specification)
+ -- rmanaged in interaction: repeat, label, foundname
+ local soundclip = interactions.soundclips.soundclip(specification.label)
+ if soundclip then
+ local controldict = pdfdictionary {
+ Mode = (specification["repeat"] and pdfconstant("Repeat")) or nil
+ }
+ local sounddict = pdfdictionary {
+ F = soundclip.filename
+ }
+ local action = pdfdictionary {
+ Subtype = pdfconstant("Movie"),
+ Border = pdfarray { 0, 0, 0 },
+ T = format("sound %s",specification.label),
+ Movie = sounddict,
+ A = controldict,
+ }
+ write_node(pdfannotation_node(0,0,0,action())) -- test: context(...)
+ end
+end
diff --git a/tex/context/base/lpdf-nod.lua b/tex/context/base/lpdf-nod.lua
index 60d3fcd5b..9c57d6289 100644
--- a/tex/context/base/lpdf-nod.lua
+++ b/tex/context/base/lpdf-nod.lua
@@ -1,136 +1,136 @@
-if not modules then modules = { } end modules ['lpdf-nod'] = {
- version = 1.001,
- comment = "companion to lpdf-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format = string.format
-
-local copy_node = node.copy
-local new_node = node.new
-
-local nodepool = nodes.pool
-local register = nodepool.register
-local whatsitcodes = nodes.whatsitcodes
-local nodeinjections = backends.nodeinjections
-
-local pdfliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) pdfliteral.mode = 1
-local pdfsave = register(new_node("whatsit", whatsitcodes.pdfsave))
-local pdfrestore = register(new_node("whatsit", whatsitcodes.pdfrestore))
-local pdfsetmatrix = register(new_node("whatsit", whatsitcodes.pdfsetmatrix))
-local pdfdest = register(new_node("whatsit", whatsitcodes.pdfdest)) pdfdest.named_id = 1 -- xyz_zoom untouched
-local pdfannot = register(new_node("whatsit", whatsitcodes.pdfannot))
-
-local variables = interfaces.variables
-
-local views = { -- beware, we do support the pdf keys but this is *not* official
- xyz = 0, [variables.standard] = 0,
- fit = 1, [variables.fit] = 1,
- fith = 2, [variables.width] = 2,
- fitv = 3, [variables.height] = 3,
- fitb = 4,
- fitbh = 5, [variables.minwidth] = 5,
- fitbv = 6, [variables.minheight] = 6,
- fitr = 7,
-}
-
-function nodepool.pdfliteral(str)
- local t = copy_node(pdfliteral)
- t.data = str
- return t
-end
-
-function nodepool.pdfdirect(str)
- local t = copy_node(pdfliteral)
- t.data = str
- t.mode = 1
- return t
-end
-
-function nodepool.pdfsave()
- return copy_node(pdfsave)
-end
-
-function nodepool.pdfrestore()
- return copy_node(pdfrestore)
-end
-
-function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty)
- local t = copy_node(pdfsetmatrix)
- t.data = format("%s %s %s %s",rx or 0,sx or 0,sy or 0,ry or 0) -- todo: tx ty
- return t
-end
-
-nodeinjections.save = nodepool.pdfsave
-nodeinjections.restore = nodepool.pdfrestore
-nodeinjections.transform = nodepool.pdfsetmatrix
-
-function nodepool.pdfannotation(w,h,d,data,n)
- local t = copy_node(pdfannot)
- if w and w ~= 0 then
- t.width = w
- end
- if h and h ~= 0 then
- t.height = h
- end
- if d and d ~= 0 then
- t.depth = d
- end
- if n then
- t.objnum = n
- end
- if data and data ~= "" then
- t.data = data
- end
- return t
-end
-
--- (!) The next code in pdfdest.w is wrong:
---
--- case pdf_dest_xyz:
--- if (matrixused()) {
--- set_rect_dimens(pdf, p, parent_box, cur, alt_rule, pdf_dest_margin) ;
--- } else {
--- pdf_ann_left(p) = pos.h ;
--- pdf_ann_top (p) = pos.v ;
--- }
--- break ;
---
--- so we need to force a matrix.
-
-function nodepool.pdfdestination(w,h,d,name,view,n)
- local t = copy_node(pdfdest)
- local hasdimensions = false
- if w and w ~= 0 then
- t.width = w
- hasdimensions = true
- end
- if h and h ~= 0 then
- t.height = h
- hasdimensions = true
- end
- if d and d ~= 0 then
- t.depth = d
- hasdimensions = true
- end
- if n then
- t.objnum = n
- end
- view = views[view] or view or 1 -- fit is default
- t.dest_id = name
- t.dest_type = view
- if hasdimensions and view == 0 then -- xyz
- -- see (!) s -> m -> t -> r
- local s = copy_node(pdfsave)
- local m = copy_node(pdfsetmatrix)
- local r = copy_node(pdfrestore)
- m.data = "1 0 0 1"
- s.next = m m.next = t t.next = r
- m.prev = s t.prev = m r.prev = t
- return s -- a list
- else
- return t
- end
-end
+if not modules then modules = { } end modules ['lpdf-nod'] = {
+ version = 1.001,
+ comment = "companion to lpdf-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format = string.format
+
+local copy_node = node.copy
+local new_node = node.new
+
+local nodepool = nodes.pool
+local register = nodepool.register
+local whatsitcodes = nodes.whatsitcodes
+local nodeinjections = backends.nodeinjections
+
+local pdfliteral = register(new_node("whatsit", whatsitcodes.pdfliteral)) pdfliteral.mode = 1
+local pdfsave = register(new_node("whatsit", whatsitcodes.pdfsave))
+local pdfrestore = register(new_node("whatsit", whatsitcodes.pdfrestore))
+local pdfsetmatrix = register(new_node("whatsit", whatsitcodes.pdfsetmatrix))
+local pdfdest = register(new_node("whatsit", whatsitcodes.pdfdest)) pdfdest.named_id = 1 -- xyz_zoom untouched
+local pdfannot = register(new_node("whatsit", whatsitcodes.pdfannot))
+
+local variables = interfaces.variables
+
+local views = { -- beware, we do support the pdf keys but this is *not* official
+ xyz = 0, [variables.standard] = 0,
+ fit = 1, [variables.fit] = 1,
+ fith = 2, [variables.width] = 2,
+ fitv = 3, [variables.height] = 3,
+ fitb = 4,
+ fitbh = 5, [variables.minwidth] = 5,
+ fitbv = 6, [variables.minheight] = 6,
+ fitr = 7,
+}
+
+function nodepool.pdfliteral(str)
+ local t = copy_node(pdfliteral)
+ t.data = str
+ return t
+end
+
+function nodepool.pdfdirect(str)
+ local t = copy_node(pdfliteral)
+ t.data = str
+ t.mode = 1
+ return t
+end
+
+function nodepool.pdfsave()
+ return copy_node(pdfsave)
+end
+
+function nodepool.pdfrestore()
+ return copy_node(pdfrestore)
+end
+
+function nodepool.pdfsetmatrix(rx,sx,sy,ry,tx,ty)
+ local t = copy_node(pdfsetmatrix)
+ t.data = format("%s %s %s %s",rx or 0,sx or 0,sy or 0,ry or 0) -- todo: tx ty
+ return t
+end
+
+nodeinjections.save = nodepool.pdfsave
+nodeinjections.restore = nodepool.pdfrestore
+nodeinjections.transform = nodepool.pdfsetmatrix
+
+function nodepool.pdfannotation(w,h,d,data,n)
+ local t = copy_node(pdfannot)
+ if w and w ~= 0 then
+ t.width = w
+ end
+ if h and h ~= 0 then
+ t.height = h
+ end
+ if d and d ~= 0 then
+ t.depth = d
+ end
+ if n then
+ t.objnum = n
+ end
+ if data and data ~= "" then
+ t.data = data
+ end
+ return t
+end
+
+-- (!) The next code in pdfdest.w is wrong:
+--
+-- case pdf_dest_xyz:
+-- if (matrixused()) {
+-- set_rect_dimens(pdf, p, parent_box, cur, alt_rule, pdf_dest_margin) ;
+-- } else {
+-- pdf_ann_left(p) = pos.h ;
+-- pdf_ann_top (p) = pos.v ;
+-- }
+-- break ;
+--
+-- so we need to force a matrix.
+
+function nodepool.pdfdestination(w,h,d,name,view,n)
+ local t = copy_node(pdfdest)
+ local hasdimensions = false
+ if w and w ~= 0 then
+ t.width = w
+ hasdimensions = true
+ end
+ if h and h ~= 0 then
+ t.height = h
+ hasdimensions = true
+ end
+ if d and d ~= 0 then
+ t.depth = d
+ hasdimensions = true
+ end
+ if n then
+ t.objnum = n
+ end
+ view = views[view] or view or 1 -- fit is default
+ t.dest_id = name
+ t.dest_type = view
+ if hasdimensions and view == 0 then -- xyz
+ -- see (!) s -> m -> t -> r
+ local s = copy_node(pdfsave)
+ local m = copy_node(pdfsetmatrix)
+ local r = copy_node(pdfrestore)
+ m.data = "1 0 0 1"
+ s.next = m m.next = t t.next = r
+ m.prev = s t.prev = m r.prev = t
+ return s -- a list
+ else
+ return t
+ end
+end
diff --git a/tex/context/base/lpdf-ren.lua b/tex/context/base/lpdf-ren.lua
index 6af65f9de..19582817d 100644
--- a/tex/context/base/lpdf-ren.lua
+++ b/tex/context/base/lpdf-ren.lua
@@ -1,349 +1,349 @@
-if not modules then modules = { } end modules ['lpdf-ren'] = {
- version = 1.001,
- comment = "companion to lpdf-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- rendering
-
-local tostring, tonumber, next = tostring, tonumber, next
-local format, rep = string.format, string.rep
-local concat = table.concat
-local settings_to_array = utilities.parsers.settings_to_array
-
-local backends, lpdf, nodes, node = backends, lpdf, nodes, node
-
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-local registrations = backends.pdf.registrations
-local viewerlayers = attributes.viewerlayers
-
-local references = structures.references
-
-references.executers = references.executers or { }
-local executers = references.executers
-
-local variables = interfaces.variables
-
-local v_no = variables.no
-local v_yes = variables.yes
-local v_start = variables.start
-local v_stop = variables.stop
-local v_reset = variables.reset
-local v_auto = variables.auto
-local v_random = variables.random
-
-local pdfconstant = lpdf.constant
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfreference = lpdf.reference
-local pdfflushobject = lpdf.flushobject
-local pdfreserveobject = lpdf.reserveobject
-
-local nodepool = nodes.pool
-local register = nodepool.register
-local pdfliteral = nodepool.pdfliteral
-
-local pdf_ocg = pdfconstant("OCG")
-local pdf_ocmd = pdfconstant("OCMD")
-local pdf_off = pdfconstant("OFF")
-local pdf_on = pdfconstant("ON")
-local pdf_toggle = pdfconstant("Toggle")
-local pdf_setocgstate = pdfconstant("SetOCGState")
-
-local copy_node = node.copy
-
-local lpdf_usage = pdfdictionary { Print = pdfdictionary { PrintState = pdf_off } }
-
--- We can have references to layers before they are places, for instance from
--- hide and vide actions. This is why we need to be able to force usage of layers
--- at several moments.
-
--- management
-
-local pdfln, pdfld = { }, { }
-local textlayers, hidelayers, videlayers = pdfarray(), pdfarray(), pdfarray()
-local pagelayers, pagelayersreference, cache = nil, nil, { }
-local alphabetic = { }
-
-local specifications = { }
-local initialized = { }
-
-function codeinjections.defineviewerlayer(specification)
- if viewerlayers.supported and textlayers then
- local tag = specification.tag
- if not specifications[tag] then
- specifications[tag] = specification
- end
- end
-end
-
-local function useviewerlayer(name) -- move up so that we can use it as local
- if not environment.initex and not initialized[name] then
- local specification = specifications[name]
- if specification then
- specifications[name] = nil -- or not
- initialized [name] = true
- if not pagelayers then
- pagelayers = pdfdictionary()
- pagelayersreference = pdfreserveobject()
- end
- local tag = specification.tag
- -- todo: reserve
- local nn = pdfreserveobject()
- local nr = pdfreference(nn)
- local nd = pdfdictionary {
- Type = pdf_ocg,
- Name = specification.title or "unknown",
- Intent = ((specification.editable ~= v_no) and pdf_design) or nil, -- disable layer hiding by user
- Usage = ((specification.printable == v_no) and lpdf_usage) or nil, -- printable or not
- }
- cache[#cache+1] = { nn, nd }
- pdfln[tag] = nr -- was n
- local dn = pdfreserveobject()
- local dr = pdfreference(dn)
- local dd = pdfdictionary {
- Type = pdf_ocmd,
- OCGs = pdfarray { nr },
- }
- cache[#cache+1] = { dn, dd }
- pdfld[tag] = dr
- textlayers[#textlayers+1] = nr
- alphabetic[tag] = nr
- if specification.visible == v_start then
- videlayers[#videlayers+1] = nr
- else
- hidelayers[#hidelayers+1] = nr
- end
- pagelayers[tag] = dr -- check
- else
- -- todo: message
- end
- end
-end
-
-codeinjections.useviewerlayer = useviewerlayer
-
-local function layerreference(name)
- local r = pdfln[name]
- if r then
- return r
- else
- useviewerlayer(name)
- return pdfln[name]
- end
-end
-
-lpdf.layerreference = layerreference -- also triggered when a hide or vide happens
-
-local function flushtextlayers()
- if viewerlayers.supported then
- if pagelayers then
- pdfflushobject(pagelayersreference,pagelayers)
- end
- for i=1,#cache do
- local ci = cache[i]
- pdfflushobject(ci[1],ci[2])
- end
- if textlayers and #textlayers > 0 then -- we can group them if needed, like: layout
- local sortedlayers = { }
- for k, v in table.sortedhash(alphabetic) do
- sortedlayers[#sortedlayers+1] = v -- maybe do a proper numeric sort as well
- end
- local d = pdfdictionary {
- OCGs = textlayers,
- D = pdfdictionary {
- Name = "Document",
- -- Order = (viewerlayers.hasorder and textlayers) or nil,
- Order = (viewerlayers.hasorder and sortedlayers) or nil,
- ON = videlayers,
- OFF = hidelayers,
- BaseState = pdf_on,
- },
- }
- lpdf.addtocatalog("OCProperties",d)
- textlayers = nil
- end
- end
-end
-
-local function flushpagelayers() -- we can share these
- if pagelayers then
- lpdf.addtopageresources("Properties",pdfreference(pagelayersreference)) -- we could cache this
- end
-end
-
-lpdf.registerpagefinalizer (flushpagelayers,"layers")
-lpdf.registerdocumentfinalizer(flushtextlayers,"layers")
-
-local function setlayer(what,arguments)
- -- maybe just a gmatch of even better, earlier in lpeg
- arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments)
- local state = pdfarray { what }
- for i=1,#arguments do
- local p = layerreference(arguments[i])
- if p then
- state[#state+1] = p
- end
- end
- return pdfdictionary {
- S = pdf_setocgstate,
- State = state,
- }
-end
-
-function executers.hidelayer (arguments) return setlayer(pdf_off, arguments) end
-function executers.videlayer (arguments) return setlayer(pdf_on, arguments) end
-function executers.togglelayer(arguments) return setlayer(pdf_toggle,arguments) end
-
--- injection
-
-function codeinjections.startlayer(name) -- used in mp
- if not name then
- name = "unknown"
- end
- useviewerlayer(name)
- return format("/OC /%s BDC",name)
-end
-
-function codeinjections.stoplayer(name) -- used in mp
- return "EMC"
-end
-
-local cache = { }
-
-function nodeinjections.startlayer(name)
- local c = cache[name]
- if not c then
- useviewerlayer(name)
- c = register(pdfliteral(format("/OC /%s BDC",name)))
- cache[name] = c
- end
- return copy_node(c)
-end
-
-local stop = register(pdfliteral("EMC"))
-
-function nodeinjections.stoplayer()
- return copy_node(stop)
-end
-
--- experimental stacker code (slow, can be optimized): !!!! TEST CODE !!!!
-
-local values = viewerlayers.values
-local startlayer = codeinjections.startlayer
-local stoplayer = codeinjections.stoplayer
-
-function nodeinjections.startstackedlayer(s,t,first,last)
- local r = { }
- for i=first,last do
- r[#r+1] = startlayer(values[t[i]])
- end
- r = concat(r," ")
- return pdfliteral(r)
-end
-
-function nodeinjections.stopstackedlayer(s,t,first,last)
- local r = { }
- for i=last,first,-1 do
- r[#r+1] = stoplayer()
- end
- r = concat(r," ")
- return pdfliteral(r)
-end
-
-function nodeinjections.changestackedlayer(s,t1,first1,last1,t2,first2,last2)
- local r = { }
- for i=last1,first1,-1 do
- r[#r+1] = stoplayer()
- end
- for i=first2,last2 do
- r[#r+1] = startlayer(values[t2[i]])
- end
- r = concat(r," ")
- return pdfliteral(r)
-end
-
--- transitions
-
-local pagetransitions = {
- {"split","in","vertical"}, {"split","in","horizontal"},
- {"split","out","vertical"}, {"split","out","horizontal"},
- {"blinds","horizontal"}, {"blinds","vertical"},
- {"box","in"}, {"box","out"},
- {"wipe","east"}, {"wipe","west"}, {"wipe","north"}, {"wipe","south"},
- {"dissolve"},
- {"glitter","east"}, {"glitter","south"},
- {"fly","in","east"}, {"fly","in","west"}, {"fly","in","north"}, {"fly","in","south"},
- {"fly","out","east"}, {"fly","out","west"}, {"fly","out","north"}, {"fly","out","south"},
- {"push","east"}, {"push","west"}, {"push","north"}, {"push","south"},
- {"cover","east"}, {"cover","west"}, {"cover","north"}, {"cover","south"},
- {"uncover","east"}, {"uncover","west"}, {"uncover","north"}, {"uncover","south"},
- {"fade"},
-}
-
-local mapping = {
- split = { "S" , pdfconstant("Split") },
- blinds = { "S" , pdfconstant("Blinds") },
- box = { "S" , pdfconstant("Box") },
- wipe = { "S" , pdfconstant("Wipe") },
- dissolve = { "S" , pdfconstant("Dissolve") },
- glitter = { "S" , pdfconstant("Glitter") },
- replace = { "S" , pdfconstant("R") },
- fly = { "S" , pdfconstant("Fly") },
- push = { "S" , pdfconstant("Push") },
- cover = { "S" , pdfconstant("Cover") },
- uncover = { "S" , pdfconstant("Uncover") },
- fade = { "S" , pdfconstant("Fade") },
- horizontal = { "Dm" , pdfconstant("H") },
- vertical = { "Dm" , pdfconstant("V") },
- ["in"] = { "M" , pdfconstant("I") },
- out = { "M" , pdfconstant("O") },
- east = { "Di" , 0 },
- north = { "Di" , 90 },
- west = { "Di" , 180 },
- south = { "Di" , 270 },
-}
-
-local last = 0
-
--- n: number, "stop", "reset", "random", "a,b,c" delay: number, "none"
-
-function codeinjections.setpagetransition(specification)
- local n, delay = specification.n, specification.delay
- if not n or n == "" then
- return -- let's forget about it
- elseif n == v_auto then
- if last >= #pagetransitions then
- last = 0
- end
- n = last + 1
- elseif n == v_stop then
- return
- elseif n == v_reset then
- last = 0
- return
- elseif n == v_random then
- n = math.random(1,#pagetransitions)
- else
- n = tonumber(n)
- end
- local t = n and pagetransitions[n] or pagetransitions[1]
- if not t then
- t = settings_to_array(n)
- end
- if t and #t > 0 then
- local d = pdfdictionary()
- for i=1,#t do
- local m = mapping[t[i]]
- d[m[1]] = m[2]
- end
- delay = tonumber(delay)
- if delay and delay > 0 then
- lpdf.addtopageattributes("Dur",delay)
- end
- lpdf.addtopageattributes("Trans",d)
- end
-end
+if not modules then modules = { } end modules ['lpdf-ren'] = {
+ version = 1.001,
+ comment = "companion to lpdf-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- rendering
+
+local tostring, tonumber, next = tostring, tonumber, next
+local format, rep = string.format, string.rep
+local concat = table.concat
+local settings_to_array = utilities.parsers.settings_to_array
+
+local backends, lpdf, nodes, node = backends, lpdf, nodes, node
+
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+local registrations = backends.pdf.registrations
+local viewerlayers = attributes.viewerlayers
+
+local references = structures.references
+
+references.executers = references.executers or { }
+local executers = references.executers
+
+local variables = interfaces.variables
+
+local v_no = variables.no
+local v_yes = variables.yes
+local v_start = variables.start
+local v_stop = variables.stop
+local v_reset = variables.reset
+local v_auto = variables.auto
+local v_random = variables.random
+
+local pdfconstant = lpdf.constant
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfreference = lpdf.reference
+local pdfflushobject = lpdf.flushobject
+local pdfreserveobject = lpdf.reserveobject
+
+local nodepool = nodes.pool
+local register = nodepool.register
+local pdfliteral = nodepool.pdfliteral
+
+local pdf_ocg = pdfconstant("OCG")
+local pdf_ocmd = pdfconstant("OCMD")
+local pdf_off = pdfconstant("OFF")
+local pdf_on = pdfconstant("ON")
+local pdf_toggle = pdfconstant("Toggle")
+local pdf_setocgstate = pdfconstant("SetOCGState")
+
+local copy_node = node.copy
+
+local lpdf_usage = pdfdictionary { Print = pdfdictionary { PrintState = pdf_off } }
+
+-- We can have references to layers before they are places, for instance from
+-- hide and vide actions. This is why we need to be able to force usage of layers
+-- at several moments.
+
+-- management
+
+local pdfln, pdfld = { }, { }
+local textlayers, hidelayers, videlayers = pdfarray(), pdfarray(), pdfarray()
+local pagelayers, pagelayersreference, cache = nil, nil, { }
+local alphabetic = { }
+
+local specifications = { }
+local initialized = { }
+
+function codeinjections.defineviewerlayer(specification)
+ if viewerlayers.supported and textlayers then
+ local tag = specification.tag
+ if not specifications[tag] then
+ specifications[tag] = specification
+ end
+ end
+end
+
+local function useviewerlayer(name) -- move up so that we can use it as local
+ if not environment.initex and not initialized[name] then
+ local specification = specifications[name]
+ if specification then
+ specifications[name] = nil -- or not
+ initialized [name] = true
+ if not pagelayers then
+ pagelayers = pdfdictionary()
+ pagelayersreference = pdfreserveobject()
+ end
+ local tag = specification.tag
+ -- todo: reserve
+ local nn = pdfreserveobject()
+ local nr = pdfreference(nn)
+ local nd = pdfdictionary {
+ Type = pdf_ocg,
+ Name = specification.title or "unknown",
+ Intent = ((specification.editable ~= v_no) and pdf_design) or nil, -- disable layer hiding by user
+ Usage = ((specification.printable == v_no) and lpdf_usage) or nil, -- printable or not
+ }
+ cache[#cache+1] = { nn, nd }
+ pdfln[tag] = nr -- was n
+ local dn = pdfreserveobject()
+ local dr = pdfreference(dn)
+ local dd = pdfdictionary {
+ Type = pdf_ocmd,
+ OCGs = pdfarray { nr },
+ }
+ cache[#cache+1] = { dn, dd }
+ pdfld[tag] = dr
+ textlayers[#textlayers+1] = nr
+ alphabetic[tag] = nr
+ if specification.visible == v_start then
+ videlayers[#videlayers+1] = nr
+ else
+ hidelayers[#hidelayers+1] = nr
+ end
+ pagelayers[tag] = dr -- check
+ else
+ -- todo: message
+ end
+ end
+end
+
+codeinjections.useviewerlayer = useviewerlayer
+
+local function layerreference(name)
+ local r = pdfln[name]
+ if r then
+ return r
+ else
+ useviewerlayer(name)
+ return pdfln[name]
+ end
+end
+
+lpdf.layerreference = layerreference -- also triggered when a hide or vide happens
+
+local function flushtextlayers()
+ if viewerlayers.supported then
+ if pagelayers then
+ pdfflushobject(pagelayersreference,pagelayers)
+ end
+ for i=1,#cache do
+ local ci = cache[i]
+ pdfflushobject(ci[1],ci[2])
+ end
+ if textlayers and #textlayers > 0 then -- we can group them if needed, like: layout
+ local sortedlayers = { }
+ for k, v in table.sortedhash(alphabetic) do
+ sortedlayers[#sortedlayers+1] = v -- maybe do a proper numeric sort as well
+ end
+ local d = pdfdictionary {
+ OCGs = textlayers,
+ D = pdfdictionary {
+ Name = "Document",
+ -- Order = (viewerlayers.hasorder and textlayers) or nil,
+ Order = (viewerlayers.hasorder and sortedlayers) or nil,
+ ON = videlayers,
+ OFF = hidelayers,
+ BaseState = pdf_on,
+ },
+ }
+ lpdf.addtocatalog("OCProperties",d)
+ textlayers = nil
+ end
+ end
+end
+
+local function flushpagelayers() -- we can share these
+ if pagelayers then
+ lpdf.addtopageresources("Properties",pdfreference(pagelayersreference)) -- we could cache this
+ end
+end
+
+lpdf.registerpagefinalizer (flushpagelayers,"layers")
+lpdf.registerdocumentfinalizer(flushtextlayers,"layers")
+
+local function setlayer(what,arguments)
+ -- maybe just a gmatch of even better, earlier in lpeg
+ arguments = (type(arguments) == "table" and arguments) or settings_to_array(arguments)
+ local state = pdfarray { what }
+ for i=1,#arguments do
+ local p = layerreference(arguments[i])
+ if p then
+ state[#state+1] = p
+ end
+ end
+ return pdfdictionary {
+ S = pdf_setocgstate,
+ State = state,
+ }
+end
+
+function executers.hidelayer (arguments) return setlayer(pdf_off, arguments) end
+function executers.videlayer (arguments) return setlayer(pdf_on, arguments) end
+function executers.togglelayer(arguments) return setlayer(pdf_toggle,arguments) end
+
+-- injection
+
+function codeinjections.startlayer(name) -- used in mp
+ if not name then
+ name = "unknown"
+ end
+ useviewerlayer(name)
+ return format("/OC /%s BDC",name)
+end
+
+function codeinjections.stoplayer(name) -- used in mp
+ return "EMC"
+end
+
+local cache = { }
+
+function nodeinjections.startlayer(name)
+ local c = cache[name]
+ if not c then
+ useviewerlayer(name)
+ c = register(pdfliteral(format("/OC /%s BDC",name)))
+ cache[name] = c
+ end
+ return copy_node(c)
+end
+
+local stop = register(pdfliteral("EMC"))
+
+function nodeinjections.stoplayer()
+ return copy_node(stop)
+end
+
+-- experimental stacker code (slow, can be optimized): !!!! TEST CODE !!!!
+
+local values = viewerlayers.values
+local startlayer = codeinjections.startlayer
+local stoplayer = codeinjections.stoplayer
+
+function nodeinjections.startstackedlayer(s,t,first,last)
+ local r = { }
+ for i=first,last do
+ r[#r+1] = startlayer(values[t[i]])
+ end
+ r = concat(r," ")
+ return pdfliteral(r)
+end
+
+function nodeinjections.stopstackedlayer(s,t,first,last)
+ local r = { }
+ for i=last,first,-1 do
+ r[#r+1] = stoplayer()
+ end
+ r = concat(r," ")
+ return pdfliteral(r)
+end
+
+function nodeinjections.changestackedlayer(s,t1,first1,last1,t2,first2,last2)
+ local r = { }
+ for i=last1,first1,-1 do
+ r[#r+1] = stoplayer()
+ end
+ for i=first2,last2 do
+ r[#r+1] = startlayer(values[t2[i]])
+ end
+ r = concat(r," ")
+ return pdfliteral(r)
+end
+
+-- transitions
+
+local pagetransitions = {
+ {"split","in","vertical"}, {"split","in","horizontal"},
+ {"split","out","vertical"}, {"split","out","horizontal"},
+ {"blinds","horizontal"}, {"blinds","vertical"},
+ {"box","in"}, {"box","out"},
+ {"wipe","east"}, {"wipe","west"}, {"wipe","north"}, {"wipe","south"},
+ {"dissolve"},
+ {"glitter","east"}, {"glitter","south"},
+ {"fly","in","east"}, {"fly","in","west"}, {"fly","in","north"}, {"fly","in","south"},
+ {"fly","out","east"}, {"fly","out","west"}, {"fly","out","north"}, {"fly","out","south"},
+ {"push","east"}, {"push","west"}, {"push","north"}, {"push","south"},
+ {"cover","east"}, {"cover","west"}, {"cover","north"}, {"cover","south"},
+ {"uncover","east"}, {"uncover","west"}, {"uncover","north"}, {"uncover","south"},
+ {"fade"},
+}
+
+local mapping = {
+ split = { "S" , pdfconstant("Split") },
+ blinds = { "S" , pdfconstant("Blinds") },
+ box = { "S" , pdfconstant("Box") },
+ wipe = { "S" , pdfconstant("Wipe") },
+ dissolve = { "S" , pdfconstant("Dissolve") },
+ glitter = { "S" , pdfconstant("Glitter") },
+ replace = { "S" , pdfconstant("R") },
+ fly = { "S" , pdfconstant("Fly") },
+ push = { "S" , pdfconstant("Push") },
+ cover = { "S" , pdfconstant("Cover") },
+ uncover = { "S" , pdfconstant("Uncover") },
+ fade = { "S" , pdfconstant("Fade") },
+ horizontal = { "Dm" , pdfconstant("H") },
+ vertical = { "Dm" , pdfconstant("V") },
+ ["in"] = { "M" , pdfconstant("I") },
+ out = { "M" , pdfconstant("O") },
+ east = { "Di" , 0 },
+ north = { "Di" , 90 },
+ west = { "Di" , 180 },
+ south = { "Di" , 270 },
+}
+
+local last = 0
+
+-- n: number, "stop", "reset", "random", "a,b,c" delay: number, "none"
+
+function codeinjections.setpagetransition(specification)
+ local n, delay = specification.n, specification.delay
+ if not n or n == "" then
+ return -- let's forget about it
+ elseif n == v_auto then
+ if last >= #pagetransitions then
+ last = 0
+ end
+ n = last + 1
+ elseif n == v_stop then
+ return
+ elseif n == v_reset then
+ last = 0
+ return
+ elseif n == v_random then
+ n = math.random(1,#pagetransitions)
+ else
+ n = tonumber(n)
+ end
+ local t = n and pagetransitions[n] or pagetransitions[1]
+ if not t then
+ t = settings_to_array(n)
+ end
+ if t and #t > 0 then
+ local d = pdfdictionary()
+ for i=1,#t do
+ local m = mapping[t[i]]
+ d[m[1]] = m[2]
+ end
+ delay = tonumber(delay)
+ if delay and delay > 0 then
+ lpdf.addtopageattributes("Dur",delay)
+ end
+ lpdf.addtopageattributes("Trans",d)
+ end
+end
diff --git a/tex/context/base/lpdf-swf.lua b/tex/context/base/lpdf-swf.lua
index 12c80036f..0267e5255 100644
--- a/tex/context/base/lpdf-swf.lua
+++ b/tex/context/base/lpdf-swf.lua
@@ -1,306 +1,306 @@
-if not modules then modules = { } end modules ['lpdf-swf'] = {
- version = 1.001,
- comment = "companion to lpdf-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- The following code is based on tests by Luigi Scarso. His prototype
--- was using tex code. This is the official implementation.
-
-local format, gsub = string.format, string.gsub
-
-local backends, lpdf = backends, lpdf
-
-local pdfconstant = lpdf.constant
-local pdfboolean = lpdf.boolean
-local pdfstring = lpdf.string
-local pdfunicode = lpdf.unicode
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfnull = lpdf.null
-local pdfreference = lpdf.reference
-local pdfflushobject = lpdf.flushobject
-
-local checkedkey = lpdf.checkedkey
-
-local codeinjections = backends.pdf.codeinjections
-local nodeinjections = backends.pdf.nodeinjections
-
-local pdfannotation_node = nodes.pool.pdfannotation
-
-local trace_swf = false trackers.register("backend.swf", function(v) trace_swf = v end)
-
-local report_swf = logs.reporter("backend","swf")
-
-local activations = {
- click = "XA",
- page = "PO",
- focus = "PV",
-}
-
-local deactivations = {
- click = "XD",
- page = "PI",
- focus = "PC",
-}
-
-table.setmetatableindex(activations, function() return activations .click end)
-table.setmetatableindex(deactivations,function() return deactivations.focus end)
-
-local function insertswf(spec)
-
- local width = spec.width
- local height = spec.height
- local filename = spec.foundname
- local resources = spec.resources
- local display = spec.display
- local controls = spec.controls
-
- local resources = resources and parametersets[resources]
- local display = display and parametersets[display]
- local controls = controls and parametersets[controls] -- not yet used
-
- local preview = checkedkey(display,"preview","string")
- local toolbar = checkedkey(display,"toolbar","boolean")
-
- local embeddedreference = codeinjections.embedfile { file = filename }
-
- local flash = pdfdictionary {
- Subtype = pdfconstant("Flash"),
- Instances = pdfarray {
- pdfdictionary {
- Asset = embeddedreference,
- Params = pdfdictionary {
- Binding = pdfconstant("Background") -- Foreground makes swf behave erratic
- }
- },
- },
- }
-
- local flashreference = pdfreference(pdfflushobject(flash))
-
- local configuration = pdfdictionary {
- Configurations = pdfarray { flashreference },
- Assets = pdfdictionary {
- Names = pdfarray {
- pdfstring(filename),
- embeddedreference,
- }
- },
- }
-
- -- todo: check op subpath figuur (relatief)
-
- -- filename : ./test.swf (graphic)
- -- root : .
- -- prefix : ^%./
- -- fullname : ./assets/whatever.xml
- -- usedname : assets/whatever.xml
- -- filename : assets/whatever.xml
-
- local root = file.dirname(filename)
- local relativepaths = nil
- local paths = nil
-
- if resources then
- local names = configuration.Assets.Names
- local prefix = false
- if root ~= "" and root ~= "." then
- prefix = format("^%s/",string.topattern(root))
- end
- if prefix and trace_swf then
- report_swf("using strip pattern %a",prefix)
- end
- local function add(fullname,strip)
- local filename = gsub(fullname,"^%./","")
- local usedname = strip and prefix and gsub(filename,prefix,"") or filename
- local embeddedreference = codeinjections.embedfile {
- file = fullname,
- usedname = usedname,
- keepdir = true,
- }
- names[#names+1] = pdfstring(filename)
- names[#names+1] = embeddedreference
- if trace_swf then
- report_swf("embedding file %a as %a",fullname,usedname)
- end
- end
- relativepaths = resources.relativepaths
- if relativepaths then
- if trace_swf then
- report_swf("checking %s relative paths",#relativepaths)
- end
- for i=1,#relativepaths do
- local relativepath = relativepaths[i]
- if trace_swf then
- report_swf("checking path %a relative to %a",relativepath,root)
- end
- local path = file.join(root == "" and "." or root,relativepath)
- local files = dir.glob(path .. "/**")
- for i=1,#files do
- add(files[i],true)
- end
- end
- end
- paths = resources.paths
- if paths then
- if trace_swf then
- report_swf("checking absolute %s paths",#paths)
- end
- for i=1,#paths do
- local path = paths[i]
- if trace_swf then
- report_swf("checking path %a",path)
- end
- local files = dir.glob(path .. "/**")
- for i=1,#files do
- add(files[i],false)
- end
- end
- end
- local relativefiles = resources.relativefiles
- if relativefiles then
- if trace_swf then
- report_swf("checking %s relative files",#relativefiles)
- end
- for i=1,#relativefiles do
- add(relativefiles[i],true)
- end
- end
- local files = resources.files
- if files then
- if trace_swf then
- report_swf("checking absolute %s files",#files)
- end
- for i=1,#files do
- add(files[i],false)
- end
- end
- end
-
- local opendisplay = display and display.open or false
- local closedisplay = display and display.close or false
-
- local configurationreference = pdfreference(pdfflushobject(configuration))
-
- local activation = pdfdictionary {
- Type = pdfconstant("RichMediaActivation"),
- Condition = pdfconstant(activations[opendisplay]),
- Configuration = flashreference,
- Animation = pdfdictionary {
- Subtype = pdfconstant("Linear"),
- Speed = 1,
- Playcount = 1,
- },
- Presentation = pdfdictionary {
- PassContextClick = false,
- Style = pdfconstant("Embedded"),
- Toolbar = toolbar,
- NavigationPane = false,
- Transparent = true,
- Window = pdfdictionary {
- Type = pdfconstant("RichMediaWindow"),
- Width = pdfdictionary {
- Default = 100,
- Min = 100,
- Max = 100,
- },
- Height = pdfdictionary {
- Default = 100,
- Min = 100,
- Max = 100,
- },
- Position = pdfdictionary {
- Type = pdfconstant("RichMediaPosition"),
- HAlign = pdfconstant("Near"),
- VAlign = pdfconstant("Near"),
- HOffset = 0,
- VOffset = 0,
- }
- }
- },
- -- View
- -- Scripts
- }
-
- local deactivation = pdfdictionary {
- Type = pdfconstant("RichMediaDeactivation"),
- Condition = pdfconstant(deactivations[closedisplay]),
- }
-
- local richmediasettings = pdfdictionary {
- Type = pdfconstant("RichMediaSettings"),
- Activation = activation,
- Deactivation = deactivation,
- }
-
- local settingsreference = pdfreference(pdfflushobject(richmediasettings))
-
- local appearance
-
- if preview then
- preview = gsub(preview,"%*",file.nameonly(filename))
- local figure = codeinjections.getpreviewfigure { name = preview, width = width, height = height }
- if relativepaths and not figure then
- for i=1,#relativepaths do
- local path = file.join(root == "" and "." or root,relativepaths[i])
- if trace_swf then
- report_swf("checking preview on relative path %s",path)
- end
- local p = file.join(path,preview)
- figure = codeinjections.getpreviewfigure { name = p, width = width, height = height }
- if figure then
- preview = p
- break
- end
- end
- end
- if paths and not figure then
- for i=1,#paths do
- local path = paths[i]
- if trace_swf then
- report_swf("checking preview on absolute path %s",path)
- end
- local p = file.join(path,preview)
- figure = codeinjections.getpreviewfigure { name = p, width = width, height = height }
- if figure then
- preview = p
- break
- end
- end
- end
- if figure then
- local image = img.package(figure.status.private)
- appearance = pdfdictionary { N = pdfreference(image.objnum) }
- if trace_swf then
- report_swf("using preview %s",preview)
- end
- end
- end
-
- local annotation = pdfdictionary {
- Subtype = pdfconstant("RichMedia"),
- RichMediaContent = configurationreference,
- RichMediaSettings = settingsreference,
- AP = appearance,
- }
-
- return annotation, nil, nil
-
-end
-
-function backends.pdf.nodeinjections.insertswf(spec)
- local annotation, preview, ref = insertswf {
- foundname = spec.foundname,
- width = spec.width,
- height = spec.height,
- display = spec.display,
- controls = spec.controls,
- resources = spec.resources,
- -- factor = spec.factor,
- -- label = spec.label,
- }
- context(pdfannotation_node(spec.width,spec.height,0,annotation())) -- the context wrap is probably also needed elsewhere
-end
+if not modules then modules = { } end modules ['lpdf-swf'] = {
+ version = 1.001,
+ comment = "companion to lpdf-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- The following code is based on tests by Luigi Scarso. His prototype
+-- was using tex code. This is the official implementation.
+
+local format, gsub = string.format, string.gsub
+
+local backends, lpdf = backends, lpdf
+
+local pdfconstant = lpdf.constant
+local pdfboolean = lpdf.boolean
+local pdfstring = lpdf.string
+local pdfunicode = lpdf.unicode
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfnull = lpdf.null
+local pdfreference = lpdf.reference
+local pdfflushobject = lpdf.flushobject
+
+local checkedkey = lpdf.checkedkey
+
+local codeinjections = backends.pdf.codeinjections
+local nodeinjections = backends.pdf.nodeinjections
+
+local pdfannotation_node = nodes.pool.pdfannotation
+
+local trace_swf = false trackers.register("backend.swf", function(v) trace_swf = v end)
+
+local report_swf = logs.reporter("backend","swf")
+
+local activations = {
+ click = "XA",
+ page = "PO",
+ focus = "PV",
+}
+
+local deactivations = {
+ click = "XD",
+ page = "PI",
+ focus = "PC",
+}
+
+table.setmetatableindex(activations, function() return activations .click end)
+table.setmetatableindex(deactivations,function() return deactivations.focus end)
+
+local function insertswf(spec)
+
+ local width = spec.width
+ local height = spec.height
+ local filename = spec.foundname
+ local resources = spec.resources
+ local display = spec.display
+ local controls = spec.controls
+
+ local resources = resources and parametersets[resources]
+ local display = display and parametersets[display]
+ local controls = controls and parametersets[controls] -- not yet used
+
+ local preview = checkedkey(display,"preview","string")
+ local toolbar = checkedkey(display,"toolbar","boolean")
+
+ local embeddedreference = codeinjections.embedfile { file = filename }
+
+ local flash = pdfdictionary {
+ Subtype = pdfconstant("Flash"),
+ Instances = pdfarray {
+ pdfdictionary {
+ Asset = embeddedreference,
+ Params = pdfdictionary {
+ Binding = pdfconstant("Background") -- Foreground makes swf behave erratic
+ }
+ },
+ },
+ }
+
+ local flashreference = pdfreference(pdfflushobject(flash))
+
+ local configuration = pdfdictionary {
+ Configurations = pdfarray { flashreference },
+ Assets = pdfdictionary {
+ Names = pdfarray {
+ pdfstring(filename),
+ embeddedreference,
+ }
+ },
+ }
+
+ -- todo: check op subpath figuur (relatief)
+
+ -- filename : ./test.swf (graphic)
+ -- root : .
+ -- prefix : ^%./
+ -- fullname : ./assets/whatever.xml
+ -- usedname : assets/whatever.xml
+ -- filename : assets/whatever.xml
+
+ local root = file.dirname(filename)
+ local relativepaths = nil
+ local paths = nil
+
+ if resources then
+ local names = configuration.Assets.Names
+ local prefix = false
+ if root ~= "" and root ~= "." then
+ prefix = format("^%s/",string.topattern(root))
+ end
+ if prefix and trace_swf then
+ report_swf("using strip pattern %a",prefix)
+ end
+ local function add(fullname,strip)
+ local filename = gsub(fullname,"^%./","")
+ local usedname = strip and prefix and gsub(filename,prefix,"") or filename
+ local embeddedreference = codeinjections.embedfile {
+ file = fullname,
+ usedname = usedname,
+ keepdir = true,
+ }
+ names[#names+1] = pdfstring(filename)
+ names[#names+1] = embeddedreference
+ if trace_swf then
+ report_swf("embedding file %a as %a",fullname,usedname)
+ end
+ end
+ relativepaths = resources.relativepaths
+ if relativepaths then
+ if trace_swf then
+ report_swf("checking %s relative paths",#relativepaths)
+ end
+ for i=1,#relativepaths do
+ local relativepath = relativepaths[i]
+ if trace_swf then
+ report_swf("checking path %a relative to %a",relativepath,root)
+ end
+ local path = file.join(root == "" and "." or root,relativepath)
+ local files = dir.glob(path .. "/**")
+ for i=1,#files do
+ add(files[i],true)
+ end
+ end
+ end
+ paths = resources.paths
+ if paths then
+ if trace_swf then
+ report_swf("checking absolute %s paths",#paths)
+ end
+ for i=1,#paths do
+ local path = paths[i]
+ if trace_swf then
+ report_swf("checking path %a",path)
+ end
+ local files = dir.glob(path .. "/**")
+ for i=1,#files do
+ add(files[i],false)
+ end
+ end
+ end
+ local relativefiles = resources.relativefiles
+ if relativefiles then
+ if trace_swf then
+ report_swf("checking %s relative files",#relativefiles)
+ end
+ for i=1,#relativefiles do
+ add(relativefiles[i],true)
+ end
+ end
+ local files = resources.files
+ if files then
+ if trace_swf then
+ report_swf("checking absolute %s files",#files)
+ end
+ for i=1,#files do
+ add(files[i],false)
+ end
+ end
+ end
+
+ local opendisplay = display and display.open or false
+ local closedisplay = display and display.close or false
+
+ local configurationreference = pdfreference(pdfflushobject(configuration))
+
+ local activation = pdfdictionary {
+ Type = pdfconstant("RichMediaActivation"),
+ Condition = pdfconstant(activations[opendisplay]),
+ Configuration = flashreference,
+ Animation = pdfdictionary {
+ Subtype = pdfconstant("Linear"),
+ Speed = 1,
+ Playcount = 1,
+ },
+ Presentation = pdfdictionary {
+ PassContextClick = false,
+ Style = pdfconstant("Embedded"),
+ Toolbar = toolbar,
+ NavigationPane = false,
+ Transparent = true,
+ Window = pdfdictionary {
+ Type = pdfconstant("RichMediaWindow"),
+ Width = pdfdictionary {
+ Default = 100,
+ Min = 100,
+ Max = 100,
+ },
+ Height = pdfdictionary {
+ Default = 100,
+ Min = 100,
+ Max = 100,
+ },
+ Position = pdfdictionary {
+ Type = pdfconstant("RichMediaPosition"),
+ HAlign = pdfconstant("Near"),
+ VAlign = pdfconstant("Near"),
+ HOffset = 0,
+ VOffset = 0,
+ }
+ }
+ },
+ -- View
+ -- Scripts
+ }
+
+ local deactivation = pdfdictionary {
+ Type = pdfconstant("RichMediaDeactivation"),
+ Condition = pdfconstant(deactivations[closedisplay]),
+ }
+
+ local richmediasettings = pdfdictionary {
+ Type = pdfconstant("RichMediaSettings"),
+ Activation = activation,
+ Deactivation = deactivation,
+ }
+
+ local settingsreference = pdfreference(pdfflushobject(richmediasettings))
+
+ local appearance
+
+ if preview then
+ preview = gsub(preview,"%*",file.nameonly(filename))
+ local figure = codeinjections.getpreviewfigure { name = preview, width = width, height = height }
+ if relativepaths and not figure then
+ for i=1,#relativepaths do
+ local path = file.join(root == "" and "." or root,relativepaths[i])
+ if trace_swf then
+ report_swf("checking preview on relative path %s",path)
+ end
+ local p = file.join(path,preview)
+ figure = codeinjections.getpreviewfigure { name = p, width = width, height = height }
+ if figure then
+ preview = p
+ break
+ end
+ end
+ end
+ if paths and not figure then
+ for i=1,#paths do
+ local path = paths[i]
+ if trace_swf then
+ report_swf("checking preview on absolute path %s",path)
+ end
+ local p = file.join(path,preview)
+ figure = codeinjections.getpreviewfigure { name = p, width = width, height = height }
+ if figure then
+ preview = p
+ break
+ end
+ end
+ end
+ if figure then
+ local image = img.package(figure.status.private)
+ appearance = pdfdictionary { N = pdfreference(image.objnum) }
+ if trace_swf then
+ report_swf("using preview %s",preview)
+ end
+ end
+ end
+
+ local annotation = pdfdictionary {
+ Subtype = pdfconstant("RichMedia"),
+ RichMediaContent = configurationreference,
+ RichMediaSettings = settingsreference,
+ AP = appearance,
+ }
+
+ return annotation, nil, nil
+
+end
+
+function backends.pdf.nodeinjections.insertswf(spec)
+ local annotation, preview, ref = insertswf {
+ foundname = spec.foundname,
+ width = spec.width,
+ height = spec.height,
+ display = spec.display,
+ controls = spec.controls,
+ resources = spec.resources,
+ -- factor = spec.factor,
+ -- label = spec.label,
+ }
+ context(pdfannotation_node(spec.width,spec.height,0,annotation())) -- the context wrap is probably also needed elsewhere
+end
diff --git a/tex/context/base/lpdf-tag.lua b/tex/context/base/lpdf-tag.lua
index 8cdb5f6a4..f5766996c 100644
--- a/tex/context/base/lpdf-tag.lua
+++ b/tex/context/base/lpdf-tag.lua
@@ -1,313 +1,313 @@
-if not modules then modules = { } end modules ['lpdf-tag'] = {
- version = 1.001,
- comment = "companion to lpdf-tag.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, match, concat = string.format, string.match, table.concat
-local lpegmatch = lpeg.match
-local utfchar = utf.char
-
-local trace_tags = false trackers.register("structures.tags", function(v) trace_tags = v end)
-
-local report_tags = logs.reporter("backend","tags")
-
-local backends, lpdf, nodes = backends, lpdf, nodes
-
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-
-local tasks = nodes.tasks
-
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfboolean = lpdf.boolean
-local pdfconstant = lpdf.constant
-local pdfreference = lpdf.reference
-local pdfunicode = lpdf.unicode
-local pdfstring = lpdf.string
-local pdfflushobject = lpdf.flushobject
-local pdfreserveobject = lpdf.reserveobject
-local pdfpagereference = lpdf.pagereference
-
-local nodepool = nodes.pool
-
-local pdfliteral = nodepool.pdfliteral
-
-local nodecodes = nodes.nodecodes
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local glyph_code = nodecodes.glyph
-
-local a_tagged = attributes.private('tagged')
-local a_image = attributes.private('image')
-
-local traverse_nodes = node.traverse
-local traverse_id = node.traverse_id
-local tosequence = nodes.tosequence
-local copy_node = node.copy
-local slide_nodelist = node.slide
-
-local structure_stack = { }
-local structure_kids = pdfarray()
-local structure_ref = pdfreserveobject()
-local parent_ref = pdfreserveobject()
-local root = { pref = pdfreference(structure_ref), kids = structure_kids }
-local tree = { }
-local elements = { }
-local names = pdfarray()
-local taglist = structures.tags.taglist
-local usedlabels = structures.tags.labels
-local properties = structures.tags.properties
-local usedmapping = { }
-
-local colonsplitter = lpeg.splitat(":")
-local dashsplitter = lpeg.splitat("-")
-
-local add_ids = false -- true
-
-
---~ function codeinjections.maptag(original,target,kind)
---~ mapping[original] = { target, kind or "inline" }
---~ end
-
-local function finishstructure()
- if #structure_kids > 0 then
- local nums, n = pdfarray(), 0
- for i=1,#tree do
- n = n + 1 ; nums[n] = i-1
- n = n + 1 ; nums[n] = pdfreference(pdfflushobject(tree[i]))
- end
- local parenttree = pdfdictionary {
- Nums = nums
- }
- -- we need to split names into smaller parts (e.g. alphabetic or so)
- if add_ids then
- local kids = pdfdictionary {
- Limits = pdfarray { names[1], names[#names-1] },
- Names = names,
- }
- local idtree = pdfdictionary {
- Kids = pdfarray { pdfreference(pdfflushobject(kids)) },
- }
- end
- --
- local rolemap = pdfdictionary()
- for k, v in next, usedmapping do
- k = usedlabels[k] or k
- local p = properties[k]
- rolemap[k] = pdfconstant(p and p.pdf or "Span") -- or "Div"
- end
- local structuretree = pdfdictionary {
- Type = pdfconstant("StructTreeRoot"),
- K = pdfreference(pdfflushobject(structure_kids)),
- ParentTree = pdfreference(pdfflushobject(parent_ref,parenttree)),
- IDTree = (add_ids and pdfreference(pdfflushobject(idtree))) or nil,
- RoleMap = rolemap,
- }
- pdfflushobject(structure_ref,structuretree)
- lpdf.addtocatalog("StructTreeRoot",pdfreference(structure_ref))
- --
- local markinfo = pdfdictionary {
- Marked = pdfboolean(true),
- -- UserProperties = pdfboolean(true),
- -- Suspects = pdfboolean(true),
- }
- lpdf.addtocatalog("MarkInfo",pdfreference(pdfflushobject(markinfo)))
- --
- for fulltag, element in next, elements do
- pdfflushobject(element.knum,element.kids)
- end
- end
-end
-
-lpdf.registerdocumentfinalizer(finishstructure,"document structure")
-
-local index, pageref, pagenum, list = 0, nil, 0, nil
-
-local pdf_mcr = pdfconstant("MCR")
-local pdf_struct_element = pdfconstant("StructElem")
-
-local function initializepage()
- index = 0
- pagenum = tex.count.realpageno
- pageref = pdfreference(pdfpagereference(pagenum))
- list = pdfarray()
- tree[pagenum] = list -- we can flush after done, todo
-end
-
-local function finishpage()
- -- flush what can be flushed
- lpdf.addtopageattributes("StructParents",pagenum-1)
-end
-
--- here we can flush and free elements that are finished
-
-local function makeelement(fulltag,parent)
- local tag, n = lpegmatch(dashsplitter,fulltag)
- local tg, detail = lpegmatch(colonsplitter,tag)
- local k, r = pdfarray(), pdfreserveobject()
- usedmapping[tg] = true
- tg = usedlabels[tg] or tg
- local d = pdfdictionary {
- Type = pdf_struct_element,
- S = pdfconstant(tg),
- ID = (add_ids and fulltag) or nil,
- T = detail and detail or nil,
- P = parent.pref,
- Pg = pageref,
- K = pdfreference(r),
- -- Alt = " Who cares ",
- -- ActualText = " Hi Hans ",
- }
- local s = pdfreference(pdfflushobject(d))
- if add_ids then
- names[#names+1] = fulltag
- names[#names+1] = s
- end
- local kids = parent.kids
- kids[#kids+1] = s
- elements[fulltag] = { tag = tag, pref = s, kids = k, knum = r, pnum = pagenum }
-end
-
-local function makecontent(parent,start,stop,slist,id)
- local tag, kids = parent.tag, parent.kids
- local last = index
- if id == "image" then
- local d = pdfdictionary {
- Type = pdf_mcr,
- Pg = pageref,
- MCID = last,
- Alt = "image",
- }
- kids[#kids+1] = d
- elseif pagenum == parent.pnum then
- kids[#kids+1] = last
- else
- local d = pdfdictionary {
- Type = pdf_mcr,
- Pg = pageref,
- MCID = last,
- }
- -- kids[#kids+1] = pdfreference(pdfflushobject(d))
- kids[#kids+1] = d
- end
- --
- local bliteral = pdfliteral(format("/%s <>BDC",tag,last))
- local prev = start.prev
- if prev then
- prev.next, bliteral.prev = bliteral, prev
- end
- start.prev, bliteral.next = bliteral, start
- if slist and slist.list == start then
- slist.list = bliteral
- elseif not prev then
- report_tags("this can't happen: injection in front of nothing")
- end
- --
- local eliteral = pdfliteral("EMC")
- local next = stop.next
- if next then
- next.prev, eliteral.next = eliteral, next
- end
- stop.next, eliteral.prev = eliteral, stop
- --
- index = index + 1
- list[index] = parent.pref
- return bliteral, eliteral
-end
-
--- -- --
-
-local level, last, ranges, range = 0, nil, { }, nil
-
-local function collectranges(head,list)
- for n in traverse_nodes(head) do
- local id = n.id -- 14: image, 8: literal (mp)
- if id == glyph_code then
- local at = n[a_tagged]
- if not at then
- range = nil
- elseif last ~= at then
- range = { at, "glyph", n, n, list } -- attr id start stop list
- ranges[#ranges+1] = range
- last = at
- elseif range then
- range[4] = n -- stop
- end
- elseif id == hlist_code or id == vlist_code then
- local at = n[a_image]
- if at then
- local at = n[a_tagged]
- if not at then
- range = nil
- else
- ranges[#ranges+1] = { at, "image", n, n, list } -- attr id start stop list
- end
- last = nil
- else
- local nl = n.list
- slide_nodelist(nl) -- temporary hack till math gets slided (tracker item)
- collectranges(nl,n)
- end
- end
- end
-end
-
-function nodeinjections.addtags(head)
- -- no need to adapt head, as we always operate on lists
- level, last, ranges, range = 0, nil, { }, nil
- initializepage()
- collectranges(head)
- if trace_tags then
- for i=1,#ranges do
- local range = ranges[i]
- local attr, id, start, stop = range[1], range[2], range[3], range[4]
- local tags = taglist[attr]
- if tags then -- not ok ... only first lines
- report_tags("%s => %s : %05i % t",tosequence(start,start),tosequence(stop,stop),attr,tags)
- end
- end
- end
- for i=1,#ranges do
- local range = ranges[i]
- local attr, id, start, stop, list = range[1], range[2], range[3], range[4], range[5]
- local tags = taglist[attr]
- local prev = root
- local noftags, tag = #tags, nil
- for j=1,noftags do
- local tag = tags[j]
- if not elements[tag] then
- makeelement(tag,prev)
- end
- prev = elements[tag]
- end
- local b, e = makecontent(prev,start,stop,list,id)
- if start == head then
- report_tags("this can't happen: parent list gets tagged")
- head = b
- end
- end
- finishpage()
- -- can be separate feature
- --
- -- injectspans(head) -- does to work yet
- --
- return head, true
-end
-
--- this belongs elsewhere (export is not pdf related)
-
-function codeinjections.enabletags(tg,lb)
- structures.tags.handler = nodeinjections.addtags
- tasks.enableaction("shipouts","structures.tags.handler")
- tasks.enableaction("shipouts","nodes.handlers.accessibility")
- tasks.enableaction("math","noads.handlers.tags")
- -- maybe also textblock
- if trace_tags then
- report_tags("enabling structure tags")
- end
-end
+if not modules then modules = { } end modules ['lpdf-tag'] = {
+ version = 1.001,
+ comment = "companion to lpdf-tag.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format, match, concat = string.format, string.match, table.concat
+local lpegmatch = lpeg.match
+local utfchar = utf.char
+
+local trace_tags = false trackers.register("structures.tags", function(v) trace_tags = v end)
+
+local report_tags = logs.reporter("backend","tags")
+
+local backends, lpdf, nodes = backends, lpdf, nodes
+
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+
+local tasks = nodes.tasks
+
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfboolean = lpdf.boolean
+local pdfconstant = lpdf.constant
+local pdfreference = lpdf.reference
+local pdfunicode = lpdf.unicode
+local pdfstring = lpdf.string
+local pdfflushobject = lpdf.flushobject
+local pdfreserveobject = lpdf.reserveobject
+local pdfpagereference = lpdf.pagereference
+
+local nodepool = nodes.pool
+
+local pdfliteral = nodepool.pdfliteral
+
+local nodecodes = nodes.nodecodes
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local glyph_code = nodecodes.glyph
+
+local a_tagged = attributes.private('tagged')
+local a_image = attributes.private('image')
+
+local traverse_nodes = node.traverse
+local traverse_id = node.traverse_id
+local tosequence = nodes.tosequence
+local copy_node = node.copy
+local slide_nodelist = node.slide
+
+local structure_stack = { }
+local structure_kids = pdfarray()
+local structure_ref = pdfreserveobject()
+local parent_ref = pdfreserveobject()
+local root = { pref = pdfreference(structure_ref), kids = structure_kids }
+local tree = { }
+local elements = { }
+local names = pdfarray()
+local taglist = structures.tags.taglist
+local usedlabels = structures.tags.labels
+local properties = structures.tags.properties
+local usedmapping = { }
+
+local colonsplitter = lpeg.splitat(":")
+local dashsplitter = lpeg.splitat("-")
+
+local add_ids = false -- true
+
+
+--~ function codeinjections.maptag(original,target,kind)
+--~ mapping[original] = { target, kind or "inline" }
+--~ end
+
+local function finishstructure()
+ if #structure_kids > 0 then
+ local nums, n = pdfarray(), 0
+ for i=1,#tree do
+ n = n + 1 ; nums[n] = i-1
+ n = n + 1 ; nums[n] = pdfreference(pdfflushobject(tree[i]))
+ end
+ local parenttree = pdfdictionary {
+ Nums = nums
+ }
+ -- we need to split names into smaller parts (e.g. alphabetic or so)
+ if add_ids then
+ local kids = pdfdictionary {
+ Limits = pdfarray { names[1], names[#names-1] },
+ Names = names,
+ }
+ local idtree = pdfdictionary {
+ Kids = pdfarray { pdfreference(pdfflushobject(kids)) },
+ }
+ end
+ --
+ local rolemap = pdfdictionary()
+ for k, v in next, usedmapping do
+ k = usedlabels[k] or k
+ local p = properties[k]
+ rolemap[k] = pdfconstant(p and p.pdf or "Span") -- or "Div"
+ end
+ local structuretree = pdfdictionary {
+ Type = pdfconstant("StructTreeRoot"),
+ K = pdfreference(pdfflushobject(structure_kids)),
+ ParentTree = pdfreference(pdfflushobject(parent_ref,parenttree)),
+ IDTree = (add_ids and pdfreference(pdfflushobject(idtree))) or nil,
+ RoleMap = rolemap,
+ }
+ pdfflushobject(structure_ref,structuretree)
+ lpdf.addtocatalog("StructTreeRoot",pdfreference(structure_ref))
+ --
+ local markinfo = pdfdictionary {
+ Marked = pdfboolean(true),
+ -- UserProperties = pdfboolean(true),
+ -- Suspects = pdfboolean(true),
+ }
+ lpdf.addtocatalog("MarkInfo",pdfreference(pdfflushobject(markinfo)))
+ --
+ for fulltag, element in next, elements do
+ pdfflushobject(element.knum,element.kids)
+ end
+ end
+end
+
+lpdf.registerdocumentfinalizer(finishstructure,"document structure")
+
+local index, pageref, pagenum, list = 0, nil, 0, nil
+
+local pdf_mcr = pdfconstant("MCR")
+local pdf_struct_element = pdfconstant("StructElem")
+
+local function initializepage()
+ index = 0
+ pagenum = tex.count.realpageno
+ pageref = pdfreference(pdfpagereference(pagenum))
+ list = pdfarray()
+ tree[pagenum] = list -- we can flush after done, todo
+end
+
+local function finishpage()
+ -- flush what can be flushed
+ lpdf.addtopageattributes("StructParents",pagenum-1)
+end
+
+-- here we can flush and free elements that are finished
+
+local function makeelement(fulltag,parent)
+ local tag, n = lpegmatch(dashsplitter,fulltag)
+ local tg, detail = lpegmatch(colonsplitter,tag)
+ local k, r = pdfarray(), pdfreserveobject()
+ usedmapping[tg] = true
+ tg = usedlabels[tg] or tg
+ local d = pdfdictionary {
+ Type = pdf_struct_element,
+ S = pdfconstant(tg),
+ ID = (add_ids and fulltag) or nil,
+ T = detail and detail or nil,
+ P = parent.pref,
+ Pg = pageref,
+ K = pdfreference(r),
+ -- Alt = " Who cares ",
+ -- ActualText = " Hi Hans ",
+ }
+ local s = pdfreference(pdfflushobject(d))
+ if add_ids then
+ names[#names+1] = fulltag
+ names[#names+1] = s
+ end
+ local kids = parent.kids
+ kids[#kids+1] = s
+ elements[fulltag] = { tag = tag, pref = s, kids = k, knum = r, pnum = pagenum }
+end
+
+local function makecontent(parent,start,stop,slist,id)
+ local tag, kids = parent.tag, parent.kids
+ local last = index
+ if id == "image" then
+ local d = pdfdictionary {
+ Type = pdf_mcr,
+ Pg = pageref,
+ MCID = last,
+ Alt = "image",
+ }
+ kids[#kids+1] = d
+ elseif pagenum == parent.pnum then
+ kids[#kids+1] = last
+ else
+ local d = pdfdictionary {
+ Type = pdf_mcr,
+ Pg = pageref,
+ MCID = last,
+ }
+ -- kids[#kids+1] = pdfreference(pdfflushobject(d))
+ kids[#kids+1] = d
+ end
+ --
+ local bliteral = pdfliteral(format("/%s <>BDC",tag,last))
+ local prev = start.prev
+ if prev then
+ prev.next, bliteral.prev = bliteral, prev
+ end
+ start.prev, bliteral.next = bliteral, start
+ if slist and slist.list == start then
+ slist.list = bliteral
+ elseif not prev then
+ report_tags("this can't happen: injection in front of nothing")
+ end
+ --
+ local eliteral = pdfliteral("EMC")
+ local next = stop.next
+ if next then
+ next.prev, eliteral.next = eliteral, next
+ end
+ stop.next, eliteral.prev = eliteral, stop
+ --
+ index = index + 1
+ list[index] = parent.pref
+ return bliteral, eliteral
+end
+
+-- -- --
+
+local level, last, ranges, range = 0, nil, { }, nil
+
+local function collectranges(head,list)
+ for n in traverse_nodes(head) do
+ local id = n.id -- 14: image, 8: literal (mp)
+ if id == glyph_code then
+ local at = n[a_tagged]
+ if not at then
+ range = nil
+ elseif last ~= at then
+ range = { at, "glyph", n, n, list } -- attr id start stop list
+ ranges[#ranges+1] = range
+ last = at
+ elseif range then
+ range[4] = n -- stop
+ end
+ elseif id == hlist_code or id == vlist_code then
+ local at = n[a_image]
+ if at then
+ local at = n[a_tagged]
+ if not at then
+ range = nil
+ else
+ ranges[#ranges+1] = { at, "image", n, n, list } -- attr id start stop list
+ end
+ last = nil
+ else
+ local nl = n.list
+ slide_nodelist(nl) -- temporary hack till math gets slided (tracker item)
+ collectranges(nl,n)
+ end
+ end
+ end
+end
+
+function nodeinjections.addtags(head)
+ -- no need to adapt head, as we always operate on lists
+ level, last, ranges, range = 0, nil, { }, nil
+ initializepage()
+ collectranges(head)
+ if trace_tags then
+ for i=1,#ranges do
+ local range = ranges[i]
+ local attr, id, start, stop = range[1], range[2], range[3], range[4]
+ local tags = taglist[attr]
+ if tags then -- not ok ... only first lines
+ report_tags("%s => %s : %05i % t",tosequence(start,start),tosequence(stop,stop),attr,tags)
+ end
+ end
+ end
+ for i=1,#ranges do
+ local range = ranges[i]
+ local attr, id, start, stop, list = range[1], range[2], range[3], range[4], range[5]
+ local tags = taglist[attr]
+ local prev = root
+ local noftags, tag = #tags, nil
+ for j=1,noftags do
+ local tag = tags[j]
+ if not elements[tag] then
+ makeelement(tag,prev)
+ end
+ prev = elements[tag]
+ end
+ local b, e = makecontent(prev,start,stop,list,id)
+ if start == head then
+ report_tags("this can't happen: parent list gets tagged")
+ head = b
+ end
+ end
+ finishpage()
+ -- can be separate feature
+ --
+ -- injectspans(head) -- does to work yet
+ --
+ return head, true
+end
+
+-- this belongs elsewhere (export is not pdf related)
+
+function codeinjections.enabletags(tg,lb)
+ structures.tags.handler = nodeinjections.addtags
+ tasks.enableaction("shipouts","structures.tags.handler")
+ tasks.enableaction("shipouts","nodes.handlers.accessibility")
+ tasks.enableaction("math","noads.handlers.tags")
+ -- maybe also textblock
+ if trace_tags then
+ report_tags("enabling structure tags")
+ end
+end
diff --git a/tex/context/base/lpdf-u3d.lua b/tex/context/base/lpdf-u3d.lua
index 33269486c..464ea6fa7 100644
--- a/tex/context/base/lpdf-u3d.lua
+++ b/tex/context/base/lpdf-u3d.lua
@@ -1,488 +1,488 @@
-if not modules then modules = { } end modules ['lpdf-u3d'] = {
- version = 1.001,
- comment = "companion to lpdf-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- The following code is based on a working prototype provided
--- by Michael Vidiassov. It is rewritten using the lpdf library
--- and different checking is used. The macro calls are adapted
--- (and will eventually be removed). The user interface needs
--- an overhaul. There are some messy leftovers that will be
--- removed in future versions.
-
--- For some reason no one really tested this code so at some
--- point we will end up with a reimplementation. For instance
--- it makes sense to add the same activation code as with swf.
-
-local format, find = string.format, string.find
-local cos, sin, sqrt, pi, atan2, abs = math.cos, math.sin, math.sqrt, math.pi, math.atan2, math.abs
-
-local backends, lpdf = backends, lpdf
-
-local nodeinjections = backends.pdf.nodeinjections
-
-local pdfconstant = lpdf.constant
-local pdfboolean = lpdf.boolean
-local pdfnumber = lpdf.number
-local pdfunicode = lpdf.unicode
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfnull = lpdf.null
-local pdfreference = lpdf.reference
-local pdfflushstreamobject = lpdf.flushstreamobject
-local pdfflushstreamfileobject = lpdf.flushstreamfileobject
-
-local checkedkey = lpdf.checkedkey
-local limited = lpdf.limited
-
-local pdfannotation_node = nodes.pool.pdfannotation
-
-local schemes = table.tohash {
- "Artwork", "None", "White", "Day", "Night", "Hard",
- "Primary", "Blue", "Red", "Cube", "CAD", "Headlamp",
-}
-
-local modes = table.tohash {
- "Solid", "SolidWireframe", "Transparent", "TransparentWireframe", "BoundingBox",
- "TransparentBoundingBox", "TransparentBoundingBoxOutline", "Wireframe",
- "ShadedWireframe", "HiddenWireframe", "Vertices", "ShadedVertices", "Illustration",
- "SolidOutline", "ShadedIllustration",
-}
-
-local function normalize(x, y, z)
- local modulo = sqrt(x*x + y*y + z*z);
- if modulo ~= 0 then
- return x/modulo, y/modulo, z/modulo
- else
- return x, y, z
- end
-end
-
-local function rotate(vect_x,vect_y,vect_z, tet, axis_x,axis_y,axis_z)
- -- rotate vect by tet about axis counterclockwise
- local c, s = cos(tet*pi/180), sin(tet*pi/180)
- local r = 1 - c
- local n = sqrt(axis_x*axis_x+axis_y*axis_y+axis_z*axis_z)
- axis_x, axis_y, axis_z = axis_x/n, axis_y/n, axis_z/n
- return
- (axis_x*axis_x*r+c )*vect_x + (axis_x*axis_y*r-axis_z*s)*vect_y + (axis_x*axis_z*r+axis_y*s)*vect_z,
- (axis_x*axis_y*r+axis_z*s)*vect_x + (axis_y*axis_y*r+c )*vect_y + (axis_y*axis_z*r-axis_x*s)*vect_z,
- (axis_x*axis_z*r-axis_y*s)*vect_x + (axis_y*axis_z*r+axis_x*s)*vect_y + (axis_z*axis_z*r+c )*vect_z
-end
-
-local function make3dview(view)
-
- local name = view.name
- local name = pdfunicode(name ~= "" and name or "unknown view")
-
- local viewdict = pdfdictionary {
- Type = pdfconstant("3DView"),
- XN = name,
- IN = name,
- NR = true,
- }
-
- local bg = checkedkey(view,"bg","table")
- if bg then
- viewdict.BG = pdfdictionary {
- Type = pdfconstant("3DBG"),
- C = pdfarray { limited(bg[1],1,1,1), limited(bg[2],1,1,1), limited(bg[3],1,1,1) },
- }
- end
-
- local lights = checkedkey(view,"lights","string")
- if lights and schemes[lights] then
- viewdict.LS = pdfdictionary {
- Type = pdfconstant("3DLightingScheme"),
- Subtype = pdfconstant(lights),
- }
- end
-
- -- camera position is taken from 3d model
-
- local u3dview = checkedkey(view, "u3dview", "string")
- if u3dview then
- viewdict.MS = pdfconstant("U3D")
- viewdict.U3DPath = u3dview
- end
-
- -- position the camera as given
-
- local c2c = checkedkey(view, "c2c", "table")
- local coo = checkedkey(view, "coo", "table")
- local roo = checkedkey(view, "roo", "number")
- local azimuth = checkedkey(view, "azimuth", "number")
- local altitude = checkedkey(view, "altitude", "number")
-
- if c2c or coo or roo or azimuth or altitude then
-
- local pos = checkedkey(view, "pos", "table")
- local dir = checkedkey(view, "dir", "table")
- local upv = checkedkey(view, "upv", "table")
- local roll = checkedkey(view, "roll", "table")
-
- local coo_x, coo_y, coo_z = 0, 0, 0
- local dir_x, dir_y, dir_z = 0, 0, 0
- local trans_x, trans_y, trans_z = 0, 0, 0
- local left_x, left_y, left_z = 0, 0, 0
- local up_x, up_y, up_z = 0, 0, 0
-
- -- point camera is aimed at
-
- if coo then
- coo_x, coo_y, coo_z = tonumber(coo[1]) or 0, tonumber(coo[2]) or 0, tonumber(coo[3]) or 0
- end
-
- -- distance from camera to target
-
- if roo then
- roo = abs(roo)
- end
- if not roo or roo == 0 then
- roo = 0.000000000000000001
- end
-
- -- set it via camera position
-
- if pos then
- dir_x = coo_x - (tonumber(pos[1]) or 0)
- dir_y = coo_y - (tonumber(pos[2]) or 0)
- dir_z = coo_z - (tonumber(pos[3]) or 0)
- if not roo then
- roo = sqrt(dir_x*dir_x + dir_y*dir_y + dir_z*dir_z)
- end
- if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end
- dir_x, dir_y, dir_z = normalize(dir_x,dir_y,dir_z)
- end
-
- -- set it directly
-
- if dir then
- dir_x, dir_y, dir_z = tonumber(dir[1] or 0), tonumber(dir[2] or 0), tonumber(dir[3] or 0)
- if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end
- dir_x, dir_y, dir_z = normalize(dir_x,dir_y,dir_z)
- end
-
- -- set it movie15 style with vector from target to camera
-
- if c2c then
- dir_x, dir_y, dir_z = - tonumber(c2c[1] or 0), - tonumber(c2c[2] or 0), - tonumber(c2c[3] or 0)
- if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end
- dir_x, dir_y, dir_z = normalize(dir_x,dir_y,dir_z)
- end
-
- -- set it with azimuth and altitutde
-
- if altitude or azimuth then
- dir_x, dir_y, dir_z = -1, 0, 0
- if altitude then dir_x, dir_y, dir_z = rotate(dir_x,dir_y,dir_z, -altitude, 0,1,0) end
- if azimuth then dir_x, dir_y, dir_z = rotate(dir_x,dir_y,dir_z, azimuth, 0,0,1) end
- end
-
- -- set it with rotation like in MathGL
-
- if rot then
- if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_z = -1 end
- dir_x,dir_y,dir_z = rotate(dir_x,dir_y,dir_z, tonumber(rot[1]) or 0, 1,0,0)
- dir_x,dir_y,dir_z = rotate(dir_x,dir_y,dir_z, tonumber(rot[2]) or 0, 0,1,0)
- dir_x,dir_y,dir_z = rotate(dir_x,dir_y,dir_z, tonumber(rot[3]) or 0, 0,0,1)
- end
-
- -- set it with default movie15 orientation looking up y axis
-
- if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end
-
- -- left-vector
- -- up-vector
-
- if upv then
- up_x, up_y, up_z = tonumber(upv[1]) or 0, tonumber(upv[2]) or 0, tonumber(upv[3]) or 0
- else
- -- set default up-vector
- if abs(dir_x) == 0 and abs(dir_y) == 0 then
- if dir_z < 0 then
- up_y = 1 -- top view
- else
- up_y = -1 -- bottom view
- end
- else
- -- other camera positions than top and bottom, up-vector = up_world - (up_world dot dir) dir
- up_x, up_y, up_z = - dir_z*dir_x, - dir_z*dir_y, - dir_z*dir_z + 1
- end
- end
-
- -- normalize up-vector
-
- up_x, up_y, up_z = normalize(up_x,up_y,up_z)
-
- -- left vector = up x dir
-
- left_x, left_y, left_z = dir_z*up_y - dir_y*up_z, dir_x*up_z - dir_z*up_x, dir_y*up_x - dir_x*up_y
-
- -- normalize left vector
-
- left_x, left_y, left_z = normalize(left_x,left_y,left_z)
-
- -- apply camera roll
-
- if roll then
- local sinroll = sin((roll/180.0)*pi)
- local cosroll = cos((roll/180.0)*pi)
- left_x = left_x*cosroll + up_x*sinroll
- left_y = left_y*cosroll + up_y*sinroll
- left_z = left_z*cosroll + up_z*sinroll
- up_x = up_x*cosroll + left_x*sinroll
- up_y = up_y*cosroll + left_y*sinroll
- up_z = up_z*cosroll + left_z*sinroll
- end
-
- -- translation vector
-
- trans_x, trans_y, trans_z = coo_x - roo*dir_x, coo_y - roo*dir_y, coo_z - roo*dir_z
-
- viewdict.MS = pdfconstant("M")
- viewdict.CO = roo
- viewdict.C2W = pdfarray {
- left_x, left_y, left_z,
- up_x, up_y, up_z,
- dir_x, dir_y, dir_z,
- trans_x, trans_y, trans_z,
- }
-
- end
-
- local aac = tonumber(view.aac) -- perspective projection
- local mag = tonumber(view.mag) -- ortho projection
-
- if aac and aac > 0 and aac < 180 then
- viewdict.P = pdfdictionary {
- Subtype = pdfconstant("P"),
- PS = pdfconstant("Min"),
- FOV = aac,
- }
- elseif mag and mag > 0 then
- viewdict.P = pdfdictionary {
- Subtype = pdfconstant("O"),
- OS = mag,
- }
- end
-
- local mode = modes[view.rendermode]
- if mode then
- pdfdictionary {
- Type = pdfconstant("3DRenderMode"),
- Subtype = pdfconstant(mode),
- }
- end
-
- -- crosssection
-
- local crosssection = checkedkey(view,"crosssection","table")
- if crosssection then
- local crossdict = pdfdictionary {
- Type = pdfconstant("3DCrossSection")
- }
-
- local c = checkedkey(crosssection,"point","table") or checkedkey(crosssection,"center","table")
- if c then
- crossdict.C = pdfarray { tonumber(c[1]) or 0, tonumber(c[2]) or 0, tonumber(c[3]) or 0 }
- end
-
- local normal = checkedkey(crosssection,"normal","table")
- if normal then
- local x, y, z = tonumber(normal[1] or 0), tonumber(normal[2] or 0), tonumber(normal[3] or 0)
- if sqrt(x*x + y*y + z*z) == 0 then
- x, y, z = 1, 0, 0
- end
- crossdict.O = pdfarray {
- pdfnull,
- atan2(-z,sqrt(x*x + y*y))*180/pi,
- atan2(y,x)*180/pi,
- }
- end
-
- local orient = checkedkey(crosssection,"orient","table")
- if orient then
- crossdict.O = pdfarray {
- tonumber(orient[1]) or 1,
- tonumber(orient[2]) or 0,
- tonumber(orient[3]) or 0,
- }
- end
-
- crossdict.IV = cross.intersection or false
- crossdict.ST = cross.transparent or false
-
- viewdict.SA = next(crossdict) and pdfarray { crossdict } -- maybe test if # > 1
- end
-
- local nodes = checkedkey(view,"nodes","table")
- if nodes then
- local nodelist = pdfarray()
- for i=1,#nodes do
- local node = checkedkey(nodes,i,"table")
- if node then
- local position = checkedkey(node,"position","table")
- nodelist[#nodelist+1] = pdfdictionary {
- Type = pdfconstant("3DNode"),
- N = node.name or ("node_" .. i), -- pdfunicode ?
- M = position and #position == 12 and pdfarray(position),
- V = node.visible or true,
- O = node.opacity or 0,
- RM = pdfdictionary {
- Type = pdfconstant("3DRenderMode"),
- Subtype = pdfconstant(node.rendermode or "Solid"),
- },
- }
- end
- end
- viewdict.NA = nodelist
- end
-
- return viewdict
-
-end
-
-local stored_js, stored_3d, stored_pr, streams = { }, { }, { }, { }
-
-local function insert3d(spec) -- width, height, factor, display, controls, label, foundname
-
- local width, height, factor = spec.width, spec.height, spec.factor or number.dimenfactors.bp
- local display, controls, label, foundname = spec.display, spec.controls, spec.label, spec.foundname
-
- local param = (display and parametersets[display]) or { }
- local streamparam = (controls and parametersets[controls]) or { }
- local name = "3D Artwork " .. (param.name or label or "Unknown")
-
- local activationdict = pdfdictionary {
- TB = pdfboolean(param.toolbar,true),
- NP = pdfboolean(param.tree,false),
- }
-
- local stream = streams[label]
- if not stream then
-
- local subtype, subdata = "U3D", io.loaddata(foundname) or ""
- if find(subdata,"^PRC") then
- subtype = "PRC"
- elseif find(subdata,"^U3D") then
- subtype = "U3D"
- elseif file.suffix(foundname) == "prc" then
- subtype = "PRC"
- end
-
- local attr = pdfdictionary {
- Type = pdfconstant("3D"),
- Subtype = pdfconstant(subtype),
- }
- local streamviews = checkedkey(streamparam, "views", "table")
- if streamviews then
- local list = pdfarray()
- for i=1,#streamviews do
- local v = checkedkey(streamviews, i, "table")
- if v then
- list[#list+1] = make3dview(v)
- end
- end
- attr.VA = list
- end
- if checkedkey(streamparam, "view", "table") then
- attr.DV = make3dview(streamparam.view)
- elseif checkedkey(streamparam, "view", "string") then
- attr.DV = streamparam.view
- end
- local js = checkedkey(streamparam, "js", "string")
- if js then
- local jsref = stored_js[js]
- if not jsref then
- jsref = pdfflushstreamfileobject(js)
- stored_js[js] = jsref
- end
- attr.OnInstantiate = pdfreference(jsref)
- end
- stored_3d[label] = pdfflushstreamfileobject(foundname,attr)
- stream = 1
- else
- stream = stream + 1
- end
- streams[label] = stream
-
- local name = pdfunicode(name)
-
- local annot = pdfdictionary {
- Subtype = pdfconstant("3D"),
- T = name,
- Contents = name,
- NM = name,
- ["3DD"] = pdfreference(stored_3d[label]),
- ["3DA"] = activationdict,
- }
- if checkedkey(param,"view","table") then
- annot["3DV"] = make3dview(param.view)
- elseif checkedkey(param,"view","string") then
- annot["3DV"] = param.view
- end
-
- local preview = checkedkey(param,"preview","string")
- if preview then
- activationdict.A = pdfconstant("XA")
- local tag = format("%s:%s:%s",label,stream,preview)
- local ref = stored_pr[tag]
- if not ref then
- local figure = img.immediatewrite {
- filename = preview,
- width = width,
- height = height
- }
- ref = figure.objnum
- stored_pr[tag] = ref
- end
- if ref then -- see back-pdf ** .. here we have a local /IM !
- local zero, one = pdfnumber(0), pdfnumber(1) -- not really needed
- local pw = pdfdictionary {
- Type = pdfconstant("XObject"),
- Subtype = pdfconstant("Form"),
- FormType = one,
- BBox = pdfarray { zero, zero, pdfnumber(factor*width), pdfnumber(factor*height) },
- Matrix = pdfarray { one, zero, zero, one, zero, zero },
- Resources = pdfdictionary {
- XObject = pdfdictionary {
- IM = pdfreference(ref)
- }
- },
- ExtGState = pdfdictionary {
- GS = pdfdictionary {
- Type = pdfconstant("ExtGState"),
- CA = one,
- ca = one,
- }
- },
- ProcSet = pdfarray { pdfconstant("PDF"), pdfconstant("ImageC") },
- }
- local pwd = pdfflushstreamobject(format("q /GS gs %f 0 0 %f 0 0 cm /IM Do Q",factor*width,factor*height),pw)
- annot.AP = pdfdictionary {
- N = pdfreference(pwd)
- }
- end
- return annot, figure, ref
- else
- activationdict.A = pdfconstant("PV")
- return annot, nil, nil
- end
-end
-
-function nodeinjections.insertu3d(spec)
- local annotation, preview, ref = insert3d { -- just spec
- foundname = spec.foundname,
- width = spec.width,
- height = spec.height,
- factor = spec.factor,
- display = spec.display,
- controls = spec.controls,
- label = spec.label,
- }
- node.write(pdfannotation_node(spec.width,spec.height,0,annotation()))
-end
+if not modules then modules = { } end modules ['lpdf-u3d'] = {
+ version = 1.001,
+ comment = "companion to lpdf-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- The following code is based on a working prototype provided
+-- by Michael Vidiassov. It is rewritten using the lpdf library
+-- and different checking is used. The macro calls are adapted
+-- (and will eventually be removed). The user interface needs
+-- an overhaul. There are some messy leftovers that will be
+-- removed in future versions.
+
+-- For some reason no one really tested this code so at some
+-- point we will end up with a reimplementation. For instance
+-- it makes sense to add the same activation code as with swf.
+
+local format, find = string.format, string.find
+local cos, sin, sqrt, pi, atan2, abs = math.cos, math.sin, math.sqrt, math.pi, math.atan2, math.abs
+
+local backends, lpdf = backends, lpdf
+
+local nodeinjections = backends.pdf.nodeinjections
+
+local pdfconstant = lpdf.constant
+local pdfboolean = lpdf.boolean
+local pdfnumber = lpdf.number
+local pdfunicode = lpdf.unicode
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfnull = lpdf.null
+local pdfreference = lpdf.reference
+local pdfflushstreamobject = lpdf.flushstreamobject
+local pdfflushstreamfileobject = lpdf.flushstreamfileobject
+
+local checkedkey = lpdf.checkedkey
+local limited = lpdf.limited
+
+local pdfannotation_node = nodes.pool.pdfannotation
+
+local schemes = table.tohash {
+ "Artwork", "None", "White", "Day", "Night", "Hard",
+ "Primary", "Blue", "Red", "Cube", "CAD", "Headlamp",
+}
+
+local modes = table.tohash {
+ "Solid", "SolidWireframe", "Transparent", "TransparentWireframe", "BoundingBox",
+ "TransparentBoundingBox", "TransparentBoundingBoxOutline", "Wireframe",
+ "ShadedWireframe", "HiddenWireframe", "Vertices", "ShadedVertices", "Illustration",
+ "SolidOutline", "ShadedIllustration",
+}
+
+local function normalize(x, y, z)
+ local modulo = sqrt(x*x + y*y + z*z);
+ if modulo ~= 0 then
+ return x/modulo, y/modulo, z/modulo
+ else
+ return x, y, z
+ end
+end
+
+local function rotate(vect_x,vect_y,vect_z, tet, axis_x,axis_y,axis_z)
+ -- rotate vect by tet about axis counterclockwise
+ local c, s = cos(tet*pi/180), sin(tet*pi/180)
+ local r = 1 - c
+ local n = sqrt(axis_x*axis_x+axis_y*axis_y+axis_z*axis_z)
+ axis_x, axis_y, axis_z = axis_x/n, axis_y/n, axis_z/n
+ return
+ (axis_x*axis_x*r+c )*vect_x + (axis_x*axis_y*r-axis_z*s)*vect_y + (axis_x*axis_z*r+axis_y*s)*vect_z,
+ (axis_x*axis_y*r+axis_z*s)*vect_x + (axis_y*axis_y*r+c )*vect_y + (axis_y*axis_z*r-axis_x*s)*vect_z,
+ (axis_x*axis_z*r-axis_y*s)*vect_x + (axis_y*axis_z*r+axis_x*s)*vect_y + (axis_z*axis_z*r+c )*vect_z
+end
+
+local function make3dview(view)
+
+ local name = view.name
+ local name = pdfunicode(name ~= "" and name or "unknown view")
+
+ local viewdict = pdfdictionary {
+ Type = pdfconstant("3DView"),
+ XN = name,
+ IN = name,
+ NR = true,
+ }
+
+ local bg = checkedkey(view,"bg","table")
+ if bg then
+ viewdict.BG = pdfdictionary {
+ Type = pdfconstant("3DBG"),
+ C = pdfarray { limited(bg[1],1,1,1), limited(bg[2],1,1,1), limited(bg[3],1,1,1) },
+ }
+ end
+
+ local lights = checkedkey(view,"lights","string")
+ if lights and schemes[lights] then
+ viewdict.LS = pdfdictionary {
+ Type = pdfconstant("3DLightingScheme"),
+ Subtype = pdfconstant(lights),
+ }
+ end
+
+ -- camera position is taken from 3d model
+
+ local u3dview = checkedkey(view, "u3dview", "string")
+ if u3dview then
+ viewdict.MS = pdfconstant("U3D")
+ viewdict.U3DPath = u3dview
+ end
+
+ -- position the camera as given
+
+ local c2c = checkedkey(view, "c2c", "table")
+ local coo = checkedkey(view, "coo", "table")
+ local roo = checkedkey(view, "roo", "number")
+ local azimuth = checkedkey(view, "azimuth", "number")
+ local altitude = checkedkey(view, "altitude", "number")
+
+ if c2c or coo or roo or azimuth or altitude then
+
+ local pos = checkedkey(view, "pos", "table")
+ local dir = checkedkey(view, "dir", "table")
+ local upv = checkedkey(view, "upv", "table")
+ local roll = checkedkey(view, "roll", "table")
+
+ local coo_x, coo_y, coo_z = 0, 0, 0
+ local dir_x, dir_y, dir_z = 0, 0, 0
+ local trans_x, trans_y, trans_z = 0, 0, 0
+ local left_x, left_y, left_z = 0, 0, 0
+ local up_x, up_y, up_z = 0, 0, 0
+
+ -- point camera is aimed at
+
+ if coo then
+ coo_x, coo_y, coo_z = tonumber(coo[1]) or 0, tonumber(coo[2]) or 0, tonumber(coo[3]) or 0
+ end
+
+ -- distance from camera to target
+
+ if roo then
+ roo = abs(roo)
+ end
+ if not roo or roo == 0 then
+ roo = 0.000000000000000001
+ end
+
+ -- set it via camera position
+
+ if pos then
+ dir_x = coo_x - (tonumber(pos[1]) or 0)
+ dir_y = coo_y - (tonumber(pos[2]) or 0)
+ dir_z = coo_z - (tonumber(pos[3]) or 0)
+ if not roo then
+ roo = sqrt(dir_x*dir_x + dir_y*dir_y + dir_z*dir_z)
+ end
+ if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end
+ dir_x, dir_y, dir_z = normalize(dir_x,dir_y,dir_z)
+ end
+
+ -- set it directly
+
+ if dir then
+ dir_x, dir_y, dir_z = tonumber(dir[1] or 0), tonumber(dir[2] or 0), tonumber(dir[3] or 0)
+ if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end
+ dir_x, dir_y, dir_z = normalize(dir_x,dir_y,dir_z)
+ end
+
+ -- set it movie15 style with vector from target to camera
+
+ if c2c then
+ dir_x, dir_y, dir_z = - tonumber(c2c[1] or 0), - tonumber(c2c[2] or 0), - tonumber(c2c[3] or 0)
+ if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end
+ dir_x, dir_y, dir_z = normalize(dir_x,dir_y,dir_z)
+ end
+
+ -- set it with azimuth and altitutde
+
+ if altitude or azimuth then
+ dir_x, dir_y, dir_z = -1, 0, 0
+ if altitude then dir_x, dir_y, dir_z = rotate(dir_x,dir_y,dir_z, -altitude, 0,1,0) end
+ if azimuth then dir_x, dir_y, dir_z = rotate(dir_x,dir_y,dir_z, azimuth, 0,0,1) end
+ end
+
+ -- set it with rotation like in MathGL
+
+ if rot then
+ if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_z = -1 end
+ dir_x,dir_y,dir_z = rotate(dir_x,dir_y,dir_z, tonumber(rot[1]) or 0, 1,0,0)
+ dir_x,dir_y,dir_z = rotate(dir_x,dir_y,dir_z, tonumber(rot[2]) or 0, 0,1,0)
+ dir_x,dir_y,dir_z = rotate(dir_x,dir_y,dir_z, tonumber(rot[3]) or 0, 0,0,1)
+ end
+
+ -- set it with default movie15 orientation looking up y axis
+
+ if dir_x == 0 and dir_y == 0 and dir_z == 0 then dir_y = 1 end
+
+ -- left-vector
+ -- up-vector
+
+ if upv then
+ up_x, up_y, up_z = tonumber(upv[1]) or 0, tonumber(upv[2]) or 0, tonumber(upv[3]) or 0
+ else
+ -- set default up-vector
+ if abs(dir_x) == 0 and abs(dir_y) == 0 then
+ if dir_z < 0 then
+ up_y = 1 -- top view
+ else
+ up_y = -1 -- bottom view
+ end
+ else
+ -- other camera positions than top and bottom, up-vector = up_world - (up_world dot dir) dir
+ up_x, up_y, up_z = - dir_z*dir_x, - dir_z*dir_y, - dir_z*dir_z + 1
+ end
+ end
+
+ -- normalize up-vector
+
+ up_x, up_y, up_z = normalize(up_x,up_y,up_z)
+
+ -- left vector = up x dir
+
+ left_x, left_y, left_z = dir_z*up_y - dir_y*up_z, dir_x*up_z - dir_z*up_x, dir_y*up_x - dir_x*up_y
+
+ -- normalize left vector
+
+ left_x, left_y, left_z = normalize(left_x,left_y,left_z)
+
+ -- apply camera roll
+
+ if roll then
+ local sinroll = sin((roll/180.0)*pi)
+ local cosroll = cos((roll/180.0)*pi)
+ left_x = left_x*cosroll + up_x*sinroll
+ left_y = left_y*cosroll + up_y*sinroll
+ left_z = left_z*cosroll + up_z*sinroll
+ up_x = up_x*cosroll + left_x*sinroll
+ up_y = up_y*cosroll + left_y*sinroll
+ up_z = up_z*cosroll + left_z*sinroll
+ end
+
+ -- translation vector
+
+ trans_x, trans_y, trans_z = coo_x - roo*dir_x, coo_y - roo*dir_y, coo_z - roo*dir_z
+
+ viewdict.MS = pdfconstant("M")
+ viewdict.CO = roo
+ viewdict.C2W = pdfarray {
+ left_x, left_y, left_z,
+ up_x, up_y, up_z,
+ dir_x, dir_y, dir_z,
+ trans_x, trans_y, trans_z,
+ }
+
+ end
+
+ local aac = tonumber(view.aac) -- perspective projection
+ local mag = tonumber(view.mag) -- ortho projection
+
+ if aac and aac > 0 and aac < 180 then
+ viewdict.P = pdfdictionary {
+ Subtype = pdfconstant("P"),
+ PS = pdfconstant("Min"),
+ FOV = aac,
+ }
+ elseif mag and mag > 0 then
+ viewdict.P = pdfdictionary {
+ Subtype = pdfconstant("O"),
+ OS = mag,
+ }
+ end
+
+ local mode = modes[view.rendermode]
+ if mode then
+ pdfdictionary {
+ Type = pdfconstant("3DRenderMode"),
+ Subtype = pdfconstant(mode),
+ }
+ end
+
+ -- crosssection
+
+ local crosssection = checkedkey(view,"crosssection","table")
+ if crosssection then
+ local crossdict = pdfdictionary {
+ Type = pdfconstant("3DCrossSection")
+ }
+
+ local c = checkedkey(crosssection,"point","table") or checkedkey(crosssection,"center","table")
+ if c then
+ crossdict.C = pdfarray { tonumber(c[1]) or 0, tonumber(c[2]) or 0, tonumber(c[3]) or 0 }
+ end
+
+ local normal = checkedkey(crosssection,"normal","table")
+ if normal then
+ local x, y, z = tonumber(normal[1] or 0), tonumber(normal[2] or 0), tonumber(normal[3] or 0)
+ if sqrt(x*x + y*y + z*z) == 0 then
+ x, y, z = 1, 0, 0
+ end
+ crossdict.O = pdfarray {
+ pdfnull,
+ atan2(-z,sqrt(x*x + y*y))*180/pi,
+ atan2(y,x)*180/pi,
+ }
+ end
+
+ local orient = checkedkey(crosssection,"orient","table")
+ if orient then
+ crossdict.O = pdfarray {
+ tonumber(orient[1]) or 1,
+ tonumber(orient[2]) or 0,
+ tonumber(orient[3]) or 0,
+ }
+ end
+
+ crossdict.IV = cross.intersection or false
+ crossdict.ST = cross.transparent or false
+
+ viewdict.SA = next(crossdict) and pdfarray { crossdict } -- maybe test if # > 1
+ end
+
+ local nodes = checkedkey(view,"nodes","table")
+ if nodes then
+ local nodelist = pdfarray()
+ for i=1,#nodes do
+ local node = checkedkey(nodes,i,"table")
+ if node then
+ local position = checkedkey(node,"position","table")
+ nodelist[#nodelist+1] = pdfdictionary {
+ Type = pdfconstant("3DNode"),
+ N = node.name or ("node_" .. i), -- pdfunicode ?
+ M = position and #position == 12 and pdfarray(position),
+ V = node.visible or true,
+ O = node.opacity or 0,
+ RM = pdfdictionary {
+ Type = pdfconstant("3DRenderMode"),
+ Subtype = pdfconstant(node.rendermode or "Solid"),
+ },
+ }
+ end
+ end
+ viewdict.NA = nodelist
+ end
+
+ return viewdict
+
+end
+
+local stored_js, stored_3d, stored_pr, streams = { }, { }, { }, { }
+
+local function insert3d(spec) -- width, height, factor, display, controls, label, foundname
+
+ local width, height, factor = spec.width, spec.height, spec.factor or number.dimenfactors.bp
+ local display, controls, label, foundname = spec.display, spec.controls, spec.label, spec.foundname
+
+ local param = (display and parametersets[display]) or { }
+ local streamparam = (controls and parametersets[controls]) or { }
+ local name = "3D Artwork " .. (param.name or label or "Unknown")
+
+ local activationdict = pdfdictionary {
+ TB = pdfboolean(param.toolbar,true),
+ NP = pdfboolean(param.tree,false),
+ }
+
+ local stream = streams[label]
+ if not stream then
+
+ local subtype, subdata = "U3D", io.loaddata(foundname) or ""
+ if find(subdata,"^PRC") then
+ subtype = "PRC"
+ elseif find(subdata,"^U3D") then
+ subtype = "U3D"
+ elseif file.suffix(foundname) == "prc" then
+ subtype = "PRC"
+ end
+
+ local attr = pdfdictionary {
+ Type = pdfconstant("3D"),
+ Subtype = pdfconstant(subtype),
+ }
+ local streamviews = checkedkey(streamparam, "views", "table")
+ if streamviews then
+ local list = pdfarray()
+ for i=1,#streamviews do
+ local v = checkedkey(streamviews, i, "table")
+ if v then
+ list[#list+1] = make3dview(v)
+ end
+ end
+ attr.VA = list
+ end
+ if checkedkey(streamparam, "view", "table") then
+ attr.DV = make3dview(streamparam.view)
+ elseif checkedkey(streamparam, "view", "string") then
+ attr.DV = streamparam.view
+ end
+ local js = checkedkey(streamparam, "js", "string")
+ if js then
+ local jsref = stored_js[js]
+ if not jsref then
+ jsref = pdfflushstreamfileobject(js)
+ stored_js[js] = jsref
+ end
+ attr.OnInstantiate = pdfreference(jsref)
+ end
+ stored_3d[label] = pdfflushstreamfileobject(foundname,attr)
+ stream = 1
+ else
+ stream = stream + 1
+ end
+ streams[label] = stream
+
+ local name = pdfunicode(name)
+
+ local annot = pdfdictionary {
+ Subtype = pdfconstant("3D"),
+ T = name,
+ Contents = name,
+ NM = name,
+ ["3DD"] = pdfreference(stored_3d[label]),
+ ["3DA"] = activationdict,
+ }
+ if checkedkey(param,"view","table") then
+ annot["3DV"] = make3dview(param.view)
+ elseif checkedkey(param,"view","string") then
+ annot["3DV"] = param.view
+ end
+
+ local preview = checkedkey(param,"preview","string")
+ if preview then
+ activationdict.A = pdfconstant("XA")
+ local tag = format("%s:%s:%s",label,stream,preview)
+ local ref = stored_pr[tag]
+ if not ref then
+ local figure = img.immediatewrite {
+ filename = preview,
+ width = width,
+ height = height
+ }
+ ref = figure.objnum
+ stored_pr[tag] = ref
+ end
+ if ref then -- see back-pdf ** .. here we have a local /IM !
+ local zero, one = pdfnumber(0), pdfnumber(1) -- not really needed
+ local pw = pdfdictionary {
+ Type = pdfconstant("XObject"),
+ Subtype = pdfconstant("Form"),
+ FormType = one,
+ BBox = pdfarray { zero, zero, pdfnumber(factor*width), pdfnumber(factor*height) },
+ Matrix = pdfarray { one, zero, zero, one, zero, zero },
+ Resources = pdfdictionary {
+ XObject = pdfdictionary {
+ IM = pdfreference(ref)
+ }
+ },
+ ExtGState = pdfdictionary {
+ GS = pdfdictionary {
+ Type = pdfconstant("ExtGState"),
+ CA = one,
+ ca = one,
+ }
+ },
+ ProcSet = pdfarray { pdfconstant("PDF"), pdfconstant("ImageC") },
+ }
+ local pwd = pdfflushstreamobject(format("q /GS gs %f 0 0 %f 0 0 cm /IM Do Q",factor*width,factor*height),pw)
+ annot.AP = pdfdictionary {
+ N = pdfreference(pwd)
+ }
+ end
+ return annot, figure, ref
+ else
+ activationdict.A = pdfconstant("PV")
+ return annot, nil, nil
+ end
+end
+
+function nodeinjections.insertu3d(spec)
+ local annotation, preview, ref = insert3d { -- just spec
+ foundname = spec.foundname,
+ width = spec.width,
+ height = spec.height,
+ factor = spec.factor,
+ display = spec.display,
+ controls = spec.controls,
+ label = spec.label,
+ }
+ node.write(pdfannotation_node(spec.width,spec.height,0,annotation()))
+end
diff --git a/tex/context/base/lpdf-wid.lua b/tex/context/base/lpdf-wid.lua
index 9ea4744f1..20fc14679 100644
--- a/tex/context/base/lpdf-wid.lua
+++ b/tex/context/base/lpdf-wid.lua
@@ -1,645 +1,645 @@
-if not modules then modules = { } end modules ['lpdf-wid'] = {
- version = 1.001,
- comment = "companion to lpdf-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local gmatch, gsub, find, lower, format = string.gmatch, string.gsub, string.find, string.lower, string.format
-local stripstring = string.strip
-local texbox, texcount = tex.box, tex.count
-local settings_to_array = utilities.parsers.settings_to_array
-local settings_to_hash = utilities.parsers.settings_to_hash
-
-local report_media = logs.reporter("backend","media")
-local report_attachment = logs.reporter("backend","attachment")
-
-local backends, lpdf, nodes = backends, lpdf, nodes
-
-local nodeinjections = backends.pdf.nodeinjections
-local codeinjections = backends.pdf.codeinjections
-local registrations = backends.pdf.registrations
-
-local executers = structures.references.executers
-local variables = interfaces.variables
-
-local v_hidden = variables.hidden
-local v_normal = variables.normal
-local v_auto = variables.auto
-local v_embed = variables.embed
-local v_unknown = variables.unknown
-local v_max = variables.max
-
-local pdfconstant = lpdf.constant
-local pdfdictionary = lpdf.dictionary
-local pdfarray = lpdf.array
-local pdfreference = lpdf.reference
-local pdfunicode = lpdf.unicode
-local pdfstring = lpdf.string
-local pdfboolean = lpdf.boolean
-local pdfcolorspec = lpdf.colorspec
-local pdfflushobject = lpdf.flushobject
-local pdfflushstreamobject = lpdf.flushstreamobject
-local pdfflushstreamfileobject = lpdf.flushstreamfileobject
-local pdfreserveannotation = lpdf.reserveannotation
-local pdfreserveobject = lpdf.reserveobject
-local pdfpagereference = lpdf.pagereference
-local pdfshareobjectreference = lpdf.shareobjectreference
-
-local nodepool = nodes.pool
-
-local pdfannotation_node = nodepool.pdfannotation
-
-local hpack_node = node.hpack
-local write_node = node.write -- test context(...) instead
-
-local pdf_border = pdfarray { 0, 0, 0 } -- can be shared
-
--- symbols
-
-local presets = { } -- xforms
-
-local function registersymbol(name,n)
- presets[name] = pdfreference(n)
-end
-
-local function registeredsymbol(name)
- return presets[name]
-end
-
-local function presetsymbol(symbol)
- if not presets[symbol] then
- context.predefinesymbol { symbol }
- end
-end
-
-local function presetsymbollist(list)
- if list then
- for symbol in gmatch(list,"[^, ]+") do
- presetsymbol(symbol)
- end
- end
-end
-
-codeinjections.registersymbol = registersymbol
-codeinjections.registeredsymbol = registeredsymbol
-codeinjections.presetsymbol = presetsymbol
-codeinjections.presetsymbollist = presetsymbollist
-
--- comments
-
--- local symbols = {
--- Addition = pdfconstant("NewParagraph"),
--- Attachment = pdfconstant("Attachment"),
--- Balloon = pdfconstant("Comment"),
--- Check = pdfconstant("Check Mark"),
--- CheckMark = pdfconstant("Check Mark"),
--- Circle = pdfconstant("Circle"),
--- Cross = pdfconstant("Cross"),
--- CrossHairs = pdfconstant("Cross Hairs"),
--- Graph = pdfconstant("Graph"),
--- InsertText = pdfconstant("Insert Text"),
--- New = pdfconstant("Insert"),
--- Paperclip = pdfconstant("Paperclip"),
--- RightArrow = pdfconstant("Right Arrow"),
--- RightPointer = pdfconstant("Right Pointer"),
--- Star = pdfconstant("Star"),
--- Tag = pdfconstant("Tag"),
--- Text = pdfconstant("Note"),
--- TextNote = pdfconstant("Text Note"),
--- UpArrow = pdfconstant("Up Arrow"),
--- UpLeftArrow = pdfconstant("Up-Left Arrow"),
--- }
-
-local attachment_symbols = {
- Graph = pdfconstant("GraphPushPin"),
- Paperclip = pdfconstant("PaperclipTag"),
- Pushpin = pdfconstant("PushPin"),
-}
-
-attachment_symbols.PushPin = attachment_symbols.Pushpin
-attachment_symbols.Default = attachment_symbols.Pushpin
-
-local comment_symbols = {
- Comment = pdfconstant("Comment"),
- Help = pdfconstant("Help"),
- Insert = pdfconstant("Insert"),
- Key = pdfconstant("Key"),
- Newparagraph = pdfconstant("NewParagraph"),
- Note = pdfconstant("Note"),
- Paragraph = pdfconstant("Paragraph"),
-}
-
-comment_symbols.NewParagraph = Newparagraph
-comment_symbols.Default = Note
-
-local function analyzesymbol(symbol,collection)
- if not symbol or symbol == "" then
- return collection.Default, nil
- elseif collection[symbol] then
- return collection[symbol], nil
- else
- local setn, setr, setd
- local set = settings_to_array(symbol)
- if #set == 1 then
- setn, setr, setd = set[1], set[1], set[1]
- elseif #set == 2 then
- setn, setr, setd = set[1], set[1], set[2]
- else
- setn, setr, setd = set[1], set[2], set[3]
- end
- local appearance = pdfdictionary {
- N = setn and registeredsymbol(setn),
- R = setr and registeredsymbol(setr),
- D = setd and registeredsymbol(setd),
- }
- local appearanceref = pdfshareobjectreference(appearance)
- return nil, appearanceref
- end
-end
-
-local function analyzelayer(layer)
- -- todo: (specification.layer ~= "" and pdfreference(specification.layer)) or nil, -- todo: ref to layer
-end
-
-local function analyzecolor(colorvalue,colormodel)
- local cvalue = colorvalue and tonumber(colorvalue)
- local cmodel = colormodel and tonumber(colormodel) or 3
- return cvalue and pdfarray { lpdf.colorvalues(cmodel,cvalue) } or nil
-end
-
-local function analyzetransparency(transparencyvalue)
- local tvalue = transparencyvalue and tonumber(transparencyvalue)
- return tvalue and lpdf.transparencyvalue(tvalue) or nil
-end
-
--- Attachments
-
-local nofattachments, attachments, filestreams, referenced = 0, { }, { }, { }
-
-local ignorereferenced = true -- fuzzy pdf spec .. twice in attachment list, can become an option
-
-local function flushembeddedfiles()
- if next(filestreams) then
- local e = pdfarray()
- for tag, reference in next, filestreams do
- if not reference then
- report_attachment("unreferenced file, tag %a",tag)
- elseif referenced[tag] == "hidden" then
- e[#e+1] = pdfstring(tag)
- e[#e+1] = reference -- already a reference
- else
- -- messy spec ... when annot not in named else twice in menu list acrobat
- end
- end
- lpdf.addtonames("EmbeddedFiles",pdfreference(pdfflushobject(pdfdictionary{ Names = e })))
- end
-end
-
-lpdf.registerdocumentfinalizer(flushembeddedfiles,"embeddedfiles")
-
-function codeinjections.embedfile(specification)
- local data = specification.data
- local filename = specification.file
- local name = specification.name or ""
- local title = specification.title or ""
- local hash = specification.hash or filename
- local keepdir = specification.keepdir -- can change
- local usedname = specification.usedname
- if filename == "" then
- filename = nil
- end
- if data then
- local r = filestreams[hash]
- if r == false then
- return nil
- elseif r then
- return r
- elseif not filename then
- filename = specification.tag
- if not filename or filename == "" then
- filename = specification.registered
- end
- if not filename or filename == "" then
- filename = hash
- end
- end
- else
- if not filename then
- return nil
- end
- local r = filestreams[hash]
- if r == false then
- return nil
- elseif r then
- return r
- else
- local foundname = resolvers.findbinfile(filename) or ""
- if foundname == "" or not lfs.isfile(foundname) then
- filestreams[filename] = false
- return nil
- else
- specification.foundname = foundname
- end
- end
- end
- usedname = usedname ~= "" and usedname or filename
- local basename = keepdir == true and usedname or file.basename(usedname)
-local basename = gsub(basename,"%./","")
- local savename = file.addsuffix(name ~= "" and name or basename,"txt") -- else no valid file
- local a = pdfdictionary { Type = pdfconstant("EmbeddedFile") }
- local f
- if data then
- f = pdfflushstreamobject(data,a)
- specification.data = true -- signal that still data but already flushed
- else
- local foundname = specification.foundname or filename
- f = pdfflushstreamfileobject(foundname,a)
- end
- local d = pdfdictionary {
- Type = pdfconstant("Filespec"),
- F = pdfstring(savename),
- UF = pdfstring(savename),
- EF = pdfdictionary { F = pdfreference(f) },
- Desc = title ~= "" and pdfunicode(title) or nil,
- }
- local r = pdfreference(pdfflushobject(d))
- filestreams[hash] = r
- return r
-end
-
-function nodeinjections.attachfile(specification)
- local registered = specification.registered or ""
- local data = specification.data
- local hash
- local filename
- if data then
- hash = md5.HEX(data)
- else
- filename = specification.file
- if not filename or filename == "" then
- report_attachment("no file specified, using registered %a instead",registered)
- filename = registered
- specification.file = registered
- end
- local foundname = resolvers.findbinfile(filename) or ""
- if foundname == "" or not lfs.isfile(foundname) then
- report_attachment("invalid filename %a, ignoring registered %a",filename,registered)
- return nil
- else
- specification.foundname = foundname
- end
- hash = filename
- end
- specification.hash = hash
- nofattachments = nofattachments + 1
- local registered = specification.registered or ""
- local title = specification.title or ""
- local subtitle = specification.subtitle or ""
- local author = specification.author or ""
- if registered == "" then
- registered = filename
- end
- if author == "" then
- author = title
- title = ""
- end
- if author == "" then
- author = filename or ""
- end
- if title == "" then
- title = registered
- end
- local aref = attachments[registered]
- if not aref then
- aref = codeinjections.embedfile(specification)
- attachments[registered] = aref
- end
- if not aref then
- report_attachment("skipping attachment, registered %a",registered)
- -- already reported
- elseif specification.method == v_hidden then
- referenced[hash] = "hidden"
- else
- referenced[hash] = "annotation"
- local name, appearance = analyzesymbol(specification.symbol,attachment_symbols)
- local d = pdfdictionary {
- Subtype = pdfconstant("FileAttachment"),
- FS = aref,
- Contents = pdfunicode(title),
- Name = name,
- NM = pdfstring(format("attachment:%s",nofattachments)),
- T = author ~= "" and pdfunicode(author) or nil,
- Subj = subtitle ~= "" and pdfunicode(subtitle) or nil,
- C = analyzecolor(specification.colorvalue,specification.colormodel),
- CA = analyzetransparency(specification.transparencyvalue),
- AP = appearance,
- OC = analyzelayer(specification.layer),
- }
- local width, height, depth = specification.width or 0, specification.height or 0, specification.depth
- local box = hpack_node(pdfannotation_node(width,height,depth,d()))
- box.width, box.height, box.depth = width, height, depth
- return box
- end
-end
-
-function codeinjections.attachmentid(filename) -- not used in context
- return filestreams[filename]
-end
-
-local nofcomments, usepopupcomments, stripleading = 0, false, true
-
-local defaultattributes = {
- ["xmlns"] = "http://www.w3.org/1999/xhtml",
- ["xmlns:xfa"] = "http://www.xfa.org/schema/xfa-data/1.0/",
- ["xfa:contentType"] = "text/html",
- ["xfa:APIVersion"] = "Acrobat:8.0.0",
- ["xfa:spec"] = "2.4",
-}
-
-local function checkcontent(text,option)
- if option and option.xml then
- local root = xml.convert(text)
- if root and not root.er then
- xml.checkbom(root)
- local body = xml.first(root,"/body")
- if body then
- local at = body.at
- for k, v in next, defaultattributes do
- if not at[k] then
- at[k] = v
- end
- end
- -- local content = xml.textonly(root)
- local richcontent = xml.tostring(root)
- return nil, pdfunicode(richcontent)
- end
- end
- end
- return pdfunicode(text)
-end
-
-function nodeinjections.comment(specification) -- brrr: seems to be done twice
- nofcomments = nofcomments + 1
- local text = stripstring(specification.data or "")
- if stripleading then
- text = gsub(text,"[\n\r] *","\n")
- end
- local name, appearance = analyzesymbol(specification.symbol,comment_symbols)
- local tag = specification.tag or "" -- this is somewhat messy as recent
- local title = specification.title or "" -- versions of acrobat see the title
- local subtitle = specification.subtitle or "" -- as author
- local author = specification.author or ""
- local option = settings_to_hash(specification.option or "")
- if author == "" then
- if title == "" then
- title = tag
- end
- else
- if subtitle == "" then
- subtitle = title
- elseif title ~= "" then
- subtitle = subtitle .. ", " .. title
- end
- title = author
- end
- local content, richcontent = checkcontent(text,option)
- local d = pdfdictionary {
- Subtype = pdfconstant("Text"),
- Open = option[v_max] and pdfboolean(true) or nil,
- Contents = content,
- RC = richcontent,
- T = title ~= "" and pdfunicode(title) or nil,
- Subj = subtitle ~= "" and pdfunicode(subtitle) or nil,
- C = analyzecolor(specification.colorvalue,specification.colormodel),
- CA = analyzetransparency(specification.transparencyvalue),
- OC = analyzelayer(specification.layer),
- Name = name,
- NM = pdfstring(format("comment:%s",nofcomments)),
- AP = appearance,
- }
- local width, height, depth = specification.width or 0, specification.height or 0, specification.depth
- local box
- if usepopupcomments then
- -- rather useless as we can hide/vide
- local nd = pdfreserveannotation()
- local nc = pdfreserveannotation()
- local c = pdfdictionary {
- Subtype = pdfconstant("Popup"),
- Parent = pdfreference(nd),
- }
- d.Popup = pdfreference(nc)
- box = hpack_node(
- pdfannotation_node(0,0,0,d(),nd),
- pdfannotation_node(width,height,depth,c(),nc)
- )
- else
- box = hpack_node(pdfannotation_node(width,height,depth,d()))
- end
- box.width, box.height, box.depth = width, height, depth -- redundant
- return box
-end
-
--- rendering stuff
---
--- object_1 -> <> >>
--- object_2 -> <> >>
--- rendering -> <>
---
--- we only work foreward here (currently)
--- annotation is to be packed at the tex end
-
--- aiff audio/aiff
--- au audio/basic
--- avi video/avi
--- mid audio/midi
--- mov video/quicktime
--- mp3 audio/x-mp3 (mpeg)
--- mp4 audio/mp4
--- mp4 video/mp4
--- mpeg video/mpeg
--- smil application/smil
--- swf application/x-shockwave-flash
-
--- P media play parameters (evt /BE for controls etc
--- A boolean (audio)
--- C boolean (captions)
--- O boolean (overdubs)
--- S boolean (subtitles)
--- PL pdfconstant("ADBE_MCI"),
-
--- F = flags,
--- T = title,
--- Contents = rubish,
--- AP = irrelevant,
-
--- sound is different, no window (or zero) so we need to collect them and
--- force them if not set
-
-local ms, mu, mf = { }, { }, { }
-
-local function delayed(label)
- local a = pdfreserveannotation()
- mu[label] = a
- return pdfreference(a)
-end
-
-local function insertrenderingwindow(specification)
- local label = specification.label
---~ local openpage = specification.openpage
---~ local closepage = specification.closepage
- if specification.option == v_auto then
- if openpageaction then
- -- \handlereferenceactions{\v!StartRendering{#2}}
- end
- if closepageaction then
- -- \handlereferenceactions{\v!StopRendering {#2}}
- end
- end
- local actions = nil
- if openpage or closepage then
- actions = pdfdictionary {
- PO = (openpage and lpdf.action(openpage )) or nil,
- PC = (closepage and lpdf.action(closepage)) or nil,
- }
- end
- local page = tonumber(specification.page) or texcount.realpageno -- todo
- local r = mu[label] or pdfreserveannotation() -- why the reserve here?
- local a = pdfdictionary {
- S = pdfconstant("Rendition"),
- R = mf[label],
- OP = 0,
- AN = pdfreference(r),
- }
- local d = pdfdictionary {
- Subtype = pdfconstant("Screen"),
- P = pdfreference(pdfpagereference(page)),
- A = a, -- needed in order to make the annotation clickable (i.e. don't bark)
- Border = pdf_border,
- AA = actions,
- }
- local width = specification.width or 0
- local height = specification.height or 0
- if height == 0 or width == 0 then
- -- todo: sound needs no window
- end
- write_node(pdfannotation_node(width,height,0,d(),r)) -- save ref
- return pdfreference(r)
-end
-
--- some dictionaries can have a MH (must honor) or BE (best effort) capsule
-
-local function insertrendering(specification)
- local label = specification.label
- local option = settings_to_hash(specification.option)
- if not mf[label] then
- local filename = specification.filename
- local isurl = find(filename,"://")
- --~ local start = pdfdictionary {
- --~ Type = pdfconstant("MediaOffset"),
- --~ S = pdfconstant("T"), -- time
- --~ T = pdfdictionary { -- time
- --~ Type = pdfconstant("Timespan"),
- --~ S = pdfconstant("S"),
- --~ V = 3, -- time in seconds
- --~ },
- --~ }
- --~ local start = pdfdictionary {
- --~ Type = pdfconstant("MediaOffset"),
- --~ S = pdfconstant("F"), -- frame
- --~ F = 100 -- framenumber
- --~ }
- --~ local start = pdfdictionary {
- --~ Type = pdfconstant("MediaOffset"),
- --~ S = pdfconstant("M"), -- mark
- --~ M = "somemark",
- --~ }
- --~ local parameters = pdfdictionary {
- --~ BE = pdfdictionary {
- --~ B = start,
- --~ }
- --~ }
- --~ local parameters = pdfdictionary {
- --~ Type = pdfconstant(MediaPermissions),
- --~ TF = pdfstring("TEMPALWAYS") }, -- TEMPNEVER TEMPEXTRACT TEMPACCESS TEMPALWAYS
- --~ }
- local descriptor = pdfdictionary {
- Type = pdfconstant("Filespec"),
- F = filename,
- }
- if isurl then
- descriptor.FS = pdfconstant("URL")
- elseif option[v_embed] then
- descriptor.EF = codeinjections.embedfile { file = filename }
- end
- local clip = pdfdictionary {
- Type = pdfconstant("MediaClip"),
- S = pdfconstant("MCD"),
- N = label,
- CT = specification.mime,
- Alt = pdfarray { "", "file not found" }, -- language id + message
- D = pdfreference(pdfflushobject(descriptor)),
- -- P = pdfreference(pdfflushobject(parameters)),
- }
- local rendition = pdfdictionary {
- Type = pdfconstant("Rendition"),
- S = pdfconstant("MR"),
- N = label,
- C = pdfreference(pdfflushobject(clip)),
- }
- mf[label] = pdfreference(pdfflushobject(rendition))
- end
-end
-
-local function insertrenderingobject(specification) -- todo
- local label = specification.label
- if not mf[label] then
- report_media("unknown medium, label %a",label)
- local clip = pdfdictionary { -- does not work that well one level up
- Type = pdfconstant("MediaClip"),
- S = pdfconstant("MCD"),
- N = label,
- D = pdfreference(unknown), -- not label but objectname, hm .. todo?
- }
- local rendition = pdfdictionary {
- Type = pdfconstant("Rendition"),
- S = pdfconstant("MR"),
- N = label,
- C = pdfreference(pdfflushobject(clip)),
- }
- mf[label] = pdfreference(pdfflushobject(rendition))
- end
-end
-
-function codeinjections.processrendering(label)
- local specification = interactions.renderings.rendering(label)
- if not specification then
- -- error
- elseif specification.type == "external" then
- insertrendering(specification)
- else
- insertrenderingobject(specification)
- end
-end
-
-function codeinjections.insertrenderingwindow(specification)
- local label = specification.label
- codeinjections.processrendering(label)
- ms[label] = insertrenderingwindow(specification)
-end
-
-local function set(operation,arguments)
- codeinjections.processrendering(arguments)
- return pdfdictionary {
- S = pdfconstant("Rendition"),
- OP = operation,
- R = mf[arguments],
- AN = ms[arguments] or delayed(arguments),
- }
-end
-
-function executers.startrendering (arguments) return set(0,arguments) end
-function executers.stoprendering (arguments) return set(1,arguments) end
-function executers.pauserendering (arguments) return set(2,arguments) end
-function executers.resumerendering(arguments) return set(3,arguments) end
+if not modules then modules = { } end modules ['lpdf-wid'] = {
+ version = 1.001,
+ comment = "companion to lpdf-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local gmatch, gsub, find, lower, format = string.gmatch, string.gsub, string.find, string.lower, string.format
+local stripstring = string.strip
+local texbox, texcount = tex.box, tex.count
+local settings_to_array = utilities.parsers.settings_to_array
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local report_media = logs.reporter("backend","media")
+local report_attachment = logs.reporter("backend","attachment")
+
+local backends, lpdf, nodes = backends, lpdf, nodes
+
+local nodeinjections = backends.pdf.nodeinjections
+local codeinjections = backends.pdf.codeinjections
+local registrations = backends.pdf.registrations
+
+local executers = structures.references.executers
+local variables = interfaces.variables
+
+local v_hidden = variables.hidden
+local v_normal = variables.normal
+local v_auto = variables.auto
+local v_embed = variables.embed
+local v_unknown = variables.unknown
+local v_max = variables.max
+
+local pdfconstant = lpdf.constant
+local pdfdictionary = lpdf.dictionary
+local pdfarray = lpdf.array
+local pdfreference = lpdf.reference
+local pdfunicode = lpdf.unicode
+local pdfstring = lpdf.string
+local pdfboolean = lpdf.boolean
+local pdfcolorspec = lpdf.colorspec
+local pdfflushobject = lpdf.flushobject
+local pdfflushstreamobject = lpdf.flushstreamobject
+local pdfflushstreamfileobject = lpdf.flushstreamfileobject
+local pdfreserveannotation = lpdf.reserveannotation
+local pdfreserveobject = lpdf.reserveobject
+local pdfpagereference = lpdf.pagereference
+local pdfshareobjectreference = lpdf.shareobjectreference
+
+local nodepool = nodes.pool
+
+local pdfannotation_node = nodepool.pdfannotation
+
+local hpack_node = node.hpack
+local write_node = node.write -- test context(...) instead
+
+local pdf_border = pdfarray { 0, 0, 0 } -- can be shared
+
+-- symbols
+
+local presets = { } -- xforms
+
+local function registersymbol(name,n)
+ presets[name] = pdfreference(n)
+end
+
+local function registeredsymbol(name)
+ return presets[name]
+end
+
+local function presetsymbol(symbol)
+ if not presets[symbol] then
+ context.predefinesymbol { symbol }
+ end
+end
+
+local function presetsymbollist(list)
+ if list then
+ for symbol in gmatch(list,"[^, ]+") do
+ presetsymbol(symbol)
+ end
+ end
+end
+
+codeinjections.registersymbol = registersymbol
+codeinjections.registeredsymbol = registeredsymbol
+codeinjections.presetsymbol = presetsymbol
+codeinjections.presetsymbollist = presetsymbollist
+
+-- comments
+
+-- local symbols = {
+-- Addition = pdfconstant("NewParagraph"),
+-- Attachment = pdfconstant("Attachment"),
+-- Balloon = pdfconstant("Comment"),
+-- Check = pdfconstant("Check Mark"),
+-- CheckMark = pdfconstant("Check Mark"),
+-- Circle = pdfconstant("Circle"),
+-- Cross = pdfconstant("Cross"),
+-- CrossHairs = pdfconstant("Cross Hairs"),
+-- Graph = pdfconstant("Graph"),
+-- InsertText = pdfconstant("Insert Text"),
+-- New = pdfconstant("Insert"),
+-- Paperclip = pdfconstant("Paperclip"),
+-- RightArrow = pdfconstant("Right Arrow"),
+-- RightPointer = pdfconstant("Right Pointer"),
+-- Star = pdfconstant("Star"),
+-- Tag = pdfconstant("Tag"),
+-- Text = pdfconstant("Note"),
+-- TextNote = pdfconstant("Text Note"),
+-- UpArrow = pdfconstant("Up Arrow"),
+-- UpLeftArrow = pdfconstant("Up-Left Arrow"),
+-- }
+
+local attachment_symbols = {
+ Graph = pdfconstant("GraphPushPin"),
+ Paperclip = pdfconstant("PaperclipTag"),
+ Pushpin = pdfconstant("PushPin"),
+}
+
+attachment_symbols.PushPin = attachment_symbols.Pushpin
+attachment_symbols.Default = attachment_symbols.Pushpin
+
+local comment_symbols = {
+ Comment = pdfconstant("Comment"),
+ Help = pdfconstant("Help"),
+ Insert = pdfconstant("Insert"),
+ Key = pdfconstant("Key"),
+ Newparagraph = pdfconstant("NewParagraph"),
+ Note = pdfconstant("Note"),
+ Paragraph = pdfconstant("Paragraph"),
+}
+
+comment_symbols.NewParagraph = Newparagraph
+comment_symbols.Default = Note
+
+local function analyzesymbol(symbol,collection)
+ if not symbol or symbol == "" then
+ return collection.Default, nil
+ elseif collection[symbol] then
+ return collection[symbol], nil
+ else
+ local setn, setr, setd
+ local set = settings_to_array(symbol)
+ if #set == 1 then
+ setn, setr, setd = set[1], set[1], set[1]
+ elseif #set == 2 then
+ setn, setr, setd = set[1], set[1], set[2]
+ else
+ setn, setr, setd = set[1], set[2], set[3]
+ end
+ local appearance = pdfdictionary {
+ N = setn and registeredsymbol(setn),
+ R = setr and registeredsymbol(setr),
+ D = setd and registeredsymbol(setd),
+ }
+ local appearanceref = pdfshareobjectreference(appearance)
+ return nil, appearanceref
+ end
+end
+
+local function analyzelayer(layer)
+ -- todo: (specification.layer ~= "" and pdfreference(specification.layer)) or nil, -- todo: ref to layer
+end
+
+local function analyzecolor(colorvalue,colormodel)
+ local cvalue = colorvalue and tonumber(colorvalue)
+ local cmodel = colormodel and tonumber(colormodel) or 3
+ return cvalue and pdfarray { lpdf.colorvalues(cmodel,cvalue) } or nil
+end
+
+local function analyzetransparency(transparencyvalue)
+ local tvalue = transparencyvalue and tonumber(transparencyvalue)
+ return tvalue and lpdf.transparencyvalue(tvalue) or nil
+end
+
+-- Attachments
+
+local nofattachments, attachments, filestreams, referenced = 0, { }, { }, { }
+
+local ignorereferenced = true -- fuzzy pdf spec .. twice in attachment list, can become an option
+
+local function flushembeddedfiles()
+ if next(filestreams) then
+ local e = pdfarray()
+ for tag, reference in next, filestreams do
+ if not reference then
+ report_attachment("unreferenced file, tag %a",tag)
+ elseif referenced[tag] == "hidden" then
+ e[#e+1] = pdfstring(tag)
+ e[#e+1] = reference -- already a reference
+ else
+ -- messy spec ... when annot not in named else twice in menu list acrobat
+ end
+ end
+ lpdf.addtonames("EmbeddedFiles",pdfreference(pdfflushobject(pdfdictionary{ Names = e })))
+ end
+end
+
+lpdf.registerdocumentfinalizer(flushembeddedfiles,"embeddedfiles")
+
+function codeinjections.embedfile(specification)
+ local data = specification.data
+ local filename = specification.file
+ local name = specification.name or ""
+ local title = specification.title or ""
+ local hash = specification.hash or filename
+ local keepdir = specification.keepdir -- can change
+ local usedname = specification.usedname
+ if filename == "" then
+ filename = nil
+ end
+ if data then
+ local r = filestreams[hash]
+ if r == false then
+ return nil
+ elseif r then
+ return r
+ elseif not filename then
+ filename = specification.tag
+ if not filename or filename == "" then
+ filename = specification.registered
+ end
+ if not filename or filename == "" then
+ filename = hash
+ end
+ end
+ else
+ if not filename then
+ return nil
+ end
+ local r = filestreams[hash]
+ if r == false then
+ return nil
+ elseif r then
+ return r
+ else
+ local foundname = resolvers.findbinfile(filename) or ""
+ if foundname == "" or not lfs.isfile(foundname) then
+ filestreams[filename] = false
+ return nil
+ else
+ specification.foundname = foundname
+ end
+ end
+ end
+ usedname = usedname ~= "" and usedname or filename
+ local basename = keepdir == true and usedname or file.basename(usedname)
+local basename = gsub(basename,"%./","")
+ local savename = file.addsuffix(name ~= "" and name or basename,"txt") -- else no valid file
+ local a = pdfdictionary { Type = pdfconstant("EmbeddedFile") }
+ local f
+ if data then
+ f = pdfflushstreamobject(data,a)
+ specification.data = true -- signal that still data but already flushed
+ else
+ local foundname = specification.foundname or filename
+ f = pdfflushstreamfileobject(foundname,a)
+ end
+ local d = pdfdictionary {
+ Type = pdfconstant("Filespec"),
+ F = pdfstring(savename),
+ UF = pdfstring(savename),
+ EF = pdfdictionary { F = pdfreference(f) },
+ Desc = title ~= "" and pdfunicode(title) or nil,
+ }
+ local r = pdfreference(pdfflushobject(d))
+ filestreams[hash] = r
+ return r
+end
+
+function nodeinjections.attachfile(specification)
+ local registered = specification.registered or ""
+ local data = specification.data
+ local hash
+ local filename
+ if data then
+ hash = md5.HEX(data)
+ else
+ filename = specification.file
+ if not filename or filename == "" then
+ report_attachment("no file specified, using registered %a instead",registered)
+ filename = registered
+ specification.file = registered
+ end
+ local foundname = resolvers.findbinfile(filename) or ""
+ if foundname == "" or not lfs.isfile(foundname) then
+ report_attachment("invalid filename %a, ignoring registered %a",filename,registered)
+ return nil
+ else
+ specification.foundname = foundname
+ end
+ hash = filename
+ end
+ specification.hash = hash
+ nofattachments = nofattachments + 1
+ local registered = specification.registered or ""
+ local title = specification.title or ""
+ local subtitle = specification.subtitle or ""
+ local author = specification.author or ""
+ if registered == "" then
+ registered = filename
+ end
+ if author == "" then
+ author = title
+ title = ""
+ end
+ if author == "" then
+ author = filename or ""
+ end
+ if title == "" then
+ title = registered
+ end
+ local aref = attachments[registered]
+ if not aref then
+ aref = codeinjections.embedfile(specification)
+ attachments[registered] = aref
+ end
+ if not aref then
+ report_attachment("skipping attachment, registered %a",registered)
+ -- already reported
+ elseif specification.method == v_hidden then
+ referenced[hash] = "hidden"
+ else
+ referenced[hash] = "annotation"
+ local name, appearance = analyzesymbol(specification.symbol,attachment_symbols)
+ local d = pdfdictionary {
+ Subtype = pdfconstant("FileAttachment"),
+ FS = aref,
+ Contents = pdfunicode(title),
+ Name = name,
+ NM = pdfstring(format("attachment:%s",nofattachments)),
+ T = author ~= "" and pdfunicode(author) or nil,
+ Subj = subtitle ~= "" and pdfunicode(subtitle) or nil,
+ C = analyzecolor(specification.colorvalue,specification.colormodel),
+ CA = analyzetransparency(specification.transparencyvalue),
+ AP = appearance,
+ OC = analyzelayer(specification.layer),
+ }
+ local width, height, depth = specification.width or 0, specification.height or 0, specification.depth
+ local box = hpack_node(pdfannotation_node(width,height,depth,d()))
+ box.width, box.height, box.depth = width, height, depth
+ return box
+ end
+end
+
+function codeinjections.attachmentid(filename) -- not used in context
+ return filestreams[filename]
+end
+
+local nofcomments, usepopupcomments, stripleading = 0, false, true
+
+local defaultattributes = {
+ ["xmlns"] = "http://www.w3.org/1999/xhtml",
+ ["xmlns:xfa"] = "http://www.xfa.org/schema/xfa-data/1.0/",
+ ["xfa:contentType"] = "text/html",
+ ["xfa:APIVersion"] = "Acrobat:8.0.0",
+ ["xfa:spec"] = "2.4",
+}
+
+local function checkcontent(text,option)
+ if option and option.xml then
+ local root = xml.convert(text)
+ if root and not root.er then
+ xml.checkbom(root)
+ local body = xml.first(root,"/body")
+ if body then
+ local at = body.at
+ for k, v in next, defaultattributes do
+ if not at[k] then
+ at[k] = v
+ end
+ end
+ -- local content = xml.textonly(root)
+ local richcontent = xml.tostring(root)
+ return nil, pdfunicode(richcontent)
+ end
+ end
+ end
+ return pdfunicode(text)
+end
+
+function nodeinjections.comment(specification) -- brrr: seems to be done twice
+ nofcomments = nofcomments + 1
+ local text = stripstring(specification.data or "")
+ if stripleading then
+ text = gsub(text,"[\n\r] *","\n")
+ end
+ local name, appearance = analyzesymbol(specification.symbol,comment_symbols)
+ local tag = specification.tag or "" -- this is somewhat messy as recent
+ local title = specification.title or "" -- versions of acrobat see the title
+ local subtitle = specification.subtitle or "" -- as author
+ local author = specification.author or ""
+ local option = settings_to_hash(specification.option or "")
+ if author == "" then
+ if title == "" then
+ title = tag
+ end
+ else
+ if subtitle == "" then
+ subtitle = title
+ elseif title ~= "" then
+ subtitle = subtitle .. ", " .. title
+ end
+ title = author
+ end
+ local content, richcontent = checkcontent(text,option)
+ local d = pdfdictionary {
+ Subtype = pdfconstant("Text"),
+ Open = option[v_max] and pdfboolean(true) or nil,
+ Contents = content,
+ RC = richcontent,
+ T = title ~= "" and pdfunicode(title) or nil,
+ Subj = subtitle ~= "" and pdfunicode(subtitle) or nil,
+ C = analyzecolor(specification.colorvalue,specification.colormodel),
+ CA = analyzetransparency(specification.transparencyvalue),
+ OC = analyzelayer(specification.layer),
+ Name = name,
+ NM = pdfstring(format("comment:%s",nofcomments)),
+ AP = appearance,
+ }
+ local width, height, depth = specification.width or 0, specification.height or 0, specification.depth
+ local box
+ if usepopupcomments then
+ -- rather useless as we can hide/vide
+ local nd = pdfreserveannotation()
+ local nc = pdfreserveannotation()
+ local c = pdfdictionary {
+ Subtype = pdfconstant("Popup"),
+ Parent = pdfreference(nd),
+ }
+ d.Popup = pdfreference(nc)
+ box = hpack_node(
+ pdfannotation_node(0,0,0,d(),nd),
+ pdfannotation_node(width,height,depth,c(),nc)
+ )
+ else
+ box = hpack_node(pdfannotation_node(width,height,depth,d()))
+ end
+ box.width, box.height, box.depth = width, height, depth -- redundant
+ return box
+end
+
+-- rendering stuff
+--
+-- object_1 -> <> >>
+-- object_2 -> <> >>
+-- rendering -> <>
+--
+-- we only work foreward here (currently)
+-- annotation is to be packed at the tex end
+
+-- aiff audio/aiff
+-- au audio/basic
+-- avi video/avi
+-- mid audio/midi
+-- mov video/quicktime
+-- mp3 audio/x-mp3 (mpeg)
+-- mp4 audio/mp4
+-- mp4 video/mp4
+-- mpeg video/mpeg
+-- smil application/smil
+-- swf application/x-shockwave-flash
+
+-- P media play parameters (evt /BE for controls etc
+-- A boolean (audio)
+-- C boolean (captions)
+-- O boolean (overdubs)
+-- S boolean (subtitles)
+-- PL pdfconstant("ADBE_MCI"),
+
+-- F = flags,
+-- T = title,
+-- Contents = rubish,
+-- AP = irrelevant,
+
+-- sound is different, no window (or zero) so we need to collect them and
+-- force them if not set
+
+local ms, mu, mf = { }, { }, { }
+
+local function delayed(label)
+ local a = pdfreserveannotation()
+ mu[label] = a
+ return pdfreference(a)
+end
+
+local function insertrenderingwindow(specification)
+ local label = specification.label
+--~ local openpage = specification.openpage
+--~ local closepage = specification.closepage
+ if specification.option == v_auto then
+ if openpageaction then
+ -- \handlereferenceactions{\v!StartRendering{#2}}
+ end
+ if closepageaction then
+ -- \handlereferenceactions{\v!StopRendering {#2}}
+ end
+ end
+ local actions = nil
+ if openpage or closepage then
+ actions = pdfdictionary {
+ PO = (openpage and lpdf.action(openpage )) or nil,
+ PC = (closepage and lpdf.action(closepage)) or nil,
+ }
+ end
+ local page = tonumber(specification.page) or texcount.realpageno -- todo
+ local r = mu[label] or pdfreserveannotation() -- why the reserve here?
+ local a = pdfdictionary {
+ S = pdfconstant("Rendition"),
+ R = mf[label],
+ OP = 0,
+ AN = pdfreference(r),
+ }
+ local d = pdfdictionary {
+ Subtype = pdfconstant("Screen"),
+ P = pdfreference(pdfpagereference(page)),
+ A = a, -- needed in order to make the annotation clickable (i.e. don't bark)
+ Border = pdf_border,
+ AA = actions,
+ }
+ local width = specification.width or 0
+ local height = specification.height or 0
+ if height == 0 or width == 0 then
+ -- todo: sound needs no window
+ end
+ write_node(pdfannotation_node(width,height,0,d(),r)) -- save ref
+ return pdfreference(r)
+end
+
+-- some dictionaries can have a MH (must honor) or BE (best effort) capsule
+
+local function insertrendering(specification)
+ local label = specification.label
+ local option = settings_to_hash(specification.option)
+ if not mf[label] then
+ local filename = specification.filename
+ local isurl = find(filename,"://")
+ --~ local start = pdfdictionary {
+ --~ Type = pdfconstant("MediaOffset"),
+ --~ S = pdfconstant("T"), -- time
+ --~ T = pdfdictionary { -- time
+ --~ Type = pdfconstant("Timespan"),
+ --~ S = pdfconstant("S"),
+ --~ V = 3, -- time in seconds
+ --~ },
+ --~ }
+ --~ local start = pdfdictionary {
+ --~ Type = pdfconstant("MediaOffset"),
+ --~ S = pdfconstant("F"), -- frame
+ --~ F = 100 -- framenumber
+ --~ }
+ --~ local start = pdfdictionary {
+ --~ Type = pdfconstant("MediaOffset"),
+ --~ S = pdfconstant("M"), -- mark
+ --~ M = "somemark",
+ --~ }
+ --~ local parameters = pdfdictionary {
+ --~ BE = pdfdictionary {
+ --~ B = start,
+ --~ }
+ --~ }
+ --~ local parameters = pdfdictionary {
+ --~ Type = pdfconstant(MediaPermissions),
+ --~ TF = pdfstring("TEMPALWAYS") }, -- TEMPNEVER TEMPEXTRACT TEMPACCESS TEMPALWAYS
+ --~ }
+ local descriptor = pdfdictionary {
+ Type = pdfconstant("Filespec"),
+ F = filename,
+ }
+ if isurl then
+ descriptor.FS = pdfconstant("URL")
+ elseif option[v_embed] then
+ descriptor.EF = codeinjections.embedfile { file = filename }
+ end
+ local clip = pdfdictionary {
+ Type = pdfconstant("MediaClip"),
+ S = pdfconstant("MCD"),
+ N = label,
+ CT = specification.mime,
+ Alt = pdfarray { "", "file not found" }, -- language id + message
+ D = pdfreference(pdfflushobject(descriptor)),
+ -- P = pdfreference(pdfflushobject(parameters)),
+ }
+ local rendition = pdfdictionary {
+ Type = pdfconstant("Rendition"),
+ S = pdfconstant("MR"),
+ N = label,
+ C = pdfreference(pdfflushobject(clip)),
+ }
+ mf[label] = pdfreference(pdfflushobject(rendition))
+ end
+end
+
+local function insertrenderingobject(specification) -- todo
+ local label = specification.label
+ if not mf[label] then
+ report_media("unknown medium, label %a",label)
+ local clip = pdfdictionary { -- does not work that well one level up
+ Type = pdfconstant("MediaClip"),
+ S = pdfconstant("MCD"),
+ N = label,
+ D = pdfreference(unknown), -- not label but objectname, hm .. todo?
+ }
+ local rendition = pdfdictionary {
+ Type = pdfconstant("Rendition"),
+ S = pdfconstant("MR"),
+ N = label,
+ C = pdfreference(pdfflushobject(clip)),
+ }
+ mf[label] = pdfreference(pdfflushobject(rendition))
+ end
+end
+
+function codeinjections.processrendering(label)
+ local specification = interactions.renderings.rendering(label)
+ if not specification then
+ -- error
+ elseif specification.type == "external" then
+ insertrendering(specification)
+ else
+ insertrenderingobject(specification)
+ end
+end
+
+function codeinjections.insertrenderingwindow(specification)
+ local label = specification.label
+ codeinjections.processrendering(label)
+ ms[label] = insertrenderingwindow(specification)
+end
+
+local function set(operation,arguments)
+ codeinjections.processrendering(arguments)
+ return pdfdictionary {
+ S = pdfconstant("Rendition"),
+ OP = operation,
+ R = mf[arguments],
+ AN = ms[arguments] or delayed(arguments),
+ }
+end
+
+function executers.startrendering (arguments) return set(0,arguments) end
+function executers.stoprendering (arguments) return set(1,arguments) end
+function executers.pauserendering (arguments) return set(2,arguments) end
+function executers.resumerendering(arguments) return set(3,arguments) end
diff --git a/tex/context/base/luat-bwc.lua b/tex/context/base/luat-bwc.lua
index 993de7bf3..b8672469e 100644
--- a/tex/context/base/luat-bwc.lua
+++ b/tex/context/base/luat-bwc.lua
@@ -1,32 +1,32 @@
-if not modules then modules = { } end modules ['luat-bwc'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- backward compatibility
-
-if not tex.wd then
-
- local box = tex.box
-
- local wd = { } setmetatable(wd, {
- __index = function(t,k) local bk = box[k] return bk and bk.width or 0 end,
- __newindex = function(t,k,v) local bk = box[k] if bk then bk.width = v end end,
- } )
-
- local ht = { } setmetatable(ht, {
- __index = function(t,k) local bk = box[k] return bk and bk.height or 0 end,
- __newindex = function(t,k,v) local bk = box[k] if bk then bk.height = v end end,
- } )
-
- local dp = { } setmetatable(dp, {
- __index = function(t,k) local bk = box[k] return bk and bk.depth or 0 end,
- __newindex = function(t,k,v) local bk = box[k] if bk then bk.depth = v end end,
- } )
-
- -- tex.wd, tex.ht, tex.dp = wd, ht, dp
-
-end
+if not modules then modules = { } end modules ['luat-bwc'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- backward compatibility
+
+if not tex.wd then
+
+ local box = tex.box
+
+ local wd = { } setmetatable(wd, {
+ __index = function(t,k) local bk = box[k] return bk and bk.width or 0 end,
+ __newindex = function(t,k,v) local bk = box[k] if bk then bk.width = v end end,
+ } )
+
+ local ht = { } setmetatable(ht, {
+ __index = function(t,k) local bk = box[k] return bk and bk.height or 0 end,
+ __newindex = function(t,k,v) local bk = box[k] if bk then bk.height = v end end,
+ } )
+
+ local dp = { } setmetatable(dp, {
+ __index = function(t,k) local bk = box[k] return bk and bk.depth or 0 end,
+ __newindex = function(t,k,v) local bk = box[k] if bk then bk.depth = v end end,
+ } )
+
+ -- tex.wd, tex.ht, tex.dp = wd, ht, dp
+
+end
diff --git a/tex/context/base/luat-cbk.lua b/tex/context/base/luat-cbk.lua
index 5aa12005b..4a88cfed7 100644
--- a/tex/context/base/luat-cbk.lua
+++ b/tex/context/base/luat-cbk.lua
@@ -1,320 +1,320 @@
-if not modules then modules = { } end modules ['luat-cbk'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local insert, remove, find, format = table.insert, table.remove, string.find, string.format
-local collectgarbage, type, next = collectgarbage, type, next
-local round = math.round
-local sortedhash, tohash = table.sortedhash, table.tohash
-
-local trace_checking = false trackers.register("memory.checking", function(v) trace_checking = v end)
-
-local report_callbacks = logs.reporter("system","callbacks")
-local report_memory = logs.reporter("system","memory")
-
---[[ldx--
-
Callbacks are the real asset of . They permit you to hook
-your own code into the engine. Here we implement a few handy
-auxiliary functions.
When you (temporarily) want to install a callback function, and after a
-while wants to revert to the original one, you can use the following two
-functions.
---ldx]]--
-
-local trace_callbacks = false trackers.register("system.callbacks", function(v) trace_callbacks = v end)
-local trace_calls = false -- only used when analyzing performance and initializations
-
-local register_callback = callback.register
-local find_callback = callback.find
-local list_callbacks = callback.list
-
-local frozen, stack, list = { }, { }, callbacks.list
-
-if not list then -- otherwise counters get reset
-
- list = utilities.storage.allocate(list_callbacks())
-
- for k, _ in next, list do
- list[k] = 0
- end
-
- callbacks.list = list
-
-end
-
-local delayed = tohash {
- "buildpage_filter",
-}
-
-
-if trace_calls then
-
- local functions = { }
- local original = register_callback
-
- register_callback = function(name,func)
- if type(func) == "function" then
- if functions[name] then
- functions[name] = func
- return find_callback(name)
- else
- functions[name] = func
- local cnuf = function(...)
- list[name] = list[name] + 1
- return functions[name](...)
- end
- return original(name,cnuf)
- end
- else
- return original(name,func)
- end
- end
-
-end
-
-local function frozen_message(what,name)
- report_callbacks("not %s frozen %a to %a",what,name,frozen[name])
-end
-
-local function frozen_callback(name)
- return nil, format("callback '%s' is frozen to '%s'",name,frozen[name]) -- no formatter yet
-end
-
-local function state(name)
- local f = find_callback(name)
- if f == false then
- return "disabled"
- elseif f then
- return "enabled"
- else
- return "undefined"
- end
-end
-
-function callbacks.known(name)
- return list[name]
-end
-
-function callbacks.report()
- for name, _ in sortedhash(list) do
- local str = frozen[name]
- if str then
- report_callbacks("%s: %s -> %s",state(name),name,str)
- else
- report_callbacks("%s: %s",state(name),name)
- end
- end
-end
-
-function callbacks.freeze(name,freeze)
- freeze = type(freeze) == "string" and freeze
- if find(name,"%*") then
- local pattern = name
- for name, _ in next, list do
- if find(name,pattern) then
- frozen[name] = freeze or frozen[name] or "frozen"
- end
- end
- else
- frozen[name] = freeze or frozen[name] or "frozen"
- end
-end
-
-function callbacks.register(name,func,freeze)
- if frozen[name] then
- if trace_callbacks then
- frozen_message("registering",name)
- end
- return frozen_callback(name)
- elseif freeze then
- frozen[name] = type(freeze) == "string" and freeze or "registered"
- end
- if delayed[name] and environment.initex then
- return nil
- end
- return register_callback(name,func)
-end
-
-function callback.register(name,func) -- original
- if not frozen[name] then
- return register_callback(name,func)
- elseif trace_callbacks then
- frozen_message("registering",name)
- end
- return frozen_callback(name)
-end
-
-function callbacks.push(name,func)
- if not frozen[name] then
- local sn = stack[name]
- if not sn then
- sn = { }
- stack[name] = sn
- end
- insert(sn,find_callback(name))
- register_callback(name, func)
- elseif trace_callbacks then
- frozen_message("pushing",name)
- end
-end
-
-function callbacks.pop(name)
- if not frozen[name] then
- local sn = stack[name]
- if not sn or #sn == 0 then
- -- some error
- register_callback(name, nil) -- ! really needed
- else
- -- this fails: register_callback(name, remove(stack[name]))
- local func = remove(sn)
- register_callback(name, func)
- end
- end
-end
-
-if trace_calls then
- statistics.register("callback details", function()
- local t = { } -- todo: pass function to register and quit at nil
- for name, n in sortedhash(list) do
- if n > 0 then
- t[#t+1] = format("%s -> %s",name,n)
- end
- end
- return t
- end)
-end
-
--- -- somehow crashes later on
---
--- callbacks.freeze("find_.*_file","finding file")
--- callbacks.freeze("read_.*_file","reading file")
--- callbacks.freeze("open_.*_file","opening file")
-
---[[ldx--
-
Callbacks may result in doing some hard work
-which takes time and above all resourses. Sometimes it makes
-sense to disable or tune the garbage collector in order to
-keep the use of resources acceptable.
-
-
At some point in the development we did some tests with counting
-nodes (in this case 121049).
-
-
-
setstepmul
seconds
megabytes
-
200
24.0
80.5
-
175
21.0
78.2
-
150
22.0
74.6
-
160
22.0
74.6
-
165
21.0
77.6
-
125
21.5
89.2
-
100
21.5
88.4
-
-
-
The following code is kind of experimental. In the documents
-that describe the development of we report
-on speed tests. One observation is thta it sometimes helps to
-restart the collector. Okay, experimental code has been removed,
-because messing aroudn with the gc is too unpredictable.
---ldx]]--
-
--- For the moment we keep this here and not in util-gbc.lua or so.
-
-utilities = utilities or { }
-utilities.garbagecollector = utilities.garbagecollector or { }
-local garbagecollector = utilities.garbagecollector
-
-garbagecollector.enabled = false -- could become a directive
-garbagecollector.criterium = 4*1024*1024
-
--- Lua allocates up to 12 times the amount of memory needed for
--- handling a string, and for large binary chunks (like chinese otf
--- files) we get a prominent memory consumption. Even when a variable
--- is nilled, there is some delay in freeing the associated memory (the
--- hashed string) because if we do the same thing directly afterwards,
--- we see only a slight increase in memory. For that reason it makes
--- sense to do a collector pass after a huge file.
---
--- test file:
---
--- function test()
--- local b = collectgarbage("count")
--- local s = io.loaddata("some font table, e.g. a big tmc file")
--- local a = collectgarbage("count")
--- print(">>> STATUS",b,a,a-b,#s,1000*(a-b)/#s)
--- end
---
--- test() test() test() test() collectgarbage("collect") test() test() test() test()
---
--- As a result of this, LuaTeX now uses an optimized version of f:read("*a"),
--- one that does not use the 4K allocations but allocates in one step.
-
-function garbagecollector.check(size,criterium)
- if garbagecollector.enabled then
- criterium = criterium or garbagecollector.criterium
- if not size or (criterium and criterium > 0 and size > criterium) then
- if trace_checking then
- local b = collectgarbage("count")
- collectgarbage("collect")
- local a = collectgarbage("count")
- report_memory("forced sweep, collected: %s MB, used: %s MB",round((b-a)/1000),round(a/1000))
- else
- collectgarbage("collect")
- end
- end
- end
-end
-
--- this will move
-
-commands = commands or { }
-
-function commands.showcallbacks()
- local NC, NR, verbatim = context.NC, context.NR, context.type
- context.starttabulate { "|l|l|p|" }
- for name, _ in sortedhash(list) do
- NC() verbatim(name) NC() verbatim(state(name)) NC() context(frozen[name] or "") NC() NR()
- end
- context.stoptabulate()
-end
+if not modules then modules = { } end modules ['luat-cbk'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local insert, remove, find, format = table.insert, table.remove, string.find, string.format
+local collectgarbage, type, next = collectgarbage, type, next
+local round = math.round
+local sortedhash, tohash = table.sortedhash, table.tohash
+
+local trace_checking = false trackers.register("memory.checking", function(v) trace_checking = v end)
+
+local report_callbacks = logs.reporter("system","callbacks")
+local report_memory = logs.reporter("system","memory")
+
+--[[ldx--
+
Callbacks are the real asset of . They permit you to hook
+your own code into the engine. Here we implement a few handy
+auxiliary functions.
When you (temporarily) want to install a callback function, and after a
+while wants to revert to the original one, you can use the following two
+functions.
+--ldx]]--
+
+local trace_callbacks = false trackers.register("system.callbacks", function(v) trace_callbacks = v end)
+local trace_calls = false -- only used when analyzing performance and initializations
+
+local register_callback = callback.register
+local find_callback = callback.find
+local list_callbacks = callback.list
+
+local frozen, stack, list = { }, { }, callbacks.list
+
+if not list then -- otherwise counters get reset
+
+ list = utilities.storage.allocate(list_callbacks())
+
+ for k, _ in next, list do
+ list[k] = 0
+ end
+
+ callbacks.list = list
+
+end
+
+local delayed = tohash {
+ "buildpage_filter",
+}
+
+
+if trace_calls then
+
+ local functions = { }
+ local original = register_callback
+
+ register_callback = function(name,func)
+ if type(func) == "function" then
+ if functions[name] then
+ functions[name] = func
+ return find_callback(name)
+ else
+ functions[name] = func
+ local cnuf = function(...)
+ list[name] = list[name] + 1
+ return functions[name](...)
+ end
+ return original(name,cnuf)
+ end
+ else
+ return original(name,func)
+ end
+ end
+
+end
+
+local function frozen_message(what,name)
+ report_callbacks("not %s frozen %a to %a",what,name,frozen[name])
+end
+
+local function frozen_callback(name)
+ return nil, format("callback '%s' is frozen to '%s'",name,frozen[name]) -- no formatter yet
+end
+
+local function state(name)
+ local f = find_callback(name)
+ if f == false then
+ return "disabled"
+ elseif f then
+ return "enabled"
+ else
+ return "undefined"
+ end
+end
+
+function callbacks.known(name)
+ return list[name]
+end
+
+function callbacks.report()
+ for name, _ in sortedhash(list) do
+ local str = frozen[name]
+ if str then
+ report_callbacks("%s: %s -> %s",state(name),name,str)
+ else
+ report_callbacks("%s: %s",state(name),name)
+ end
+ end
+end
+
+function callbacks.freeze(name,freeze)
+ freeze = type(freeze) == "string" and freeze
+ if find(name,"%*") then
+ local pattern = name
+ for name, _ in next, list do
+ if find(name,pattern) then
+ frozen[name] = freeze or frozen[name] or "frozen"
+ end
+ end
+ else
+ frozen[name] = freeze or frozen[name] or "frozen"
+ end
+end
+
+function callbacks.register(name,func,freeze)
+ if frozen[name] then
+ if trace_callbacks then
+ frozen_message("registering",name)
+ end
+ return frozen_callback(name)
+ elseif freeze then
+ frozen[name] = type(freeze) == "string" and freeze or "registered"
+ end
+ if delayed[name] and environment.initex then
+ return nil
+ end
+ return register_callback(name,func)
+end
+
+function callback.register(name,func) -- original
+ if not frozen[name] then
+ return register_callback(name,func)
+ elseif trace_callbacks then
+ frozen_message("registering",name)
+ end
+ return frozen_callback(name)
+end
+
+function callbacks.push(name,func)
+ if not frozen[name] then
+ local sn = stack[name]
+ if not sn then
+ sn = { }
+ stack[name] = sn
+ end
+ insert(sn,find_callback(name))
+ register_callback(name, func)
+ elseif trace_callbacks then
+ frozen_message("pushing",name)
+ end
+end
+
+function callbacks.pop(name)
+ if not frozen[name] then
+ local sn = stack[name]
+ if not sn or #sn == 0 then
+ -- some error
+ register_callback(name, nil) -- ! really needed
+ else
+ -- this fails: register_callback(name, remove(stack[name]))
+ local func = remove(sn)
+ register_callback(name, func)
+ end
+ end
+end
+
+if trace_calls then
+ statistics.register("callback details", function()
+ local t = { } -- todo: pass function to register and quit at nil
+ for name, n in sortedhash(list) do
+ if n > 0 then
+ t[#t+1] = format("%s -> %s",name,n)
+ end
+ end
+ return t
+ end)
+end
+
+-- -- somehow crashes later on
+--
+-- callbacks.freeze("find_.*_file","finding file")
+-- callbacks.freeze("read_.*_file","reading file")
+-- callbacks.freeze("open_.*_file","opening file")
+
+--[[ldx--
+
Callbacks may result in doing some hard work
+which takes time and above all resourses. Sometimes it makes
+sense to disable or tune the garbage collector in order to
+keep the use of resources acceptable.
+
+
At some point in the development we did some tests with counting
+nodes (in this case 121049).
+
+
+
setstepmul
seconds
megabytes
+
200
24.0
80.5
+
175
21.0
78.2
+
150
22.0
74.6
+
160
22.0
74.6
+
165
21.0
77.6
+
125
21.5
89.2
+
100
21.5
88.4
+
+
+
The following code is kind of experimental. In the documents
+that describe the development of we report
+on speed tests. One observation is thta it sometimes helps to
+restart the collector. Okay, experimental code has been removed,
+because messing aroudn with the gc is too unpredictable.
+--ldx]]--
+
+-- For the moment we keep this here and not in util-gbc.lua or so.
+
+utilities = utilities or { }
+utilities.garbagecollector = utilities.garbagecollector or { }
+local garbagecollector = utilities.garbagecollector
+
+garbagecollector.enabled = false -- could become a directive
+garbagecollector.criterium = 4*1024*1024
+
+-- Lua allocates up to 12 times the amount of memory needed for
+-- handling a string, and for large binary chunks (like chinese otf
+-- files) we get a prominent memory consumption. Even when a variable
+-- is nilled, there is some delay in freeing the associated memory (the
+-- hashed string) because if we do the same thing directly afterwards,
+-- we see only a slight increase in memory. For that reason it makes
+-- sense to do a collector pass after a huge file.
+--
+-- test file:
+--
+-- function test()
+-- local b = collectgarbage("count")
+-- local s = io.loaddata("some font table, e.g. a big tmc file")
+-- local a = collectgarbage("count")
+-- print(">>> STATUS",b,a,a-b,#s,1000*(a-b)/#s)
+-- end
+--
+-- test() test() test() test() collectgarbage("collect") test() test() test() test()
+--
+-- As a result of this, LuaTeX now uses an optimized version of f:read("*a"),
+-- one that does not use the 4K allocations but allocates in one step.
+
+function garbagecollector.check(size,criterium)
+ if garbagecollector.enabled then
+ criterium = criterium or garbagecollector.criterium
+ if not size or (criterium and criterium > 0 and size > criterium) then
+ if trace_checking then
+ local b = collectgarbage("count")
+ collectgarbage("collect")
+ local a = collectgarbage("count")
+ report_memory("forced sweep, collected: %s MB, used: %s MB",round((b-a)/1000),round(a/1000))
+ else
+ collectgarbage("collect")
+ end
+ end
+ end
+end
+
+-- this will move
+
+commands = commands or { }
+
+function commands.showcallbacks()
+ local NC, NR, verbatim = context.NC, context.NR, context.type
+ context.starttabulate { "|l|l|p|" }
+ for name, _ in sortedhash(list) do
+ NC() verbatim(name) NC() verbatim(state(name)) NC() context(frozen[name] or "") NC() NR()
+ end
+ context.stoptabulate()
+end
diff --git a/tex/context/base/luat-cnf.lua b/tex/context/base/luat-cnf.lua
index 3672c603e..4020f0b12 100644
--- a/tex/context/base/luat-cnf.lua
+++ b/tex/context/base/luat-cnf.lua
@@ -1,197 +1,197 @@
-if not modules then modules = { } end modules ['luat-cnf'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local type, next, tostring, tonumber = type, next, tostring, tonumber
-local format, concat, find = string.format, table.concat, string.find
-
-local allocate = utilities.storage.allocate
-
-texconfig.kpse_init = false
-texconfig.shell_escape = 't'
-
-luatex = luatex or { }
-local luatex = luatex
-
-texconfig.error_line = 79 -- 79 -- obsolete
-texconfig.half_error_line = 50 -- 50 -- obsolete
-
-texconfig.expand_depth = 10000 -- 10000
-texconfig.hash_extra = 100000 -- 0
-texconfig.nest_size = 1000 -- 50
-texconfig.max_in_open = 500 -- 15
-texconfig.max_print_line = 10000 -- 79
-texconfig.max_strings = 500000 -- 15000
-texconfig.param_size = 25000 -- 60
-texconfig.save_size = 50000 -- 4000
-texconfig.stack_size = 10000 -- 300
-
--- local function initialize()
--- local t, variable = allocate(), resolvers.variable
--- for name, default in next, variablenames do
--- local name = variablenames[i]
--- local value = variable(name)
--- value = tonumber(value)
--- if not value or value == "" or value == 0 then
--- value = default
--- end
--- texconfig[name], t[name] = value, value
--- end
--- initialize = nil
--- return t
--- end
---
--- luatex.variables = initialize()
-
-local stub = [[
-
--- checking
-
-storage = storage or { }
-luatex = luatex or { }
-
--- we provide our own file handling
-
-texconfig.kpse_init = false
-texconfig.shell_escape = 't'
-
--- as soon as possible
-
-luatex.starttime = os.gettimeofday()
-
--- this will happen after the format is loaded
-
-function texconfig.init()
-
- -- development
-
- local builtin, globals = { }, { }
-
- libraries = { -- we set it here as we want libraries also 'indexed'
- basiclua = {
- "string", "table", "coroutine", "debug", "file", "io", "lpeg", "math", "os", "package", "bit32",
- },
- basictex = { -- noad
- "callback", "font", "img", "lang", "lua", "node", "pdf", "status", "tex", "texconfig", "texio", "token",
- },
- extralua = {
- "gzip", "zip", "zlib", "lfs", "ltn12", "mime", "socket", "md5", "profiler", "unicode", "utf",
- },
- extratex = {
- "epdf", "fontloader", "kpse", "mplib",
- },
- obsolete = {
- "fontforge", -- can be filled by luat-log
- "kpse",
- },
- functions = {
- "assert", "pcall", "xpcall", "error", "collectgarbage",
- "dofile", "load","loadfile", "require", "module",
- "getmetatable", "setmetatable",
- "ipairs", "pairs", "rawequal", "rawget", "rawset", "next",
- "tonumber", "tostring",
- "type", "unpack", "select", "print",
- },
- builtin = builtin, -- to be filled
- globals = globals, -- to be filled
- }
-
- for k, v in next, _G do
- globals[k] = tostring(v)
- end
-
- local function collect(t,fnc)
- local lib = { }
- for k, v in next, t do
- if fnc then
- lib[v] = _G[v]
- else
- local keys = { }
- local gv = _G[v]
- local tv = type(gv)
- if tv == "table" then
- for k, v in next, gv do
- keys[k] = tostring(v) -- true -- by tostring we cannot call overloades functions (security)
- end
- end
- lib[v] = keys
- builtin[v] = keys
- end
- end
- return lib
- end
-
- libraries.basiclua = collect(libraries.basiclua)
- libraries.basictex = collect(libraries.basictex)
- libraries.extralua = collect(libraries.extralua)
- libraries.extratex = collect(libraries.extratex)
- libraries.functions = collect(libraries.functions,true)
- libraries.obsolete = collect(libraries.obsolete)
-
- -- shortcut and helper
-
- local function init(start)
- local b = lua.bytecode
- local i = start
- local t = os.clock()
- while b[i] do
- b[i]() ;
- b[i] = nil ;
- i = i + 1
- -- collectgarbage('step')
- end
- return i - start, os.clock() - t
- end
-
- -- the stored tables and modules
-
- storage.noftables , storage.toftables = init(0)
- storage.nofmodules, storage.tofmodules = init(%s)
-
- if modules then
- local loaded = package.loaded
- for module, _ in next, modules do
- loaded[module] = true
- end
- end
-
-end
-
--- we provide a qualified path
-
-callback.register('find_format_file',function(name)
- texconfig.formatname = name
- return name
-end)
-
--- done, from now on input and callbacks are internal
-]]
-
-local variablenames = {
- "error_line", "half_error_line",
- "expand_depth", "hash_extra", "nest_size",
- "max_in_open", "max_print_line", "max_strings",
- "param_size", "save_size", "stack_size",
-}
-
-local function makestub()
- name = name or (environment.jobname .. ".lui")
- firsttable = firsttable or lua.firstbytecode
- local t = {
- "-- this file is generated, don't change it\n",
- "-- configuration (can be overloaded later)\n"
- }
- for _,v in next, variablenames do
- local tv = texconfig[v]
- if tv and tv ~= "" then
- t[#t+1] = format("texconfig.%s=%s",v,tv)
- end
- end
- io.savedata(name,format("%s\n\n%s",concat(t,"\n"),format(stub,firsttable)))
-end
-
-lua.registerfinalizer(makestub,"create stub file")
+if not modules then modules = { } end modules ['luat-cnf'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type, next, tostring, tonumber = type, next, tostring, tonumber
+local format, concat, find = string.format, table.concat, string.find
+
+local allocate = utilities.storage.allocate
+
+texconfig.kpse_init = false
+texconfig.shell_escape = 't'
+
+luatex = luatex or { }
+local luatex = luatex
+
+texconfig.error_line = 79 -- 79 -- obsolete
+texconfig.half_error_line = 50 -- 50 -- obsolete
+
+texconfig.expand_depth = 10000 -- 10000
+texconfig.hash_extra = 100000 -- 0
+texconfig.nest_size = 1000 -- 50
+texconfig.max_in_open = 500 -- 15
+texconfig.max_print_line = 10000 -- 79
+texconfig.max_strings = 500000 -- 15000
+texconfig.param_size = 25000 -- 60
+texconfig.save_size = 50000 -- 4000
+texconfig.stack_size = 10000 -- 300
+
+-- local function initialize()
+-- local t, variable = allocate(), resolvers.variable
+-- for name, default in next, variablenames do
+-- local name = variablenames[i]
+-- local value = variable(name)
+-- value = tonumber(value)
+-- if not value or value == "" or value == 0 then
+-- value = default
+-- end
+-- texconfig[name], t[name] = value, value
+-- end
+-- initialize = nil
+-- return t
+-- end
+--
+-- luatex.variables = initialize()
+
+local stub = [[
+
+-- checking
+
+storage = storage or { }
+luatex = luatex or { }
+
+-- we provide our own file handling
+
+texconfig.kpse_init = false
+texconfig.shell_escape = 't'
+
+-- as soon as possible
+
+luatex.starttime = os.gettimeofday()
+
+-- this will happen after the format is loaded
+
+function texconfig.init()
+
+ -- development
+
+ local builtin, globals = { }, { }
+
+ libraries = { -- we set it here as we want libraries also 'indexed'
+ basiclua = {
+ "string", "table", "coroutine", "debug", "file", "io", "lpeg", "math", "os", "package", "bit32",
+ },
+ basictex = { -- noad
+ "callback", "font", "img", "lang", "lua", "node", "pdf", "status", "tex", "texconfig", "texio", "token",
+ },
+ extralua = {
+ "gzip", "zip", "zlib", "lfs", "ltn12", "mime", "socket", "md5", "profiler", "unicode", "utf",
+ },
+ extratex = {
+ "epdf", "fontloader", "kpse", "mplib",
+ },
+ obsolete = {
+ "fontforge", -- can be filled by luat-log
+ "kpse",
+ },
+ functions = {
+ "assert", "pcall", "xpcall", "error", "collectgarbage",
+ "dofile", "load","loadfile", "require", "module",
+ "getmetatable", "setmetatable",
+ "ipairs", "pairs", "rawequal", "rawget", "rawset", "next",
+ "tonumber", "tostring",
+ "type", "unpack", "select", "print",
+ },
+ builtin = builtin, -- to be filled
+ globals = globals, -- to be filled
+ }
+
+ for k, v in next, _G do
+ globals[k] = tostring(v)
+ end
+
+ local function collect(t,fnc)
+ local lib = { }
+ for k, v in next, t do
+ if fnc then
+ lib[v] = _G[v]
+ else
+ local keys = { }
+ local gv = _G[v]
+ local tv = type(gv)
+ if tv == "table" then
+ for k, v in next, gv do
+ keys[k] = tostring(v) -- true -- by tostring we cannot call overloades functions (security)
+ end
+ end
+ lib[v] = keys
+ builtin[v] = keys
+ end
+ end
+ return lib
+ end
+
+ libraries.basiclua = collect(libraries.basiclua)
+ libraries.basictex = collect(libraries.basictex)
+ libraries.extralua = collect(libraries.extralua)
+ libraries.extratex = collect(libraries.extratex)
+ libraries.functions = collect(libraries.functions,true)
+ libraries.obsolete = collect(libraries.obsolete)
+
+ -- shortcut and helper
+
+ local function init(start)
+ local b = lua.bytecode
+ local i = start
+ local t = os.clock()
+ while b[i] do
+ b[i]() ;
+ b[i] = nil ;
+ i = i + 1
+ -- collectgarbage('step')
+ end
+ return i - start, os.clock() - t
+ end
+
+ -- the stored tables and modules
+
+ storage.noftables , storage.toftables = init(0)
+ storage.nofmodules, storage.tofmodules = init(%s)
+
+ if modules then
+ local loaded = package.loaded
+ for module, _ in next, modules do
+ loaded[module] = true
+ end
+ end
+
+end
+
+-- we provide a qualified path
+
+callback.register('find_format_file',function(name)
+ texconfig.formatname = name
+ return name
+end)
+
+-- done, from now on input and callbacks are internal
+]]
+
+local variablenames = {
+ "error_line", "half_error_line",
+ "expand_depth", "hash_extra", "nest_size",
+ "max_in_open", "max_print_line", "max_strings",
+ "param_size", "save_size", "stack_size",
+}
+
+local function makestub()
+ name = name or (environment.jobname .. ".lui")
+ firsttable = firsttable or lua.firstbytecode
+ local t = {
+ "-- this file is generated, don't change it\n",
+ "-- configuration (can be overloaded later)\n"
+ }
+ for _,v in next, variablenames do
+ local tv = texconfig[v]
+ if tv and tv ~= "" then
+ t[#t+1] = format("texconfig.%s=%s",v,tv)
+ end
+ end
+ io.savedata(name,format("%s\n\n%s",concat(t,"\n"),format(stub,firsttable)))
+end
+
+lua.registerfinalizer(makestub,"create stub file")
diff --git a/tex/context/base/luat-cod.lua b/tex/context/base/luat-cod.lua
index 8b015477f..8fc94779c 100644
--- a/tex/context/base/luat-cod.lua
+++ b/tex/context/base/luat-cod.lua
@@ -1,181 +1,181 @@
-if not modules then modules = { } end modules ['luat-cod'] = {
- version = 1.001,
- comment = "companion to luat-cod.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local type, loadfile = type, loadfile
-local match, gsub, find, format = string.match, string.gsub, string.find, string.format
-
-local texconfig, lua = texconfig, lua
-
--- some basic housekeeping
-
-texconfig.kpse_init = false
-texconfig.shell_escape = 't'
-texconfig.max_print_line = 100000
-texconfig.max_in_open = 127
-
--- registering bytecode chunks
-
-local bytecode = lua.bytecode or { }
-local bytedata = lua.bytedata or { }
-local bytedone = lua.bytedone or { }
-
-lua.bytecode = bytecode -- built in anyway
-lua.bytedata = bytedata
-lua.bytedone = bytedone
-
-lua.firstbytecode = 501
-lua.lastbytecode = lua.lastbytecode or (lua.firstbytecode - 1) -- as we load ourselves again ... maybe return earlier
-
-function lua.registeredcodes()
- return lua.lastbytecode - lua.firstbytecode + 1
-end
-
--- no file.* functions yet
-
-function lua.registercode(filename,version)
- local barename = gsub(filename,"%.[%a%d]+$","")
- if barename == filename then filename = filename .. ".lua" end
- local basename = match(barename,"^.+[/\\](.-)$") or barename
- if not bytedone[basename] then
- local code = environment.luafilechunk(filename)
- if code then
- bytedone[basename] = true
- if environment.initex then
- local n = lua.lastbytecode + 1
- bytedata[n] = { barename, version or "0.000" }
- bytecode[n] = code
- lua.lastbytecode = n
- end
- end
- end
-end
-
-local finalizers = { }
-
-function lua.registerfinalizer(f,comment)
- comment = comment or "unknown"
- if type(f) == "function" then
- finalizers[#finalizers+1] = { action = f, comment = comment }
- else
- print(format("\nfatal error: invalid finalizer, action: %s\n",comment))
- os.exit()
- end
-end
-
-function lua.finalize(logger)
- for i=1,#finalizers do
- local finalizer = finalizers[i]
- finalizer.action()
- if logger then
- logger("finalize action: %s",finalizer.comment)
- end
- end
-end
-
--- A first start with environments. This will be overloaded later.
-
-environment = environment or { }
-local environment = environment
-
--- no string.unquoted yet
-
-local sourcefile = gsub(arg and arg[1] or "","^\"(.*)\"$","%1")
-local sourcepath = find(sourcefile,"/") and gsub(sourcefile,"/[^/]+$","") or ""
-local targetpath = "."
-
--- delayed (via metatable):
---
--- environment.jobname = tex.jobname
--- environment.version = tostring(tex.toks.contextversiontoks)
-
-environment.initex = tex.formatname == ""
-
-if not environment.luafilechunk then
-
- function environment.luafilechunk(filename)
- if sourcepath ~= "" then
- filename = sourcepath .. "/" .. filename
- end
- local data = loadfile(filename)
- texio.write("<",data and "+ " or "- ",filename,">")
- if data then
- data()
- end
- return data
- end
-
-end
-
-if not environment.engineflags then -- raw flags
-
- local engineflags = { }
-
- for i=-10,#arg do
- local a = arg[i]
- if a then
- local flag, content = match(a,"^%-%-([^=]+)=?(.-)$")
- if flag then
- engineflags[flag] = content or ""
- end
- end
- end
-
- environment.engineflags = engineflags
-
-end
-
--- We need a few premature callbacks in the format generator. We
--- also do this when the format is loaded as otherwise we get
--- a kpse error when disabled. This is an engine issue that will
--- be sorted out in due time.
-
-local isfile = lfs.isfile
-
-local function source_file(name)
- local fullname = sourcepath .. "/" .. name
- if isfile(fullname) then
- return fullname
- end
- fullname = fullname .. ".tex"
- if isfile(fullname) then
- return fullname
- end
- if isfile(name) then
- return name
- end
- name = name .. ".tex"
- if isfile(name) then
- return name
- end
- return nil
-end
-
-local function target_file(name)
- return targetpath .. "/" .. name
-end
-
-local function find_read_file (id,name)
- return source_file(name)
-end
-
-local function find_write_file(id,name)
- return target_file(name)
-end
-
-local function open_read_file(name)
- local f = io.open(name,'rb')
- return {
- reader = function()
- return f:read("*line")
- end
- }
-end
-
-callback.register('find_read_file' , find_read_file )
-callback.register('open_read_file' , open_read_file )
-callback.register('find_write_file', find_write_file)
+if not modules then modules = { } end modules ['luat-cod'] = {
+ version = 1.001,
+ comment = "companion to luat-cod.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type, loadfile = type, loadfile
+local match, gsub, find, format = string.match, string.gsub, string.find, string.format
+
+local texconfig, lua = texconfig, lua
+
+-- some basic housekeeping
+
+texconfig.kpse_init = false
+texconfig.shell_escape = 't'
+texconfig.max_print_line = 100000
+texconfig.max_in_open = 127
+
+-- registering bytecode chunks
+
+local bytecode = lua.bytecode or { }
+local bytedata = lua.bytedata or { }
+local bytedone = lua.bytedone or { }
+
+lua.bytecode = bytecode -- built in anyway
+lua.bytedata = bytedata
+lua.bytedone = bytedone
+
+lua.firstbytecode = 501
+lua.lastbytecode = lua.lastbytecode or (lua.firstbytecode - 1) -- as we load ourselves again ... maybe return earlier
+
+function lua.registeredcodes()
+ return lua.lastbytecode - lua.firstbytecode + 1
+end
+
+-- no file.* functions yet
+
+function lua.registercode(filename,version)
+ local barename = gsub(filename,"%.[%a%d]+$","")
+ if barename == filename then filename = filename .. ".lua" end
+ local basename = match(barename,"^.+[/\\](.-)$") or barename
+ if not bytedone[basename] then
+ local code = environment.luafilechunk(filename)
+ if code then
+ bytedone[basename] = true
+ if environment.initex then
+ local n = lua.lastbytecode + 1
+ bytedata[n] = { barename, version or "0.000" }
+ bytecode[n] = code
+ lua.lastbytecode = n
+ end
+ end
+ end
+end
+
+local finalizers = { }
+
+function lua.registerfinalizer(f,comment)
+ comment = comment or "unknown"
+ if type(f) == "function" then
+ finalizers[#finalizers+1] = { action = f, comment = comment }
+ else
+ print(format("\nfatal error: invalid finalizer, action: %s\n",comment))
+ os.exit()
+ end
+end
+
+function lua.finalize(logger)
+ for i=1,#finalizers do
+ local finalizer = finalizers[i]
+ finalizer.action()
+ if logger then
+ logger("finalize action: %s",finalizer.comment)
+ end
+ end
+end
+
+-- A first start with environments. This will be overloaded later.
+
+environment = environment or { }
+local environment = environment
+
+-- no string.unquoted yet
+
+local sourcefile = gsub(arg and arg[1] or "","^\"(.*)\"$","%1")
+local sourcepath = find(sourcefile,"/") and gsub(sourcefile,"/[^/]+$","") or ""
+local targetpath = "."
+
+-- delayed (via metatable):
+--
+-- environment.jobname = tex.jobname
+-- environment.version = tostring(tex.toks.contextversiontoks)
+
+environment.initex = tex.formatname == ""
+
+if not environment.luafilechunk then
+
+ function environment.luafilechunk(filename)
+ if sourcepath ~= "" then
+ filename = sourcepath .. "/" .. filename
+ end
+ local data = loadfile(filename)
+ texio.write("<",data and "+ " or "- ",filename,">")
+ if data then
+ data()
+ end
+ return data
+ end
+
+end
+
+if not environment.engineflags then -- raw flags
+
+ local engineflags = { }
+
+ for i=-10,#arg do
+ local a = arg[i]
+ if a then
+ local flag, content = match(a,"^%-%-([^=]+)=?(.-)$")
+ if flag then
+ engineflags[flag] = content or ""
+ end
+ end
+ end
+
+ environment.engineflags = engineflags
+
+end
+
+-- We need a few premature callbacks in the format generator. We
+-- also do this when the format is loaded as otherwise we get
+-- a kpse error when disabled. This is an engine issue that will
+-- be sorted out in due time.
+
+local isfile = lfs.isfile
+
+local function source_file(name)
+ local fullname = sourcepath .. "/" .. name
+ if isfile(fullname) then
+ return fullname
+ end
+ fullname = fullname .. ".tex"
+ if isfile(fullname) then
+ return fullname
+ end
+ if isfile(name) then
+ return name
+ end
+ name = name .. ".tex"
+ if isfile(name) then
+ return name
+ end
+ return nil
+end
+
+local function target_file(name)
+ return targetpath .. "/" .. name
+end
+
+local function find_read_file (id,name)
+ return source_file(name)
+end
+
+local function find_write_file(id,name)
+ return target_file(name)
+end
+
+local function open_read_file(name)
+ local f = io.open(name,'rb')
+ return {
+ reader = function()
+ return f:read("*line")
+ end
+ }
+end
+
+callback.register('find_read_file' , find_read_file )
+callback.register('open_read_file' , open_read_file )
+callback.register('find_write_file', find_write_file)
diff --git a/tex/context/base/luat-env.lua b/tex/context/base/luat-env.lua
index 8753972c6..2f8f9e28d 100644
--- a/tex/context/base/luat-env.lua
+++ b/tex/context/base/luat-env.lua
@@ -1,176 +1,176 @@
- if not modules then modules = { } end modules ['luat-env'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- A former version provided functionality for non embeded core scripts i.e. runtime
--- library loading. Given the amount of Lua code we use now, this no longer makes
--- sense. Much of this evolved before bytecode arrays were available and so a lot of
--- code has disappeared already.
-
-local rawset, rawget, loadfile, assert = rawset, rawget, loadfile, assert
-
-local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
-
-local report_lua = logs.reporter("resolvers","lua")
-
-local luautilities = utilities.lua
-local luasuffixes = luautilities.suffixes
-
-environment = environment or { }
-local environment = environment
-
--- environment
-
-local mt = {
- __index = function(_,k)
- if k == "version" then
- local version = tex.toks and tex.toks.contextversiontoks
- if version and version ~= "" then
- rawset(environment,"version",version)
- return version
- else
- return "unknown"
- end
- elseif k == "kind" then
- local kind = tex.toks and tex.toks.contextkindtoks
- if kind and kind ~= "" then
- rawset(environment,"kind",kind)
- return kind
- else
- return "unknown"
- end
- elseif k == "jobname" or k == "formatname" then
- local name = tex and tex[k]
- if name or name== "" then
- rawset(environment,k,name)
- return name
- else
- return "unknown"
- end
- elseif k == "outputfilename" then
- local name = environment.jobname
- rawset(environment,k,name)
- return name
- end
- end
-}
-
-setmetatable(environment,mt)
-
--- weird place ... depends on a not yet loaded module
-
-function environment.texfile(filename)
- return resolvers.findfile(filename,'tex')
-end
-
-function environment.luafile(filename) -- needs checking
- local resolved = resolvers.findfile(filename,'tex') or ""
- if resolved ~= "" then
- return resolved
- end
- resolved = resolvers.findfile(filename,'texmfscripts') or ""
- if resolved ~= "" then
- return resolved
- end
- return resolvers.findfile(filename,'luatexlibs') or ""
-end
-
--- local function checkstrip(filename)
--- local modu = modules[file.nameonly(filename)]
--- return modu and modu.dataonly
--- end
-
-local stripindeed = false directives.register("system.compile.strip", function(v) stripindeed = v end)
-
-local function strippable(filename)
- if stripindeed then
- local modu = modules[file.nameonly(filename)]
- return modu and modu.dataonly
- else
- return false
- end
-end
-
-function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format
- filename = file.replacesuffix(filename, "lua")
- local fullname = environment.luafile(filename)
- if fullname and fullname ~= "" then
- local data = luautilities.loadedluacode(fullname,strippable,filename) -- can be overloaded
- if trace_locating then
- report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
- elseif not silent then
- texio.write("<",data and "+ " or "- ",fullname,">")
- end
- return data
- else
- if trace_locating then
- report_lua("unknown file %a",filename)
- end
- return nil
- end
-end
-
--- the next ones can use the previous ones / combine
-
-function environment.loadluafile(filename, version)
- local lucname, luaname, chunk
- local basename = file.removesuffix(filename)
- if basename == filename then
- luaname = file.addsuffix(basename,luasuffixes.lua)
- lucname = file.addsuffix(basename,luasuffixes.luc)
- else
- luaname = basename -- forced suffix
- lucname = nil
- end
- -- when not overloaded by explicit suffix we look for a luc file first
- local fullname = (lucname and environment.luafile(lucname)) or ""
- if fullname ~= "" then
- if trace_locating then
- report_lua("loading %a",fullname)
- end
- -- maybe: package.loaded[file.nameonly(fullname)] = true
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- end
- if chunk then
- assert(chunk)()
- if version then
- -- we check of the version number of this chunk matches
- local v = version -- can be nil
- if modules and modules[filename] then
- v = modules[filename].version -- new method
- elseif versions and versions[filename] then
- v = versions[filename] -- old method
- end
- if v == version then
- return true
- else
- if trace_locating then
- report_lua("version mismatch for %a, lua version %a, luc version %a",filename,v,version)
- end
- environment.loadluafile(filename)
- end
- else
- return true
- end
- end
- fullname = (luaname and environment.luafile(luaname)) or ""
- if fullname ~= "" then
- if trace_locating then
- report_lua("loading %a",fullname)
- end
- chunk = loadfile(fullname) -- this way we don't need a file exists check
- if not chunk then
- if trace_locating then
- report_lua("unknown file %a",filename)
- end
- else
- assert(chunk)()
- return true
- end
- end
- return false
-end
+ if not modules then modules = { } end modules ['luat-env'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- A former version provided functionality for non embeded core scripts i.e. runtime
+-- library loading. Given the amount of Lua code we use now, this no longer makes
+-- sense. Much of this evolved before bytecode arrays were available and so a lot of
+-- code has disappeared already.
+
+local rawset, rawget, loadfile, assert = rawset, rawget, loadfile, assert
+
+local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
+
+local report_lua = logs.reporter("resolvers","lua")
+
+local luautilities = utilities.lua
+local luasuffixes = luautilities.suffixes
+
+environment = environment or { }
+local environment = environment
+
+-- environment
+
+local mt = {
+ __index = function(_,k)
+ if k == "version" then
+ local version = tex.toks and tex.toks.contextversiontoks
+ if version and version ~= "" then
+ rawset(environment,"version",version)
+ return version
+ else
+ return "unknown"
+ end
+ elseif k == "kind" then
+ local kind = tex.toks and tex.toks.contextkindtoks
+ if kind and kind ~= "" then
+ rawset(environment,"kind",kind)
+ return kind
+ else
+ return "unknown"
+ end
+ elseif k == "jobname" or k == "formatname" then
+ local name = tex and tex[k]
+ if name or name== "" then
+ rawset(environment,k,name)
+ return name
+ else
+ return "unknown"
+ end
+ elseif k == "outputfilename" then
+ local name = environment.jobname
+ rawset(environment,k,name)
+ return name
+ end
+ end
+}
+
+setmetatable(environment,mt)
+
+-- weird place ... depends on a not yet loaded module
+
+function environment.texfile(filename)
+ return resolvers.findfile(filename,'tex')
+end
+
+function environment.luafile(filename) -- needs checking
+ local resolved = resolvers.findfile(filename,'tex') or ""
+ if resolved ~= "" then
+ return resolved
+ end
+ resolved = resolvers.findfile(filename,'texmfscripts') or ""
+ if resolved ~= "" then
+ return resolved
+ end
+ return resolvers.findfile(filename,'luatexlibs') or ""
+end
+
+-- local function checkstrip(filename)
+-- local modu = modules[file.nameonly(filename)]
+-- return modu and modu.dataonly
+-- end
+
+local stripindeed = false directives.register("system.compile.strip", function(v) stripindeed = v end)
+
+local function strippable(filename)
+ if stripindeed then
+ local modu = modules[file.nameonly(filename)]
+ return modu and modu.dataonly
+ else
+ return false
+ end
+end
+
+function environment.luafilechunk(filename,silent) -- used for loading lua bytecode in the format
+ filename = file.replacesuffix(filename, "lua")
+ local fullname = environment.luafile(filename)
+ if fullname and fullname ~= "" then
+ local data = luautilities.loadedluacode(fullname,strippable,filename) -- can be overloaded
+ if trace_locating then
+ report_lua("loading file %a %s",fullname,not data and "failed" or "succeeded")
+ elseif not silent then
+ texio.write("<",data and "+ " or "- ",fullname,">")
+ end
+ return data
+ else
+ if trace_locating then
+ report_lua("unknown file %a",filename)
+ end
+ return nil
+ end
+end
+
+-- the next ones can use the previous ones / combine
+
+function environment.loadluafile(filename, version)
+ local lucname, luaname, chunk
+ local basename = file.removesuffix(filename)
+ if basename == filename then
+ luaname = file.addsuffix(basename,luasuffixes.lua)
+ lucname = file.addsuffix(basename,luasuffixes.luc)
+ else
+ luaname = basename -- forced suffix
+ lucname = nil
+ end
+ -- when not overloaded by explicit suffix we look for a luc file first
+ local fullname = (lucname and environment.luafile(lucname)) or ""
+ if fullname ~= "" then
+ if trace_locating then
+ report_lua("loading %a",fullname)
+ end
+ -- maybe: package.loaded[file.nameonly(fullname)] = true
+ chunk = loadfile(fullname) -- this way we don't need a file exists check
+ end
+ if chunk then
+ assert(chunk)()
+ if version then
+ -- we check of the version number of this chunk matches
+ local v = version -- can be nil
+ if modules and modules[filename] then
+ v = modules[filename].version -- new method
+ elseif versions and versions[filename] then
+ v = versions[filename] -- old method
+ end
+ if v == version then
+ return true
+ else
+ if trace_locating then
+ report_lua("version mismatch for %a, lua version %a, luc version %a",filename,v,version)
+ end
+ environment.loadluafile(filename)
+ end
+ else
+ return true
+ end
+ end
+ fullname = (luaname and environment.luafile(luaname)) or ""
+ if fullname ~= "" then
+ if trace_locating then
+ report_lua("loading %a",fullname)
+ end
+ chunk = loadfile(fullname) -- this way we don't need a file exists check
+ if not chunk then
+ if trace_locating then
+ report_lua("unknown file %a",filename)
+ end
+ else
+ assert(chunk)()
+ return true
+ end
+ end
+ return false
+end
diff --git a/tex/context/base/luat-exe.lua b/tex/context/base/luat-exe.lua
index a57a5a006..6f7137cad 100644
--- a/tex/context/base/luat-exe.lua
+++ b/tex/context/base/luat-exe.lua
@@ -1,126 +1,126 @@
-if not modules then modules = { } end modules ['luat-exe'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this module needs checking (very old and never really used, not even enabled)
-
-local match, find, gmatch = string.match, string.find, string.gmatch
-local concat = table.concat
-local select = select
-
-local report_executers = logs.reporter("system","executers")
-
-resolvers.executers = resolvers.executers or { }
-local executers = resolvers.executers
-
-local permitted = { }
-
-local osexecute = os.execute
-local osexec = os.exec
-local osspawn = os.spawn
-local iopopen = io.popen
-
-local execute = osexecute
-local exec = osexec
-local spawn = osspawn
-local popen = iopopen
-
-local function register(...)
- for k=1,select("#",...) do
- local v = select(k,...)
- permitted[#permitted+1] = v == "*" and ".*" or v
- end
-end
-
-local function prepare(...)
- -- todo: make more clever first split
- local t = { ... }
- local n = #n
- local one = t[1]
- if n == 1 then
- if type(one) == 'table' then
- return one, concat(t," ",2,n)
- else
- local name, arguments = match(one,"^(.-)%s+(.+)$")
- if name and arguments then
- return name, arguments
- else
- return one, ""
- end
- end
- else
- return one, concat(t," ",2,n)
- end
-end
-
-local function executer(action)
- return function(...)
- local name, arguments = prepare(...)
- for k=1,#permitted do
- local v = permitted[k]
- if find(name,v) then
- return action(name .. " " .. arguments)
- else
- report_executers("not permitted: %s %s",name,arguments)
- end
- end
- return action("")
- end
-end
-
-local function finalize() -- todo: os.exec, todo: report ipv print
- execute = executer(osexecute)
- exec = executer(osexec)
- spawn = executer(osspawn)
- popen = executer(iopopen)
- finalize = function()
- report_executers("already finalized")
- end
- register = function()
- report_executers("already finalized, no registration permitted")
- end
- os.execute = execute
- os.exec = exec
- os.spawn = spawn
- io.popen = popen
-end
-
-executers.finalize = function(...) return finalize(...) end
-executers.register = function(...) return register(...) end
-executers.execute = function(...) return execute (...) end
-executers.exec = function(...) return exec (...) end
-executers.spawn = function(...) return spawn (...) end
-executers.popen = function(...) return popen (...) end
-
-local execution_mode directives.register("system.executionmode", function(v) execution_mode = v end)
-local execution_list directives.register("system.executionlist", function(v) execution_list = v end)
-
-function executers.check()
- if execution_mode == "none" then
- finalize()
- elseif execution_mode == "list" and execution_list ~= "" then
- for s in gmatch("[^%s,]",execution_list) do
- register(s)
- end
- finalize()
- else
- -- all
- end
-end
-
---~ resolvers.executers.register('.*')
---~ resolvers.executers.register('*')
---~ resolvers.executers.register('dir','ls')
---~ resolvers.executers.register('dir')
-
---~ resolvers.executers.finalize()
---~ resolvers.executers.execute('dir',"*.tex")
---~ resolvers.executers.execute("dir *.tex")
---~ resolvers.executers.execute("ls *.tex")
---~ os.execute('ls')
-
---~ resolvers.executers.check()
+if not modules then modules = { } end modules ['luat-exe'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- this module needs checking (very old and never really used, not even enabled)
+
+local match, find, gmatch = string.match, string.find, string.gmatch
+local concat = table.concat
+local select = select
+
+local report_executers = logs.reporter("system","executers")
+
+resolvers.executers = resolvers.executers or { }
+local executers = resolvers.executers
+
+local permitted = { }
+
+local osexecute = os.execute
+local osexec = os.exec
+local osspawn = os.spawn
+local iopopen = io.popen
+
+local execute = osexecute
+local exec = osexec
+local spawn = osspawn
+local popen = iopopen
+
+local function register(...)
+ for k=1,select("#",...) do
+ local v = select(k,...)
+ permitted[#permitted+1] = v == "*" and ".*" or v
+ end
+end
+
+local function prepare(...)
+ -- todo: make more clever first split
+ local t = { ... }
+ local n = #n
+ local one = t[1]
+ if n == 1 then
+ if type(one) == 'table' then
+ return one, concat(t," ",2,n)
+ else
+ local name, arguments = match(one,"^(.-)%s+(.+)$")
+ if name and arguments then
+ return name, arguments
+ else
+ return one, ""
+ end
+ end
+ else
+ return one, concat(t," ",2,n)
+ end
+end
+
+local function executer(action)
+ return function(...)
+ local name, arguments = prepare(...)
+ for k=1,#permitted do
+ local v = permitted[k]
+ if find(name,v) then
+ return action(name .. " " .. arguments)
+ else
+ report_executers("not permitted: %s %s",name,arguments)
+ end
+ end
+ return action("")
+ end
+end
+
+local function finalize() -- todo: os.exec, todo: report ipv print
+ execute = executer(osexecute)
+ exec = executer(osexec)
+ spawn = executer(osspawn)
+ popen = executer(iopopen)
+ finalize = function()
+ report_executers("already finalized")
+ end
+ register = function()
+ report_executers("already finalized, no registration permitted")
+ end
+ os.execute = execute
+ os.exec = exec
+ os.spawn = spawn
+ io.popen = popen
+end
+
+executers.finalize = function(...) return finalize(...) end
+executers.register = function(...) return register(...) end
+executers.execute = function(...) return execute (...) end
+executers.exec = function(...) return exec (...) end
+executers.spawn = function(...) return spawn (...) end
+executers.popen = function(...) return popen (...) end
+
+local execution_mode directives.register("system.executionmode", function(v) execution_mode = v end)
+local execution_list directives.register("system.executionlist", function(v) execution_list = v end)
+
+function executers.check()
+ if execution_mode == "none" then
+ finalize()
+ elseif execution_mode == "list" and execution_list ~= "" then
+ for s in gmatch("[^%s,]",execution_list) do
+ register(s)
+ end
+ finalize()
+ else
+ -- all
+ end
+end
+
+--~ resolvers.executers.register('.*')
+--~ resolvers.executers.register('*')
+--~ resolvers.executers.register('dir','ls')
+--~ resolvers.executers.register('dir')
+
+--~ resolvers.executers.finalize()
+--~ resolvers.executers.execute('dir',"*.tex")
+--~ resolvers.executers.execute("dir *.tex")
+--~ resolvers.executers.execute("ls *.tex")
+--~ os.execute('ls')
+
+--~ resolvers.executers.check()
diff --git a/tex/context/base/luat-fio.lua b/tex/context/base/luat-fio.lua
index d61c6f142..bc8c6677b 100644
--- a/tex/context/base/luat-fio.lua
+++ b/tex/context/base/luat-fio.lua
@@ -1,117 +1,117 @@
-if not modules then modules = { } end modules ['luat-fio'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format = string.format
-local concat = table.concat
-local sequenced = table.sequenced
-
-texconfig.kpse_init = false
-texconfig.shell_escape = 't'
-texconfig.max_in_open = 127
-texconfig.max_print_line = 100000
-
-if not resolvers.instance then
-
- resolvers.reset()
-
- resolvers.instance.validfile = resolvers.validctxfile
-
- -- we now load the file database as we might need files other than
- -- tex and lua file on the given path
-
- -- trackers.enable("resolvers.*")
- resolvers.load()
- -- trackers.disable("resolvers.*")
-
- local findbinfile, loadbinfile = resolvers.findbinfile, resolvers.loadbinfile
- local findtexfile, opentexfile = resolvers.findtexfile, resolvers.opentexfile
-
- if callback then
-
- local register = callbacks.register
-
- -- register('process_jobname' , function(name) return name end, true)
-
- register('find_read_file' , function(id,name) return findtexfile(name) end, true)
- register('open_read_file' , function( name) return opentexfile(name) end, true)
-
- register('find_data_file' , function(name) return findbinfile(name,"tex") end, true)
- register('find_enc_file' , function(name) return findbinfile(name,"enc") end, true)
- register('find_font_file' , function(name) return findbinfile(name,"tfm") end, true)
- register('find_format_file' , function(name) return findbinfile(name,"fmt") end, true)
- register('find_image_file' , function(name) return findbinfile(name,"tex") end, true)
- register('find_map_file' , function(name) return findbinfile(name,"map") end, true)
- register('find_opentype_file' , function(name) return findbinfile(name,"otf") end, true)
- register('find_output_file' , function(name) return name end, true)
- register('find_pk_file' , function(name) return findbinfile(name,"pk") end, true)
- register('find_sfd_file' , function(name) return findbinfile(name,"sfd") end, true)
- register('find_truetype_file' , function(name) return findbinfile(name,"ttf") end, true)
- register('find_type1_file' , function(name) return findbinfile(name,"pfb") end, true)
- register('find_vf_file' , function(name) return findbinfile(name,"vf") end, true)
- register('find_cidmap_file' , function(name) return findbinfile(name,"cidmap") end, true)
-
- register('read_data_file' , function(file) return loadbinfile(file,"tex") end, true)
- register('read_enc_file' , function(file) return loadbinfile(file,"enc") end, true)
- register('read_font_file' , function(file) return loadbinfile(file,"tfm") end, true)
- -- format
- -- image
- register('read_map_file' , function(file) return loadbinfile(file,"map") end, true)
- -- output
- register('read_pk_file' , function(file) return loadbinfile(file,"pk") end, true) -- 600dpi/manfnt.720pk
- register('read_sfd_file' , function(file) return loadbinfile(file,"sfd") end, true)
- register('read_vf_file' , function(file) return loadbinfile(file,"vf" ) end, true)
-
- register('find_font_file' , function(name) return findbinfile(name,"ofm") end, true)
- register('find_vf_file' , function(name) return findbinfile(name,"ovf") end, true)
-
- register('read_font_file' , function(file) return loadbinfile(file,"ofm") end, true)
- register('read_vf_file' , function(file) return loadbinfile(file,"ovf") end, true)
-
- -- register('read_opentype_file' , function(file) return loadbinfile(file,"otf") end, true)
- -- register('read_truetype_file' , function(file) return loadbinfile(file,"ttf") end, true)
- -- register('read_type1_file' , function(file) return loadbinfile(file,"pfb") end, true)
- -- register('read_cidmap_file' , function(file) return loadbinfile(file,"cidmap") end, true)
-
- register('find_write_file' , function(id,name) return name end, true)
- register('find_format_file' , function(name) return name end, true)
-
- end
-
-end
-
-local report_system = logs.reporter("system","files")
-local report_files = logs.reporter("used files")
-
-luatex.registerstopactions(function()
- local foundintrees = resolvers.instance.foundintrees
- if #foundintrees > 0 then
- logs.pushtarget("logfile")
- logs.newline()
- report_system("start used files")
- logs.newline()
- for i=1,#foundintrees do
- report_files("%4i: % T",i,foundintrees[i])
- end
- logs.newline()
- report_system("stop used files")
- logs.newline()
- logs.poptarget()
- end
-end)
-
-statistics.register("resource resolver", function()
- local scandata = resolvers.scandata()
- return format("loadtime %s seconds, %s scans with scantime %s seconds, %s shared scans, %s found files, scanned paths: %s",
- resolvers.loadtime(),
- scandata.n,
- scandata.time,
- scandata.shared,
- #resolvers.instance.foundintrees,
- #scandata.paths > 0 and concat(scandata.paths," ") or ""
- )
-end)
+if not modules then modules = { } end modules ['luat-fio'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format = string.format
+local concat = table.concat
+local sequenced = table.sequenced
+
+texconfig.kpse_init = false
+texconfig.shell_escape = 't'
+texconfig.max_in_open = 127
+texconfig.max_print_line = 100000
+
+if not resolvers.instance then
+
+ resolvers.reset()
+
+ resolvers.instance.validfile = resolvers.validctxfile
+
+ -- we now load the file database as we might need files other than
+ -- tex and lua file on the given path
+
+ -- trackers.enable("resolvers.*")
+ resolvers.load()
+ -- trackers.disable("resolvers.*")
+
+ local findbinfile, loadbinfile = resolvers.findbinfile, resolvers.loadbinfile
+ local findtexfile, opentexfile = resolvers.findtexfile, resolvers.opentexfile
+
+ if callback then
+
+ local register = callbacks.register
+
+ -- register('process_jobname' , function(name) return name end, true)
+
+ register('find_read_file' , function(id,name) return findtexfile(name) end, true)
+ register('open_read_file' , function( name) return opentexfile(name) end, true)
+
+ register('find_data_file' , function(name) return findbinfile(name,"tex") end, true)
+ register('find_enc_file' , function(name) return findbinfile(name,"enc") end, true)
+ register('find_font_file' , function(name) return findbinfile(name,"tfm") end, true)
+ register('find_format_file' , function(name) return findbinfile(name,"fmt") end, true)
+ register('find_image_file' , function(name) return findbinfile(name,"tex") end, true)
+ register('find_map_file' , function(name) return findbinfile(name,"map") end, true)
+ register('find_opentype_file' , function(name) return findbinfile(name,"otf") end, true)
+ register('find_output_file' , function(name) return name end, true)
+ register('find_pk_file' , function(name) return findbinfile(name,"pk") end, true)
+ register('find_sfd_file' , function(name) return findbinfile(name,"sfd") end, true)
+ register('find_truetype_file' , function(name) return findbinfile(name,"ttf") end, true)
+ register('find_type1_file' , function(name) return findbinfile(name,"pfb") end, true)
+ register('find_vf_file' , function(name) return findbinfile(name,"vf") end, true)
+ register('find_cidmap_file' , function(name) return findbinfile(name,"cidmap") end, true)
+
+ register('read_data_file' , function(file) return loadbinfile(file,"tex") end, true)
+ register('read_enc_file' , function(file) return loadbinfile(file,"enc") end, true)
+ register('read_font_file' , function(file) return loadbinfile(file,"tfm") end, true)
+ -- format
+ -- image
+ register('read_map_file' , function(file) return loadbinfile(file,"map") end, true)
+ -- output
+ register('read_pk_file' , function(file) return loadbinfile(file,"pk") end, true) -- 600dpi/manfnt.720pk
+ register('read_sfd_file' , function(file) return loadbinfile(file,"sfd") end, true)
+ register('read_vf_file' , function(file) return loadbinfile(file,"vf" ) end, true)
+
+ register('find_font_file' , function(name) return findbinfile(name,"ofm") end, true)
+ register('find_vf_file' , function(name) return findbinfile(name,"ovf") end, true)
+
+ register('read_font_file' , function(file) return loadbinfile(file,"ofm") end, true)
+ register('read_vf_file' , function(file) return loadbinfile(file,"ovf") end, true)
+
+ -- register('read_opentype_file' , function(file) return loadbinfile(file,"otf") end, true)
+ -- register('read_truetype_file' , function(file) return loadbinfile(file,"ttf") end, true)
+ -- register('read_type1_file' , function(file) return loadbinfile(file,"pfb") end, true)
+ -- register('read_cidmap_file' , function(file) return loadbinfile(file,"cidmap") end, true)
+
+ register('find_write_file' , function(id,name) return name end, true)
+ register('find_format_file' , function(name) return name end, true)
+
+ end
+
+end
+
+local report_system = logs.reporter("system","files")
+local report_files = logs.reporter("used files")
+
+luatex.registerstopactions(function()
+ local foundintrees = resolvers.instance.foundintrees
+ if #foundintrees > 0 then
+ logs.pushtarget("logfile")
+ logs.newline()
+ report_system("start used files")
+ logs.newline()
+ for i=1,#foundintrees do
+ report_files("%4i: % T",i,foundintrees[i])
+ end
+ logs.newline()
+ report_system("stop used files")
+ logs.newline()
+ logs.poptarget()
+ end
+end)
+
+statistics.register("resource resolver", function()
+ local scandata = resolvers.scandata()
+ return format("loadtime %s seconds, %s scans with scantime %s seconds, %s shared scans, %s found files, scanned paths: %s",
+ resolvers.loadtime(),
+ scandata.n,
+ scandata.time,
+ scandata.shared,
+ #resolvers.instance.foundintrees,
+ #scandata.paths > 0 and concat(scandata.paths," ") or ""
+ )
+end)
diff --git a/tex/context/base/luat-fmt.lua b/tex/context/base/luat-fmt.lua
index 20a4a8fcd..2eb5b89c9 100644
--- a/tex/context/base/luat-fmt.lua
+++ b/tex/context/base/luat-fmt.lua
@@ -1,140 +1,140 @@
-if not modules then modules = { } end modules ['luat-fmt'] = {
- version = 1.001,
- comment = "companion to mtxrun",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format = string.format
-local concat = table.concat
-local quoted = string.quoted
-local luasuffixes = utilities.lua.suffixes
-
-local report_format = logs.reporter("resolvers","formats")
-
-local function primaryflags() -- not yet ok
- local trackers = environment.argument("trackers")
- local directives = environment.argument("directives")
- local flags = { }
- if trackers and trackers ~= "" then
- flags = { "--trackers=" .. quoted(trackers) }
- end
- if directives and directives ~= "" then
- flags = { "--directives=" .. quoted(directives) }
- end
- if environment.argument("jit") then
- flags = { "--jiton" }
- end
- return concat(flags," ")
-end
-
-function environment.make_format(name)
- local engine = environment.ownmain or "luatex"
- -- change to format path (early as we need expanded paths)
- local olddir = dir.current()
- local path = caches.getwritablepath("formats",engine) or "" -- maybe platform
- if path ~= "" then
- lfs.chdir(path)
- end
- report_format("using format path %a",dir.current())
- -- check source file
- local texsourcename = file.addsuffix(name,"mkiv")
- local fulltexsourcename = resolvers.findfile(texsourcename,"tex") or ""
- if fulltexsourcename == "" then
- texsourcename = file.addsuffix(name,"tex")
- fulltexsourcename = resolvers.findfile(texsourcename,"tex") or ""
- end
- if fulltexsourcename == "" then
- report_format("no tex source file with name %a (mkiv or tex)",name)
- lfs.chdir(olddir)
- return
- else
- report_format("using tex source file %a",fulltexsourcename)
- end
- local texsourcepath = dir.expandname(file.dirname(fulltexsourcename)) -- really needed
- -- check specification
- local specificationname = file.replacesuffix(fulltexsourcename,"lus")
- local fullspecificationname = resolvers.findfile(specificationname,"tex") or ""
- if fullspecificationname == "" then
- specificationname = file.join(texsourcepath,"context.lus")
- fullspecificationname = resolvers.findfile(specificationname,"tex") or ""
- end
- if fullspecificationname == "" then
- report_format("unknown stub specification %a",specificationname)
- lfs.chdir(olddir)
- return
- end
- local specificationpath = file.dirname(fullspecificationname)
- -- load specification
- local usedluastub = nil
- local usedlualibs = dofile(fullspecificationname)
- if type(usedlualibs) == "string" then
- usedluastub = file.join(file.dirname(fullspecificationname),usedlualibs)
- elseif type(usedlualibs) == "table" then
- report_format("using stub specification %a",fullspecificationname)
- local texbasename = file.basename(name)
- local luastubname = file.addsuffix(texbasename,luasuffixes.lua)
- local lucstubname = file.addsuffix(texbasename,luasuffixes.luc)
- -- pack libraries in stub
- report_format("creating initialization file %a",luastubname)
- utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname)
- -- compile stub file (does not save that much as we don't use this stub at startup any more)
- if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then
- report_format("using compiled initialization file %a",lucstubname)
- usedluastub = lucstubname
- else
- report_format("using uncompiled initialization file %a",luastubname)
- usedluastub = luastubname
- end
- else
- report_format("invalid stub specification %a",fullspecificationname)
- lfs.chdir(olddir)
- return
- end
- -- generate format
- local command = format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\")
- report_format("running command: %s\n",command)
- os.spawn(command)
- -- remove related mem files
- local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem"
- -- report_format("removing related mplib format with pattern %a", pattern)
- local mp = dir.glob(pattern)
- if mp then
- for i=1,#mp do
- local name = mp[i]
- report_format("removing related mplib format %a", file.basename(name))
- os.remove(name)
- end
- end
- lfs.chdir(olddir)
-end
-
-function environment.run_format(name,data,more)
- if name and name ~= "" then
- local engine = environment.ownmain or "luatex"
- local barename = file.removesuffix(name)
- local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats",engine)
- if fmtname == "" then
- fmtname = resolvers.findfile(file.addsuffix(barename,"fmt")) or ""
- end
- fmtname = resolvers.cleanpath(fmtname)
- if fmtname == "" then
- report_format("no format with name %a",name)
- else
- local barename = file.removesuffix(name) -- expanded name
- local luaname = file.addsuffix(barename,"luc")
- if not lfs.isfile(luaname) then
- luaname = file.addsuffix(barename,"lua")
- end
- if not lfs.isfile(luaname) then
- report_format("using format name %a",fmtname)
- report_format("no luc/lua file with name %a",barename)
- else
- local command = format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more ~= "" and quoted(more) or "")
- report_format("running command: %s",command)
- os.spawn(command)
- end
- end
- end
-end
+if not modules then modules = { } end modules ['luat-fmt'] = {
+ version = 1.001,
+ comment = "companion to mtxrun",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format = string.format
+local concat = table.concat
+local quoted = string.quoted
+local luasuffixes = utilities.lua.suffixes
+
+local report_format = logs.reporter("resolvers","formats")
+
+local function primaryflags() -- not yet ok
+ local trackers = environment.argument("trackers")
+ local directives = environment.argument("directives")
+ local flags = { }
+ if trackers and trackers ~= "" then
+ flags = { "--trackers=" .. quoted(trackers) }
+ end
+ if directives and directives ~= "" then
+ flags = { "--directives=" .. quoted(directives) }
+ end
+ if environment.argument("jit") then
+ flags = { "--jiton" }
+ end
+ return concat(flags," ")
+end
+
+function environment.make_format(name)
+ local engine = environment.ownmain or "luatex"
+ -- change to format path (early as we need expanded paths)
+ local olddir = dir.current()
+ local path = caches.getwritablepath("formats",engine) or "" -- maybe platform
+ if path ~= "" then
+ lfs.chdir(path)
+ end
+ report_format("using format path %a",dir.current())
+ -- check source file
+ local texsourcename = file.addsuffix(name,"mkiv")
+ local fulltexsourcename = resolvers.findfile(texsourcename,"tex") or ""
+ if fulltexsourcename == "" then
+ texsourcename = file.addsuffix(name,"tex")
+ fulltexsourcename = resolvers.findfile(texsourcename,"tex") or ""
+ end
+ if fulltexsourcename == "" then
+ report_format("no tex source file with name %a (mkiv or tex)",name)
+ lfs.chdir(olddir)
+ return
+ else
+ report_format("using tex source file %a",fulltexsourcename)
+ end
+ local texsourcepath = dir.expandname(file.dirname(fulltexsourcename)) -- really needed
+ -- check specification
+ local specificationname = file.replacesuffix(fulltexsourcename,"lus")
+ local fullspecificationname = resolvers.findfile(specificationname,"tex") or ""
+ if fullspecificationname == "" then
+ specificationname = file.join(texsourcepath,"context.lus")
+ fullspecificationname = resolvers.findfile(specificationname,"tex") or ""
+ end
+ if fullspecificationname == "" then
+ report_format("unknown stub specification %a",specificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ local specificationpath = file.dirname(fullspecificationname)
+ -- load specification
+ local usedluastub = nil
+ local usedlualibs = dofile(fullspecificationname)
+ if type(usedlualibs) == "string" then
+ usedluastub = file.join(file.dirname(fullspecificationname),usedlualibs)
+ elseif type(usedlualibs) == "table" then
+ report_format("using stub specification %a",fullspecificationname)
+ local texbasename = file.basename(name)
+ local luastubname = file.addsuffix(texbasename,luasuffixes.lua)
+ local lucstubname = file.addsuffix(texbasename,luasuffixes.luc)
+ -- pack libraries in stub
+ report_format("creating initialization file %a",luastubname)
+ utilities.merger.selfcreate(usedlualibs,specificationpath,luastubname)
+ -- compile stub file (does not save that much as we don't use this stub at startup any more)
+ if utilities.lua.compile(luastubname,lucstubname) and lfs.isfile(lucstubname) then
+ report_format("using compiled initialization file %a",lucstubname)
+ usedluastub = lucstubname
+ else
+ report_format("using uncompiled initialization file %a",luastubname)
+ usedluastub = luastubname
+ end
+ else
+ report_format("invalid stub specification %a",fullspecificationname)
+ lfs.chdir(olddir)
+ return
+ end
+ -- generate format
+ local command = format("%s --ini %s --lua=%s %s %sdump",engine,primaryflags(),quoted(usedluastub),quoted(fulltexsourcename),os.platform == "unix" and "\\\\" or "\\")
+ report_format("running command: %s\n",command)
+ os.spawn(command)
+ -- remove related mem files
+ local pattern = file.removesuffix(file.basename(usedluastub)).."-*.mem"
+ -- report_format("removing related mplib format with pattern %a", pattern)
+ local mp = dir.glob(pattern)
+ if mp then
+ for i=1,#mp do
+ local name = mp[i]
+ report_format("removing related mplib format %a", file.basename(name))
+ os.remove(name)
+ end
+ end
+ lfs.chdir(olddir)
+end
+
+function environment.run_format(name,data,more)
+ if name and name ~= "" then
+ local engine = environment.ownmain or "luatex"
+ local barename = file.removesuffix(name)
+ local fmtname = caches.getfirstreadablefile(file.addsuffix(barename,"fmt"),"formats",engine)
+ if fmtname == "" then
+ fmtname = resolvers.findfile(file.addsuffix(barename,"fmt")) or ""
+ end
+ fmtname = resolvers.cleanpath(fmtname)
+ if fmtname == "" then
+ report_format("no format with name %a",name)
+ else
+ local barename = file.removesuffix(name) -- expanded name
+ local luaname = file.addsuffix(barename,"luc")
+ if not lfs.isfile(luaname) then
+ luaname = file.addsuffix(barename,"lua")
+ end
+ if not lfs.isfile(luaname) then
+ report_format("using format name %a",fmtname)
+ report_format("no luc/lua file with name %a",barename)
+ else
+ local command = format("%s %s --fmt=%s --lua=%s %s %s",engine,primaryflags(),quoted(barename),quoted(luaname),quoted(data),more ~= "" and quoted(more) or "")
+ report_format("running command: %s",command)
+ os.spawn(command)
+ end
+ end
+ end
+end
diff --git a/tex/context/base/luat-ini.lua b/tex/context/base/luat-ini.lua
index 587214b93..d4eee7123 100644
--- a/tex/context/base/luat-ini.lua
+++ b/tex/context/base/luat-ini.lua
@@ -1,206 +1,206 @@
-if not modules then modules = { } end modules ['luat-ini'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- rather experimental down here ... adapted to lua 5.2 ... but still
--- experimental
-
-local debug = require("debug")
-
-local string, table, lpeg, math, io, system = string, table, lpeg, math, io, system
-local rawset, rawget, next, setmetatable = rawset, rawget, next, setmetatable
-
---[[ldx--
-
We cannot load anything yet. However what we will do us reserve a few tables.
-These can be used for runtime user data or third party modules and will not be
-cluttered by macro package code.
---ldx]]--
-
-userdata = userdata or { } -- for users (e.g. functions etc)
-thirddata = thirddata or { } -- only for third party modules
-moduledata = moduledata or { } -- only for development team
-documentdata = documentdata or { } -- for users (e.g. raw data)
-parametersets = parametersets or { } -- experimental for team
-
-table.setmetatableindex(moduledata,table.autokey)
-table.setmetatableindex(thirddata, table.autokey)
-
---[[ldx--
-
Please create a namespace within these tables before using them!
We could cook up a readonly model for global tables but it makes more sense
-to invite users to use one of the predefined namespaces. One can redefine the
-protector. After all, it's just a lightweight suggestive system, not a
-watertight one.
---ldx]]--
-
-local global = _G
-global.global = global
-
-local dummy = function() end
-
---[[ldx--
-
Another approach is to freeze tables by using a metatable, this will be
-implemented stepwise.
---ldx]]--
-
--- moduledata : no need for protection (only for developers)
--- isolatedata : full protection
--- userdata : protected
--- thirddata : protected
-
---[[ldx--
-
We could have a metatable that automaticaly creates a top level namespace.
---ldx]]--
-
-local luanames = lua.name -- luatex itself
-
-lua.numbers = lua.numbers or { } local numbers = lua.numbers
-lua.messages = lua.messages or { } local messages = lua.messages
-
-storage.register("lua/numbers", numbers, "lua.numbers" )
-storage.register("lua/messages", messages, "lua.messages")
-
-local setfenv = setfenv or debug.setfenv -- < 5.2
-
-if setfenv then
-
- local protected = {
- -- global table
- global = global,
- -- user tables
- -- moduledata = moduledata,
- userdata = userdata,
- thirddata = thirddata,
- documentdata = documentdata,
- -- reserved
- protect = dummy,
- unprotect = dummy,
- -- luatex
- tex = tex,
- -- lua
- string = string,
- table = table,
- lpeg = lpeg,
- math = math,
- io = io,
- file = file,
- bit32 = bit32,
- --
- context = context,
- }
-
- local protect_full = function(name)
- local t = { }
- for k, v in next, protected do
- t[k] = v
- end
- return t
- end
-
- local protect_part = function(name) -- adds
- local t = rawget(global,name)
- if not t then
- t = { }
- for k, v in next, protected do
- t[k] = v
- end
- rawset(global,name,t)
- end
- return t
- end
-
- protect = function(name)
- if name == "isolateddata" then
- setfenv(2,protect_full(name))
- else
- setfenv(2,protect_part(name or "shareddata"))
- end
- end
-
- function lua.registername(name,message)
- local lnn = lua.numbers[name]
- if not lnn then
- lnn = #messages + 1
- messages[lnn] = message
- numbers[name] = lnn
- end
- luanames[lnn] = message
- context(lnn)
- -- initialize once
- if name ~= "isolateddata" then
- protect_full(name or "shareddata")
- end
- end
-
-elseif libraries then -- assume >= 5.2
-
- local shared
-
- protect = function(name)
- if not shared then
- -- e.g. context is not yet known
- local public = {
- global = global,
- -- moduledata = moduledata,
- userdata = userdata,
- thirddata = thirddata,
- documentdata = documentdata,
- protect = dummy,
- unprotect = dummy,
- context = context,
- }
- --
- for k, v in next, libraries.builtin do public[k] = v end
- for k, v in next, libraries.functions do public[k] = v end
- for k, v in next, libraries.obsolete do public[k] = nil end
- --
- shared = { __index = public }
- protect = function(name)
- local t = global[name] or { }
- setmetatable(t,shared) -- set each time
- return t
- end
- end
- return protect(name)
- end
-
- function lua.registername(name,message)
- local lnn = lua.numbers[name]
- if not lnn then
- lnn = #messages + 1
- messages[lnn] = message
- numbers[name] = lnn
- end
- luanames[lnn] = message
- context(lnn)
- end
-
-else
-
- protect = dummy
-
- function lua.registername(name,message)
- local lnn = lua.numbers[name]
- if not lnn then
- lnn = #messages + 1
- messages[lnn] = message
- numbers[name] = lnn
- end
- luanames[lnn] = message
- context(lnn)
- end
-
-end
-
+if not modules then modules = { } end modules ['luat-ini'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- rather experimental down here ... adapted to lua 5.2 ... but still
+-- experimental
+
+local debug = require("debug")
+
+local string, table, lpeg, math, io, system = string, table, lpeg, math, io, system
+local rawset, rawget, next, setmetatable = rawset, rawget, next, setmetatable
+
+--[[ldx--
+
We cannot load anything yet. However what we will do us reserve a few tables.
+These can be used for runtime user data or third party modules and will not be
+cluttered by macro package code.
+--ldx]]--
+
+userdata = userdata or { } -- for users (e.g. functions etc)
+thirddata = thirddata or { } -- only for third party modules
+moduledata = moduledata or { } -- only for development team
+documentdata = documentdata or { } -- for users (e.g. raw data)
+parametersets = parametersets or { } -- experimental for team
+
+table.setmetatableindex(moduledata,table.autokey)
+table.setmetatableindex(thirddata, table.autokey)
+
+--[[ldx--
+
Please create a namespace within these tables before using them!
We could cook up a readonly model for global tables but it makes more sense
+to invite users to use one of the predefined namespaces. One can redefine the
+protector. After all, it's just a lightweight suggestive system, not a
+watertight one.
+--ldx]]--
+
+local global = _G
+global.global = global
+
+local dummy = function() end
+
+--[[ldx--
+
Another approach is to freeze tables by using a metatable, this will be
+implemented stepwise.
+--ldx]]--
+
+-- moduledata : no need for protection (only for developers)
+-- isolatedata : full protection
+-- userdata : protected
+-- thirddata : protected
+
+--[[ldx--
+
We could have a metatable that automaticaly creates a top level namespace.
+--ldx]]--
+
+local luanames = lua.name -- luatex itself
+
+lua.numbers = lua.numbers or { } local numbers = lua.numbers
+lua.messages = lua.messages or { } local messages = lua.messages
+
+storage.register("lua/numbers", numbers, "lua.numbers" )
+storage.register("lua/messages", messages, "lua.messages")
+
+local setfenv = setfenv or debug.setfenv -- < 5.2
+
+if setfenv then
+
+ local protected = {
+ -- global table
+ global = global,
+ -- user tables
+ -- moduledata = moduledata,
+ userdata = userdata,
+ thirddata = thirddata,
+ documentdata = documentdata,
+ -- reserved
+ protect = dummy,
+ unprotect = dummy,
+ -- luatex
+ tex = tex,
+ -- lua
+ string = string,
+ table = table,
+ lpeg = lpeg,
+ math = math,
+ io = io,
+ file = file,
+ bit32 = bit32,
+ --
+ context = context,
+ }
+
+ local protect_full = function(name)
+ local t = { }
+ for k, v in next, protected do
+ t[k] = v
+ end
+ return t
+ end
+
+ local protect_part = function(name) -- adds
+ local t = rawget(global,name)
+ if not t then
+ t = { }
+ for k, v in next, protected do
+ t[k] = v
+ end
+ rawset(global,name,t)
+ end
+ return t
+ end
+
+ protect = function(name)
+ if name == "isolateddata" then
+ setfenv(2,protect_full(name))
+ else
+ setfenv(2,protect_part(name or "shareddata"))
+ end
+ end
+
+ function lua.registername(name,message)
+ local lnn = lua.numbers[name]
+ if not lnn then
+ lnn = #messages + 1
+ messages[lnn] = message
+ numbers[name] = lnn
+ end
+ luanames[lnn] = message
+ context(lnn)
+ -- initialize once
+ if name ~= "isolateddata" then
+ protect_full(name or "shareddata")
+ end
+ end
+
+elseif libraries then -- assume >= 5.2
+
+ local shared
+
+ protect = function(name)
+ if not shared then
+ -- e.g. context is not yet known
+ local public = {
+ global = global,
+ -- moduledata = moduledata,
+ userdata = userdata,
+ thirddata = thirddata,
+ documentdata = documentdata,
+ protect = dummy,
+ unprotect = dummy,
+ context = context,
+ }
+ --
+ for k, v in next, libraries.builtin do public[k] = v end
+ for k, v in next, libraries.functions do public[k] = v end
+ for k, v in next, libraries.obsolete do public[k] = nil end
+ --
+ shared = { __index = public }
+ protect = function(name)
+ local t = global[name] or { }
+ setmetatable(t,shared) -- set each time
+ return t
+ end
+ end
+ return protect(name)
+ end
+
+ function lua.registername(name,message)
+ local lnn = lua.numbers[name]
+ if not lnn then
+ lnn = #messages + 1
+ messages[lnn] = message
+ numbers[name] = lnn
+ end
+ luanames[lnn] = message
+ context(lnn)
+ end
+
+else
+
+ protect = dummy
+
+ function lua.registername(name,message)
+ local lnn = lua.numbers[name]
+ if not lnn then
+ lnn = #messages + 1
+ messages[lnn] = message
+ numbers[name] = lnn
+ end
+ luanames[lnn] = message
+ context(lnn)
+ end
+
+end
+
diff --git a/tex/context/base/luat-iop.lua b/tex/context/base/luat-iop.lua
index 52f14683e..bcbfac73a 100644
--- a/tex/context/base/luat-iop.lua
+++ b/tex/context/base/luat-iop.lua
@@ -1,195 +1,195 @@
-if not modules then modules = { } end modules ['luat-iop'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this paranoid stuff in web2c ... we cannot hook checks into the
--- input functions because one can always change the callback but
--- we can feed back specific patterns and paths into the next
--- mechanism
-
--- os.execute os.exec os.spawn io.fopen
--- os.remove lfs.chdir lfs.mkdir
--- io.open zip.open epdf.open mlib.new
-
--- cache
-
-local topattern, find = string.topattern, string.find
-
-local report_limiter = logs.reporter("system","limiter")
-
--- the basic methods
-
-local function match(ruleset,name)
- local n = #ruleset
- if n > 0 then
- for i=1,n do
- local r = ruleset[i]
- if find(name,r[1]) then
- return r[2]
- end
- end
- return false
- else
- -- nothing defined (or any)
- return true
- end
-end
-
-local function protect(ruleset,proc)
- return function(name,...)
- if name == "" then
- -- report_limiter("no access permitted: ") -- can happen in mplib code
- return nil, "no name given"
- elseif match(ruleset,name) then
- return proc(name,...)
- else
- report_limiter("no access permitted for %a",name)
- return nil, name .. ": no access permitted"
- end
- end
-end
-
-function io.limiter(preset)
- preset = preset or { }
- local ruleset = { }
- for i=1,#preset do
- local p = preset[i]
- local what, spec = p[1] or "", p[2] or ""
- if spec == "" then
- -- skip 'm
- elseif what == "tree" then
- resolvers.dowithpath(spec, function(r)
- local spec = resolvers.resolve(r) or ""
- if spec ~= "" then
- ruleset[#ruleset+1] = { topattern(spec,true), true }
- end
- end)
- elseif what == "permit" then
- ruleset[#ruleset+1] = { topattern(spec,true), true }
- elseif what == "forbid" then
- ruleset[#ruleset+1] = { topattern(spec,true), false }
- end
- end
- if #ruleset > 0 then
- return {
- match = function(name) return match (ruleset,name) end,
- protect = function(proc) return protect(ruleset,proc) end,
- }
- else
- return {
- match = function(name) return true end,
- protect = proc,
- }
- end
-end
-
--- a few handlers
-
-io.i_limiters = { }
-io.o_limiters = { }
-
-function io.i_limiter(v)
- local i = io.i_limiters[v]
- if i then
- local i_limiter = io.limiter(i)
- function io.i_limiter()
- return i_limiter
- end
- return i_limiter
- end
-end
-
-function io.o_limiter(v)
- local o = io.o_limiters[v]
- if o then
- local o_limiter = io.limiter(o)
- function io.o_limiter()
- return o_limiter
- end
- return o_limiter
- end
-end
-
--- the real thing (somewhat fuzzy as we need to know what gets done)
-
-local i_opener, i_limited = io.open, false
-local o_opener, o_limited = io.open, false
-
-local function i_register(v)
- if not i_limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- local protect = i_limiter.protect
- i_opener = protect(i_opener)
- i_limited = true
- report_limiter("input mode set to %a",v)
- end
- end
-end
-
-local function o_register(v)
- if not o_limited then
- local o_limiter = io.o_limiter(v)
- if o_limiter then
- local protect = o_limiter.protect
- o_opener = protect(o_opener)
- o_limited = true
- report_limiter("output mode set to %a",v)
- end
- end
-end
-
-function io.open(name,method)
- if method and find(method,"[wa]") then
- return o_opener(name,method)
- else
- return i_opener(name,method)
- end
-end
-
-directives.register("system.inputmode", i_register)
-directives.register("system.outputmode", o_register)
-
-local i_limited = false
-local o_limited = false
-
-local function i_register(v)
- if not i_limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- local protect = i_limiter.protect
- lfs.chdir = protect(lfs.chdir) -- needs checking
- i_limited = true
- end
- end
-end
-
-local function o_register(v)
- if not o_limited then
- local o_limiter = io.o_limiter(v)
- if o_limiter then
- local protect = o_limiter.protect
- os.remove = protect(os.remove) -- rather okay
- lfs.chdir = protect(lfs.chdir) -- needs checking
- lfs.mkdir = protect(lfs.mkdir) -- needs checking
- o_limited = true
- end
- end
-end
-
-directives.register("system.inputmode", i_register)
-directives.register("system.outputmode", o_register)
-
--- the definitions
-
-local limiters = resolvers.variable("limiters")
-
-if limiters then
- io.i_limiters = limiters.input or { }
- io.o_limiters = limiters.output or { }
-end
-
+if not modules then modules = { } end modules ['luat-iop'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- this paranoid stuff in web2c ... we cannot hook checks into the
+-- input functions because one can always change the callback but
+-- we can feed back specific patterns and paths into the next
+-- mechanism
+
+-- os.execute os.exec os.spawn io.fopen
+-- os.remove lfs.chdir lfs.mkdir
+-- io.open zip.open epdf.open mlib.new
+
+-- cache
+
+local topattern, find = string.topattern, string.find
+
+local report_limiter = logs.reporter("system","limiter")
+
+-- the basic methods
+
+local function match(ruleset,name)
+ local n = #ruleset
+ if n > 0 then
+ for i=1,n do
+ local r = ruleset[i]
+ if find(name,r[1]) then
+ return r[2]
+ end
+ end
+ return false
+ else
+ -- nothing defined (or any)
+ return true
+ end
+end
+
+local function protect(ruleset,proc)
+ return function(name,...)
+ if name == "" then
+ -- report_limiter("no access permitted: ") -- can happen in mplib code
+ return nil, "no name given"
+ elseif match(ruleset,name) then
+ return proc(name,...)
+ else
+ report_limiter("no access permitted for %a",name)
+ return nil, name .. ": no access permitted"
+ end
+ end
+end
+
+function io.limiter(preset)
+ preset = preset or { }
+ local ruleset = { }
+ for i=1,#preset do
+ local p = preset[i]
+ local what, spec = p[1] or "", p[2] or ""
+ if spec == "" then
+ -- skip 'm
+ elseif what == "tree" then
+ resolvers.dowithpath(spec, function(r)
+ local spec = resolvers.resolve(r) or ""
+ if spec ~= "" then
+ ruleset[#ruleset+1] = { topattern(spec,true), true }
+ end
+ end)
+ elseif what == "permit" then
+ ruleset[#ruleset+1] = { topattern(spec,true), true }
+ elseif what == "forbid" then
+ ruleset[#ruleset+1] = { topattern(spec,true), false }
+ end
+ end
+ if #ruleset > 0 then
+ return {
+ match = function(name) return match (ruleset,name) end,
+ protect = function(proc) return protect(ruleset,proc) end,
+ }
+ else
+ return {
+ match = function(name) return true end,
+ protect = proc,
+ }
+ end
+end
+
+-- a few handlers
+
+io.i_limiters = { }
+io.o_limiters = { }
+
+function io.i_limiter(v)
+ local i = io.i_limiters[v]
+ if i then
+ local i_limiter = io.limiter(i)
+ function io.i_limiter()
+ return i_limiter
+ end
+ return i_limiter
+ end
+end
+
+function io.o_limiter(v)
+ local o = io.o_limiters[v]
+ if o then
+ local o_limiter = io.limiter(o)
+ function io.o_limiter()
+ return o_limiter
+ end
+ return o_limiter
+ end
+end
+
+-- the real thing (somewhat fuzzy as we need to know what gets done)
+
+local i_opener, i_limited = io.open, false
+local o_opener, o_limited = io.open, false
+
+local function i_register(v)
+ if not i_limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ local protect = i_limiter.protect
+ i_opener = protect(i_opener)
+ i_limited = true
+ report_limiter("input mode set to %a",v)
+ end
+ end
+end
+
+local function o_register(v)
+ if not o_limited then
+ local o_limiter = io.o_limiter(v)
+ if o_limiter then
+ local protect = o_limiter.protect
+ o_opener = protect(o_opener)
+ o_limited = true
+ report_limiter("output mode set to %a",v)
+ end
+ end
+end
+
+function io.open(name,method)
+ if method and find(method,"[wa]") then
+ return o_opener(name,method)
+ else
+ return i_opener(name,method)
+ end
+end
+
+directives.register("system.inputmode", i_register)
+directives.register("system.outputmode", o_register)
+
+local i_limited = false
+local o_limited = false
+
+local function i_register(v)
+ if not i_limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ local protect = i_limiter.protect
+ lfs.chdir = protect(lfs.chdir) -- needs checking
+ i_limited = true
+ end
+ end
+end
+
+local function o_register(v)
+ if not o_limited then
+ local o_limiter = io.o_limiter(v)
+ if o_limiter then
+ local protect = o_limiter.protect
+ os.remove = protect(os.remove) -- rather okay
+ lfs.chdir = protect(lfs.chdir) -- needs checking
+ lfs.mkdir = protect(lfs.mkdir) -- needs checking
+ o_limited = true
+ end
+ end
+end
+
+directives.register("system.inputmode", i_register)
+directives.register("system.outputmode", o_register)
+
+-- the definitions
+
+local limiters = resolvers.variable("limiters")
+
+if limiters then
+ io.i_limiters = limiters.input or { }
+ io.o_limiters = limiters.output or { }
+end
+
diff --git a/tex/context/base/luat-lua.lua b/tex/context/base/luat-lua.lua
index 972004e88..fd899871f 100644
--- a/tex/context/base/luat-lua.lua
+++ b/tex/context/base/luat-lua.lua
@@ -1,45 +1,45 @@
-if not modules then modules = { } end modules ['luat-lua'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-if lua then do
-
- local delayed = { }
-
- function lua.flushdelayed(...)
- local t = delayed
- delayed = { }
- for i=1, #t do
- t[i](...)
- end
- end
-
- function lua.delay(f)
- delayed[#delayed+1] = f
- end
-
- function lua.flush(...)
- context.directlua("lua.flushdelayed(%,t)",{...})
- end
-
-end end
-
--- See mk.pdf for an explanation of the following code:
---
--- function test(n)
--- lua.delay(function(...)
--- context("pi: %s %s %s",...)
--- context.par()
--- end)
--- lua.delay(function(...)
--- context("more pi: %s %s %s",...)
--- context.par()
--- end)
--- context("\\setbox0=\\hbox{%s}",math.pi*n)
--- local box = tex.box[0]
--- lua.flush(box.width,box.height,box.depth)
--- end
+if not modules then modules = { } end modules ['luat-lua'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if lua then do
+
+ local delayed = { }
+
+ function lua.flushdelayed(...)
+ local t = delayed
+ delayed = { }
+ for i=1, #t do
+ t[i](...)
+ end
+ end
+
+ function lua.delay(f)
+ delayed[#delayed+1] = f
+ end
+
+ function lua.flush(...)
+ context.directlua("lua.flushdelayed(%,t)",{...})
+ end
+
+end end
+
+-- See mk.pdf for an explanation of the following code:
+--
+-- function test(n)
+-- lua.delay(function(...)
+-- context("pi: %s %s %s",...)
+-- context.par()
+-- end)
+-- lua.delay(function(...)
+-- context("more pi: %s %s %s",...)
+-- context.par()
+-- end)
+-- context("\\setbox0=\\hbox{%s}",math.pi*n)
+-- local box = tex.box[0]
+-- lua.flush(box.width,box.height,box.depth)
+-- end
diff --git a/tex/context/base/luat-mac.lua b/tex/context/base/luat-mac.lua
index c8be06b63..19f4d108b 100644
--- a/tex/context/base/luat-mac.lua
+++ b/tex/context/base/luat-mac.lua
@@ -1,434 +1,434 @@
-if not modules then modules = { } end modules ['luat-mac'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- Sometimes we run into situations like:
---
--- \def\foo#1{\expandafter\def\csname#1\endcsname}
---
--- As this confuses the parser, the following should be used instead:
---
--- \def\foo#1{\expandafter\normaldef\csname#1\endcsname}
-
-local P, V, S, R, C, Cs, Cmt, Carg = lpeg.P, lpeg.V, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.Cmt, lpeg.Carg
-local lpegmatch, patterns = lpeg.match, lpeg.patterns
-
-local insert, remove = table.insert, table.remove
-local rep, sub = string.rep, string.sub
-local setmetatable = setmetatable
-local filesuffix = file.suffix
-local convertlmxstring = lmx and lmx.convertstring
-
-local pushtarget, poptarget = logs.pushtarget, logs.poptarget
-
-local report_macros = logs.reporter("interface","macros")
-
-local stack, top, n, hashes = { }, nil, 0, { }
-
-local function set(s)
- if top then
- n = n + 1
- if n > 9 then
- report_macros("number of arguments > 9, ignoring %s",s)
- else
- local ns = #stack
- local h = hashes[ns]
- if not h then
- h = rep("#",2^(ns-1))
- hashes[ns] = h
- end
- m = h .. n
- top[s] = m
- return m
- end
- end
-end
-
-local function get(s)
- if not top then
- report_macros("keeping #%s, no stack",s)
- return "#" .. s -- can be lua
- end
- local m = top[s]
- if m then
- return m
- else
- report_macros("keeping #%s, not on stack",s)
- return "#" .. s -- quite likely an error
- end
-end
-
-local function push()
- top = { }
- n = 0
- local s = stack[#stack]
- if s then
- setmetatable(top,{ __index = s })
- end
- insert(stack,top)
-end
-
-local function pop()
- top = remove(stack)
-end
-
-local leftbrace = P("{") -- will be in patterns
-local rightbrace = P("}")
-local escape = P("\\")
-
-local space = patterns.space
-local spaces = space^1
-local newline = patterns.newline
-local nobrace = 1 - leftbrace - rightbrace
-
-local longleft = leftbrace -- P("(")
-local longright = rightbrace -- P(")")
-local nolong = 1 - longleft - longright
-
-local name = R("AZ","az")^1
-local csname = (R("AZ","az") + S("@?!_"))^1
-local longname = (longleft/"") * (nolong^1) * (longright/"")
-local variable = P("#") * Cs(name + longname)
-local escapedname = escape * csname
-local definer = escape * (P("def") + S("egx") * P("def")) -- tex
-local setter = escape * P("set") * (P("u")^-1 * S("egx")^-1) * P("value") -- context specific
---- + escape * P("install") * (1-P("handler"))^1 * P("handler") -- context specific
-local startcode = P("\\starttexdefinition") -- context specific
-local stopcode = P("\\stoptexdefinition") -- context specific
-local anything = patterns.anything
-local always = patterns.alwaysmatched
-
-local definer = escape * (P("u")^-1 * S("egx")^-1 * P("def")) -- tex
-
--- The comment nilling can become an option but it nicely compensates the Lua
--- parsing here with less parsing at the TeX end. We keep lines so the errors
--- get reported all right, but comments are never seen there anyway. We keep
--- comment that starts inline as it can be something special with a % (at some
--- point we can do that as well, esp if we never use \% or `% somewhere
--- unpredictable). We need to skip comments anyway. Hm, too tricky, this
--- stripping as we can have Lua code etc.
-
-local commenttoken = P("%")
-local crorlf = S("\n\r")
------ commentline = commenttoken * ((Carg(1) * C((1-crorlf)^0))/function(strip,s) return strip and "" or s end)
-local commentline = commenttoken * ((1-crorlf)^0)
-local leadingcomment = (commentline * crorlf^1)^1
-local furthercomment = (crorlf^1 * commentline)^1
-
-local pushlocal = always / push
-local poplocal = always / pop
-local declaration = variable / set
-local identifier = variable / get
-
-local argument = P { leftbrace * ((identifier + V(1) + (1 - leftbrace - rightbrace))^0) * rightbrace }
-
-local function matcherror(str,pos)
- report_macros("runaway definition at: %s",sub(str,pos-30,pos))
-end
-
-local csname_endcsname = P("\\csname") * (identifier + (1 - P("\\endcsname")))^1
-
-local grammar = { "converter",
- texcode = pushlocal
- * startcode
- * spaces
- * (csname * spaces)^1 -- new: multiple, new:csname instead of name
- -- * (declaration + furthercomment + (1 - newline - space))^0
- * ((declaration * (space^0/""))^1 + furthercomment + (1 - newline - space))^0 -- accepts #a #b #c
- * V("texbody")
- * stopcode
- * poplocal,
- texbody = ( V("definition")
- + identifier
- + V("braced")
- + (1 - stopcode)
- )^0,
- definition = pushlocal
- * definer
- * spaces^0
- * escapedname
--- * (declaration + furthercomment + commentline + (1-leftbrace))^0
- * (declaration + furthercomment + commentline + csname_endcsname + (1-leftbrace))^0
- * V("braced")
- * poplocal,
- setcode = pushlocal
- * setter
- * argument
- * (declaration + furthercomment + commentline + (1-leftbrace))^0
- * V("braced")
- * poplocal,
- braced = leftbrace
- * ( V("definition")
- + identifier
- + V("setcode")
- + V("texcode")
- + V("braced")
- + furthercomment
- + leadingcomment -- new per 2012-05-15 (message on mailing list)
- + nobrace
- )^0
- -- * rightbrace^-1, -- the -1 catches errors
- * (rightbrace + Cmt(always,matcherror)),
-
- pattern = leadingcomment
- + V("definition")
- + V("setcode")
- + V("texcode")
- + furthercomment
- + anything,
-
- converter = V("pattern")^1,
-}
-
-local parser = Cs(grammar)
-
-local checker = P("%") * (1 - newline - P("macros"))^0
- * P("macros") * space^0 * P("=") * space^0 * C(patterns.letter^1)
-
--- maybe namespace
-
-local macros = { } resolvers.macros = macros
-
-function macros.preprocessed(str,strip)
- return lpegmatch(parser,str,1,strip)
-end
-
-function macros.convertfile(oldname,newname) -- beware, no testing on oldname == newname
- local data = resolvers.loadtexfile(oldname)
- data = interfaces.preprocessed(data) or ""
- io.savedata(newname,data)
-end
-
-function macros.version(data)
- return lpegmatch(checker,data)
-end
-
--- function macros.processmkvi(str,filename)
--- if filename and filesuffix(filename) == "mkvi" or lpegmatch(checker,str) == "mkvi" then
--- local oldsize = #str
--- str = lpegmatch(parser,str,1,true) or str
--- pushtarget("log")
--- report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str)
--- poptarget("log")
--- end
--- return str
--- end
---
--- utilities.sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmkvi")
-
--- the document variables hack is temporary
-
-local processors = { }
-
-function processors.mkvi(str,filename)
- local oldsize = #str
- str = lpegmatch(parser,str,1,true) or str
- pushtarget("log")
- report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str)
- poptarget("log")
- return str
-end
-
-function processors.mkix(str,filename) -- we could intercept earlier so that caching works better
- if not document then -- because now we hash the string as well as the
- document = { }
- end
- if not document.variables then
- document.variables = { }
- end
- local oldsize = #str
- str = convertlmxstring(str,document.variables,false) or str
- pushtarget("log")
- report_macros("processed mkix file %a, delta %s",filename,oldsize-#str)
- poptarget("log")
- return str
-end
-
-function processors.mkxi(str,filename)
- if not document then
- document = { }
- end
- if not document.variables then
- document.variables = { }
- end
- local oldsize = #str
- str = convertlmxstring(str,document.variables,false) or str
- str = lpegmatch(parser,str,1,true) or str
- pushtarget("log")
- report_macros("processed mkxi file %a, delta %s",filename,oldsize-#str)
- poptarget("log")
- return str
-end
-
-function macros.processmk(str,filename)
- if filename then
- local suffix = filesuffix(filename)
- local processor = processors[suffix] or processors[lpegmatch(checker,str)]
- if processor then
- str = processor(str,filename)
- end
- end
- return str
-end
-
-function macros.processmkvi(str,filename)
- if filename and filesuffix(filename) == "mkvi" or lpegmatch(checker,str) == "mkvi" then
- local oldsize = #str
- str = lpegmatch(parser,str,1,true) or str
- pushtarget("log")
- report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str)
- poptarget("log")
- end
- return str
-end
-
-local sequencers = utilities.sequencers
-
-if sequencers then
-
- sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmk")
- sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmkvi")
-
-end
-
--- bonus
-
-if resolvers.schemes then
-
- local function handler(protocol,name,cachename)
- local hashed = url.hashed(name)
- local path = hashed.path
- if path and path ~= "" then
- local str = resolvers.loadtexfile(path)
- if filesuffix(path) == "mkvi" or lpegmatch(checker,str) == "mkvi" then
- -- already done automatically
- io.savedata(cachename,str)
- else
- local result = lpegmatch(parser,str,1,true) or str
- pushtarget("log")
- report_macros("processed scheme %a, delta %s",filename,#str-#result)
- poptarget("log")
- io.savedata(cachename,result)
- end
- end
- return cachename
- end
-
- resolvers.schemes.install('mkvi',handler,1) -- this will cache !
-
-end
-
--- print(macros.preprocessed(
--- [[
--- \starttexdefinition unexpanded test #aa #bb #cc
--- test
--- \stoptexdefinition
--- ]]))
-
--- print(macros.preprocessed([[\checked \def \bla #bla{bla#{bla}}]]))
--- print(macros.preprocessed([[\def\bla#bla{#{bla}bla}]]))
--- print(macros.preprocessed([[\def\blä#{blá}{blà :#{blá}}]]))
--- print(macros.preprocessed([[\def\blä#bla{blà :#bla}]]))
--- print(macros.preprocessed([[\setvalue{xx}#bla{blà :#bla}]]))
--- print(macros.preprocessed([[\def\foo#bar{\setvalue{xx#bar}{#bar}}]]))
--- print(macros.preprocessed([[\def\bla#bla{bla:#{bla}}]]))
--- print(macros.preprocessed([[\def\bla_bla#bla{bla:#bla}]]))
--- print(macros.preprocessed([[\def\test#oeps{test:#oeps}]]))
--- print(macros.preprocessed([[\def\test_oeps#oeps{test:#oeps}]]))
--- print(macros.preprocessed([[\def\test#oeps{test:#{oeps}}]]))
--- print(macros.preprocessed([[\def\test#{oeps:1}{test:#{oeps:1}}]]))
--- print(macros.preprocessed([[\def\test#{oeps}{test:#oeps}]]))
--- print(macros.preprocessed([[\def\x[#a][#b][#c]{\setvalue{\y{#a}\z{#b}}{#c}}]]))
--- print(macros.preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}]]))
--- print(macros.preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}}]]))
--- print(macros.preprocessed([[% test
--- \def\test#oeps{#oeps} % {test}
--- % test
---
--- % test
--- two
--- %test]]))
--- print(macros.preprocessed([[
--- \def\scrn_button_make_normal#namespace#current#currentparameter#text%
--- {\ctxlua{structures.references.injectcurrentset(nil,nil)}%
--- % \hbox attr \referenceattribute \lastreferenceattribute {\localframed[#namespace:#current]{#text}}}
--- \hbox attr \referenceattribute \lastreferenceattribute {\directlocalframed[#namespace:#current]{#text}}}
--- ]]))
---
--- print(macros.preprocessed([[
--- \def\definefoo[#name]%
--- {\setvalue{start#name}{\dostartfoo{#name}}}
--- \def\dostartfoo#name%
--- {\def\noexpand\next#content\expandafter\noexpand\csname stop#name\endcsname{#name : #content}%
--- \next}
--- \def\dostartfoo#name%
--- {\normalexpanded{\def\noexpand\next#content\expandafter\noexpand\csname stop#name\endcsname}{#name : #content}%
--- \next}
--- ]]))
---
--- print(macros.preprocessed([[
--- \def\dosomething#content{%%% {{
--- % { }{{ %%
--- \bgroup\italic#content\egroup
--- }
--- ]]))
---
--- print(macros.preprocessed([[
--- \unexpanded\def\start#tag#stoptag%
--- {\initialize{#tag}%
--- \normalexpanded
--- {\def\yes[#one]#two\csname\e!stop#stoptag\endcsname{\command_yes[#one]{#two}}%
--- \def\nop #one\csname\e!stop#stoptag\endcsname{\command_nop {#one}}}%
--- \doifnextoptionalelse\yes\nop}
--- ]]))
---
--- print(macros.preprocessed([[
--- \normalexpanded{\long\def\expandafter\noexpand\csname\e!start\v!interactionmenu\endcsname[#tag]#content\expandafter\noexpand\csname\e!stop\v!interactionmenu\endcsname}%
--- {\def\currentinteractionmenu{#tag}%
--- \expandafter\settrue\csname\??menustate\interactionmenuparameter\c!category\endcsname
--- \setinteractionmenuparameter\c!menu{#content}}
--- ]]))
---
--- Just an experiment:
---
--- \catcode\numexpr"10FF25=\commentcatcode %% > 110000 is invalid
---
--- We could have a push/pop mechanism but binding to txtcatcodes
--- is okay too.
-
-local txtcatcodes = false -- also signal and yet unknown
-
-local commentsignal = utf.char(0x10FF25)
-
-local encodecomment = P("%%") / commentsignal --
------ encodepattern = Cs(((1-encodecomment)^0 * encodecomment)) -- strips but not nice for verbatim
-local encodepattern = Cs((encodecomment + 1)^0)
-local decodecomment = P(commentsignal) / "%%%%" -- why doubles here?
-local decodepattern = Cs((decodecomment + 1)^0)
-
-function resolvers.macros.encodecomment(str)
- if txtcatcodes and tex.catcodetable == txtcatcodes then
- return lpegmatch(encodepattern,str) or str
- else
- return str
- end
-end
-
-function resolvers.macros.decodecomment(str) -- normally not needed
- return txtcatcodes and lpegmatch(decodepattern,str) or str
-end
-
--- resolvers.macros.commentsignal = commentsignal
--- resolvers.macros.encodecommentpattern = encodepattern
--- resolvers.macros.decodecommentpattern = decodepattern
-
-function resolvers.macros.enablecomment(thecatcodes)
- if not txtcatcodes then
- txtcatcodes = thecatcodes or catcodes.numbers.txtcatcodes
- utilities.sequencers.appendaction(resolvers.openers.helpers.textlineactions,"system","resolvers.macros.encodecomment")
- end
-end
+if not modules then modules = { } end modules ['luat-mac'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Sometimes we run into situations like:
+--
+-- \def\foo#1{\expandafter\def\csname#1\endcsname}
+--
+-- As this confuses the parser, the following should be used instead:
+--
+-- \def\foo#1{\expandafter\normaldef\csname#1\endcsname}
+
+local P, V, S, R, C, Cs, Cmt, Carg = lpeg.P, lpeg.V, lpeg.S, lpeg.R, lpeg.C, lpeg.Cs, lpeg.Cmt, lpeg.Carg
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+
+local insert, remove = table.insert, table.remove
+local rep, sub = string.rep, string.sub
+local setmetatable = setmetatable
+local filesuffix = file.suffix
+local convertlmxstring = lmx and lmx.convertstring
+
+local pushtarget, poptarget = logs.pushtarget, logs.poptarget
+
+local report_macros = logs.reporter("interface","macros")
+
+local stack, top, n, hashes = { }, nil, 0, { }
+
+local function set(s)
+ if top then
+ n = n + 1
+ if n > 9 then
+ report_macros("number of arguments > 9, ignoring %s",s)
+ else
+ local ns = #stack
+ local h = hashes[ns]
+ if not h then
+ h = rep("#",2^(ns-1))
+ hashes[ns] = h
+ end
+ m = h .. n
+ top[s] = m
+ return m
+ end
+ end
+end
+
+local function get(s)
+ if not top then
+ report_macros("keeping #%s, no stack",s)
+ return "#" .. s -- can be lua
+ end
+ local m = top[s]
+ if m then
+ return m
+ else
+ report_macros("keeping #%s, not on stack",s)
+ return "#" .. s -- quite likely an error
+ end
+end
+
+local function push()
+ top = { }
+ n = 0
+ local s = stack[#stack]
+ if s then
+ setmetatable(top,{ __index = s })
+ end
+ insert(stack,top)
+end
+
+local function pop()
+ top = remove(stack)
+end
+
+local leftbrace = P("{") -- will be in patterns
+local rightbrace = P("}")
+local escape = P("\\")
+
+local space = patterns.space
+local spaces = space^1
+local newline = patterns.newline
+local nobrace = 1 - leftbrace - rightbrace
+
+local longleft = leftbrace -- P("(")
+local longright = rightbrace -- P(")")
+local nolong = 1 - longleft - longright
+
+local name = R("AZ","az")^1
+local csname = (R("AZ","az") + S("@?!_"))^1
+local longname = (longleft/"") * (nolong^1) * (longright/"")
+local variable = P("#") * Cs(name + longname)
+local escapedname = escape * csname
+local definer = escape * (P("def") + S("egx") * P("def")) -- tex
+local setter = escape * P("set") * (P("u")^-1 * S("egx")^-1) * P("value") -- context specific
+--- + escape * P("install") * (1-P("handler"))^1 * P("handler") -- context specific
+local startcode = P("\\starttexdefinition") -- context specific
+local stopcode = P("\\stoptexdefinition") -- context specific
+local anything = patterns.anything
+local always = patterns.alwaysmatched
+
+local definer = escape * (P("u")^-1 * S("egx")^-1 * P("def")) -- tex
+
+-- The comment nilling can become an option but it nicely compensates the Lua
+-- parsing here with less parsing at the TeX end. We keep lines so the errors
+-- get reported all right, but comments are never seen there anyway. We keep
+-- comment that starts inline as it can be something special with a % (at some
+-- point we can do that as well, esp if we never use \% or `% somewhere
+-- unpredictable). We need to skip comments anyway. Hm, too tricky, this
+-- stripping as we can have Lua code etc.
+
+local commenttoken = P("%")
+local crorlf = S("\n\r")
+----- commentline = commenttoken * ((Carg(1) * C((1-crorlf)^0))/function(strip,s) return strip and "" or s end)
+local commentline = commenttoken * ((1-crorlf)^0)
+local leadingcomment = (commentline * crorlf^1)^1
+local furthercomment = (crorlf^1 * commentline)^1
+
+local pushlocal = always / push
+local poplocal = always / pop
+local declaration = variable / set
+local identifier = variable / get
+
+local argument = P { leftbrace * ((identifier + V(1) + (1 - leftbrace - rightbrace))^0) * rightbrace }
+
+local function matcherror(str,pos)
+ report_macros("runaway definition at: %s",sub(str,pos-30,pos))
+end
+
+local csname_endcsname = P("\\csname") * (identifier + (1 - P("\\endcsname")))^1
+
+local grammar = { "converter",
+ texcode = pushlocal
+ * startcode
+ * spaces
+ * (csname * spaces)^1 -- new: multiple, new:csname instead of name
+ -- * (declaration + furthercomment + (1 - newline - space))^0
+ * ((declaration * (space^0/""))^1 + furthercomment + (1 - newline - space))^0 -- accepts #a #b #c
+ * V("texbody")
+ * stopcode
+ * poplocal,
+ texbody = ( V("definition")
+ + identifier
+ + V("braced")
+ + (1 - stopcode)
+ )^0,
+ definition = pushlocal
+ * definer
+ * spaces^0
+ * escapedname
+-- * (declaration + furthercomment + commentline + (1-leftbrace))^0
+ * (declaration + furthercomment + commentline + csname_endcsname + (1-leftbrace))^0
+ * V("braced")
+ * poplocal,
+ setcode = pushlocal
+ * setter
+ * argument
+ * (declaration + furthercomment + commentline + (1-leftbrace))^0
+ * V("braced")
+ * poplocal,
+ braced = leftbrace
+ * ( V("definition")
+ + identifier
+ + V("setcode")
+ + V("texcode")
+ + V("braced")
+ + furthercomment
+ + leadingcomment -- new per 2012-05-15 (message on mailing list)
+ + nobrace
+ )^0
+ -- * rightbrace^-1, -- the -1 catches errors
+ * (rightbrace + Cmt(always,matcherror)),
+
+ pattern = leadingcomment
+ + V("definition")
+ + V("setcode")
+ + V("texcode")
+ + furthercomment
+ + anything,
+
+ converter = V("pattern")^1,
+}
+
+local parser = Cs(grammar)
+
+local checker = P("%") * (1 - newline - P("macros"))^0
+ * P("macros") * space^0 * P("=") * space^0 * C(patterns.letter^1)
+
+-- maybe namespace
+
+local macros = { } resolvers.macros = macros
+
+function macros.preprocessed(str,strip)
+ return lpegmatch(parser,str,1,strip)
+end
+
+function macros.convertfile(oldname,newname) -- beware, no testing on oldname == newname
+ local data = resolvers.loadtexfile(oldname)
+ data = interfaces.preprocessed(data) or ""
+ io.savedata(newname,data)
+end
+
+function macros.version(data)
+ return lpegmatch(checker,data)
+end
+
+-- function macros.processmkvi(str,filename)
+-- if filename and filesuffix(filename) == "mkvi" or lpegmatch(checker,str) == "mkvi" then
+-- local oldsize = #str
+-- str = lpegmatch(parser,str,1,true) or str
+-- pushtarget("log")
+-- report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str)
+-- poptarget("log")
+-- end
+-- return str
+-- end
+--
+-- utilities.sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmkvi")
+
+-- the document variables hack is temporary
+
+local processors = { }
+
+function processors.mkvi(str,filename)
+ local oldsize = #str
+ str = lpegmatch(parser,str,1,true) or str
+ pushtarget("log")
+ report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str)
+ poptarget("log")
+ return str
+end
+
+function processors.mkix(str,filename) -- we could intercept earlier so that caching works better
+ if not document then -- because now we hash the string as well as the
+ document = { }
+ end
+ if not document.variables then
+ document.variables = { }
+ end
+ local oldsize = #str
+ str = convertlmxstring(str,document.variables,false) or str
+ pushtarget("log")
+ report_macros("processed mkix file %a, delta %s",filename,oldsize-#str)
+ poptarget("log")
+ return str
+end
+
+function processors.mkxi(str,filename)
+ if not document then
+ document = { }
+ end
+ if not document.variables then
+ document.variables = { }
+ end
+ local oldsize = #str
+ str = convertlmxstring(str,document.variables,false) or str
+ str = lpegmatch(parser,str,1,true) or str
+ pushtarget("log")
+ report_macros("processed mkxi file %a, delta %s",filename,oldsize-#str)
+ poptarget("log")
+ return str
+end
+
+function macros.processmk(str,filename)
+ if filename then
+ local suffix = filesuffix(filename)
+ local processor = processors[suffix] or processors[lpegmatch(checker,str)]
+ if processor then
+ str = processor(str,filename)
+ end
+ end
+ return str
+end
+
+function macros.processmkvi(str,filename)
+ if filename and filesuffix(filename) == "mkvi" or lpegmatch(checker,str) == "mkvi" then
+ local oldsize = #str
+ str = lpegmatch(parser,str,1,true) or str
+ pushtarget("log")
+ report_macros("processed mkvi file %a, delta %s",filename,oldsize-#str)
+ poptarget("log")
+ end
+ return str
+end
+
+local sequencers = utilities.sequencers
+
+if sequencers then
+
+ sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmk")
+ sequencers.appendaction(resolvers.openers.helpers.textfileactions,"system","resolvers.macros.processmkvi")
+
+end
+
+-- bonus
+
+if resolvers.schemes then
+
+ local function handler(protocol,name,cachename)
+ local hashed = url.hashed(name)
+ local path = hashed.path
+ if path and path ~= "" then
+ local str = resolvers.loadtexfile(path)
+ if filesuffix(path) == "mkvi" or lpegmatch(checker,str) == "mkvi" then
+ -- already done automatically
+ io.savedata(cachename,str)
+ else
+ local result = lpegmatch(parser,str,1,true) or str
+ pushtarget("log")
+ report_macros("processed scheme %a, delta %s",filename,#str-#result)
+ poptarget("log")
+ io.savedata(cachename,result)
+ end
+ end
+ return cachename
+ end
+
+ resolvers.schemes.install('mkvi',handler,1) -- this will cache !
+
+end
+
+-- print(macros.preprocessed(
+-- [[
+-- \starttexdefinition unexpanded test #aa #bb #cc
+-- test
+-- \stoptexdefinition
+-- ]]))
+
+-- print(macros.preprocessed([[\checked \def \bla #bla{bla#{bla}}]]))
+-- print(macros.preprocessed([[\def\bla#bla{#{bla}bla}]]))
+-- print(macros.preprocessed([[\def\blä#{blá}{blà :#{blá}}]]))
+-- print(macros.preprocessed([[\def\blä#bla{blà :#bla}]]))
+-- print(macros.preprocessed([[\setvalue{xx}#bla{blà :#bla}]]))
+-- print(macros.preprocessed([[\def\foo#bar{\setvalue{xx#bar}{#bar}}]]))
+-- print(macros.preprocessed([[\def\bla#bla{bla:#{bla}}]]))
+-- print(macros.preprocessed([[\def\bla_bla#bla{bla:#bla}]]))
+-- print(macros.preprocessed([[\def\test#oeps{test:#oeps}]]))
+-- print(macros.preprocessed([[\def\test_oeps#oeps{test:#oeps}]]))
+-- print(macros.preprocessed([[\def\test#oeps{test:#{oeps}}]]))
+-- print(macros.preprocessed([[\def\test#{oeps:1}{test:#{oeps:1}}]]))
+-- print(macros.preprocessed([[\def\test#{oeps}{test:#oeps}]]))
+-- print(macros.preprocessed([[\def\x[#a][#b][#c]{\setvalue{\y{#a}\z{#b}}{#c}}]]))
+-- print(macros.preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}]]))
+-- print(macros.preprocessed([[\def\test#{oeps}{test:#oeps \halign{##\cr #oeps\cr}}]]))
+-- print(macros.preprocessed([[% test
+-- \def\test#oeps{#oeps} % {test}
+-- % test
+--
+-- % test
+-- two
+-- %test]]))
+-- print(macros.preprocessed([[
+-- \def\scrn_button_make_normal#namespace#current#currentparameter#text%
+-- {\ctxlua{structures.references.injectcurrentset(nil,nil)}%
+-- % \hbox attr \referenceattribute \lastreferenceattribute {\localframed[#namespace:#current]{#text}}}
+-- \hbox attr \referenceattribute \lastreferenceattribute {\directlocalframed[#namespace:#current]{#text}}}
+-- ]]))
+--
+-- print(macros.preprocessed([[
+-- \def\definefoo[#name]%
+-- {\setvalue{start#name}{\dostartfoo{#name}}}
+-- \def\dostartfoo#name%
+-- {\def\noexpand\next#content\expandafter\noexpand\csname stop#name\endcsname{#name : #content}%
+-- \next}
+-- \def\dostartfoo#name%
+-- {\normalexpanded{\def\noexpand\next#content\expandafter\noexpand\csname stop#name\endcsname}{#name : #content}%
+-- \next}
+-- ]]))
+--
+-- print(macros.preprocessed([[
+-- \def\dosomething#content{%%% {{
+-- % { }{{ %%
+-- \bgroup\italic#content\egroup
+-- }
+-- ]]))
+--
+-- print(macros.preprocessed([[
+-- \unexpanded\def\start#tag#stoptag%
+-- {\initialize{#tag}%
+-- \normalexpanded
+-- {\def\yes[#one]#two\csname\e!stop#stoptag\endcsname{\command_yes[#one]{#two}}%
+-- \def\nop #one\csname\e!stop#stoptag\endcsname{\command_nop {#one}}}%
+-- \doifnextoptionalelse\yes\nop}
+-- ]]))
+--
+-- print(macros.preprocessed([[
+-- \normalexpanded{\long\def\expandafter\noexpand\csname\e!start\v!interactionmenu\endcsname[#tag]#content\expandafter\noexpand\csname\e!stop\v!interactionmenu\endcsname}%
+-- {\def\currentinteractionmenu{#tag}%
+-- \expandafter\settrue\csname\??menustate\interactionmenuparameter\c!category\endcsname
+-- \setinteractionmenuparameter\c!menu{#content}}
+-- ]]))
+--
+-- Just an experiment:
+--
+-- \catcode\numexpr"10FF25=\commentcatcode %% > 110000 is invalid
+--
+-- We could have a push/pop mechanism but binding to txtcatcodes
+-- is okay too.
+
+local txtcatcodes = false -- also signal and yet unknown
+
+local commentsignal = utf.char(0x10FF25)
+
+local encodecomment = P("%%") / commentsignal --
+----- encodepattern = Cs(((1-encodecomment)^0 * encodecomment)) -- strips but not nice for verbatim
+local encodepattern = Cs((encodecomment + 1)^0)
+local decodecomment = P(commentsignal) / "%%%%" -- why doubles here?
+local decodepattern = Cs((decodecomment + 1)^0)
+
+function resolvers.macros.encodecomment(str)
+ if txtcatcodes and tex.catcodetable == txtcatcodes then
+ return lpegmatch(encodepattern,str) or str
+ else
+ return str
+ end
+end
+
+function resolvers.macros.decodecomment(str) -- normally not needed
+ return txtcatcodes and lpegmatch(decodepattern,str) or str
+end
+
+-- resolvers.macros.commentsignal = commentsignal
+-- resolvers.macros.encodecommentpattern = encodepattern
+-- resolvers.macros.decodecommentpattern = decodepattern
+
+function resolvers.macros.enablecomment(thecatcodes)
+ if not txtcatcodes then
+ txtcatcodes = thecatcodes or catcodes.numbers.txtcatcodes
+ utilities.sequencers.appendaction(resolvers.openers.helpers.textlineactions,"system","resolvers.macros.encodecomment")
+ end
+end
diff --git a/tex/context/base/luat-run.lua b/tex/context/base/luat-run.lua
index eaede1030..6291fef1b 100644
--- a/tex/context/base/luat-run.lua
+++ b/tex/context/base/luat-run.lua
@@ -1,158 +1,158 @@
-if not modules then modules = { } end modules ['luat-run'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format = string.format
-local insert = table.insert
-
--- trace_job_status is also controlled by statistics.enable that is set via the directive system.nostatistics
-
-local trace_lua_dump = false trackers.register("system.dump", function(v) trace_lua_dump = v end)
-local trace_temp_files = false trackers.register("system.tempfiles", function(v) trace_temp_files = v end)
-local trace_job_status = true trackers.register("system.jobstatus", function(v) trace_job_status = v end)
-local trace_tex_status = false trackers.register("system.texstatus", function(v) trace_tex_status = v end)
-
-local report_lua = logs.reporter("system","lua")
-local report_tex = logs.reporter("system","status")
-local report_tempfiles = logs.reporter("resolvers","tempfiles")
-
-luatex = luatex or { }
-local luatex = luatex
-
-local startactions = { }
-local stopactions = { }
-
-function luatex.registerstartactions(...) insert(startactions, ...) end
-function luatex.registerstopactions (...) insert(stopactions, ...) end
-
-local function start_run()
- if logs.start_run then
- logs.start_run()
- end
- for i=1,#startactions do
- startactions[i]()
- end
-end
-
-local function stop_run()
- for i=1,#stopactions do
- stopactions[i]()
- end
- if trace_job_status then
- statistics.show()
- end
- if trace_tex_status then
- for k, v in table.sortedhash(status.list()) do
- report_tex("%S=%S",k,v)
- end
- end
- if logs.stop_run then
- logs.stop_run()
- end
-end
-
-local function start_shipout_page()
- logs.start_page_number()
-end
-
-local function stop_shipout_page()
- logs.stop_page_number()
-end
-
-local function report_output_pages()
-end
-
-local function report_output_log()
-end
-
--- local function show_open()
--- end
-
--- local function show_close()
--- end
-
-local function pre_dump_actions()
- lua.finalize(trace_lua_dump and report_lua or nil)
- -- statistics.savefmtstatus("\jobname","\contextversion","context.tex")
-end
-
--- this can be done later
-
-callbacks.register('start_run', start_run, "actions performed at the beginning of a run")
-callbacks.register('stop_run', stop_run, "actions performed at the end of a run")
-
----------.register('show_open', show_open, "actions performed when opening a file")
----------.register('show_close', show_close, "actions performed when closing a file")
-
-callbacks.register('report_output_pages', report_output_pages, "actions performed when reporting pages")
-callbacks.register('report_output_log', report_output_log, "actions performed when reporting log file")
-
-callbacks.register('start_page_number', start_shipout_page, "actions performed at the beginning of a shipout")
-callbacks.register('stop_page_number', stop_shipout_page, "actions performed at the end of a shipout")
-
-callbacks.register('process_input_buffer', false, "actions performed when reading data")
-callbacks.register('process_output_buffer', false, "actions performed when writing data")
-
-callbacks.register("pre_dump", pre_dump_actions, "lua related finalizers called before we dump the format") -- comes after \everydump
-
--- an example:
-
-local tempfiles = { }
-
-function luatex.registertempfile(name,extrasuffix)
- if extrasuffix then
- name = name .. ".mkiv-tmp" -- maybe just .tmp
- end
- if trace_temp_files and not tempfiles[name] then
- report_tempfiles("registering temporary file %a",name)
- end
- tempfiles[name] = true
- return name
-end
-
-function luatex.cleanuptempfiles()
- for name, _ in next, tempfiles do
- if trace_temp_files then
- report_tempfiles("removing temporary file %a",name)
- end
- os.remove(name)
- end
- tempfiles = { }
-end
-
-luatex.registerstopactions(luatex.cleanuptempfiles)
-
--- for the moment here
-
-local synctex = false
-
-local report_system = logs.reporter("system")
-
-directives.register("system.synctex", function(v)
- synctex = v
- if v then
- report_system("synctex functionality is enabled!")
- else
- report_system("synctex functionality is disabled!")
- end
- synctex = tonumber(synctex) or (toboolean(synctex,true) and 1) or (synctex == "zipped" and 1) or (synctex == "unzipped" and -1) or false
- -- currently this is bugged:
- tex.synctex = synctex
- -- so for the moment we need:
- context.normalsynctex()
- if synctex then
- context.plusone()
- else
- context.zerocount()
- end
-end)
-
-statistics.register("synctex tracing",function()
- if synctex or tex.synctex ~= 0 then
- return "synctex has been enabled (extra log file generated)"
- end
-end)
+if not modules then modules = { } end modules ['luat-run'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format = string.format
+local insert = table.insert
+
+-- trace_job_status is also controlled by statistics.enable that is set via the directive system.nostatistics
+
+local trace_lua_dump = false trackers.register("system.dump", function(v) trace_lua_dump = v end)
+local trace_temp_files = false trackers.register("system.tempfiles", function(v) trace_temp_files = v end)
+local trace_job_status = true trackers.register("system.jobstatus", function(v) trace_job_status = v end)
+local trace_tex_status = false trackers.register("system.texstatus", function(v) trace_tex_status = v end)
+
+local report_lua = logs.reporter("system","lua")
+local report_tex = logs.reporter("system","status")
+local report_tempfiles = logs.reporter("resolvers","tempfiles")
+
+luatex = luatex or { }
+local luatex = luatex
+
+local startactions = { }
+local stopactions = { }
+
+function luatex.registerstartactions(...) insert(startactions, ...) end
+function luatex.registerstopactions (...) insert(stopactions, ...) end
+
+local function start_run()
+ if logs.start_run then
+ logs.start_run()
+ end
+ for i=1,#startactions do
+ startactions[i]()
+ end
+end
+
+local function stop_run()
+ for i=1,#stopactions do
+ stopactions[i]()
+ end
+ if trace_job_status then
+ statistics.show()
+ end
+ if trace_tex_status then
+ for k, v in table.sortedhash(status.list()) do
+ report_tex("%S=%S",k,v)
+ end
+ end
+ if logs.stop_run then
+ logs.stop_run()
+ end
+end
+
+local function start_shipout_page()
+ logs.start_page_number()
+end
+
+local function stop_shipout_page()
+ logs.stop_page_number()
+end
+
+local function report_output_pages()
+end
+
+local function report_output_log()
+end
+
+-- local function show_open()
+-- end
+
+-- local function show_close()
+-- end
+
+local function pre_dump_actions()
+ lua.finalize(trace_lua_dump and report_lua or nil)
+ -- statistics.savefmtstatus("\jobname","\contextversion","context.tex")
+end
+
+-- this can be done later
+
+callbacks.register('start_run', start_run, "actions performed at the beginning of a run")
+callbacks.register('stop_run', stop_run, "actions performed at the end of a run")
+
+---------.register('show_open', show_open, "actions performed when opening a file")
+---------.register('show_close', show_close, "actions performed when closing a file")
+
+callbacks.register('report_output_pages', report_output_pages, "actions performed when reporting pages")
+callbacks.register('report_output_log', report_output_log, "actions performed when reporting log file")
+
+callbacks.register('start_page_number', start_shipout_page, "actions performed at the beginning of a shipout")
+callbacks.register('stop_page_number', stop_shipout_page, "actions performed at the end of a shipout")
+
+callbacks.register('process_input_buffer', false, "actions performed when reading data")
+callbacks.register('process_output_buffer', false, "actions performed when writing data")
+
+callbacks.register("pre_dump", pre_dump_actions, "lua related finalizers called before we dump the format") -- comes after \everydump
+
+-- an example:
+
+local tempfiles = { }
+
+function luatex.registertempfile(name,extrasuffix)
+ if extrasuffix then
+ name = name .. ".mkiv-tmp" -- maybe just .tmp
+ end
+ if trace_temp_files and not tempfiles[name] then
+ report_tempfiles("registering temporary file %a",name)
+ end
+ tempfiles[name] = true
+ return name
+end
+
+function luatex.cleanuptempfiles()
+ for name, _ in next, tempfiles do
+ if trace_temp_files then
+ report_tempfiles("removing temporary file %a",name)
+ end
+ os.remove(name)
+ end
+ tempfiles = { }
+end
+
+luatex.registerstopactions(luatex.cleanuptempfiles)
+
+-- for the moment here
+
+local synctex = false
+
+local report_system = logs.reporter("system")
+
+directives.register("system.synctex", function(v)
+ synctex = v
+ if v then
+ report_system("synctex functionality is enabled!")
+ else
+ report_system("synctex functionality is disabled!")
+ end
+ synctex = tonumber(synctex) or (toboolean(synctex,true) and 1) or (synctex == "zipped" and 1) or (synctex == "unzipped" and -1) or false
+ -- currently this is bugged:
+ tex.synctex = synctex
+ -- so for the moment we need:
+ context.normalsynctex()
+ if synctex then
+ context.plusone()
+ else
+ context.zerocount()
+ end
+end)
+
+statistics.register("synctex tracing",function()
+ if synctex or tex.synctex ~= 0 then
+ return "synctex has been enabled (extra log file generated)"
+ end
+end)
diff --git a/tex/context/base/luat-sta.lua b/tex/context/base/luat-sta.lua
index 8b58774d3..1e83083cd 100644
--- a/tex/context/base/luat-sta.lua
+++ b/tex/context/base/luat-sta.lua
@@ -1,211 +1,211 @@
-if not modules then modules = { } end modules ['luat-sta'] = {
- version = 1.001,
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this code is used in the updater
-
-local gmatch, match = string.gmatch, string.match
-local type = type
-
-states = states or { }
-local states = states
-
-states.data = states.data or { }
-local data = states.data
-
-states.hash = states.hash or { }
-local hash = states.hash
-
-states.tag = states.tag or ""
-states.filename = states.filename or ""
-
-function states.save(filename,tag)
- tag = tag or states.tag
- filename = file.addsuffix(filename or states.filename,'lus')
- io.savedata(filename,
- "-- generator : luat-sta.lua\n" ..
- "-- state tag : " .. tag .. "\n\n" ..
- table.serialize(data[tag or states.tag] or {},true)
- )
-end
-
-function states.load(filename,tag)
- states.filename = filename
- states.tag = tag or "whatever"
- states.filename = file.addsuffix(states.filename,'lus')
- data[states.tag], hash[states.tag] = (io.exists(filename) and dofile(filename)) or { }, { }
-end
-
-local function set_by_tag(tag,key,value,default,persistent)
- local d, h = data[tag], hash[tag]
- if d then
- if type(d) == "table" then
- local dkey, hkey = key, key
- local pre, post = match(key,"(.+)%.([^%.]+)$")
- if pre and post then
- for k in gmatch(pre,"[^%.]+") do
- local dk = d[k]
- if not dk then
- dk = { }
- d[k] = dk
- elseif type(dk) == "string" then
- -- invalid table, unable to upgrade structure
- -- hope for the best or delete the state file
- break
- end
- d = dk
- end
- dkey, hkey = post, key
- end
- if value == nil then
- value = default
- elseif value == false then
- -- special case
- elseif persistent then
- value = value or d[dkey] or default
- else
- value = value or default
- end
- d[dkey], h[hkey] = value, value
- elseif type(d) == "string" then
- -- weird
- data[tag], hash[tag] = value, value
- end
- end
-end
-
-local function get_by_tag(tag,key,default)
- local h = hash[tag]
- if h and h[key] then
- return h[key]
- else
- local d = data[tag]
- if d then
- for k in gmatch(key,"[^%.]+") do
- local dk = d[k]
- if dk ~= nil then
- d = dk
- else
- return default
- end
- end
- if d == false then
- return false
- else
- return d or default
- end
- end
- end
-end
-
-states.set_by_tag = set_by_tag
-states.get_by_tag = get_by_tag
-
-function states.set(key,value,default,persistent)
- set_by_tag(states.tag,key,value,default,persistent)
-end
-
-function states.get(key,default)
- return get_by_tag(states.tag,key,default)
-end
-
---~ data.update = {
---~ ["version"] = {
---~ ["major"] = 0,
---~ ["minor"] = 1,
---~ },
---~ ["rsync"] = {
---~ ["server"] = "contextgarden.net",
---~ ["module"] = "minimals",
---~ ["repository"] = "current",
---~ ["flags"] = "-rpztlv --stats",
---~ },
---~ ["tasks"] = {
---~ ["update"] = true,
---~ ["make"] = true,
---~ ["delete"] = false,
---~ },
---~ ["platform"] = {
---~ ["host"] = true,
---~ ["other"] = {
---~ ["mswin"] = false,
---~ ["linux"] = false,
---~ ["linux-64"] = false,
---~ ["osx-intel"] = false,
---~ ["osx-ppc"] = false,
---~ ["sun"] = false,
---~ },
---~ },
---~ ["context"] = {
---~ ["available"] = {"current", "beta", "alpha", "experimental"},
---~ ["selected"] = "current",
---~ },
---~ ["formats"] = {
---~ ["cont-en"] = true,
---~ ["cont-nl"] = true,
---~ ["cont-de"] = false,
---~ ["cont-cz"] = false,
---~ ["cont-fr"] = false,
---~ ["cont-ro"] = false,
---~ },
---~ ["engine"] = {
---~ ["pdftex"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ ["pdftex"] = true,
---~ },
---~ },
---~ ["luatex"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ },
---~ },
---~ ["xetex"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ ["xetex"] = false,
---~ },
---~ },
---~ ["metapost"] = {
---~ ["install"] = true,
---~ ["formats"] = {
---~ ["mpost"] = true,
---~ ["metafun"] = true,
---~ },
---~ },
---~ },
---~ ["fonts"] = {
---~ },
---~ ["doc"] = {
---~ },
---~ ["modules"] = {
---~ ["f-urwgaramond"] = false,
---~ ["f-urwgothic"] = false,
---~ ["t-bnf"] = false,
---~ ["t-chromato"] = false,
---~ ["t-cmscbf"] = false,
---~ ["t-cmttbf"] = false,
---~ ["t-construction-plan"] = false,
---~ ["t-degrade"] = false,
---~ ["t-french"] = false,
---~ ["t-lettrine"] = false,
---~ ["t-lilypond"] = false,
---~ ["t-mathsets"] = false,
---~ ["t-tikz"] = false,
---~ ["t-typearea"] = false,
---~ ["t-vim"] = false,
---~ },
---~ }
-
---~ states.save("teststate", "update")
---~ states.load("teststate", "update")
-
---~ print(states.get_by_tag("update","rsync.server","unknown"))
---~ states.set_by_tag("update","rsync.server","oeps")
---~ print(states.get_by_tag("update","rsync.server","unknown"))
---~ states.save("teststate", "update")
---~ states.load("teststate", "update")
---~ print(states.get_by_tag("update","rsync.server","unknown"))
+if not modules then modules = { } end modules ['luat-sta'] = {
+ version = 1.001,
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- this code is used in the updater
+
+local gmatch, match = string.gmatch, string.match
+local type = type
+
+states = states or { }
+local states = states
+
+states.data = states.data or { }
+local data = states.data
+
+states.hash = states.hash or { }
+local hash = states.hash
+
+states.tag = states.tag or ""
+states.filename = states.filename or ""
+
+function states.save(filename,tag)
+ tag = tag or states.tag
+ filename = file.addsuffix(filename or states.filename,'lus')
+ io.savedata(filename,
+ "-- generator : luat-sta.lua\n" ..
+ "-- state tag : " .. tag .. "\n\n" ..
+ table.serialize(data[tag or states.tag] or {},true)
+ )
+end
+
+function states.load(filename,tag)
+ states.filename = filename
+ states.tag = tag or "whatever"
+ states.filename = file.addsuffix(states.filename,'lus')
+ data[states.tag], hash[states.tag] = (io.exists(filename) and dofile(filename)) or { }, { }
+end
+
+local function set_by_tag(tag,key,value,default,persistent)
+ local d, h = data[tag], hash[tag]
+ if d then
+ if type(d) == "table" then
+ local dkey, hkey = key, key
+ local pre, post = match(key,"(.+)%.([^%.]+)$")
+ if pre and post then
+ for k in gmatch(pre,"[^%.]+") do
+ local dk = d[k]
+ if not dk then
+ dk = { }
+ d[k] = dk
+ elseif type(dk) == "string" then
+ -- invalid table, unable to upgrade structure
+ -- hope for the best or delete the state file
+ break
+ end
+ d = dk
+ end
+ dkey, hkey = post, key
+ end
+ if value == nil then
+ value = default
+ elseif value == false then
+ -- special case
+ elseif persistent then
+ value = value or d[dkey] or default
+ else
+ value = value or default
+ end
+ d[dkey], h[hkey] = value, value
+ elseif type(d) == "string" then
+ -- weird
+ data[tag], hash[tag] = value, value
+ end
+ end
+end
+
+local function get_by_tag(tag,key,default)
+ local h = hash[tag]
+ if h and h[key] then
+ return h[key]
+ else
+ local d = data[tag]
+ if d then
+ for k in gmatch(key,"[^%.]+") do
+ local dk = d[k]
+ if dk ~= nil then
+ d = dk
+ else
+ return default
+ end
+ end
+ if d == false then
+ return false
+ else
+ return d or default
+ end
+ end
+ end
+end
+
+states.set_by_tag = set_by_tag
+states.get_by_tag = get_by_tag
+
+function states.set(key,value,default,persistent)
+ set_by_tag(states.tag,key,value,default,persistent)
+end
+
+function states.get(key,default)
+ return get_by_tag(states.tag,key,default)
+end
+
+--~ data.update = {
+--~ ["version"] = {
+--~ ["major"] = 0,
+--~ ["minor"] = 1,
+--~ },
+--~ ["rsync"] = {
+--~ ["server"] = "contextgarden.net",
+--~ ["module"] = "minimals",
+--~ ["repository"] = "current",
+--~ ["flags"] = "-rpztlv --stats",
+--~ },
+--~ ["tasks"] = {
+--~ ["update"] = true,
+--~ ["make"] = true,
+--~ ["delete"] = false,
+--~ },
+--~ ["platform"] = {
+--~ ["host"] = true,
+--~ ["other"] = {
+--~ ["mswin"] = false,
+--~ ["linux"] = false,
+--~ ["linux-64"] = false,
+--~ ["osx-intel"] = false,
+--~ ["osx-ppc"] = false,
+--~ ["sun"] = false,
+--~ },
+--~ },
+--~ ["context"] = {
+--~ ["available"] = {"current", "beta", "alpha", "experimental"},
+--~ ["selected"] = "current",
+--~ },
+--~ ["formats"] = {
+--~ ["cont-en"] = true,
+--~ ["cont-nl"] = true,
+--~ ["cont-de"] = false,
+--~ ["cont-cz"] = false,
+--~ ["cont-fr"] = false,
+--~ ["cont-ro"] = false,
+--~ },
+--~ ["engine"] = {
+--~ ["pdftex"] = {
+--~ ["install"] = true,
+--~ ["formats"] = {
+--~ ["pdftex"] = true,
+--~ },
+--~ },
+--~ ["luatex"] = {
+--~ ["install"] = true,
+--~ ["formats"] = {
+--~ },
+--~ },
+--~ ["xetex"] = {
+--~ ["install"] = true,
+--~ ["formats"] = {
+--~ ["xetex"] = false,
+--~ },
+--~ },
+--~ ["metapost"] = {
+--~ ["install"] = true,
+--~ ["formats"] = {
+--~ ["mpost"] = true,
+--~ ["metafun"] = true,
+--~ },
+--~ },
+--~ },
+--~ ["fonts"] = {
+--~ },
+--~ ["doc"] = {
+--~ },
+--~ ["modules"] = {
+--~ ["f-urwgaramond"] = false,
+--~ ["f-urwgothic"] = false,
+--~ ["t-bnf"] = false,
+--~ ["t-chromato"] = false,
+--~ ["t-cmscbf"] = false,
+--~ ["t-cmttbf"] = false,
+--~ ["t-construction-plan"] = false,
+--~ ["t-degrade"] = false,
+--~ ["t-french"] = false,
+--~ ["t-lettrine"] = false,
+--~ ["t-lilypond"] = false,
+--~ ["t-mathsets"] = false,
+--~ ["t-tikz"] = false,
+--~ ["t-typearea"] = false,
+--~ ["t-vim"] = false,
+--~ },
+--~ }
+
+--~ states.save("teststate", "update")
+--~ states.load("teststate", "update")
+
+--~ print(states.get_by_tag("update","rsync.server","unknown"))
+--~ states.set_by_tag("update","rsync.server","oeps")
+--~ print(states.get_by_tag("update","rsync.server","unknown"))
+--~ states.save("teststate", "update")
+--~ states.load("teststate", "update")
+--~ print(states.get_by_tag("update","rsync.server","unknown"))
diff --git a/tex/context/base/luat-sto.lua b/tex/context/base/luat-sto.lua
index 7a11b7f5e..da2467708 100644
--- a/tex/context/base/luat-sto.lua
+++ b/tex/context/base/luat-sto.lua
@@ -1,169 +1,169 @@
-if not modules then modules = { } end modules ['luat-sto'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- we could nil some function in the productionrun
-
-local type, next, setmetatable, getmetatable, collectgarbage = type, next, setmetatable, getmetatable, collectgarbage
-local gmatch, format = string.gmatch, string.format
-local serialize, concat, sortedhash = table.serialize, table.concat, table.sortedhash
-local bytecode = lua.bytecode
-local strippedloadstring = utilities.lua.strippedloadstring
-
-local trace_storage = false
-local report_storage = logs.reporter("system","storage")
-
-storage = storage or { }
-local storage = storage
-
-local data = { }
-storage.data = data
-
-storage.min = 0 -- 500
-storage.max = storage.min - 1
-storage.noftables = storage.noftables or 0
-storage.nofmodules = storage.nofmodules or 0
-
-storage.mark = utilities.storage.mark
-storage.allocate = utilities.storage.allocate
-storage.marked = utilities.storage.marked
-storage.strip = false
-
-directives.register("system.compile.strip", function(v) storage.strip = v end)
-
-function storage.register(...)
- local t = { ... }
- local d = t[2]
- if d then
- storage.mark(d)
- else
- report_storage("fatal error: invalid storage %a",t[1])
- os.exit()
- end
- data[#data+1] = t
- return t
-end
-
-local n = 0
-local function dump()
- local max = storage.max
- for i=1,#data do
- local d = data[i]
- local message, original, target = d[1], d[2] ,d[3]
- local c, code, name = 0, { }, nil
- -- we have a nice definer for this
- for str in gmatch(target,"([^%.]+)") do
- if name then
- name = name .. "." .. str
- else
- name = str
- end
- c = c + 1 ; code[c] = format("%s = %s or { }",name,name)
- end
- max = max + 1
- if trace_storage then
- c = c + 1 ; code[c] = format("print('restoring %s from slot %s')",message,max)
- end
- c = c + 1 ; code[c] = serialize(original,name)
- if trace_storage then
- report_storage('saving %a in slot %a, size %s',message,max,#code[c])
- end
- -- we don't need tracing in such tables
- bytecode[max] = strippedloadstring(concat(code,"\n"),storage.strip,format("slot %s (%s)",max,name))
- collectgarbage("step")
- end
- storage.max = max
-end
-
-lua.registerfinalizer(dump,"dump storage")
-
--- to be tested with otf caching:
-
-function lua.collectgarbage(threshold)
- local current = collectgarbage("count")
- local threshold = threshold or 256 * 1024
- while true do
- collectgarbage("collect")
- local previous = collectgarbage("count")
- if current - previous < threshold then
- break
- else
- current = previous
- end
- end
-end
-
--- -- we also need to count at generation time (nicer for message)
---
--- if lua.bytecode then -- from 0 upwards
--- local i, b = storage.min, lua.bytecode
--- while b[i] do
--- storage.noftables = i
--- b[i]()
--- b[i] = nil
--- i = i + 1
--- end
--- end
-
-statistics.register("stored bytecode data", function()
- local nofmodules = (storage.nofmodules > 0 and storage.nofmodules) or (status.luabytecodes - lua.firstbytecode - 1)
- local nofdumps = (storage.noftables > 0 and storage.noftables ) or storage.max-storage.min + 1
- local tofmodules = storage.tofmodules or 0
- local tofdumps = storage.toftables or 0
- if environment.initex then
- local luautilities = utilities.lua
- local nofstrippedbytes = luautilities.nofstrippedbytes
- local nofstrippedchunks = luautilities.nofstrippedchunks
- if nofstrippedbytes > 0 then
- return format("%s modules, %s tables, %s chunks, %s chunks stripped (%s bytes)",
- nofmodules,
- nofdumps,
- nofmodules + nofdumps,
- nofstrippedchunks,
- nofstrippedbytes
- )
- elseif nofstrippedchunks > 0 then
- return format("%s modules, %s tables, %s chunks, %s chunks stripped",
- nofmodules,
- nofdumps,
- nofmodules + nofdumps,
- nofstrippedchunks
- )
- else
- return format("%s modules, %s tables, %s chunks",
- nofmodules,
- nofdumps,
- nofmodules + nofdumps
- )
- end
- else
- return format("%s modules (%0.3f sec), %s tables (%0.3f sec), %s chunks (%0.3f sec)",
- nofmodules, tofmodules,
- nofdumps, tofdumps,
- nofmodules + nofdumps, tofmodules + tofdumps
- )
- end
-end)
-
-if lua.bytedata then
- storage.register("lua/bytedata",lua.bytedata,"lua.bytedata")
-end
-
--- Because the storage mechanism assumes tables, we define a table for storing
--- (non table) values.
-
-storage.shared = storage.shared or { }
-
-storage.register("storage/shared", storage.shared, "storage.shared")
-
-local mark = storage.mark
-
-if string.patterns then mark(string.patterns) end
-if lpeg.patterns then mark(lpeg.patterns) end
-if os.env then mark(os.env) end
-if number.dimenfactors then mark(number.dimenfactors) end
-if libraries then for k,v in next, libraries do mark(v) end end
+if not modules then modules = { } end modules ['luat-sto'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- we could nil some function in the productionrun
+
+local type, next, setmetatable, getmetatable, collectgarbage = type, next, setmetatable, getmetatable, collectgarbage
+local gmatch, format = string.gmatch, string.format
+local serialize, concat, sortedhash = table.serialize, table.concat, table.sortedhash
+local bytecode = lua.bytecode
+local strippedloadstring = utilities.lua.strippedloadstring
+
+local trace_storage = false
+local report_storage = logs.reporter("system","storage")
+
+storage = storage or { }
+local storage = storage
+
+local data = { }
+storage.data = data
+
+storage.min = 0 -- 500
+storage.max = storage.min - 1
+storage.noftables = storage.noftables or 0
+storage.nofmodules = storage.nofmodules or 0
+
+storage.mark = utilities.storage.mark
+storage.allocate = utilities.storage.allocate
+storage.marked = utilities.storage.marked
+storage.strip = false
+
+directives.register("system.compile.strip", function(v) storage.strip = v end)
+
+function storage.register(...)
+ local t = { ... }
+ local d = t[2]
+ if d then
+ storage.mark(d)
+ else
+ report_storage("fatal error: invalid storage %a",t[1])
+ os.exit()
+ end
+ data[#data+1] = t
+ return t
+end
+
+local n = 0
+local function dump()
+ local max = storage.max
+ for i=1,#data do
+ local d = data[i]
+ local message, original, target = d[1], d[2] ,d[3]
+ local c, code, name = 0, { }, nil
+ -- we have a nice definer for this
+ for str in gmatch(target,"([^%.]+)") do
+ if name then
+ name = name .. "." .. str
+ else
+ name = str
+ end
+ c = c + 1 ; code[c] = format("%s = %s or { }",name,name)
+ end
+ max = max + 1
+ if trace_storage then
+ c = c + 1 ; code[c] = format("print('restoring %s from slot %s')",message,max)
+ end
+ c = c + 1 ; code[c] = serialize(original,name)
+ if trace_storage then
+ report_storage('saving %a in slot %a, size %s',message,max,#code[c])
+ end
+ -- we don't need tracing in such tables
+ bytecode[max] = strippedloadstring(concat(code,"\n"),storage.strip,format("slot %s (%s)",max,name))
+ collectgarbage("step")
+ end
+ storage.max = max
+end
+
+lua.registerfinalizer(dump,"dump storage")
+
+-- to be tested with otf caching:
+
+function lua.collectgarbage(threshold)
+ local current = collectgarbage("count")
+ local threshold = threshold or 256 * 1024
+ while true do
+ collectgarbage("collect")
+ local previous = collectgarbage("count")
+ if current - previous < threshold then
+ break
+ else
+ current = previous
+ end
+ end
+end
+
+-- -- we also need to count at generation time (nicer for message)
+--
+-- if lua.bytecode then -- from 0 upwards
+-- local i, b = storage.min, lua.bytecode
+-- while b[i] do
+-- storage.noftables = i
+-- b[i]()
+-- b[i] = nil
+-- i = i + 1
+-- end
+-- end
+
+statistics.register("stored bytecode data", function()
+ local nofmodules = (storage.nofmodules > 0 and storage.nofmodules) or (status.luabytecodes - lua.firstbytecode - 1)
+ local nofdumps = (storage.noftables > 0 and storage.noftables ) or storage.max-storage.min + 1
+ local tofmodules = storage.tofmodules or 0
+ local tofdumps = storage.toftables or 0
+ if environment.initex then
+ local luautilities = utilities.lua
+ local nofstrippedbytes = luautilities.nofstrippedbytes
+ local nofstrippedchunks = luautilities.nofstrippedchunks
+ if nofstrippedbytes > 0 then
+ return format("%s modules, %s tables, %s chunks, %s chunks stripped (%s bytes)",
+ nofmodules,
+ nofdumps,
+ nofmodules + nofdumps,
+ nofstrippedchunks,
+ nofstrippedbytes
+ )
+ elseif nofstrippedchunks > 0 then
+ return format("%s modules, %s tables, %s chunks, %s chunks stripped",
+ nofmodules,
+ nofdumps,
+ nofmodules + nofdumps,
+ nofstrippedchunks
+ )
+ else
+ return format("%s modules, %s tables, %s chunks",
+ nofmodules,
+ nofdumps,
+ nofmodules + nofdumps
+ )
+ end
+ else
+ return format("%s modules (%0.3f sec), %s tables (%0.3f sec), %s chunks (%0.3f sec)",
+ nofmodules, tofmodules,
+ nofdumps, tofdumps,
+ nofmodules + nofdumps, tofmodules + tofdumps
+ )
+ end
+end)
+
+if lua.bytedata then
+ storage.register("lua/bytedata",lua.bytedata,"lua.bytedata")
+end
+
+-- Because the storage mechanism assumes tables, we define a table for storing
+-- (non table) values.
+
+storage.shared = storage.shared or { }
+
+storage.register("storage/shared", storage.shared, "storage.shared")
+
+local mark = storage.mark
+
+if string.patterns then mark(string.patterns) end
+if lpeg.patterns then mark(lpeg.patterns) end
+if os.env then mark(os.env) end
+if number.dimenfactors then mark(number.dimenfactors) end
+if libraries then for k,v in next, libraries do mark(v) end end
diff --git a/tex/context/base/lxml-aux.lua b/tex/context/base/lxml-aux.lua
index 0fffe261a..812b14d50 100644
--- a/tex/context/base/lxml-aux.lua
+++ b/tex/context/base/lxml-aux.lua
@@ -1,811 +1,811 @@
-if not modules then modules = { } end modules ['lxml-aux'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- not all functions here make sense anymore vbut we keep them for
--- compatibility reasons
-
-local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end)
-
-local report_xml = logs.reporter("xml")
-
-local xml = xml
-
-local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
-local xmlinheritedconvert = xml.inheritedconvert
-local xmlapplylpath = xml.applylpath
-local xmlfilter = xml.filter
-
-local type, setmetatable, getmetatable = type, setmetatable, getmetatable
-local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
-local gmatch, gsub, format, find, strip = string.gmatch, string.gsub, string.format, string.find, string.strip
-local utfbyte = utf.byte
-
-local function report(what,pattern,c,e)
- report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
-end
-
-local function withelements(e,handle,depth)
- if e and handle then
- local edt = e.dt
- if edt then
- depth = depth or 0
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "table" then
- handle(e,depth)
- withelements(e,handle,depth+1)
- end
- end
- end
- end
-end
-
-xml.withelements = withelements
-
-function xml.withelement(e,n,handle) -- slow
- if e and n ~= 0 and handle then
- local edt = e.dt
- if edt then
- if n > 0 then
- for i=1,#edt do
- local ei = edt[i]
- if type(ei) == "table" then
- if n == 1 then
- handle(ei)
- return
- else
- n = n - 1
- end
- end
- end
- elseif n < 0 then
- for i=#edt,1,-1 do
- local ei = edt[i]
- if type(ei) == "table" then
- if n == -1 then
- handle(ei)
- return
- else
- n = n + 1
- end
- end
- end
- end
- end
- end
-end
-
-function xml.each(root,pattern,handle,reverse)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- if reverse then
- for c=#collected,1,-1 do
- handle(collected[c])
- end
- else
- for c=1,#collected do
- handle(collected[c])
- end
- end
- return collected
- end
-end
-
-function xml.processattributes(root,pattern,handle)
- local collected = xmlapplylpath(root,pattern)
- if collected and handle then
- for c=1,#collected do
- handle(collected[c].at)
- end
- end
- return collected
-end
-
---[[ldx--
-
The following functions collect elements and texts.
---ldx]]--
-
--- are these still needed -> lxml-cmp.lua
-
-function xml.collect(root, pattern)
- return xmlapplylpath(root,pattern)
-end
-
-function xml.collecttexts(root, pattern, flatten) -- todo: variant with handle
- local collected = xmlapplylpath(root,pattern)
- if collected and flatten then
- local xmltostring = xml.tostring
- for c=1,#collected do
- collected[c] = xmltostring(collected[c].dt)
- end
- end
- return collected or { }
-end
-
-function xml.collect_tags(root, pattern, nonamespace)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- local t, n = { }, 0
- for c=1,#collected do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace then
- t[n] = tg
- elseif ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
- end
- return t
- end
-end
-
---[[ldx--
-
We've now arrived at the functions that manipulate the tree.
---ldx]]--
-
-local no_root = { no_root = true }
-
-local function redo_ni(d)
- for k=1,#d do
- local dk = d[k]
- if type(dk) == "table" then
- dk.ni = k
- end
- end
-end
-
-local function xmltoelement(whatever,root)
- if not whatever then
- return nil
- end
- local element
- if type(whatever) == "string" then
- element = xmlinheritedconvert(whatever,root) -- beware, not really a root
- else
- element = whatever -- we assume a table
- end
- if element.error then
- return whatever -- string
- end
- if element then
- --~ if element.ri then
- --~ element = element.dt[element.ri].dt
- --~ else
- --~ element = element.dt
- --~ end
- end
- return element
-end
-
-xml.toelement = xmltoelement
-
-local function copiedelement(element,newparent)
- if type(element) == "string" then
- return element
- else
- element = xmlcopy(element).dt
- if newparent and type(element) == "table" then
- element.__p__ = newparent
- end
- return element
- end
-end
-
-function xml.delete(root,pattern)
- if not pattern or pattern == "" then
- local p = root.__p__
- if p then
- if trace_manipulations then
- report('deleting',"--",c,root)
- end
- local d = p.dt
- remove(d,root.ni)
- redo_ni(d) -- can be made faster and inlined
- end
- else
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('deleting',pattern,c,e)
- end
- local d = p.dt
- remove(d,e.ni)
- redo_ni(d) -- can be made faster and inlined
- end
- end
- end
- end
-end
-
-function xml.replace(root,pattern,whatever)
- local element = root and xmltoelement(whatever,root)
- local collected = element and xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local p = e.__p__
- if p then
- if trace_manipulations then
- report('replacing',pattern,c,e)
- end
- local d = p.dt
- d[e.ni] = copiedelement(element,p)
- redo_ni(d) -- probably not needed
- end
- end
- end
-end
-
-local function wrap(e,wrapper)
- local t = {
- rn = e.rn,
- tg = e.tg,
- ns = e.ns,
- at = e.at,
- dt = e.dt,
- __p__ = e,
- }
- setmetatable(t,getmetatable(e))
- e.rn = wrapper.rn or e.rn or ""
- e.tg = wrapper.tg or e.tg or ""
- e.ns = wrapper.ns or e.ns or ""
- e.at = fastcopy(wrapper.at)
- e.dt = { t }
-end
-
-function xml.wrap(root,pattern,whatever)
- if whatever then
- local wrapper = xmltoelement(whatever,root)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if trace_manipulations then
- report('wrapping',pattern,c,e)
- end
- wrap(e,wrapper)
- end
- end
- else
- wrap(root,xmltoelement(pattern))
- end
-end
-
-local function inject_element(root,pattern,whatever,prepend)
- local element = root and xmltoelement(whatever,root)
- local collected = element and xmlapplylpath(root,pattern)
- local function inject_e(e)
- local r = e.__p__
- local d, k, rri = r.dt, e.ni, r.ri
- local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
- if edt then
- local be, af
- local cp = copiedelement(element,e)
- if prepend then
- be, af = cp, edt
- else
- be, af = edt, cp
- end
- local bn = #be
- for i=1,#af do
- bn = bn + 1
- be[bn] = af[i]
- end
- if rri then
- r.dt[rri].dt = be
- else
- d[k].dt = be
- end
- redo_ni(d)
- end
- end
- if not collected then
- -- nothing
- elseif collected.tg then
- -- first or so
- inject_e(collected)
- else
- for c=1,#collected do
- inject_e(collected[c])
- end
- end
-end
-
-local function insert_element(root,pattern,whatever,before) -- todo: element als functie
- local element = root and xmltoelement(whatever,root)
- local collected = element and xmlapplylpath(root,pattern)
- local function insert_e(e)
- local r = e.__p__
- local d, k = r.dt, e.ni
- if not before then
- k = k + 1
- end
- insert(d,k,copiedelement(element,r))
- redo_ni(d)
- end
- if not collected then
- -- nothing
- elseif collected.tg then
- -- first or so
- insert_e(collected)
- else
- for c=1,#collected do
- insert_e(collected[c])
- end
- end
-end
-
-xml.insert_element = insert_element
-xml.insertafter = insert_element
-xml.insertbefore = function(r,p,e) insert_element(r,p,e,true) end
-xml.injectafter = inject_element
-xml.injectbefore = function(r,p,e) inject_element(r,p,e,true) end
-
-local function include(xmldata,pattern,attribute,recursive,loaddata)
- -- parse="text" (default: xml), encoding="" (todo)
- -- attribute = attribute or 'href'
- pattern = pattern or 'include'
- loaddata = loaddata or io.loaddata
- local collected = xmlapplylpath(xmldata,pattern)
- if collected then
- for c=1,#collected do
- local ek = collected[c]
- local name = nil
- local ekdt = ek.dt
- local ekat = ek.at
- local epdt = ek.__p__.dt
- if not attribute or attribute == "" then
- name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- check, probably always tab or str
- end
- if not name then
- for a in gmatch(attribute or "href","([^|]+)") do
- name = ekat[a]
- if name then break end
- end
- end
- local data = (name and name ~= "" and loaddata(name)) or ""
- if data == "" then
- epdt[ek.ni] = "" -- xml.empty(d,k)
- elseif ekat["parse"] == "text" then
- -- for the moment hard coded
- epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data)
- else
---~ local settings = xmldata.settings
---~ settings.parent_root = xmldata -- to be tested
---~ local xi = xmlconvert(data,settings)
- local xi = xmlinheritedconvert(data,xmldata)
- if not xi then
- epdt[ek.ni] = "" -- xml.empty(d,k)
- else
- if recursive then
- include(xi,pattern,attribute,recursive,loaddata)
- end
- epdt[ek.ni] = xml.body(xi) -- xml.assign(d,k,xi)
- end
- end
- end
- end
-end
-
-xml.include = include
-
-local function stripelement(e,nolines,anywhere)
- local edt = e.dt
- if edt then
- if anywhere then
- local t, n = { }, 0
- for e=1,#edt do
- local str = edt[e]
- if type(str) ~= "string" then
- n = n + 1
- t[n] = str
- elseif str ~= "" then
- -- todo: lpeg for each case
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"^%s*(.-)%s*$","%1")
- if str ~= "" then
- n = n + 1
- t[n] = str
- end
- end
- end
- e.dt = t
- else
- -- we can assume a regular sparse xml table with no successive strings
- -- otherwise we should use a while loop
- if #edt > 0 then
- -- strip front
- local str = edt[1]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
- remove(edt,1)
- else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"^%s+","")
- if str == "" then
- remove(edt,1)
- else
- edt[1] = str
- end
- end
- end
- local nedt = #edt
- if nedt > 0 then
- -- strip end
- local str = edt[nedt]
- if type(str) ~= "string" then
- -- nothing
- elseif str == "" then
- remove(edt)
- else
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- str = gsub(str,"%s+$","")
- if str == "" then
- remove(edt)
- else
- edt[nedt] = str
- end
- end
- end
- end
- end
- return e -- convenient
-end
-
-xml.stripelement = stripelement
-
-function xml.strip(root,pattern,nolines,anywhere) -- strips all leading and trailing spacing
- local collected = xmlapplylpath(root,pattern) -- beware, indices no longer are valid now
- if collected then
- for i=1,#collected do
- stripelement(collected[i],nolines,anywhere)
- end
- end
-end
-
-local function renamespace(root, oldspace, newspace) -- fast variant
- local ndt = #root.dt
- for i=1,ndt or 0 do
- local e = root[i]
- if type(e) == "table" then
- if e.ns == oldspace then
- e.ns = newspace
- if e.rn then
- e.rn = newspace
- end
- end
- local edt = e.dt
- if edt then
- renamespace(edt, oldspace, newspace)
- end
- end
- end
-end
-
-xml.renamespace = renamespace
-
-function xml.remaptag(root, pattern, newtg)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- collected[c].tg = newtg
- end
- end
-end
-
-function xml.remapnamespace(root, pattern, newns)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- collected[c].ns = newns
- end
- end
-end
-
-function xml.checknamespace(root, pattern, newns)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- if (not e.rn or e.rn == "") and e.ns == "" then
- e.rn = newns
- end
- end
- end
-end
-
-function xml.remapname(root, pattern, newtg, newns, newrn)
- local collected = xmlapplylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- e.tg, e.ns, e.rn = newtg, newns, newrn
- end
- end
-end
-
---[[ldx--
-
Helper (for q2p).
---ldx]]--
-
-function xml.cdatatotext(e)
- local dt = e.dt
- if #dt == 1 then
- local first = dt[1]
- if first.tg == "@cd@" then
- e.dt = first.dt
- end
- else
- -- maybe option
- end
-end
-
--- local x = xml.convert("123")
--- xml.texttocdata(xml.first(x,"a"))
--- print(x) -- 23]]>
-
-function xml.texttocdata(e) -- could be a finalizer
- local dt = e.dt
- local s = xml.tostring(dt) -- no shortcut?
- e.tg = "@cd@"
- e.special = true
- e.ns = ""
- e.rn = ""
- e.dt = { s }
- e.at = nil
-end
-
--- local x = xml.convert("123")
--- xml.tocdata(xml.first(x,"a"))
--- print(x) -- 123]]>
-
-function xml.elementtocdata(e) -- could be a finalizer
- local dt = e.dt
- local s = xml.tostring(e) -- no shortcut?
- e.tg = "@cd@"
- e.special = true
- e.ns = ""
- e.rn = ""
- e.dt = { s }
- e.at = nil
-end
-
-xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
-
-local entities = characters and characters.entities or nil
-local builtinentities = xml.builtinentities
-
-function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
- if not entities then
- require("char-ent")
- entities = characters.entities
- end
- if entities and root and root.tg == "@rt@" and root.statistics then
- local list = { }
- local hexify = option == "hexadecimal"
- for k, v in table.sortedhash(root.statistics.entities.names) do
- if not builtinentities[k] then
- local e = entities[k]
- if not e then
- e = format("[%s]",k)
- elseif hexify then
- e = format("%05X;",utfbyte(k))
- end
- list[#list+1] = format(" ",k,e)
- end
- end
- local dt = root.dt
- local n = dt[1].tg == "@pi@" and 2 or 1
- if #list > 0 then
- insert(dt, n, { "\n" })
- insert(dt, n, {
- tg = "@dt@", -- beware, doctype is unparsed
- dt = { format("Something [\n%s\n] ",concat(list)) },
- ns = "",
- special = true,
- })
- insert(dt, n, { "\n\n" })
- else
- -- insert(dt, n, { table.serialize(root.statistics) })
- end
- end
-end
-
--- local str = [==[
---
---
--- test test { test
---
---
--- ]==]
---
--- local x = xml.convert(str)
--- xml.addentitiesdoctype(x,"hexadecimal")
--- print(x)
-
---[[ldx--
-
Here are a few synonyms.
---ldx]]--
-
-xml.all = xml.each
-xml.insert = xml.insertafter
-xml.inject = xml.injectafter
-xml.after = xml.insertafter
-xml.before = xml.insertbefore
-xml.process = xml.each
-
--- obsolete
-
-xml.obsolete = xml.obsolete or { }
-local obsolete = xml.obsolete
-
-xml.strip_whitespace = xml.strip obsolete.strip_whitespace = xml.strip
-xml.collect_elements = xml.collect obsolete.collect_elements = xml.collect
-xml.delete_element = xml.delete obsolete.delete_element = xml.delete
-xml.replace_element = xml.replace obsolete.replace_element = xml.replacet
-xml.each_element = xml.each obsolete.each_element = xml.each
-xml.process_elements = xml.process obsolete.process_elements = xml.process
-xml.insert_element_after = xml.insertafter obsolete.insert_element_after = xml.insertafter
-xml.insert_element_before = xml.insertbefore obsolete.insert_element_before = xml.insertbefore
-xml.inject_element_after = xml.injectafter obsolete.inject_element_after = xml.injectafter
-xml.inject_element_before = xml.injectbefore obsolete.inject_element_before = xml.injectbefore
-xml.process_attributes = xml.processattributes obsolete.process_attributes = xml.processattributes
-xml.collect_texts = xml.collecttexts obsolete.collect_texts = xml.collecttexts
-xml.inject_element = xml.inject obsolete.inject_element = xml.inject
-xml.remap_tag = xml.remaptag obsolete.remap_tag = xml.remaptag
-xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
-xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
-
--- new (probably ok)
-
-function xml.cdata(e)
- if e then
- local dt = e.dt
- if dt and #dt == 1 then
- local first = dt[1]
- return first.tg == "@cd@" and first.dt[1] or ""
- end
- end
- return ""
-end
-
-function xml.finalizers.xml.cdata(collected)
- if collected then
- local e = collected[1]
- if e then
- local dt = e.dt
- if dt and #dt == 1 then
- local first = dt[1]
- return first.tg == "@cd@" and first.dt[1] or ""
- end
- end
- end
- return ""
-end
-
-function xml.insertcomment(e,str,n) -- also insertcdata
- table.insert(e.dt,n or 1,{
- tg = "@cm@",
- ns = "",
- special = true,
- at = { },
- dt = { str },
- })
-end
-
-function xml.setcdata(e,str) -- also setcomment
- e.dt = { {
- tg = "@cd@",
- ns = "",
- special = true,
- at = { },
- dt = { str },
- } }
-end
-
--- maybe helpers like this will move to an autoloader
-
-function xml.separate(x,pattern)
- local collected = xmlapplylpath(x,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local d = e.dt
- if d == x then
- report_xml("warning: xml.separate changes root")
- x = d
- end
- local t, n = { "\n" }, 1
- local i, nd = 1, #d
- while i <= nd do
- while i <= nd do
- local di = d[i]
- if type(di) == "string" then
- if di == "\n" or find(di,"^%s+$") then -- first test is speedup
- i = i + 1
- else
- d[i] = strip(di)
- break
- end
- else
- break
- end
- end
- if i > nd then
- break
- end
- t[n+1] = "\n"
- t[n+2] = d[i]
- t[n+3] = "\n"
- n = n + 3
- i = i + 1
- end
- t[n+1] = "\n"
- setmetatable(t,getmetatable(d))
- e.dt = t
- end
- end
- return x
-end
-
---
-
-local helpers = xml.helpers or { }
-xml.helpers = helpers
-
-local function normal(e,action)
- local edt = e.dt
- if edt then
- for i=1,#edt do
- local str = edt[i]
- if type(str) == "string" and str ~= "" then
- edt[i] = action(str)
- end
- end
- end
-end
-
-local function recurse(e,action)
- local edt = e.dt
- if edt then
- for i=1,#edt do
- local str = edt[i]
- if type(str) ~= "string" then
- recurse(str,action,recursive)
- elseif str ~= "" then
- edt[i] = action(str)
- end
- end
- end
-end
-
-function helpers.recursetext(collected,action,recursive)
- if recursive then
- for i=1,#collected do
- recurse(collected[i],action)
- end
- else
- for i=1,#collected do
- normal(collected[i],action)
- end
- end
-end
+if not modules then modules = { } end modules ['lxml-aux'] = {
+ version = 1.001,
+ comment = "this module is the basis for the lxml-* ones",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- not all functions here make sense anymore vbut we keep them for
+-- compatibility reasons
+
+local trace_manipulations = false trackers.register("lxml.manipulations", function(v) trace_manipulations = v end)
+
+local report_xml = logs.reporter("xml")
+
+local xml = xml
+
+local xmlconvert, xmlcopy, xmlname = xml.convert, xml.copy, xml.name
+local xmlinheritedconvert = xml.inheritedconvert
+local xmlapplylpath = xml.applylpath
+local xmlfilter = xml.filter
+
+local type, setmetatable, getmetatable = type, setmetatable, getmetatable
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local gmatch, gsub, format, find, strip = string.gmatch, string.gsub, string.format, string.find, string.strip
+local utfbyte = utf.byte
+
+local function report(what,pattern,c,e)
+ report_xml("%s element %a, root %a, position %a, index %a, pattern %a",what,xmlname(e),xmlname(e.__p__),c,e.ni,pattern)
+end
+
+local function withelements(e,handle,depth)
+ if e and handle then
+ local edt = e.dt
+ if edt then
+ depth = depth or 0
+ for i=1,#edt do
+ local e = edt[i]
+ if type(e) == "table" then
+ handle(e,depth)
+ withelements(e,handle,depth+1)
+ end
+ end
+ end
+ end
+end
+
+xml.withelements = withelements
+
+function xml.withelement(e,n,handle) -- slow
+ if e and n ~= 0 and handle then
+ local edt = e.dt
+ if edt then
+ if n > 0 then
+ for i=1,#edt do
+ local ei = edt[i]
+ if type(ei) == "table" then
+ if n == 1 then
+ handle(ei)
+ return
+ else
+ n = n - 1
+ end
+ end
+ end
+ elseif n < 0 then
+ for i=#edt,1,-1 do
+ local ei = edt[i]
+ if type(ei) == "table" then
+ if n == -1 then
+ handle(ei)
+ return
+ else
+ n = n + 1
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+function xml.each(root,pattern,handle,reverse)
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ if reverse then
+ for c=#collected,1,-1 do
+ handle(collected[c])
+ end
+ else
+ for c=1,#collected do
+ handle(collected[c])
+ end
+ end
+ return collected
+ end
+end
+
+function xml.processattributes(root,pattern,handle)
+ local collected = xmlapplylpath(root,pattern)
+ if collected and handle then
+ for c=1,#collected do
+ handle(collected[c].at)
+ end
+ end
+ return collected
+end
+
+--[[ldx--
+
The following functions collect elements and texts.
+--ldx]]--
+
+-- are these still needed -> lxml-cmp.lua
+
+function xml.collect(root, pattern)
+ return xmlapplylpath(root,pattern)
+end
+
+function xml.collecttexts(root, pattern, flatten) -- todo: variant with handle
+ local collected = xmlapplylpath(root,pattern)
+ if collected and flatten then
+ local xmltostring = xml.tostring
+ for c=1,#collected do
+ collected[c] = xmltostring(collected[c].dt)
+ end
+ end
+ return collected or { }
+end
+
+function xml.collect_tags(root, pattern, nonamespace)
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ local t, n = { }, 0
+ for c=1,#collected do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace then
+ t[n] = tg
+ elseif ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
+ end
+ end
+ return t
+ end
+end
+
+--[[ldx--
+
We've now arrived at the functions that manipulate the tree.
+--ldx]]--
+
+local no_root = { no_root = true }
+
+local function redo_ni(d)
+ for k=1,#d do
+ local dk = d[k]
+ if type(dk) == "table" then
+ dk.ni = k
+ end
+ end
+end
+
+local function xmltoelement(whatever,root)
+ if not whatever then
+ return nil
+ end
+ local element
+ if type(whatever) == "string" then
+ element = xmlinheritedconvert(whatever,root) -- beware, not really a root
+ else
+ element = whatever -- we assume a table
+ end
+ if element.error then
+ return whatever -- string
+ end
+ if element then
+ --~ if element.ri then
+ --~ element = element.dt[element.ri].dt
+ --~ else
+ --~ element = element.dt
+ --~ end
+ end
+ return element
+end
+
+xml.toelement = xmltoelement
+
+local function copiedelement(element,newparent)
+ if type(element) == "string" then
+ return element
+ else
+ element = xmlcopy(element).dt
+ if newparent and type(element) == "table" then
+ element.__p__ = newparent
+ end
+ return element
+ end
+end
+
+function xml.delete(root,pattern)
+ if not pattern or pattern == "" then
+ local p = root.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',"--",c,root)
+ end
+ local d = p.dt
+ remove(d,root.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ else
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('deleting',pattern,c,e)
+ end
+ local d = p.dt
+ remove(d,e.ni)
+ redo_ni(d) -- can be made faster and inlined
+ end
+ end
+ end
+ end
+end
+
+function xml.replace(root,pattern,whatever)
+ local element = root and xmltoelement(whatever,root)
+ local collected = element and xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local p = e.__p__
+ if p then
+ if trace_manipulations then
+ report('replacing',pattern,c,e)
+ end
+ local d = p.dt
+ d[e.ni] = copiedelement(element,p)
+ redo_ni(d) -- probably not needed
+ end
+ end
+ end
+end
+
+local function wrap(e,wrapper)
+ local t = {
+ rn = e.rn,
+ tg = e.tg,
+ ns = e.ns,
+ at = e.at,
+ dt = e.dt,
+ __p__ = e,
+ }
+ setmetatable(t,getmetatable(e))
+ e.rn = wrapper.rn or e.rn or ""
+ e.tg = wrapper.tg or e.tg or ""
+ e.ns = wrapper.ns or e.ns or ""
+ e.at = fastcopy(wrapper.at)
+ e.dt = { t }
+end
+
+function xml.wrap(root,pattern,whatever)
+ if whatever then
+ local wrapper = xmltoelement(whatever,root)
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ if trace_manipulations then
+ report('wrapping',pattern,c,e)
+ end
+ wrap(e,wrapper)
+ end
+ end
+ else
+ wrap(root,xmltoelement(pattern))
+ end
+end
+
+local function inject_element(root,pattern,whatever,prepend)
+ local element = root and xmltoelement(whatever,root)
+ local collected = element and xmlapplylpath(root,pattern)
+ local function inject_e(e)
+ local r = e.__p__
+ local d, k, rri = r.dt, e.ni, r.ri
+ local edt = (rri and d[rri].dt) or (d and d[k] and d[k].dt)
+ if edt then
+ local be, af
+ local cp = copiedelement(element,e)
+ if prepend then
+ be, af = cp, edt
+ else
+ be, af = edt, cp
+ end
+ local bn = #be
+ for i=1,#af do
+ bn = bn + 1
+ be[bn] = af[i]
+ end
+ if rri then
+ r.dt[rri].dt = be
+ else
+ d[k].dt = be
+ end
+ redo_ni(d)
+ end
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ inject_e(collected)
+ else
+ for c=1,#collected do
+ inject_e(collected[c])
+ end
+ end
+end
+
+local function insert_element(root,pattern,whatever,before) -- todo: element als functie
+ local element = root and xmltoelement(whatever,root)
+ local collected = element and xmlapplylpath(root,pattern)
+ local function insert_e(e)
+ local r = e.__p__
+ local d, k = r.dt, e.ni
+ if not before then
+ k = k + 1
+ end
+ insert(d,k,copiedelement(element,r))
+ redo_ni(d)
+ end
+ if not collected then
+ -- nothing
+ elseif collected.tg then
+ -- first or so
+ insert_e(collected)
+ else
+ for c=1,#collected do
+ insert_e(collected[c])
+ end
+ end
+end
+
+xml.insert_element = insert_element
+xml.insertafter = insert_element
+xml.insertbefore = function(r,p,e) insert_element(r,p,e,true) end
+xml.injectafter = inject_element
+xml.injectbefore = function(r,p,e) inject_element(r,p,e,true) end
+
+local function include(xmldata,pattern,attribute,recursive,loaddata)
+ -- parse="text" (default: xml), encoding="" (todo)
+ -- attribute = attribute or 'href'
+ pattern = pattern or 'include'
+ loaddata = loaddata or io.loaddata
+ local collected = xmlapplylpath(xmldata,pattern)
+ if collected then
+ for c=1,#collected do
+ local ek = collected[c]
+ local name = nil
+ local ekdt = ek.dt
+ local ekat = ek.at
+ local epdt = ek.__p__.dt
+ if not attribute or attribute == "" then
+ name = (type(ekdt) == "table" and ekdt[1]) or ekdt -- check, probably always tab or str
+ end
+ if not name then
+ for a in gmatch(attribute or "href","([^|]+)") do
+ name = ekat[a]
+ if name then break end
+ end
+ end
+ local data = (name and name ~= "" and loaddata(name)) or ""
+ if data == "" then
+ epdt[ek.ni] = "" -- xml.empty(d,k)
+ elseif ekat["parse"] == "text" then
+ -- for the moment hard coded
+ epdt[ek.ni] = xml.escaped(data) -- d[k] = xml.escaped(data)
+ else
+--~ local settings = xmldata.settings
+--~ settings.parent_root = xmldata -- to be tested
+--~ local xi = xmlconvert(data,settings)
+ local xi = xmlinheritedconvert(data,xmldata)
+ if not xi then
+ epdt[ek.ni] = "" -- xml.empty(d,k)
+ else
+ if recursive then
+ include(xi,pattern,attribute,recursive,loaddata)
+ end
+ epdt[ek.ni] = xml.body(xi) -- xml.assign(d,k,xi)
+ end
+ end
+ end
+ end
+end
+
+xml.include = include
+
+local function stripelement(e,nolines,anywhere)
+ local edt = e.dt
+ if edt then
+ if anywhere then
+ local t, n = { }, 0
+ for e=1,#edt do
+ local str = edt[e]
+ if type(str) ~= "string" then
+ n = n + 1
+ t[n] = str
+ elseif str ~= "" then
+ -- todo: lpeg for each case
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"^%s*(.-)%s*$","%1")
+ if str ~= "" then
+ n = n + 1
+ t[n] = str
+ end
+ end
+ end
+ e.dt = t
+ else
+ -- we can assume a regular sparse xml table with no successive strings
+ -- otherwise we should use a while loop
+ if #edt > 0 then
+ -- strip front
+ local str = edt[1]
+ if type(str) ~= "string" then
+ -- nothing
+ elseif str == "" then
+ remove(edt,1)
+ else
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"^%s+","")
+ if str == "" then
+ remove(edt,1)
+ else
+ edt[1] = str
+ end
+ end
+ end
+ local nedt = #edt
+ if nedt > 0 then
+ -- strip end
+ local str = edt[nedt]
+ if type(str) ~= "string" then
+ -- nothing
+ elseif str == "" then
+ remove(edt)
+ else
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ str = gsub(str,"%s+$","")
+ if str == "" then
+ remove(edt)
+ else
+ edt[nedt] = str
+ end
+ end
+ end
+ end
+ end
+ return e -- convenient
+end
+
+xml.stripelement = stripelement
+
+function xml.strip(root,pattern,nolines,anywhere) -- strips all leading and trailing spacing
+ local collected = xmlapplylpath(root,pattern) -- beware, indices no longer are valid now
+ if collected then
+ for i=1,#collected do
+ stripelement(collected[i],nolines,anywhere)
+ end
+ end
+end
+
+local function renamespace(root, oldspace, newspace) -- fast variant
+ local ndt = #root.dt
+ for i=1,ndt or 0 do
+ local e = root[i]
+ if type(e) == "table" then
+ if e.ns == oldspace then
+ e.ns = newspace
+ if e.rn then
+ e.rn = newspace
+ end
+ end
+ local edt = e.dt
+ if edt then
+ renamespace(edt, oldspace, newspace)
+ end
+ end
+ end
+end
+
+xml.renamespace = renamespace
+
+function xml.remaptag(root, pattern, newtg)
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ collected[c].tg = newtg
+ end
+ end
+end
+
+function xml.remapnamespace(root, pattern, newns)
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ collected[c].ns = newns
+ end
+ end
+end
+
+function xml.checknamespace(root, pattern, newns)
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ if (not e.rn or e.rn == "") and e.ns == "" then
+ e.rn = newns
+ end
+ end
+ end
+end
+
+function xml.remapname(root, pattern, newtg, newns, newrn)
+ local collected = xmlapplylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ e.tg, e.ns, e.rn = newtg, newns, newrn
+ end
+ end
+end
+
+--[[ldx--
+
Helper (for q2p).
+--ldx]]--
+
+function xml.cdatatotext(e)
+ local dt = e.dt
+ if #dt == 1 then
+ local first = dt[1]
+ if first.tg == "@cd@" then
+ e.dt = first.dt
+ end
+ else
+ -- maybe option
+ end
+end
+
+-- local x = xml.convert("123")
+-- xml.texttocdata(xml.first(x,"a"))
+-- print(x) -- 23]]>
+
+function xml.texttocdata(e) -- could be a finalizer
+ local dt = e.dt
+ local s = xml.tostring(dt) -- no shortcut?
+ e.tg = "@cd@"
+ e.special = true
+ e.ns = ""
+ e.rn = ""
+ e.dt = { s }
+ e.at = nil
+end
+
+-- local x = xml.convert("123")
+-- xml.tocdata(xml.first(x,"a"))
+-- print(x) -- 123]]>
+
+function xml.elementtocdata(e) -- could be a finalizer
+ local dt = e.dt
+ local s = xml.tostring(e) -- no shortcut?
+ e.tg = "@cd@"
+ e.special = true
+ e.ns = ""
+ e.rn = ""
+ e.dt = { s }
+ e.at = nil
+end
+
+xml.builtinentities = table.tohash { "amp", "quot", "apos", "lt", "gt" } -- used often so share
+
+local entities = characters and characters.entities or nil
+local builtinentities = xml.builtinentities
+
+function xml.addentitiesdoctype(root,option) -- we could also have a 'resolve' i.e. inline hex
+ if not entities then
+ require("char-ent")
+ entities = characters.entities
+ end
+ if entities and root and root.tg == "@rt@" and root.statistics then
+ local list = { }
+ local hexify = option == "hexadecimal"
+ for k, v in table.sortedhash(root.statistics.entities.names) do
+ if not builtinentities[k] then
+ local e = entities[k]
+ if not e then
+ e = format("[%s]",k)
+ elseif hexify then
+ e = format("%05X;",utfbyte(k))
+ end
+ list[#list+1] = format(" ",k,e)
+ end
+ end
+ local dt = root.dt
+ local n = dt[1].tg == "@pi@" and 2 or 1
+ if #list > 0 then
+ insert(dt, n, { "\n" })
+ insert(dt, n, {
+ tg = "@dt@", -- beware, doctype is unparsed
+ dt = { format("Something [\n%s\n] ",concat(list)) },
+ ns = "",
+ special = true,
+ })
+ insert(dt, n, { "\n\n" })
+ else
+ -- insert(dt, n, { table.serialize(root.statistics) })
+ end
+ end
+end
+
+-- local str = [==[
+--
+--
+-- test test { test
+--
+--
+-- ]==]
+--
+-- local x = xml.convert(str)
+-- xml.addentitiesdoctype(x,"hexadecimal")
+-- print(x)
+
+--[[ldx--
+
Here are a few synonyms.
+--ldx]]--
+
+xml.all = xml.each
+xml.insert = xml.insertafter
+xml.inject = xml.injectafter
+xml.after = xml.insertafter
+xml.before = xml.insertbefore
+xml.process = xml.each
+
+-- obsolete
+
+xml.obsolete = xml.obsolete or { }
+local obsolete = xml.obsolete
+
+xml.strip_whitespace = xml.strip obsolete.strip_whitespace = xml.strip
+xml.collect_elements = xml.collect obsolete.collect_elements = xml.collect
+xml.delete_element = xml.delete obsolete.delete_element = xml.delete
+xml.replace_element = xml.replace obsolete.replace_element = xml.replacet
+xml.each_element = xml.each obsolete.each_element = xml.each
+xml.process_elements = xml.process obsolete.process_elements = xml.process
+xml.insert_element_after = xml.insertafter obsolete.insert_element_after = xml.insertafter
+xml.insert_element_before = xml.insertbefore obsolete.insert_element_before = xml.insertbefore
+xml.inject_element_after = xml.injectafter obsolete.inject_element_after = xml.injectafter
+xml.inject_element_before = xml.injectbefore obsolete.inject_element_before = xml.injectbefore
+xml.process_attributes = xml.processattributes obsolete.process_attributes = xml.processattributes
+xml.collect_texts = xml.collecttexts obsolete.collect_texts = xml.collecttexts
+xml.inject_element = xml.inject obsolete.inject_element = xml.inject
+xml.remap_tag = xml.remaptag obsolete.remap_tag = xml.remaptag
+xml.remap_name = xml.remapname obsolete.remap_name = xml.remapname
+xml.remap_namespace = xml.remapnamespace obsolete.remap_namespace = xml.remapnamespace
+
+-- new (probably ok)
+
+function xml.cdata(e)
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ return ""
+end
+
+function xml.finalizers.xml.cdata(collected)
+ if collected then
+ local e = collected[1]
+ if e then
+ local dt = e.dt
+ if dt and #dt == 1 then
+ local first = dt[1]
+ return first.tg == "@cd@" and first.dt[1] or ""
+ end
+ end
+ end
+ return ""
+end
+
+function xml.insertcomment(e,str,n) -- also insertcdata
+ table.insert(e.dt,n or 1,{
+ tg = "@cm@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ })
+end
+
+function xml.setcdata(e,str) -- also setcomment
+ e.dt = { {
+ tg = "@cd@",
+ ns = "",
+ special = true,
+ at = { },
+ dt = { str },
+ } }
+end
+
+-- maybe helpers like this will move to an autoloader
+
+function xml.separate(x,pattern)
+ local collected = xmlapplylpath(x,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local d = e.dt
+ if d == x then
+ report_xml("warning: xml.separate changes root")
+ x = d
+ end
+ local t, n = { "\n" }, 1
+ local i, nd = 1, #d
+ while i <= nd do
+ while i <= nd do
+ local di = d[i]
+ if type(di) == "string" then
+ if di == "\n" or find(di,"^%s+$") then -- first test is speedup
+ i = i + 1
+ else
+ d[i] = strip(di)
+ break
+ end
+ else
+ break
+ end
+ end
+ if i > nd then
+ break
+ end
+ t[n+1] = "\n"
+ t[n+2] = d[i]
+ t[n+3] = "\n"
+ n = n + 3
+ i = i + 1
+ end
+ t[n+1] = "\n"
+ setmetatable(t,getmetatable(d))
+ e.dt = t
+ end
+ end
+ return x
+end
+
+--
+
+local helpers = xml.helpers or { }
+xml.helpers = helpers
+
+local function normal(e,action)
+ local edt = e.dt
+ if edt then
+ for i=1,#edt do
+ local str = edt[i]
+ if type(str) == "string" and str ~= "" then
+ edt[i] = action(str)
+ end
+ end
+ end
+end
+
+local function recurse(e,action)
+ local edt = e.dt
+ if edt then
+ for i=1,#edt do
+ local str = edt[i]
+ if type(str) ~= "string" then
+ recurse(str,action,recursive)
+ elseif str ~= "" then
+ edt[i] = action(str)
+ end
+ end
+ end
+end
+
+function helpers.recursetext(collected,action,recursive)
+ if recursive then
+ for i=1,#collected do
+ recurse(collected[i],action)
+ end
+ else
+ for i=1,#collected do
+ normal(collected[i],action)
+ end
+ end
+end
diff --git a/tex/context/base/lxml-css.lua b/tex/context/base/lxml-css.lua
index c5a85c2bd..f9542029f 100644
--- a/tex/context/base/lxml-css.lua
+++ b/tex/context/base/lxml-css.lua
@@ -1,158 +1,158 @@
-if not modules then modules = { } end modules ['lxml-css'] = {
- version = 1.001,
- comment = "companion to lxml-css.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local tonumber, rawset = tonumber, rawset
-local lower, format = string.lower, string.format
-local P, S, C, R, Cb, Cg, Carg, Ct, Cc, Cf = lpeg.P, lpeg.S, lpeg.C, lpeg.R, lpeg.Cb, lpeg.Cg, lpeg.Carg, lpeg.Ct, lpeg.Cc, lpeg.Cf
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-
-xml.css = xml.css or { }
-local css = xml.css
-
-if not number.dimenfactors then
- require("util-dim.lua")
-end
-
-local dimenfactors = number.dimenfactors
-local bpf = 1/dimenfactors.bp
-local cmf = 1/dimenfactors.cm
-local mmf = 1/dimenfactors.mm
-local inf = 1/dimenfactors["in"]
-
-local percentage, exheight, emwidth, pixels
-
-if tex then
-
- local exheights = fonts.hashes.exheights
- local emwidths = fonts.hashes.emwidths
-
- percentage = function(s,pcf) return tonumber(s) * (pcf or tex.hsize) end
- exheight = function(s,exf) return tonumber(s) * (exf or exheights[true]) end
- emwidth = function(s,emf) return tonumber(s) * (emf or emwidths[true]) end
- pixels = function(s,pxf) return tonumber(s) * (pxf or emwidths[true]/300) end
-
-else
-
- local function generic(s,unit) return tonumber(s) * unit end
-
- percentage = generic
- exheight = generic
- emwidth = generic
- pixels = generic
-
-end
-
-local validdimen = Cg(lpegpatterns.number,'a') * (
- Cb('a') * P("pt") / function(s) return tonumber(s) * bpf end
- + Cb('a') * P("cm") / function(s) return tonumber(s) * cmf end
- + Cb('a') * P("mm") / function(s) return tonumber(s) * mmf end
- + Cb('a') * P("in") / function(s) return tonumber(s) * inf end
- + Cb('a') * P("px") * Carg(1) / pixels
- + Cb('a') * P("%") * Carg(2) / percentage
- + Cb('a') * P("ex") * Carg(3) / exheight
- + Cb('a') * P("em") * Carg(4) / emwidth
- + Cb('a') * Carg(1) / pixels
- )
-
-local pattern = (validdimen * lpegpatterns.whitespace^0)^1
-
--- todo: default if ""
-
-local function dimension(str,pixel,percent,exheight,emwidth)
- return (lpegmatch(pattern,str,1,pixel,percent,exheight,emwidth))
-end
-
-local function padding(str,pixel,percent,exheight,emwidth)
- local top, bottom, left, right = lpegmatch(pattern,str,1,pixel,percent,exheight,emwidth)
- if not bottom then
- bottom, left, right = top, top, top
- elseif not left then
- bottom, left, right = top, bottom, bottom
- elseif not right then
- bottom, left, right = left, bottom, bottom
- end
- return top, bottom, left, right
-end
-
-css.dimension = dimension
-css.padding = padding
-
--- local hsize = 655360*100
--- local exheight = 65536*4
--- local emwidth = 65536*10
--- local pixel = emwidth/100
---
--- print(padding("10px",pixel,hsize,exheight,emwidth))
--- print(padding("10px 20px",pixel,hsize,exheight,emwidth))
--- print(padding("10px 20px 30px",pixel,hsize,exheight,emwidth))
--- print(padding("10px 20px 30px 40px",pixel,hsize,exheight,emwidth))
---
--- print(padding("10%",pixel,hsize,exheight,emwidth))
--- print(padding("10% 20%",pixel,hsize,exheight,emwidth))
--- print(padding("10% 20% 30%",pixel,hsize,exheight,emwidth))
--- print(padding("10% 20% 30% 40%",pixel,hsize,exheight,emwidth))
---
--- print(padding("10",pixel,hsize,exheight,emwidth))
--- print(padding("10 20",pixel,hsize,exheight,emwidth))
--- print(padding("10 20 30",pixel,hsize,exheight,emwidth))
--- print(padding("10 20 30 40",pixel,hsize,exheight,emwidth))
---
--- print(padding("10pt",pixel,hsize,exheight,emwidth))
--- print(padding("10pt 20pt",pixel,hsize,exheight,emwidth))
--- print(padding("10pt 20pt 30pt",pixel,hsize,exheight,emwidth))
--- print(padding("10pt 20pt 30pt 40pt",pixel,hsize,exheight,emwidth))
-
--- print(padding("0",pixel,hsize,exheight,emwidth))
-
--- local currentfont = font.current
--- local texdimen = tex.dimen
--- local hashes = fonts.hashes
--- local quads = hashes.quads
--- local xheights = hashes.xheights
---
--- local function padding(str)
--- local font = currentfont()
--- local exheight = xheights[font]
--- local emwidth = quads[font]
--- local hsize = texdimen.hsize/100
--- local pixel = emwidth/100
--- return padding(str,pixel,hsize,exheight,emwidth)
--- end
---
--- function css.simplepadding(str)
--- context("%ssp",padding(str,pixel,hsize,exheight,emwidth))
--- end
-
-local pattern = Cf( Ct("") * (
- Cg(
- Cc("style") * (
- C("italic")
- + C("oblique")
- + C("slanted") / "oblique"
- )
- + Cc("variant") * (
- (C("smallcaps") + C("caps")) / "small-caps"
- )
- + Cc("weight") *
- C("bold")
- + Cc("family") * (
- (C("mono") + C("type")) / "monospace" -- just ignore the "space(d)"
- + (C("sansserif") + C("sans")) / "sans-serif" -- match before serif
- + C("serif")
- )
- ) + P(1)
-)^0 , rawset)
-
-function css.fontspecification(str)
- return str and lpegmatch(pattern,lower(str))
-end
-
-function css.colorspecification(str)
- local c = str and attributes.colors.values[tonumber(str)]
- return c and format("rgb(%s%%,%s%%,%s%%)",c[3]*100,c[4]*100,c[5]*100)
-end
+if not modules then modules = { } end modules ['lxml-css'] = {
+ version = 1.001,
+ comment = "companion to lxml-css.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local tonumber, rawset = tonumber, rawset
+local lower, format = string.lower, string.format
+local P, S, C, R, Cb, Cg, Carg, Ct, Cc, Cf = lpeg.P, lpeg.S, lpeg.C, lpeg.R, lpeg.Cb, lpeg.Cg, lpeg.Carg, lpeg.Ct, lpeg.Cc, lpeg.Cf
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+
+xml.css = xml.css or { }
+local css = xml.css
+
+if not number.dimenfactors then
+ require("util-dim.lua")
+end
+
+local dimenfactors = number.dimenfactors
+local bpf = 1/dimenfactors.bp
+local cmf = 1/dimenfactors.cm
+local mmf = 1/dimenfactors.mm
+local inf = 1/dimenfactors["in"]
+
+local percentage, exheight, emwidth, pixels
+
+if tex then
+
+ local exheights = fonts.hashes.exheights
+ local emwidths = fonts.hashes.emwidths
+
+ percentage = function(s,pcf) return tonumber(s) * (pcf or tex.hsize) end
+ exheight = function(s,exf) return tonumber(s) * (exf or exheights[true]) end
+ emwidth = function(s,emf) return tonumber(s) * (emf or emwidths[true]) end
+ pixels = function(s,pxf) return tonumber(s) * (pxf or emwidths[true]/300) end
+
+else
+
+ local function generic(s,unit) return tonumber(s) * unit end
+
+ percentage = generic
+ exheight = generic
+ emwidth = generic
+ pixels = generic
+
+end
+
+local validdimen = Cg(lpegpatterns.number,'a') * (
+ Cb('a') * P("pt") / function(s) return tonumber(s) * bpf end
+ + Cb('a') * P("cm") / function(s) return tonumber(s) * cmf end
+ + Cb('a') * P("mm") / function(s) return tonumber(s) * mmf end
+ + Cb('a') * P("in") / function(s) return tonumber(s) * inf end
+ + Cb('a') * P("px") * Carg(1) / pixels
+ + Cb('a') * P("%") * Carg(2) / percentage
+ + Cb('a') * P("ex") * Carg(3) / exheight
+ + Cb('a') * P("em") * Carg(4) / emwidth
+ + Cb('a') * Carg(1) / pixels
+ )
+
+local pattern = (validdimen * lpegpatterns.whitespace^0)^1
+
+-- todo: default if ""
+
+local function dimension(str,pixel,percent,exheight,emwidth)
+ return (lpegmatch(pattern,str,1,pixel,percent,exheight,emwidth))
+end
+
+local function padding(str,pixel,percent,exheight,emwidth)
+ local top, bottom, left, right = lpegmatch(pattern,str,1,pixel,percent,exheight,emwidth)
+ if not bottom then
+ bottom, left, right = top, top, top
+ elseif not left then
+ bottom, left, right = top, bottom, bottom
+ elseif not right then
+ bottom, left, right = left, bottom, bottom
+ end
+ return top, bottom, left, right
+end
+
+css.dimension = dimension
+css.padding = padding
+
+-- local hsize = 655360*100
+-- local exheight = 65536*4
+-- local emwidth = 65536*10
+-- local pixel = emwidth/100
+--
+-- print(padding("10px",pixel,hsize,exheight,emwidth))
+-- print(padding("10px 20px",pixel,hsize,exheight,emwidth))
+-- print(padding("10px 20px 30px",pixel,hsize,exheight,emwidth))
+-- print(padding("10px 20px 30px 40px",pixel,hsize,exheight,emwidth))
+--
+-- print(padding("10%",pixel,hsize,exheight,emwidth))
+-- print(padding("10% 20%",pixel,hsize,exheight,emwidth))
+-- print(padding("10% 20% 30%",pixel,hsize,exheight,emwidth))
+-- print(padding("10% 20% 30% 40%",pixel,hsize,exheight,emwidth))
+--
+-- print(padding("10",pixel,hsize,exheight,emwidth))
+-- print(padding("10 20",pixel,hsize,exheight,emwidth))
+-- print(padding("10 20 30",pixel,hsize,exheight,emwidth))
+-- print(padding("10 20 30 40",pixel,hsize,exheight,emwidth))
+--
+-- print(padding("10pt",pixel,hsize,exheight,emwidth))
+-- print(padding("10pt 20pt",pixel,hsize,exheight,emwidth))
+-- print(padding("10pt 20pt 30pt",pixel,hsize,exheight,emwidth))
+-- print(padding("10pt 20pt 30pt 40pt",pixel,hsize,exheight,emwidth))
+
+-- print(padding("0",pixel,hsize,exheight,emwidth))
+
+-- local currentfont = font.current
+-- local texdimen = tex.dimen
+-- local hashes = fonts.hashes
+-- local quads = hashes.quads
+-- local xheights = hashes.xheights
+--
+-- local function padding(str)
+-- local font = currentfont()
+-- local exheight = xheights[font]
+-- local emwidth = quads[font]
+-- local hsize = texdimen.hsize/100
+-- local pixel = emwidth/100
+-- return padding(str,pixel,hsize,exheight,emwidth)
+-- end
+--
+-- function css.simplepadding(str)
+-- context("%ssp",padding(str,pixel,hsize,exheight,emwidth))
+-- end
+
+local pattern = Cf( Ct("") * (
+ Cg(
+ Cc("style") * (
+ C("italic")
+ + C("oblique")
+ + C("slanted") / "oblique"
+ )
+ + Cc("variant") * (
+ (C("smallcaps") + C("caps")) / "small-caps"
+ )
+ + Cc("weight") *
+ C("bold")
+ + Cc("family") * (
+ (C("mono") + C("type")) / "monospace" -- just ignore the "space(d)"
+ + (C("sansserif") + C("sans")) / "sans-serif" -- match before serif
+ + C("serif")
+ )
+ ) + P(1)
+)^0 , rawset)
+
+function css.fontspecification(str)
+ return str and lpegmatch(pattern,lower(str))
+end
+
+function css.colorspecification(str)
+ local c = str and attributes.colors.values[tonumber(str)]
+ return c and format("rgb(%s%%,%s%%,%s%%)",c[3]*100,c[4]*100,c[5]*100)
+end
diff --git a/tex/context/base/lxml-ctx.lua b/tex/context/base/lxml-ctx.lua
index 968dbda71..2694839dd 100644
--- a/tex/context/base/lxml-ctx.lua
+++ b/tex/context/base/lxml-ctx.lua
@@ -1,135 +1,135 @@
-if not modules then modules = { } end modules ['lxml-ctx'] = {
- version = 1.001,
- comment = "companion to lxml-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- is this still used?
-
-local format, find = string.format, string.find
-
-local xml = xml
-
-xml.ctx = { }
-xml.ctx.enhancers = { }
-
--- hashen
-
-function xml.ctx.enhancers.compound(root,lpath,before,tokens,after) -- todo lpeg
- local before = before or "[%a%d][%a%d][%a%d]"
- local tokens = tokens or "[%/%-]"
- local after = after or "[%a%d][%a%d][%a%d]"
- local pattern = "(" .. before .. ")(" .. tokens .. ")(" .. after .. ")"
- local action = function(a,b,c)
- return a .. "" .. c -- formatters["%s%s"](a,b,c)
- end
- xml.enhance(root,lpath,pattern,action) -- still present?
-end
-
-local loaded = { }
-
-local nodesettostring = xml.nodesettostring
-
--- maybe use detokenize instead of \type
-
-function xml.ctx.tshow(specification)
- local pattern = specification.pattern
- local xmlroot = specification.xmlroot
- local attribute = specification.attribute
- if context then
- local xmlpattern = pattern
- if not find(xmlpattern,"^[%a]+://") then
- xmlpattern = "xml://" .. pattern
- end
- local parsed = xml.lpath(xmlpattern)
- local titlecommand = specification.title or "type"
- if parsed.state then
- context[titlecommand]("pattern: " .. pattern .. " (".. parsed.state .. ")")
- else
- context[titlecommand]("pattern: " .. pattern)
- end
- context.starttabulate({ "|Tr|Tl|Tp|" } )
- if specification.warning then
- local comment = parsed.comment
- if comment then
- for k=1,#comment do
- context.NC()
- context("!")
- context.NC()
- context.rlap(comment[k])
- context.NR()
- end
- context.TB()
- end
- end
- for p=1,#parsed do
- local pp = parsed[p]
- local kind = pp.kind
- context.NC()
- context(p)
- context.NC()
- context(kind)
- context.NC()
- if kind == "axis" then
- context(pp.axis)
- elseif kind == "nodes" then
- context(nodesettostring(pp.nodes,pp.nodetest))
- elseif kind == "expression" then
---~ context("%s => %s",pp.expression,pp.converted)
- context(pp.expression)
- elseif kind == "finalizer" then
- context("%s(%s)",pp.name,pp.arguments)
- elseif kind == "error" and pp.error then
- context(pp.error)
- end
- context.NC()
- context.NR()
- end
- context.stoptabulate()
- if xmlroot and xmlroot ~= "" then
- if not loaded[xmlroot] then
- loaded[xmlroot] = xml.convert(buffers.getcontent(xmlroot))
- end
- local collected = xml.filter(loaded[xmlroot],xmlpattern)
- if collected then
- local tc = type(collected)
- if not tc then
- -- skip
- else
- context.blank()
- context.type("result : ")
- if tc == "string" then
- context.type(collected)
- elseif tc == "table" then
- if collected.tg then
- collected = { collected }
- end
- for c=1,#collected do
- local cc = collected[c]
- if attribute and attribute ~= "" then
- local ccat = cc.at
- local a = ccat and ccat[attribute]
- if a and a ~= "" then
- context.type(a)
- context.type(">")
- end
- end
- local ccns = cc.ns
- if ccns == "" then
- context.type(cc.tg)
- else
- context.type(ccns .. ":" .. cc.tg)
- end
- context.space()
- end
- else
- context.type(tostring(tc))
- end
- context.blank()
- end
- end
- end
- end
-end
+if not modules then modules = { } end modules ['lxml-ctx'] = {
+ version = 1.001,
+ comment = "companion to lxml-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- is this still used?
+
+local format, find = string.format, string.find
+
+local xml = xml
+
+xml.ctx = { }
+xml.ctx.enhancers = { }
+
+-- hashen
+
+function xml.ctx.enhancers.compound(root,lpath,before,tokens,after) -- todo lpeg
+ local before = before or "[%a%d][%a%d][%a%d]"
+ local tokens = tokens or "[%/%-]"
+ local after = after or "[%a%d][%a%d][%a%d]"
+ local pattern = "(" .. before .. ")(" .. tokens .. ")(" .. after .. ")"
+ local action = function(a,b,c)
+ return a .. "" .. c -- formatters["%s%s"](a,b,c)
+ end
+ xml.enhance(root,lpath,pattern,action) -- still present?
+end
+
+local loaded = { }
+
+local nodesettostring = xml.nodesettostring
+
+-- maybe use detokenize instead of \type
+
+function xml.ctx.tshow(specification)
+ local pattern = specification.pattern
+ local xmlroot = specification.xmlroot
+ local attribute = specification.attribute
+ if context then
+ local xmlpattern = pattern
+ if not find(xmlpattern,"^[%a]+://") then
+ xmlpattern = "xml://" .. pattern
+ end
+ local parsed = xml.lpath(xmlpattern)
+ local titlecommand = specification.title or "type"
+ if parsed.state then
+ context[titlecommand]("pattern: " .. pattern .. " (".. parsed.state .. ")")
+ else
+ context[titlecommand]("pattern: " .. pattern)
+ end
+ context.starttabulate({ "|Tr|Tl|Tp|" } )
+ if specification.warning then
+ local comment = parsed.comment
+ if comment then
+ for k=1,#comment do
+ context.NC()
+ context("!")
+ context.NC()
+ context.rlap(comment[k])
+ context.NR()
+ end
+ context.TB()
+ end
+ end
+ for p=1,#parsed do
+ local pp = parsed[p]
+ local kind = pp.kind
+ context.NC()
+ context(p)
+ context.NC()
+ context(kind)
+ context.NC()
+ if kind == "axis" then
+ context(pp.axis)
+ elseif kind == "nodes" then
+ context(nodesettostring(pp.nodes,pp.nodetest))
+ elseif kind == "expression" then
+--~ context("%s => %s",pp.expression,pp.converted)
+ context(pp.expression)
+ elseif kind == "finalizer" then
+ context("%s(%s)",pp.name,pp.arguments)
+ elseif kind == "error" and pp.error then
+ context(pp.error)
+ end
+ context.NC()
+ context.NR()
+ end
+ context.stoptabulate()
+ if xmlroot and xmlroot ~= "" then
+ if not loaded[xmlroot] then
+ loaded[xmlroot] = xml.convert(buffers.getcontent(xmlroot))
+ end
+ local collected = xml.filter(loaded[xmlroot],xmlpattern)
+ if collected then
+ local tc = type(collected)
+ if not tc then
+ -- skip
+ else
+ context.blank()
+ context.type("result : ")
+ if tc == "string" then
+ context.type(collected)
+ elseif tc == "table" then
+ if collected.tg then
+ collected = { collected }
+ end
+ for c=1,#collected do
+ local cc = collected[c]
+ if attribute and attribute ~= "" then
+ local ccat = cc.at
+ local a = ccat and ccat[attribute]
+ if a and a ~= "" then
+ context.type(a)
+ context.type(">")
+ end
+ end
+ local ccns = cc.ns
+ if ccns == "" then
+ context.type(cc.tg)
+ else
+ context.type(ccns .. ":" .. cc.tg)
+ end
+ context.space()
+ end
+ else
+ context.type(tostring(tc))
+ end
+ context.blank()
+ end
+ end
+ end
+ end
+end
diff --git a/tex/context/base/lxml-dir.lua b/tex/context/base/lxml-dir.lua
index 3c68664ae..4f0f61b71 100644
--- a/tex/context/base/lxml-dir.lua
+++ b/tex/context/base/lxml-dir.lua
@@ -1,114 +1,114 @@
-if not modules then modules = { } end modules ['lxml-dir'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local gsub = string.gsub
-local formatters = string.formatters
-
---
---
---
---
---
---
---
-
-local lxml, context = lxml, context
-
-local getid = lxml.getid
-
-lxml.directives = lxml.directives or { }
-local directives = lxml.directives
-
-local report_lxml = logs.reporter("xml","tex")
-
-local data = {
- setup = { },
- before = { },
- after = { }
-}
-
-local function load_setup(filename)
- local fullname = resolvers.findtexfile(filename) or ""
- if fullname ~= "" then
- filename = fullname
- end
- local collection = xml.applylpath({ getid(xml.load(filename)) },"directive") -- is { } needed ?
- if collection then
- local valid = 0
- for i=1,#collection do
- local at = collection[i].at
- local attribute, value, element = at.attribute or "", at.value or "", at.element or '*'
- local setup, before, after = at.setup or "", at.before or "", at.after or ""
- if attribute ~= "" and value ~= "" then
- local key = formatters["%s::%s::%s"](element,attribute,value)
- local t = data[key] or { }
- if setup ~= "" then t.setup = setup end
- if before ~= "" then t.before = before end
- if after ~= "" then t.after = after end
- data[key] = t
- valid = valid + 1
- end
- end
- report_lxml("%s directives found in %a, valid %s",#collection,filename,valid)
- else
- report_lxml("no directives found in %a",filename)
- end
-end
-
-local function handle_setup(category,root,attribute,element)
- root = getid(root)
- if attribute then
- local value = root.at[attribute]
- if value then
- if not element then
- local ns, tg = root.rn or root.ns, root.tg
- if ns == "" then
- element = tg
- else
- element = ns .. ':' .. tg
- end
- end
- local setup = data[formatters["%s::%s::%s"](element,attribute,value)]
- if setup then
- setup = setup[category]
- end
- if setup then
- context.directsetup(setup)
- else
- setup = data[formatters["%s::%s::*"](element,attribute)]
- if setup then
- setup = setup[category]
- end
- if setup then
- setup = gsub(setup,'%*',value)
- context.directsetup(setup)
- end
- end
- end
- end
-end
-
-directives.load = load_setup
-directives.handle = handle_setup
-
-function directives.setup(root,attribute,element)
- handle_setup('setup',root,attribute,element)
-end
-function directives.before(root,attribute,element)
- handle_setup('before',root,attribute,element)
-end
-function directives.after(root,attribute,element)
- handle_setup('after',root,attribute,element)
-end
+if not modules then modules = { } end modules ['lxml-dir'] = {
+ version = 1.001,
+ comment = "this module is the basis for the lxml-* ones",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local gsub = string.gsub
+local formatters = string.formatters
+
+--
+--
+--
+--
+--
+--
+--
+
+local lxml, context = lxml, context
+
+local getid = lxml.getid
+
+lxml.directives = lxml.directives or { }
+local directives = lxml.directives
+
+local report_lxml = logs.reporter("xml","tex")
+
+local data = {
+ setup = { },
+ before = { },
+ after = { }
+}
+
+local function load_setup(filename)
+ local fullname = resolvers.findtexfile(filename) or ""
+ if fullname ~= "" then
+ filename = fullname
+ end
+ local collection = xml.applylpath({ getid(xml.load(filename)) },"directive") -- is { } needed ?
+ if collection then
+ local valid = 0
+ for i=1,#collection do
+ local at = collection[i].at
+ local attribute, value, element = at.attribute or "", at.value or "", at.element or '*'
+ local setup, before, after = at.setup or "", at.before or "", at.after or ""
+ if attribute ~= "" and value ~= "" then
+ local key = formatters["%s::%s::%s"](element,attribute,value)
+ local t = data[key] or { }
+ if setup ~= "" then t.setup = setup end
+ if before ~= "" then t.before = before end
+ if after ~= "" then t.after = after end
+ data[key] = t
+ valid = valid + 1
+ end
+ end
+ report_lxml("%s directives found in %a, valid %s",#collection,filename,valid)
+ else
+ report_lxml("no directives found in %a",filename)
+ end
+end
+
+local function handle_setup(category,root,attribute,element)
+ root = getid(root)
+ if attribute then
+ local value = root.at[attribute]
+ if value then
+ if not element then
+ local ns, tg = root.rn or root.ns, root.tg
+ if ns == "" then
+ element = tg
+ else
+ element = ns .. ':' .. tg
+ end
+ end
+ local setup = data[formatters["%s::%s::%s"](element,attribute,value)]
+ if setup then
+ setup = setup[category]
+ end
+ if setup then
+ context.directsetup(setup)
+ else
+ setup = data[formatters["%s::%s::*"](element,attribute)]
+ if setup then
+ setup = setup[category]
+ end
+ if setup then
+ setup = gsub(setup,'%*',value)
+ context.directsetup(setup)
+ end
+ end
+ end
+ end
+end
+
+directives.load = load_setup
+directives.handle = handle_setup
+
+function directives.setup(root,attribute,element)
+ handle_setup('setup',root,attribute,element)
+end
+function directives.before(root,attribute,element)
+ handle_setup('before',root,attribute,element)
+end
+function directives.after(root,attribute,element)
+ handle_setup('after',root,attribute,element)
+end
diff --git a/tex/context/base/lxml-ent.lua b/tex/context/base/lxml-ent.lua
index a5c5bc389..e9fb0e2b8 100644
--- a/tex/context/base/lxml-ent.lua
+++ b/tex/context/base/lxml-ent.lua
@@ -1,57 +1,57 @@
-if not modules then modules = { } end modules ['lxml-ent'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local type, next, tonumber = type, next, tonumber
-local byte, format = string.byte, string.format
-local utfchar = utf.char
-local lpegmatch = lpeg.match
-
---[[ldx--
-
We provide (at least here) two entity handlers. The more extensive
-resolver consults a hash first, tries to convert to next,
-and finaly calls a handler when defines. When this all fails, the
-original entity is returned.
-
-
We do things different now but it's still somewhat experimental
---ldx]]--
-
-local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
-
-local report_xml = logs.reporter("xml")
-
-local xml = xml
-
-xml.entities = xml.entities or { }
-
-storage.register("xml/entities", xml.entities, "xml.entities" )
-
-local entities = xml.entities -- maybe some day properties
-
-function xml.registerentity(key,value)
- entities[key] = value
- if trace_entities then
- report_xml("registering entity %a as %a",key,value)
- end
-end
-
-if characters and characters.entities then
-
- function characters.registerentities(forcecopy)
- if forcecopy then
- table.setmetatableindex(entities,nil)
- for name, value in next, characters.entities do
- if not entities[name] then
- entities[name] = value
- end
- end
- else
- table.setmetatableindex(entities,characters.entities)
- end
- end
-
-end
+if not modules then modules = { } end modules ['lxml-ent'] = {
+ version = 1.001,
+ comment = "this module is the basis for the lxml-* ones",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type, next, tonumber = type, next, tonumber
+local byte, format = string.byte, string.format
+local utfchar = utf.char
+local lpegmatch = lpeg.match
+
+--[[ldx--
+
We provide (at least here) two entity handlers. The more extensive
+resolver consults a hash first, tries to convert to next,
+and finaly calls a handler when defines. When this all fails, the
+original entity is returned.
+
+
We do things different now but it's still somewhat experimental
+--ldx]]--
+
+local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
+
+local report_xml = logs.reporter("xml")
+
+local xml = xml
+
+xml.entities = xml.entities or { }
+
+storage.register("xml/entities", xml.entities, "xml.entities" )
+
+local entities = xml.entities -- maybe some day properties
+
+function xml.registerentity(key,value)
+ entities[key] = value
+ if trace_entities then
+ report_xml("registering entity %a as %a",key,value)
+ end
+end
+
+if characters and characters.entities then
+
+ function characters.registerentities(forcecopy)
+ if forcecopy then
+ table.setmetatableindex(entities,nil)
+ for name, value in next, characters.entities do
+ if not entities[name] then
+ entities[name] = value
+ end
+ end
+ else
+ table.setmetatableindex(entities,characters.entities)
+ end
+ end
+
+end
diff --git a/tex/context/base/lxml-inf.lua b/tex/context/base/lxml-inf.lua
index 8f1157c7d..2c130791a 100644
--- a/tex/context/base/lxml-inf.lua
+++ b/tex/context/base/lxml-inf.lua
@@ -1,58 +1,58 @@
-if not modules then modules = { } end modules ['lxml-inf'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This file will be loaded runtime by x-pending.tex.
-
-local concat = table.concat
-
-local xmlwithelements = xml.withelements
-local getid = lxml.getid
-
-local status, stack
-
-local function get(e,d)
- local ns, tg = e.ns, e.tg
- local name = tg
- if ns ~= "" then name = ns .. ":" .. tg end
- stack[d] = name
- local ec = e.command
- if ec == true then
- ec = "system: text"
- elseif ec == false then
- ec = "system: skip"
- elseif ec == nil then
- ec = "system: not set"
- elseif type(ec) == "string" then
- ec = "setup: " .. ec
- else -- function
- ec = tostring(ec)
- end
- local tag = concat(stack," => ",1,d)
- local s = status[tag]
- if not s then
- s = { }
- status[tag] = s
- end
- s[ec] = (s[ec] or 0) + 1
-end
-
-local function get_command_status(id)
- status, stack = {}, {}
- if id then
- xmlwithelements(getid(id),get)
- return status
- else
- local t = { }
- for id, _ in next, loaded do
- t[id] = get_command_status(id)
- end
- return t
- end
-end
-
-lxml.get_command_status = get_command_status
+if not modules then modules = { } end modules ['lxml-inf'] = {
+ version = 1.001,
+ comment = "this module is the basis for the lxml-* ones",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This file will be loaded runtime by x-pending.tex.
+
+local concat = table.concat
+
+local xmlwithelements = xml.withelements
+local getid = lxml.getid
+
+local status, stack
+
+local function get(e,d)
+ local ns, tg = e.ns, e.tg
+ local name = tg
+ if ns ~= "" then name = ns .. ":" .. tg end
+ stack[d] = name
+ local ec = e.command
+ if ec == true then
+ ec = "system: text"
+ elseif ec == false then
+ ec = "system: skip"
+ elseif ec == nil then
+ ec = "system: not set"
+ elseif type(ec) == "string" then
+ ec = "setup: " .. ec
+ else -- function
+ ec = tostring(ec)
+ end
+ local tag = concat(stack," => ",1,d)
+ local s = status[tag]
+ if not s then
+ s = { }
+ status[tag] = s
+ end
+ s[ec] = (s[ec] or 0) + 1
+end
+
+local function get_command_status(id)
+ status, stack = {}, {}
+ if id then
+ xmlwithelements(getid(id),get)
+ return status
+ else
+ local t = { }
+ for id, _ in next, loaded do
+ t[id] = get_command_status(id)
+ end
+ return t
+ end
+end
+
+lxml.get_command_status = get_command_status
diff --git a/tex/context/base/lxml-lpt.lua b/tex/context/base/lxml-lpt.lua
index 51ab321b9..2f57ced5b 100644
--- a/tex/context/base/lxml-lpt.lua
+++ b/tex/context/base/lxml-lpt.lua
@@ -1,1466 +1,1466 @@
-if not modules then modules = { } end modules ['lxml-lpt'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- e.ni is only valid after a filter run
--- todo: B/C/[get first match]
-
-local concat, remove, insert = table.concat, table.remove, table.insert
-local type, next, tonumber, tostring, setmetatable, load, select = type, next, tonumber, tostring, setmetatable, load, select
-local format, upper, lower, gmatch, gsub, find, rep = string.format, string.upper, string.lower, string.gmatch, string.gsub, string.find, string.rep
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-
-local setmetatableindex = table.setmetatableindex
-local formatters = string.formatters -- no need (yet) as paths are cached anyway
-
--- beware, this is not xpath ... e.g. position is different (currently) and
--- we have reverse-sibling as reversed preceding sibling
-
---[[ldx--
-
This module can be used stand alone but also inside in
-which case it hooks into the tracker code. Therefore we provide a few
-functions that set the tracers. Here we overload a previously defined
-function.
-
If I can get in the mood I will make a variant that is XSLT compliant
-but I wonder if it makes sense.
---ldx]]--
-
---[[ldx--
-
Expecially the lpath code is experimental, we will support some of xpath, but
-only things that make sense for us; as compensation it is possible to hook in your
-own functions. Apart from preprocessing content for we also need
-this module for process management, like handling and
-files.
-
-
-a/b/c /*/c
-a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n)
-a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n)
-
---ldx]]--
-
-local trace_lpath = false if trackers then trackers.register("xml.path", function(v) trace_lpath = v end) end
-local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end
-local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end
-
-local report_lpath = logs.reporter("xml","lpath")
-
---[[ldx--
-
We've now arrived at an interesting part: accessing the tree using a subset
-of and since we're not compatible we call it . We
-will explain more about its usage in other documents.
---ldx]]--
-
-local xml = xml
-
-local lpathcalls = 0 function xml.lpathcalls () return lpathcalls end
-local lpathcached = 0 function xml.lpathcached() return lpathcached end
-
-xml.functions = xml.functions or { } -- internal
-local functions = xml.functions
-
-xml.expressions = xml.expressions or { } -- in expressions
-local expressions = xml.expressions
-
-xml.finalizers = xml.finalizers or { } -- fast do-with ... (with return value other than collection)
-local finalizers = xml.finalizers
-
-xml.specialhandler = xml.specialhandler or { }
-local specialhandler = xml.specialhandler
-
-lpegpatterns.xml = lpegpatterns.xml or { }
-local xmlpatterns = lpegpatterns.xml
-
-finalizers.xml = finalizers.xml or { }
-finalizers.tex = finalizers.tex or { }
-
-local function fallback (t, name)
- local fn = finalizers[name]
- if fn then
- t[name] = fn
- else
- report_lpath("unknown sub finalizer %a",name)
- fn = function() end
- end
- return fn
-end
-
-setmetatableindex(finalizers.xml, fallback)
-setmetatableindex(finalizers.tex, fallback)
-
-xml.defaultprotocol = "xml"
-
--- as xsl does not follow xpath completely here we will also
--- be more liberal especially with regards to the use of | and
--- the rootpath:
---
--- test : all 'test' under current
--- /test : 'test' relative to current
--- a|b|c : set of names
--- (a|b|c) : idem
--- ! : not
---
--- after all, we're not doing transformations but filtering. in
--- addition we provide filter functions (last bit)
---
--- todo: optimizer
---
--- .. : parent
--- * : all kids
--- / : anchor here
--- // : /**/
--- ** : all in between
---
--- so far we had (more practical as we don't transform)
---
--- {/test} : kids 'test' under current node
--- {test} : any kid with tag 'test'
--- {//test} : same as above
-
--- evaluator (needs to be redone, for the moment copied)
-
--- todo: apply_axis(list,notable) and collection vs single
-
-local apply_axis = { }
-
-apply_axis['root'] = function(list)
- local collected = { }
- for l=1,#list do
- local ll = list[l]
- local rt = ll
- while ll do
- ll = ll.__p__
- if ll then
- rt = ll
- end
- end
- collected[l] = rt
- end
- return collected
-end
-
-apply_axis['self'] = function(list)
---~ local collected = { }
---~ for l=1,#list do
---~ collected[l] = list[l]
---~ end
---~ return collected
- return list
-end
-
-apply_axis['child'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local dt = ll.dt
- if dt then -- weird that this is needed
- local en = 0
- for k=1,#dt do
- local dk = dt[k]
- if dk.tg then
- c = c + 1
- collected[c] = dk
- dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
- end
- end
- ll.en = en
- end
- end
- return collected
-end
-
-local function collect(list,collected,c)
- local dt = list.dt
- if dt then
- local en = 0
- for k=1,#dt do
- local dk = dt[k]
- if dk.tg then
- c = c + 1
- collected[c] = dk
- dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
- c = collect(dk,collected,c)
- end
- end
- list.en = en
- end
- return c
-end
-
-apply_axis['descendant'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- c = collect(list[l],collected,c)
- end
- return collected
-end
-
-local function collect(list,collected,c)
- local dt = list.dt
- if dt then
- local en = 0
- for k=1,#dt do
- local dk = dt[k]
- if dk.tg then
- c = c + 1
- collected[c] = dk
- dk.ni = k -- refresh
- en = en + 1
- dk.ei = en
- c = collect(dk,collected,c)
- end
- end
- list.en = en
- end
- return c
-end
-apply_axis['descendant-or-self'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- if ll.special ~= true then -- catch double root
- c = c + 1
- collected[c] = ll
- end
- c = collect(ll,collected,c)
- end
- return collected
-end
-
-apply_axis['ancestor'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- while ll do
- ll = ll.__p__
- if ll then
- c = c + 1
- collected[c] = ll
- end
- end
- end
- return collected
-end
-
-apply_axis['ancestor-or-self'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- c = c + 1
- collected[c] = ll
- while ll do
- ll = ll.__p__
- if ll then
- c = c + 1
- collected[c] = ll
- end
- end
- end
- return collected
-end
-
-apply_axis['parent'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local pl = list[l].__p__
- if pl then
- c = c + 1
- collected[c] = pl
- end
- end
- return collected
-end
-
-apply_axis['attribute'] = function(list)
- return { }
-end
-
-apply_axis['namespace'] = function(list)
- return { }
-end
-
-apply_axis['following'] = function(list) -- incomplete
---~ local collected, c = { }, 0
---~ for l=1,#list do
---~ local ll = list[l]
---~ local p = ll.__p__
---~ local d = p.dt
---~ for i=ll.ni+1,#d do
---~ local di = d[i]
---~ if type(di) == "table" then
---~ c = c + 1
---~ collected[c] = di
---~ break
---~ end
---~ end
---~ end
---~ return collected
- return { }
-end
-
-apply_axis['preceding'] = function(list) -- incomplete
---~ local collected, c = { }, 0
---~ for l=1,#list do
---~ local ll = list[l]
---~ local p = ll.__p__
---~ local d = p.dt
---~ for i=ll.ni-1,1,-1 do
---~ local di = d[i]
---~ if type(di) == "table" then
---~ c = c + 1
---~ collected[c] = di
---~ break
---~ end
---~ end
---~ end
---~ return collected
- return { }
-end
-
-apply_axis['following-sibling'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local p = ll.__p__
- local d = p.dt
- for i=ll.ni+1,#d do
- local di = d[i]
- if type(di) == "table" then
- c = c + 1
- collected[c] = di
- end
- end
- end
- return collected
-end
-
-apply_axis['preceding-sibling'] = function(list)
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local p = ll.__p__
- local d = p.dt
- for i=1,ll.ni-1 do
- local di = d[i]
- if type(di) == "table" then
- c = c + 1
- collected[c] = di
- end
- end
- end
- return collected
-end
-
-apply_axis['reverse-sibling'] = function(list) -- reverse preceding
- local collected, c = { }, 0
- for l=1,#list do
- local ll = list[l]
- local p = ll.__p__
- local d = p.dt
- for i=ll.ni-1,1,-1 do
- local di = d[i]
- if type(di) == "table" then
- c = c + 1
- collected[c] = di
- end
- end
- end
- return collected
-end
-
-apply_axis['auto-descendant-or-self'] = apply_axis['descendant-or-self']
-apply_axis['auto-descendant'] = apply_axis['descendant']
-apply_axis['auto-child'] = apply_axis['child']
-apply_axis['auto-self'] = apply_axis['self']
-apply_axis['initial-child'] = apply_axis['child']
-
-local function apply_nodes(list,directive,nodes)
- -- todo: nodes[1] etc ... negated node name in set ... when needed
- -- ... currently ignored
- local maxn = #nodes
- if maxn == 3 then --optimized loop
- local nns, ntg = nodes[2], nodes[3]
- if not nns and not ntg then -- wildcard
- if directive then
- return list
- else
- return { }
- end
- else
- local collected, c, m, p = { }, 0, 0, nil
- if not nns then -- only check tag
- for l=1,#list do
- local ll = list[l]
- local ltg = ll.tg
- if ltg then
- if directive then
- if ntg == ltg then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif ntg ~= ltg then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- elseif not ntg then -- only check namespace
- for l=1,#list do
- local ll = list[l]
- local lns = ll.rn or ll.ns
- if lns then
- if directive then
- if lns == nns then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif lns ~= nns then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- else -- check both
- for l=1,#list do
- local ll = list[l]
- local ltg = ll.tg
- if ltg then
- local lns = ll.rn or ll.ns
- local ok = ltg == ntg and lns == nns
- if directive then
- if ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif not ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- end
- return collected
- end
- else
- local collected, c, m, p = { }, 0, 0, nil
- for l=1,#list do
- local ll = list[l]
- local ltg = ll.tg
- if ltg then
- local lns = ll.rn or ll.ns
- local ok = false
- for n=1,maxn,3 do
- local nns, ntg = nodes[n+1], nodes[n+2]
- ok = (not ntg or ltg == ntg) and (not nns or lns == nns)
- if ok then
- break
- end
- end
- if directive then
- if ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- elseif not ok then
- local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
- c = c + 1
- collected[c], ll.mi = ll, m
- end
- end
- end
- return collected
- end
-end
-
-local quit_expression = false
-
-local function apply_expression(list,expression,order)
- local collected, c = { }, 0
- quit_expression = false
- for l=1,#list do
- local ll = list[l]
- if expression(list,ll,l,order) then -- nasty, order alleen valid als n=1
- c = c + 1
- collected[c] = ll
- end
- if quit_expression then
- break
- end
- end
- return collected
-end
-
-local P, V, C, Cs, Cc, Ct, R, S, Cg, Cb = lpeg.P, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cb
-
-local spaces = S(" \n\r\t\f")^0
-local lp_space = S(" \n\r\t\f")
-local lp_any = P(1)
-local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==")
-local lp_doequal = P("=") / "=="
-local lp_or = P("|") / " or "
-local lp_and = P("&") / " and "
-
-local lp_builtin = P (
- P("text") / "(ll.dt[1] or '')" + -- fragile
- P("content") / "ll.dt" +
- -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
- P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
- P("tag") / "ll.tg" +
- P("position") / "l" + -- is element in finalizer
- P("firstindex") / "1" +
- P("lastindex") / "(#ll.__p__.dt or 1)" +
- P("firstelement") / "1" +
- P("lastelement") / "(ll.__p__.en or 1)" +
- P("first") / "1" +
- P("last") / "#list" +
- P("rootposition") / "order" +
- P("order") / "order" +
- P("element") / "(ll.ei or 1)" +
- P("index") / "(ll.ni or 1)" +
- P("match") / "(ll.mi or 1)" +
- -- P("namespace") / "ll.ns" +
- P("ns") / "ll.ns"
- ) * ((spaces * P("(") * spaces * P(")"))/"")
-
--- for the moment we keep namespaces with attributes
-
-local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * ((R("az","AZ") + S("-_:"))^1) * Cc("'])")
-
--- lp_fastpos_p = (P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end
--- lp_fastpos_n = (P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end
-
-lp_fastpos_p = P("+")^0 * R("09")^1 * P(-1) / "l==%0"
-lp_fastpos_n = P("-") * R("09")^1 * P(-1) / "(%0<0 and (#list+%0==l))"
-
-local lp_fastpos = lp_fastpos_n + lp_fastpos_p
-
-local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false")
-
--- local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling
--- return t .. "("
--- end
-
--- local lp_lua_function = (R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / "%0("
-local lp_lua_function = Cs((R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(")) / "%0"
-
-local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: better . handling
- if expressions[t] then
- return "expr." .. t .. "("
- else
- return "expr.error("
- end
-end
-
-local lparent = P("(")
-local rparent = P(")")
-local noparent = 1 - (lparent+rparent)
-local nested = P{lparent * (noparent + V(1))^0 * rparent}
-local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
-
-local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
-local lp_number = S("+-") * R("09")^1
-local lp_string = Cc("'") * R("az","AZ","--","__")^1 * Cc("'")
-local lp_content = (P("'") * (1-P("'"))^0 * P("'") + P('"') * (1-P('"'))^0 * P('"'))
-
-local cleaner
-
-local lp_special = (C(P("name")+P("text")+P("tag")+P("count")+P("child"))) * value / function(t,s)
- if expressions[t] then
- s = s and s ~= "" and lpegmatch(cleaner,s)
- if s and s ~= "" then
- return "expr." .. t .. "(ll," .. s ..")"
- else
- return "expr." .. t .. "(ll)"
- end
- else
- return "expr.error(" .. t .. ")"
- end
-end
-
-local content =
- lp_builtin +
- lp_attribute +
- lp_special +
- lp_noequal + lp_doequal +
- lp_or + lp_and +
- lp_reserved +
- lp_lua_function + lp_function +
- lp_content + -- too fragile
- lp_child +
- lp_any
-
-local converter = Cs (
- lp_fastpos + (P { lparent * (V(1))^0 * rparent + content } )^0
-)
-
-cleaner = Cs ( (
- -- lp_fastpos +
- lp_reserved +
- lp_number +
- lp_string +
-1 )^1 )
-
-local template_e = [[
- local expr = xml.expressions
- return function(list,ll,l,order)
- return %s
- end
-]]
-
-local template_f_y = [[
- local finalizer = xml.finalizers['%s']['%s']
- return function(collection)
- return finalizer(collection,%s)
- end
-]]
-
-local template_f_n = [[
- return xml.finalizers['%s']['%s']
-]]
-
---
-
-local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] }
-local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] }
-local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] }
-local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] }
-local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] }
-local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] }
-local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] }
-local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] }
-local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] }
-local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] }
-local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] }
-local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] }
-local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] }
-local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] }
-local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] }
-
-local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] }
-local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] }
-local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] }
-local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] }
-
-local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] }
-
-local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } }
-
-local skip = { }
-
-local function errorrunner_e(str,cnv)
- if not skip[str] then
- report_lpath("error in expression: %s => %s",str,cnv)
- skip[str] = cnv or str
- end
- return false
-end
-
-local function errorrunner_f(str,arg)
- report_lpath("error in finalizer: %s(%s)",str,arg or "")
- return false
-end
-
-local function register_nodes(nodetest,nodes)
- return { kind = "nodes", nodetest = nodetest, nodes = nodes }
-end
-
-local function register_expression(expression)
- local converted = lpegmatch(converter,expression)
- local runner = load(format(template_e,converted))
- runner = (runner and runner()) or function() errorrunner_e(expression,converted) end
- return { kind = "expression", expression = expression, converted = converted, evaluator = runner }
-end
-
-local function register_finalizer(protocol,name,arguments)
- local runner
- if arguments and arguments ~= "" then
- runner = load(format(template_f_y,protocol or xml.defaultprotocol,name,arguments))
- else
- runner = load(format(template_f_n,protocol or xml.defaultprotocol,name))
- end
- runner = (runner and runner()) or function() errorrunner_f(name,arguments) end
- return { kind = "finalizer", name = name, arguments = arguments, finalizer = runner }
-end
-
-local expression = P { "ex",
- ex = "[" * C((V("sq") + V("dq") + (1 - S("[]")) + V("ex"))^0) * "]",
- sq = "'" * (1 - S("'"))^0 * "'",
- dq = '"' * (1 - S('"'))^0 * '"',
-}
-
-local arguments = P { "ar",
- ar = "(" * Cs((V("sq") + V("dq") + V("nq") + P(1-P(")")))^0) * ")",
- nq = ((1 - S("),'\""))^1) / function(s) return format("%q",s) end,
- sq = P("'") * (1 - P("'"))^0 * P("'"),
- dq = P('"') * (1 - P('"'))^0 * P('"'),
-}
-
--- todo: better arg parser
-
-local function register_error(str)
- return { kind = "error", error = format("unparsed: %s",str) }
-end
-
--- there is a difference in * and /*/ and so we need to catch a few special cases
-
-local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes) -- last one not needed
-local special_2 = P("/") * Cc(register_auto_self)
-local special_3 = P("") * Cc(register_auto_self)
-
-local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
-local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
-
-local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
-
- patterns = spaces * V("protocol") * spaces * (
- ( V("special") * spaces * P(-1) ) +
- ( V("initial") * spaces * V("step") * spaces * (P("/") * spaces * V("step") * spaces)^0 )
- ),
-
- protocol = Cg(V("letters"),"protocol") * P("://") + Cg(Cc(nil),"protocol"),
-
- -- the / is needed for // as descendant or self is somewhat special
- -- step = (V("shortcuts") + V("axis") * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0,
- step = ((V("shortcuts") + P("/") + V("axis")) * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0,
-
- axis = V("descendant") + V("child") + V("parent") + V("self") + V("root") + V("ancestor") +
- V("descendant_or_self") + V("following_sibling") + V("following") +
- V("reverse_sibling") + V("preceding_sibling") + V("preceding") + V("ancestor_or_self") +
- #(1-P(-1)) * Cc(register_auto_child),
-
- special = special_1 + special_2 + special_3,
-
- initial = (P("/") * spaces * Cc(register_initial_child))^-1,
-
- error = (P(1)^1) / register_error,
-
- shortcuts_a = V("s_descendant_or_self") + V("s_descendant") + V("s_child") + V("s_parent") + V("s_self") + V("s_root") + V("s_ancestor"),
-
- shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
-
- s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
- s_descendant = P("**") * Cc(register_descendant),
- s_child = P("*") * no_nextcolon * Cc(register_child ),
- s_parent = P("..") * Cc(register_parent ),
- s_self = P("." ) * Cc(register_self ),
- s_root = P("^^") * Cc(register_root ),
- s_ancestor = P("^") * Cc(register_ancestor ),
-
- descendant = P("descendant::") * Cc(register_descendant ),
- child = P("child::") * Cc(register_child ),
- parent = P("parent::") * Cc(register_parent ),
- self = P("self::") * Cc(register_self ),
- root = P('root::') * Cc(register_root ),
- ancestor = P('ancestor::') * Cc(register_ancestor ),
- descendant_or_self = P('descendant-or-self::') * Cc(register_descendant_or_self ),
- ancestor_or_self = P('ancestor-or-self::') * Cc(register_ancestor_or_self ),
- -- attribute = P('attribute::') * Cc(register_attribute ),
- -- namespace = P('namespace::') * Cc(register_namespace ),
- following = P('following::') * Cc(register_following ),
- following_sibling = P('following-sibling::') * Cc(register_following_sibling ),
- preceding = P('preceding::') * Cc(register_preceding ),
- preceding_sibling = P('preceding-sibling::') * Cc(register_preceding_sibling ),
- reverse_sibling = P('reverse-sibling::') * Cc(register_reverse_sibling ),
-
- nodes = (V("nodefunction") * spaces * P("(") * V("nodeset") * P(")") + V("nodetest") * V("nodeset")) / register_nodes,
-
- expressions = expression / register_expression,
-
- letters = R("az")^1,
- name = (1-S("/[]()|:*!"))^1, -- make inline
- negate = P("!") * Cc(false),
-
- nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
- nodetest = V("negate") + Cc(true),
- nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
- wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
- nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
-
- finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
-
-}
-
-xmlpatterns.pathparser = pathparser
-
-local cache = { }
-
-local function nodesettostring(set,nodetest)
- local t = { }
- for i=1,#set,3 do
- local directive, ns, tg = set[i], set[i+1], set[i+2]
- if not ns or ns == "" then ns = "*" end
- if not tg or tg == "" then tg = "*" end
- tg = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
- t[i] = (directive and tg) or format("not(%s)",tg)
- end
- if nodetest == false then
- return format("not(%s)",concat(t,"|"))
- else
- return concat(t,"|")
- end
-end
-
-local function tagstostring(list)
- if #list == 0 then
- return "no elements"
- else
- local t = { }
- for i=1, #list do
- local li = list[i]
- local ns, tg = li.ns, li.tg
- if not ns or ns == "" then ns = "*" end
- if not tg or tg == "" then tg = "*" end
- t[i] = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
- end
- return concat(t," ")
- end
-end
-
-xml.nodesettostring = nodesettostring
-
-local lpath -- we have a harmless kind of circular reference
-
-local lshowoptions = { functions = false }
-
-local function lshow(parsed)
- if type(parsed) == "string" then
- parsed = lpath(parsed)
- end
- report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
- table.serialize(parsed,false,lshowoptions))
-end
-
-xml.lshow = lshow
-
-local function add_comment(p,str)
- local pc = p.comment
- if not pc then
- p.comment = { str }
- else
- pc[#pc+1] = str
- end
-end
-
-lpath = function (pattern) -- the gain of caching is rather minimal
- lpathcalls = lpathcalls + 1
- if type(pattern) == "table" then
- return pattern
- else
- local parsed = cache[pattern]
- if parsed then
- lpathcached = lpathcached + 1
- else
- parsed = lpegmatch(pathparser,pattern)
- if parsed then
- parsed.pattern = pattern
- local np = #parsed
- if np == 0 then
- parsed = { pattern = pattern, register_self, state = "parsing error" }
- report_lpath("parsing error in pattern: %s",pattern)
- lshow(parsed)
- else
- -- we could have done this with a more complex parser but this
- -- is cleaner
- local pi = parsed[1]
- if pi.axis == "auto-child" then
- if false then
- add_comment(parsed, "auto-child replaced by auto-descendant-or-self")
- parsed[1] = register_auto_descendant_or_self
- else
- add_comment(parsed, "auto-child replaced by auto-descendant")
- parsed[1] = register_auto_descendant
- end
- elseif pi.axis == "initial-child" and np > 1 and parsed[2].axis then
- add_comment(parsed, "initial-child removed") -- we could also make it a auto-self
- remove(parsed,1)
- end
- local np = #parsed -- can have changed
- if np > 1 then
- local pnp = parsed[np]
- if pnp.kind == "nodes" and pnp.nodetest == true then
- local nodes = pnp.nodes
- if nodes[1] == true and nodes[2] == false and nodes[3] == false then
- add_comment(parsed, "redundant final wildcard filter removed")
- remove(parsed,np)
- end
- end
- end
- end
- else
- parsed = { pattern = pattern }
- end
- cache[pattern] = parsed
- if trace_lparse and not trace_lprofile then
- lshow(parsed)
- end
- end
- return parsed
- end
-end
-
-xml.lpath = lpath
-
--- we can move all calls inline and then merge the trace back
--- technically we can combine axis and the next nodes which is
--- what we did before but this a bit cleaner (but slower too)
--- but interesting is that it's not that much faster when we
--- go inline
---
--- beware: we need to return a collection even when we filter
--- else the (simple) cache gets messed up
-
--- caching found lookups saves not that much (max .1 sec on a 8 sec run)
--- and it also messes up finalizers
-
--- watch out: when there is a finalizer, it's always called as there
--- can be cases that a finalizer returns (or does) something in case
--- there is no match; an example of this is count()
-
-local profiled = { } xml.profiled = profiled
-
-local function profiled_apply(list,parsed,nofparsed,order)
- local p = profiled[parsed.pattern]
- if p then
- p.tested = p.tested + 1
- else
- p = { tested = 1, matched = 0, finalized = 0 }
- profiled[parsed.pattern] = p
- end
- local collected = list
- for i=1,nofparsed do
- local pi = parsed[i]
- local kind = pi.kind
- if kind == "axis" then
- collected = apply_axis[pi.axis](collected)
- elseif kind == "nodes" then
- collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- elseif kind == "expression" then
- collected = apply_expression(collected,pi.evaluator,order)
- elseif kind == "finalizer" then
- collected = pi.finalizer(collected) -- no check on # here
- p.matched = p.matched + 1
- p.finalized = p.finalized + 1
- return collected
- end
- if not collected or #collected == 0 then
- local pn = i < nofparsed and parsed[nofparsed]
- if pn and pn.kind == "finalizer" then
- collected = pn.finalizer(collected)
- p.finalized = p.finalized + 1
- return collected
- end
- return nil
- end
- end
- if collected then
- p.matched = p.matched + 1
- end
- return collected
-end
-
-local function traced_apply(list,parsed,nofparsed,order)
- if trace_lparse then
- lshow(parsed)
- end
- report_lpath("collecting: %s",parsed.pattern)
- report_lpath("root tags : %s",tagstostring(list))
- report_lpath("order : %s",order or "unset")
- local collected = list
- for i=1,nofparsed do
- local pi = parsed[i]
- local kind = pi.kind
- if kind == "axis" then
- collected = apply_axis[pi.axis](collected)
- report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
- elseif kind == "nodes" then
- collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
- elseif kind == "expression" then
- collected = apply_expression(collected,pi.evaluator,order)
- report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
- elseif kind == "finalizer" then
- collected = pi.finalizer(collected)
- report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
- return collected
- end
- if not collected or #collected == 0 then
- local pn = i < nofparsed and parsed[nofparsed]
- if pn and pn.kind == "finalizer" then
- collected = pn.finalizer(collected)
- report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
- return collected
- end
- return nil
- end
- end
- return collected
-end
-
-local function normal_apply(list,parsed,nofparsed,order)
- local collected = list
- for i=1,nofparsed do
- local pi = parsed[i]
- local kind = pi.kind
- if kind == "axis" then
- local axis = pi.axis
- if axis ~= "self" then
- collected = apply_axis[axis](collected)
- end
- elseif kind == "nodes" then
- collected = apply_nodes(collected,pi.nodetest,pi.nodes)
- elseif kind == "expression" then
- collected = apply_expression(collected,pi.evaluator,order)
- elseif kind == "finalizer" then
- return pi.finalizer(collected)
- end
- if not collected or #collected == 0 then
- local pf = i < nofparsed and parsed[nofparsed].finalizer
- if pf then
- return pf(collected) -- can be anything
- end
- return nil
- end
- end
- return collected
-end
-
---~ local function applylpath(list,pattern)
---~ -- we avoid an extra call
---~ local parsed = cache[pattern]
---~ if parsed then
---~ lpathcalls = lpathcalls + 1
---~ lpathcached = lpathcached + 1
---~ elseif type(pattern) == "table" then
---~ lpathcalls = lpathcalls + 1
---~ parsed = pattern
---~ else
---~ parsed = lpath(pattern) or pattern
---~ end
---~ if not parsed then
---~ return
---~ end
---~ local nofparsed = #parsed
---~ if nofparsed == 0 then
---~ return -- something is wrong
---~ end
---~ local one = list[1] -- we could have a third argument: isroot and list or list[1] or whatever we like ... todo
---~ if not one then
---~ return -- something is wrong
---~ elseif not trace_lpath then
---~ return normal_apply(list,parsed,nofparsed,one.mi)
---~ elseif trace_lprofile then
---~ return profiled_apply(list,parsed,nofparsed,one.mi)
---~ else
---~ return traced_apply(list,parsed,nofparsed,one.mi)
---~ end
---~ end
-
-local function applylpath(list,pattern)
- if not list then
- return
- end
- local parsed = cache[pattern]
- if parsed then
- lpathcalls = lpathcalls + 1
- lpathcached = lpathcached + 1
- elseif type(pattern) == "table" then
- lpathcalls = lpathcalls + 1
- parsed = pattern
- else
- parsed = lpath(pattern) or pattern
- end
- if not parsed then
- return
- end
- local nofparsed = #parsed
- if nofparsed == 0 then
- return -- something is wrong
- end
- if not trace_lpath then
- return normal_apply ({ list },parsed,nofparsed,list.mi)
- elseif trace_lprofile then
- return profiled_apply({ list },parsed,nofparsed,list.mi)
- else
- return traced_apply ({ list },parsed,nofparsed,list.mi)
- end
-end
-
-xml.applylpath = applylpath -- takes a table as first argment, which is what xml.filter will do
-
---[[ldx--
-
This is the main filter function. It returns whatever is asked for.
---ldx]]--
-
-function xml.filter(root,pattern) -- no longer funny attribute handling here
- return applylpath(root,pattern)
-end
-
--- internal (parsed)
-
-expressions.child = function(e,pattern)
- return applylpath(e,pattern) -- todo: cache
-end
-
-expressions.count = function(e,pattern) -- what if pattern == empty or nil
- local collected = applylpath(e,pattern) -- todo: cache
- return pattern and (collected and #collected) or 0
-end
-
--- external
-
--- expressions.oneof = function(s,...)
--- local t = {...}
--- for i=1,#t do
--- if s == t[i] then
--- return true
--- end
--- end
--- return false
--- end
-
-expressions.oneof = function(s,...)
- for i=1,select("#",...) do
- if s == select(i,...) then
- return true
- end
- end
- return false
-end
-
-expressions.error = function(str)
- xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
- return false
-end
-
-expressions.undefined = function(s)
- return s == nil
-end
-
-expressions.quit = function(s)
- if s or s == nil then
- quit_expression = true
- end
- return true
-end
-
-expressions.print = function(...)
- print(...)
- return true
-end
-
-expressions.contains = find
-expressions.find = find
-expressions.upper = upper
-expressions.lower = lower
-expressions.number = tonumber
-expressions.boolean = toboolean
-
-function expressions.contains(str,pattern)
- local t = type(str)
- if t == "string" then
- if find(str,pattern) then
- return true
- end
- elseif t == "table" then
- for i=1,#str do
- local d = str[i]
- if type(d) == "string" and find(d,pattern) then
- return true
- end
- end
- end
- return false
-end
-
--- user interface
-
-local function traverse(root,pattern,handle)
- -- report_lpath("use 'xml.selection' instead for pattern: %s",pattern)
- local collected = applylpath(root,pattern)
- if collected then
- for c=1,#collected do
- local e = collected[c]
- local r = e.__p__
- handle(r,r.dt,e.ni)
- end
- end
-end
-
-local function selection(root,pattern,handle)
- local collected = applylpath(root,pattern)
- if collected then
- if handle then
- for c=1,#collected do
- handle(collected[c])
- end
- else
- return collected
- end
- end
-end
-
-xml.traverse = traverse -- old method, r, d, k
-xml.selection = selection -- new method, simple handle
-
---~ function xml.cachedpatterns()
---~ return cache
---~ end
-
--- generic function finalizer (independant namespace)
-
-local function dofunction(collected,fnc,...)
- if collected then
- local f = functions[fnc]
- if f then
- for c=1,#collected do
- f(collected[c],...)
- end
- else
- report_lpath("unknown function %a",fnc)
- end
- end
-end
-
-finalizers.xml["function"] = dofunction
-finalizers.tex["function"] = dofunction
-
--- functions
-
-expressions.text = function(e,n)
- local rdt = e.__p__.dt
- return rdt and rdt[n] or ""
-end
-
-expressions.name = function(e,n) -- ns + tg
- local found = false
- n = tonumber(n) or 0
- if n == 0 then
- found = type(e) == "table" and e
- elseif n < 0 then
- local d, k = e.__p__.dt, e.ni
- for i=k-1,1,-1 do
- local di = d[i]
- if type(di) == "table" then
- if n == -1 then
- found = di
- break
- else
- n = n + 1
- end
- end
- end
- else
- local d, k = e.__p__.dt, e.ni
- for i=k+1,#d,1 do
- local di = d[i]
- if type(di) == "table" then
- if n == 1 then
- found = di
- break
- else
- n = n - 1
- end
- end
- end
- end
- if found then
- local ns, tg = found.rn or found.ns or "", found.tg
- if ns ~= "" then
- return ns .. ":" .. tg
- else
- return tg
- end
- else
- return ""
- end
-end
-
-expressions.tag = function(e,n) -- only tg
- if not e then
- return ""
- else
- local found = false
- n = tonumber(n) or 0
- if n == 0 then
- found = (type(e) == "table") and e -- seems to fail
- elseif n < 0 then
- local d, k = e.__p__.dt, e.ni
- for i=k-1,1,-1 do
- local di = d[i]
- if type(di) == "table" then
- if n == -1 then
- found = di
- break
- else
- n = n + 1
- end
- end
- end
- else
- local d, k = e.__p__.dt, e.ni
- for i=k+1,#d,1 do
- local di = d[i]
- if type(di) == "table" then
- if n == 1 then
- found = di
- break
- else
- n = n - 1
- end
- end
- end
- end
- return (found and found.tg) or ""
- end
-end
-
---[[ldx--
-
Often using an iterators looks nicer in the code than passing handler
-functions. The book describes how to use coroutines for that
-purpose (). This permits
-code like:
-
-
-for r, d, k in xml.elements(xml.load('text.xml'),"title") do
- print(d[k]) -- old method
-end
-for e in xml.collected(xml.load('text.xml'),"title") do
- print(e) -- new one
-end
-
---ldx]]--
-
--- local wrap, yield = coroutine.wrap, coroutine.yield
--- local dummy = function() end
---
--- function xml.elements(root,pattern,reverse) -- r, d, k
--- local collected = applylpath(root,pattern)
--- if collected then
--- if reverse then
--- return wrap(function() for c=#collected,1,-1 do
--- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
--- end end)
--- else
--- return wrap(function() for c=1,#collected do
--- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
--- end end)
--- end
--- end
--- return wrap(dummy)
--- end
---
--- function xml.collected(root,pattern,reverse) -- e
--- local collected = applylpath(root,pattern)
--- if collected then
--- if reverse then
--- return wrap(function() for c=#collected,1,-1 do yield(collected[c]) end end)
--- else
--- return wrap(function() for c=1,#collected do yield(collected[c]) end end)
--- end
--- end
--- return wrap(dummy)
--- end
-
--- faster:
-
-local dummy = function() end
-
-function xml.elements(root,pattern,reverse) -- r, d, k
- local collected = applylpath(root,pattern)
- if not collected then
- return dummy
- elseif reverse then
- local c = #collected + 1
- return function()
- if c > 1 then
- c = c - 1
- local e = collected[c]
- local r = e.__p__
- return r, r.dt, e.ni
- end
- end
- else
- local n, c = #collected, 0
- return function()
- if c < n then
- c = c + 1
- local e = collected[c]
- local r = e.__p__
- return r, r.dt, e.ni
- end
- end
- end
-end
-
-function xml.collected(root,pattern,reverse) -- e
- local collected = applylpath(root,pattern)
- if not collected then
- return dummy
- elseif reverse then
- local c = #collected + 1
- return function()
- if c > 1 then
- c = c - 1
- return collected[c]
- end
- end
- else
- local n, c = #collected, 0
- return function()
- if c < n then
- c = c + 1
- return collected[c]
- end
- end
- end
-end
-
--- handy
-
-function xml.inspect(collection,pattern)
- pattern = pattern or "."
- for e in xml.collected(collection,pattern or ".") do
- report_lpath("pattern: %s\n\n%s\n",pattern,xml.tostring(e))
- end
-end
-
--- texy (see xfdf):
-
-local function split(e)
- local dt = e.dt
- if dt then
- for i=1,#dt do
- local dti = dt[i]
- if type(dti) == "string" then
- dti = gsub(dti,"^[\n\r]*(.-)[\n\r]*","%1")
- dti = gsub(dti,"[\n\r]+","\n\n")
- dt[i] = dti
- else
- split(dti)
- end
- end
- end
- return e
-end
-
-function xml.finalizers.paragraphs(c)
- for i=1,#c do
- split(c[i])
- end
- return c
-end
+if not modules then modules = { } end modules ['lxml-lpt'] = {
+ version = 1.001,
+ comment = "this module is the basis for the lxml-* ones",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- e.ni is only valid after a filter run
+-- todo: B/C/[get first match]
+
+local concat, remove, insert = table.concat, table.remove, table.insert
+local type, next, tonumber, tostring, setmetatable, load, select = type, next, tonumber, tostring, setmetatable, load, select
+local format, upper, lower, gmatch, gsub, find, rep = string.format, string.upper, string.lower, string.gmatch, string.gsub, string.find, string.rep
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+
+local setmetatableindex = table.setmetatableindex
+local formatters = string.formatters -- no need (yet) as paths are cached anyway
+
+-- beware, this is not xpath ... e.g. position is different (currently) and
+-- we have reverse-sibling as reversed preceding sibling
+
+--[[ldx--
+
This module can be used stand alone but also inside in
+which case it hooks into the tracker code. Therefore we provide a few
+functions that set the tracers. Here we overload a previously defined
+function.
+
If I can get in the mood I will make a variant that is XSLT compliant
+but I wonder if it makes sense.
+--ldx]]--
+
+--[[ldx--
+
Expecially the lpath code is experimental, we will support some of xpath, but
+only things that make sense for us; as compensation it is possible to hook in your
+own functions. Apart from preprocessing content for we also need
+this module for process management, like handling and
+files.
+
+
+a/b/c /*/c
+a/b/c/first() a/b/c/last() a/b/c/index(n) a/b/c/index(-n)
+a/b/c/text() a/b/c/text(1) a/b/c/text(-1) a/b/c/text(n)
+
+--ldx]]--
+
+local trace_lpath = false if trackers then trackers.register("xml.path", function(v) trace_lpath = v end) end
+local trace_lparse = false if trackers then trackers.register("xml.parse", function(v) trace_lparse = v end) end
+local trace_lprofile = false if trackers then trackers.register("xml.profile", function(v) trace_lpath = v trace_lparse = v trace_lprofile = v end) end
+
+local report_lpath = logs.reporter("xml","lpath")
+
+--[[ldx--
+
We've now arrived at an interesting part: accessing the tree using a subset
+of and since we're not compatible we call it . We
+will explain more about its usage in other documents.
+--ldx]]--
+
+local xml = xml
+
+local lpathcalls = 0 function xml.lpathcalls () return lpathcalls end
+local lpathcached = 0 function xml.lpathcached() return lpathcached end
+
+xml.functions = xml.functions or { } -- internal
+local functions = xml.functions
+
+xml.expressions = xml.expressions or { } -- in expressions
+local expressions = xml.expressions
+
+xml.finalizers = xml.finalizers or { } -- fast do-with ... (with return value other than collection)
+local finalizers = xml.finalizers
+
+xml.specialhandler = xml.specialhandler or { }
+local specialhandler = xml.specialhandler
+
+lpegpatterns.xml = lpegpatterns.xml or { }
+local xmlpatterns = lpegpatterns.xml
+
+finalizers.xml = finalizers.xml or { }
+finalizers.tex = finalizers.tex or { }
+
+local function fallback (t, name)
+ local fn = finalizers[name]
+ if fn then
+ t[name] = fn
+ else
+ report_lpath("unknown sub finalizer %a",name)
+ fn = function() end
+ end
+ return fn
+end
+
+setmetatableindex(finalizers.xml, fallback)
+setmetatableindex(finalizers.tex, fallback)
+
+xml.defaultprotocol = "xml"
+
+-- as xsl does not follow xpath completely here we will also
+-- be more liberal especially with regards to the use of | and
+-- the rootpath:
+--
+-- test : all 'test' under current
+-- /test : 'test' relative to current
+-- a|b|c : set of names
+-- (a|b|c) : idem
+-- ! : not
+--
+-- after all, we're not doing transformations but filtering. in
+-- addition we provide filter functions (last bit)
+--
+-- todo: optimizer
+--
+-- .. : parent
+-- * : all kids
+-- / : anchor here
+-- // : /**/
+-- ** : all in between
+--
+-- so far we had (more practical as we don't transform)
+--
+-- {/test} : kids 'test' under current node
+-- {test} : any kid with tag 'test'
+-- {//test} : same as above
+
+-- evaluator (needs to be redone, for the moment copied)
+
+-- todo: apply_axis(list,notable) and collection vs single
+
+local apply_axis = { }
+
+apply_axis['root'] = function(list)
+ local collected = { }
+ for l=1,#list do
+ local ll = list[l]
+ local rt = ll
+ while ll do
+ ll = ll.__p__
+ if ll then
+ rt = ll
+ end
+ end
+ collected[l] = rt
+ end
+ return collected
+end
+
+apply_axis['self'] = function(list)
+--~ local collected = { }
+--~ for l=1,#list do
+--~ collected[l] = list[l]
+--~ end
+--~ return collected
+ return list
+end
+
+apply_axis['child'] = function(list)
+ local collected, c = { }, 0
+ for l=1,#list do
+ local ll = list[l]
+ local dt = ll.dt
+ if dt then -- weird that this is needed
+ local en = 0
+ for k=1,#dt do
+ local dk = dt[k]
+ if dk.tg then
+ c = c + 1
+ collected[c] = dk
+ dk.ni = k -- refresh
+ en = en + 1
+ dk.ei = en
+ end
+ end
+ ll.en = en
+ end
+ end
+ return collected
+end
+
+local function collect(list,collected,c)
+ local dt = list.dt
+ if dt then
+ local en = 0
+ for k=1,#dt do
+ local dk = dt[k]
+ if dk.tg then
+ c = c + 1
+ collected[c] = dk
+ dk.ni = k -- refresh
+ en = en + 1
+ dk.ei = en
+ c = collect(dk,collected,c)
+ end
+ end
+ list.en = en
+ end
+ return c
+end
+
+apply_axis['descendant'] = function(list)
+ local collected, c = { }, 0
+ for l=1,#list do
+ c = collect(list[l],collected,c)
+ end
+ return collected
+end
+
+local function collect(list,collected,c)
+ local dt = list.dt
+ if dt then
+ local en = 0
+ for k=1,#dt do
+ local dk = dt[k]
+ if dk.tg then
+ c = c + 1
+ collected[c] = dk
+ dk.ni = k -- refresh
+ en = en + 1
+ dk.ei = en
+ c = collect(dk,collected,c)
+ end
+ end
+ list.en = en
+ end
+ return c
+end
+apply_axis['descendant-or-self'] = function(list)
+ local collected, c = { }, 0
+ for l=1,#list do
+ local ll = list[l]
+ if ll.special ~= true then -- catch double root
+ c = c + 1
+ collected[c] = ll
+ end
+ c = collect(ll,collected,c)
+ end
+ return collected
+end
+
+apply_axis['ancestor'] = function(list)
+ local collected, c = { }, 0
+ for l=1,#list do
+ local ll = list[l]
+ while ll do
+ ll = ll.__p__
+ if ll then
+ c = c + 1
+ collected[c] = ll
+ end
+ end
+ end
+ return collected
+end
+
+apply_axis['ancestor-or-self'] = function(list)
+ local collected, c = { }, 0
+ for l=1,#list do
+ local ll = list[l]
+ c = c + 1
+ collected[c] = ll
+ while ll do
+ ll = ll.__p__
+ if ll then
+ c = c + 1
+ collected[c] = ll
+ end
+ end
+ end
+ return collected
+end
+
+apply_axis['parent'] = function(list)
+ local collected, c = { }, 0
+ for l=1,#list do
+ local pl = list[l].__p__
+ if pl then
+ c = c + 1
+ collected[c] = pl
+ end
+ end
+ return collected
+end
+
+apply_axis['attribute'] = function(list)
+ return { }
+end
+
+apply_axis['namespace'] = function(list)
+ return { }
+end
+
+apply_axis['following'] = function(list) -- incomplete
+--~ local collected, c = { }, 0
+--~ for l=1,#list do
+--~ local ll = list[l]
+--~ local p = ll.__p__
+--~ local d = p.dt
+--~ for i=ll.ni+1,#d do
+--~ local di = d[i]
+--~ if type(di) == "table" then
+--~ c = c + 1
+--~ collected[c] = di
+--~ break
+--~ end
+--~ end
+--~ end
+--~ return collected
+ return { }
+end
+
+apply_axis['preceding'] = function(list) -- incomplete
+--~ local collected, c = { }, 0
+--~ for l=1,#list do
+--~ local ll = list[l]
+--~ local p = ll.__p__
+--~ local d = p.dt
+--~ for i=ll.ni-1,1,-1 do
+--~ local di = d[i]
+--~ if type(di) == "table" then
+--~ c = c + 1
+--~ collected[c] = di
+--~ break
+--~ end
+--~ end
+--~ end
+--~ return collected
+ return { }
+end
+
+apply_axis['following-sibling'] = function(list)
+ local collected, c = { }, 0
+ for l=1,#list do
+ local ll = list[l]
+ local p = ll.__p__
+ local d = p.dt
+ for i=ll.ni+1,#d do
+ local di = d[i]
+ if type(di) == "table" then
+ c = c + 1
+ collected[c] = di
+ end
+ end
+ end
+ return collected
+end
+
+apply_axis['preceding-sibling'] = function(list)
+ local collected, c = { }, 0
+ for l=1,#list do
+ local ll = list[l]
+ local p = ll.__p__
+ local d = p.dt
+ for i=1,ll.ni-1 do
+ local di = d[i]
+ if type(di) == "table" then
+ c = c + 1
+ collected[c] = di
+ end
+ end
+ end
+ return collected
+end
+
+apply_axis['reverse-sibling'] = function(list) -- reverse preceding
+ local collected, c = { }, 0
+ for l=1,#list do
+ local ll = list[l]
+ local p = ll.__p__
+ local d = p.dt
+ for i=ll.ni-1,1,-1 do
+ local di = d[i]
+ if type(di) == "table" then
+ c = c + 1
+ collected[c] = di
+ end
+ end
+ end
+ return collected
+end
+
+apply_axis['auto-descendant-or-self'] = apply_axis['descendant-or-self']
+apply_axis['auto-descendant'] = apply_axis['descendant']
+apply_axis['auto-child'] = apply_axis['child']
+apply_axis['auto-self'] = apply_axis['self']
+apply_axis['initial-child'] = apply_axis['child']
+
+local function apply_nodes(list,directive,nodes)
+ -- todo: nodes[1] etc ... negated node name in set ... when needed
+ -- ... currently ignored
+ local maxn = #nodes
+ if maxn == 3 then --optimized loop
+ local nns, ntg = nodes[2], nodes[3]
+ if not nns and not ntg then -- wildcard
+ if directive then
+ return list
+ else
+ return { }
+ end
+ else
+ local collected, c, m, p = { }, 0, 0, nil
+ if not nns then -- only check tag
+ for l=1,#list do
+ local ll = list[l]
+ local ltg = ll.tg
+ if ltg then
+ if directive then
+ if ntg == ltg then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ c = c + 1
+ collected[c], ll.mi = ll, m
+ end
+ elseif ntg ~= ltg then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ c = c + 1
+ collected[c], ll.mi = ll, m
+ end
+ end
+ end
+ elseif not ntg then -- only check namespace
+ for l=1,#list do
+ local ll = list[l]
+ local lns = ll.rn or ll.ns
+ if lns then
+ if directive then
+ if lns == nns then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ c = c + 1
+ collected[c], ll.mi = ll, m
+ end
+ elseif lns ~= nns then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ c = c + 1
+ collected[c], ll.mi = ll, m
+ end
+ end
+ end
+ else -- check both
+ for l=1,#list do
+ local ll = list[l]
+ local ltg = ll.tg
+ if ltg then
+ local lns = ll.rn or ll.ns
+ local ok = ltg == ntg and lns == nns
+ if directive then
+ if ok then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ c = c + 1
+ collected[c], ll.mi = ll, m
+ end
+ elseif not ok then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ c = c + 1
+ collected[c], ll.mi = ll, m
+ end
+ end
+ end
+ end
+ return collected
+ end
+ else
+ local collected, c, m, p = { }, 0, 0, nil
+ for l=1,#list do
+ local ll = list[l]
+ local ltg = ll.tg
+ if ltg then
+ local lns = ll.rn or ll.ns
+ local ok = false
+ for n=1,maxn,3 do
+ local nns, ntg = nodes[n+1], nodes[n+2]
+ ok = (not ntg or ltg == ntg) and (not nns or lns == nns)
+ if ok then
+ break
+ end
+ end
+ if directive then
+ if ok then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ c = c + 1
+ collected[c], ll.mi = ll, m
+ end
+ elseif not ok then
+ local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end
+ c = c + 1
+ collected[c], ll.mi = ll, m
+ end
+ end
+ end
+ return collected
+ end
+end
+
+local quit_expression = false
+
+local function apply_expression(list,expression,order)
+ local collected, c = { }, 0
+ quit_expression = false
+ for l=1,#list do
+ local ll = list[l]
+ if expression(list,ll,l,order) then -- nasty, order alleen valid als n=1
+ c = c + 1
+ collected[c] = ll
+ end
+ if quit_expression then
+ break
+ end
+ end
+ return collected
+end
+
+local P, V, C, Cs, Cc, Ct, R, S, Cg, Cb = lpeg.P, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.R, lpeg.S, lpeg.Cg, lpeg.Cb
+
+local spaces = S(" \n\r\t\f")^0
+local lp_space = S(" \n\r\t\f")
+local lp_any = P(1)
+local lp_noequal = P("!=") / "~=" + P("<=") + P(">=") + P("==")
+local lp_doequal = P("=") / "=="
+local lp_or = P("|") / " or "
+local lp_and = P("&") / " and "
+
+local lp_builtin = P (
+ P("text") / "(ll.dt[1] or '')" + -- fragile
+ P("content") / "ll.dt" +
+ -- P("name") / "(ll.ns~='' and ll.ns..':'..ll.tg)" +
+ P("name") / "((ll.ns~='' and ll.ns..':'..ll.tg) or ll.tg)" +
+ P("tag") / "ll.tg" +
+ P("position") / "l" + -- is element in finalizer
+ P("firstindex") / "1" +
+ P("lastindex") / "(#ll.__p__.dt or 1)" +
+ P("firstelement") / "1" +
+ P("lastelement") / "(ll.__p__.en or 1)" +
+ P("first") / "1" +
+ P("last") / "#list" +
+ P("rootposition") / "order" +
+ P("order") / "order" +
+ P("element") / "(ll.ei or 1)" +
+ P("index") / "(ll.ni or 1)" +
+ P("match") / "(ll.mi or 1)" +
+ -- P("namespace") / "ll.ns" +
+ P("ns") / "ll.ns"
+ ) * ((spaces * P("(") * spaces * P(")"))/"")
+
+-- for the moment we keep namespaces with attributes
+
+local lp_attribute = (P("@") + P("attribute::")) / "" * Cc("(ll.at and ll.at['") * ((R("az","AZ") + S("-_:"))^1) * Cc("'])")
+
+-- lp_fastpos_p = (P("+")^0 * R("09")^1 * P(-1)) / function(s) return "l==" .. s end
+-- lp_fastpos_n = (P("-") * R("09")^1 * P(-1)) / function(s) return "(" .. s .. "<0 and (#list+".. s .. "==l))" end
+
+lp_fastpos_p = P("+")^0 * R("09")^1 * P(-1) / "l==%0"
+lp_fastpos_n = P("-") * R("09")^1 * P(-1) / "(%0<0 and (#list+%0==l))"
+
+local lp_fastpos = lp_fastpos_n + lp_fastpos_p
+
+local lp_reserved = C("and") + C("or") + C("not") + C("div") + C("mod") + C("true") + C("false")
+
+-- local lp_lua_function = C(R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / function(t) -- todo: better . handling
+-- return t .. "("
+-- end
+
+-- local lp_lua_function = (R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(") / "%0("
+local lp_lua_function = Cs((R("az","AZ","__")^1 * (P(".") * R("az","AZ","__")^1)^1) * ("(")) / "%0"
+
+local lp_function = C(R("az","AZ","__")^1) * P("(") / function(t) -- todo: better . handling
+ if expressions[t] then
+ return "expr." .. t .. "("
+ else
+ return "expr.error("
+ end
+end
+
+local lparent = P("(")
+local rparent = P(")")
+local noparent = 1 - (lparent+rparent)
+local nested = P{lparent * (noparent + V(1))^0 * rparent}
+local value = P(lparent * C((noparent + nested)^0) * rparent) -- P{"("*C(((1-S("()"))+V(1))^0)*")"}
+
+local lp_child = Cc("expr.child(ll,'") * R("az","AZ","--","__")^1 * Cc("')")
+local lp_number = S("+-") * R("09")^1
+local lp_string = Cc("'") * R("az","AZ","--","__")^1 * Cc("'")
+local lp_content = (P("'") * (1-P("'"))^0 * P("'") + P('"') * (1-P('"'))^0 * P('"'))
+
+local cleaner
+
+local lp_special = (C(P("name")+P("text")+P("tag")+P("count")+P("child"))) * value / function(t,s)
+ if expressions[t] then
+ s = s and s ~= "" and lpegmatch(cleaner,s)
+ if s and s ~= "" then
+ return "expr." .. t .. "(ll," .. s ..")"
+ else
+ return "expr." .. t .. "(ll)"
+ end
+ else
+ return "expr.error(" .. t .. ")"
+ end
+end
+
+local content =
+ lp_builtin +
+ lp_attribute +
+ lp_special +
+ lp_noequal + lp_doequal +
+ lp_or + lp_and +
+ lp_reserved +
+ lp_lua_function + lp_function +
+ lp_content + -- too fragile
+ lp_child +
+ lp_any
+
+local converter = Cs (
+ lp_fastpos + (P { lparent * (V(1))^0 * rparent + content } )^0
+)
+
+cleaner = Cs ( (
+ -- lp_fastpos +
+ lp_reserved +
+ lp_number +
+ lp_string +
+1 )^1 )
+
+local template_e = [[
+ local expr = xml.expressions
+ return function(list,ll,l,order)
+ return %s
+ end
+]]
+
+local template_f_y = [[
+ local finalizer = xml.finalizers['%s']['%s']
+ return function(collection)
+ return finalizer(collection,%s)
+ end
+]]
+
+local template_f_n = [[
+ return xml.finalizers['%s']['%s']
+]]
+
+--
+
+local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] }
+local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] }
+local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] }
+local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] }
+local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] }
+local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] }
+local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] }
+local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] }
+local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] }
+local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] }
+local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] }
+local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] }
+local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] }
+local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] }
+local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] }
+
+local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] }
+local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] }
+local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] }
+local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] }
+
+local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] }
+
+local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } }
+
+local skip = { }
+
+local function errorrunner_e(str,cnv)
+ if not skip[str] then
+ report_lpath("error in expression: %s => %s",str,cnv)
+ skip[str] = cnv or str
+ end
+ return false
+end
+
+local function errorrunner_f(str,arg)
+ report_lpath("error in finalizer: %s(%s)",str,arg or "")
+ return false
+end
+
+local function register_nodes(nodetest,nodes)
+ return { kind = "nodes", nodetest = nodetest, nodes = nodes }
+end
+
+local function register_expression(expression)
+ local converted = lpegmatch(converter,expression)
+ local runner = load(format(template_e,converted))
+ runner = (runner and runner()) or function() errorrunner_e(expression,converted) end
+ return { kind = "expression", expression = expression, converted = converted, evaluator = runner }
+end
+
+local function register_finalizer(protocol,name,arguments)
+ local runner
+ if arguments and arguments ~= "" then
+ runner = load(format(template_f_y,protocol or xml.defaultprotocol,name,arguments))
+ else
+ runner = load(format(template_f_n,protocol or xml.defaultprotocol,name))
+ end
+ runner = (runner and runner()) or function() errorrunner_f(name,arguments) end
+ return { kind = "finalizer", name = name, arguments = arguments, finalizer = runner }
+end
+
+local expression = P { "ex",
+ ex = "[" * C((V("sq") + V("dq") + (1 - S("[]")) + V("ex"))^0) * "]",
+ sq = "'" * (1 - S("'"))^0 * "'",
+ dq = '"' * (1 - S('"'))^0 * '"',
+}
+
+local arguments = P { "ar",
+ ar = "(" * Cs((V("sq") + V("dq") + V("nq") + P(1-P(")")))^0) * ")",
+ nq = ((1 - S("),'\""))^1) / function(s) return format("%q",s) end,
+ sq = P("'") * (1 - P("'"))^0 * P("'"),
+ dq = P('"') * (1 - P('"'))^0 * P('"'),
+}
+
+-- todo: better arg parser
+
+local function register_error(str)
+ return { kind = "error", error = format("unparsed: %s",str) }
+end
+
+-- there is a difference in * and /*/ and so we need to catch a few special cases
+
+local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes) -- last one not needed
+local special_2 = P("/") * Cc(register_auto_self)
+local special_3 = P("") * Cc(register_auto_self)
+
+local no_nextcolon = P(-1) + #(1-P(":")) -- newer lpeg needs the P(-1)
+local no_nextlparent = P(-1) + #(1-P("(")) -- newer lpeg needs the P(-1)
+
+local pathparser = Ct { "patterns", -- can be made a bit faster by moving some patterns outside
+
+ patterns = spaces * V("protocol") * spaces * (
+ ( V("special") * spaces * P(-1) ) +
+ ( V("initial") * spaces * V("step") * spaces * (P("/") * spaces * V("step") * spaces)^0 )
+ ),
+
+ protocol = Cg(V("letters"),"protocol") * P("://") + Cg(Cc(nil),"protocol"),
+
+ -- the / is needed for // as descendant or self is somewhat special
+ -- step = (V("shortcuts") + V("axis") * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0,
+ step = ((V("shortcuts") + P("/") + V("axis")) * spaces * V("nodes")^0 + V("error")) * spaces * V("expressions")^0 * spaces * V("finalizer")^0,
+
+ axis = V("descendant") + V("child") + V("parent") + V("self") + V("root") + V("ancestor") +
+ V("descendant_or_self") + V("following_sibling") + V("following") +
+ V("reverse_sibling") + V("preceding_sibling") + V("preceding") + V("ancestor_or_self") +
+ #(1-P(-1)) * Cc(register_auto_child),
+
+ special = special_1 + special_2 + special_3,
+
+ initial = (P("/") * spaces * Cc(register_initial_child))^-1,
+
+ error = (P(1)^1) / register_error,
+
+ shortcuts_a = V("s_descendant_or_self") + V("s_descendant") + V("s_child") + V("s_parent") + V("s_self") + V("s_root") + V("s_ancestor"),
+
+ shortcuts = V("shortcuts_a") * (spaces * "/" * spaces * V("shortcuts_a"))^0,
+
+ s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus
+ s_descendant = P("**") * Cc(register_descendant),
+ s_child = P("*") * no_nextcolon * Cc(register_child ),
+ s_parent = P("..") * Cc(register_parent ),
+ s_self = P("." ) * Cc(register_self ),
+ s_root = P("^^") * Cc(register_root ),
+ s_ancestor = P("^") * Cc(register_ancestor ),
+
+ descendant = P("descendant::") * Cc(register_descendant ),
+ child = P("child::") * Cc(register_child ),
+ parent = P("parent::") * Cc(register_parent ),
+ self = P("self::") * Cc(register_self ),
+ root = P('root::') * Cc(register_root ),
+ ancestor = P('ancestor::') * Cc(register_ancestor ),
+ descendant_or_self = P('descendant-or-self::') * Cc(register_descendant_or_self ),
+ ancestor_or_self = P('ancestor-or-self::') * Cc(register_ancestor_or_self ),
+ -- attribute = P('attribute::') * Cc(register_attribute ),
+ -- namespace = P('namespace::') * Cc(register_namespace ),
+ following = P('following::') * Cc(register_following ),
+ following_sibling = P('following-sibling::') * Cc(register_following_sibling ),
+ preceding = P('preceding::') * Cc(register_preceding ),
+ preceding_sibling = P('preceding-sibling::') * Cc(register_preceding_sibling ),
+ reverse_sibling = P('reverse-sibling::') * Cc(register_reverse_sibling ),
+
+ nodes = (V("nodefunction") * spaces * P("(") * V("nodeset") * P(")") + V("nodetest") * V("nodeset")) / register_nodes,
+
+ expressions = expression / register_expression,
+
+ letters = R("az")^1,
+ name = (1-S("/[]()|:*!"))^1, -- make inline
+ negate = P("!") * Cc(false),
+
+ nodefunction = V("negate") + P("not") * Cc(false) + Cc(true),
+ nodetest = V("negate") + Cc(true),
+ nodename = (V("negate") + Cc(true)) * spaces * ((V("wildnodename") * P(":") * V("wildnodename")) + (Cc(false) * V("wildnodename"))),
+ wildnodename = (C(V("name")) + P("*") * Cc(false)) * no_nextlparent,
+ nodeset = spaces * Ct(V("nodename") * (spaces * P("|") * spaces * V("nodename"))^0) * spaces,
+
+ finalizer = (Cb("protocol") * P("/")^-1 * C(V("name")) * arguments * P(-1)) / register_finalizer,
+
+}
+
+xmlpatterns.pathparser = pathparser
+
+local cache = { }
+
+local function nodesettostring(set,nodetest)
+ local t = { }
+ for i=1,#set,3 do
+ local directive, ns, tg = set[i], set[i+1], set[i+2]
+ if not ns or ns == "" then ns = "*" end
+ if not tg or tg == "" then tg = "*" end
+ tg = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
+ t[i] = (directive and tg) or format("not(%s)",tg)
+ end
+ if nodetest == false then
+ return format("not(%s)",concat(t,"|"))
+ else
+ return concat(t,"|")
+ end
+end
+
+local function tagstostring(list)
+ if #list == 0 then
+ return "no elements"
+ else
+ local t = { }
+ for i=1, #list do
+ local li = list[i]
+ local ns, tg = li.ns, li.tg
+ if not ns or ns == "" then ns = "*" end
+ if not tg or tg == "" then tg = "*" end
+ t[i] = (tg == "@rt@" and "[root]") or format("%s:%s",ns,tg)
+ end
+ return concat(t," ")
+ end
+end
+
+xml.nodesettostring = nodesettostring
+
+local lpath -- we have a harmless kind of circular reference
+
+local lshowoptions = { functions = false }
+
+local function lshow(parsed)
+ if type(parsed) == "string" then
+ parsed = lpath(parsed)
+ end
+ report_lpath("%s://%s => %s",parsed.protocol or xml.defaultprotocol,parsed.pattern,
+ table.serialize(parsed,false,lshowoptions))
+end
+
+xml.lshow = lshow
+
+local function add_comment(p,str)
+ local pc = p.comment
+ if not pc then
+ p.comment = { str }
+ else
+ pc[#pc+1] = str
+ end
+end
+
+lpath = function (pattern) -- the gain of caching is rather minimal
+ lpathcalls = lpathcalls + 1
+ if type(pattern) == "table" then
+ return pattern
+ else
+ local parsed = cache[pattern]
+ if parsed then
+ lpathcached = lpathcached + 1
+ else
+ parsed = lpegmatch(pathparser,pattern)
+ if parsed then
+ parsed.pattern = pattern
+ local np = #parsed
+ if np == 0 then
+ parsed = { pattern = pattern, register_self, state = "parsing error" }
+ report_lpath("parsing error in pattern: %s",pattern)
+ lshow(parsed)
+ else
+ -- we could have done this with a more complex parser but this
+ -- is cleaner
+ local pi = parsed[1]
+ if pi.axis == "auto-child" then
+ if false then
+ add_comment(parsed, "auto-child replaced by auto-descendant-or-self")
+ parsed[1] = register_auto_descendant_or_self
+ else
+ add_comment(parsed, "auto-child replaced by auto-descendant")
+ parsed[1] = register_auto_descendant
+ end
+ elseif pi.axis == "initial-child" and np > 1 and parsed[2].axis then
+ add_comment(parsed, "initial-child removed") -- we could also make it a auto-self
+ remove(parsed,1)
+ end
+ local np = #parsed -- can have changed
+ if np > 1 then
+ local pnp = parsed[np]
+ if pnp.kind == "nodes" and pnp.nodetest == true then
+ local nodes = pnp.nodes
+ if nodes[1] == true and nodes[2] == false and nodes[3] == false then
+ add_comment(parsed, "redundant final wildcard filter removed")
+ remove(parsed,np)
+ end
+ end
+ end
+ end
+ else
+ parsed = { pattern = pattern }
+ end
+ cache[pattern] = parsed
+ if trace_lparse and not trace_lprofile then
+ lshow(parsed)
+ end
+ end
+ return parsed
+ end
+end
+
+xml.lpath = lpath
+
+-- we can move all calls inline and then merge the trace back
+-- technically we can combine axis and the next nodes which is
+-- what we did before but this a bit cleaner (but slower too)
+-- but interesting is that it's not that much faster when we
+-- go inline
+--
+-- beware: we need to return a collection even when we filter
+-- else the (simple) cache gets messed up
+
+-- caching found lookups saves not that much (max .1 sec on a 8 sec run)
+-- and it also messes up finalizers
+
+-- watch out: when there is a finalizer, it's always called as there
+-- can be cases that a finalizer returns (or does) something in case
+-- there is no match; an example of this is count()
+
+local profiled = { } xml.profiled = profiled
+
+local function profiled_apply(list,parsed,nofparsed,order)
+ local p = profiled[parsed.pattern]
+ if p then
+ p.tested = p.tested + 1
+ else
+ p = { tested = 1, matched = 0, finalized = 0 }
+ profiled[parsed.pattern] = p
+ end
+ local collected = list
+ for i=1,nofparsed do
+ local pi = parsed[i]
+ local kind = pi.kind
+ if kind == "axis" then
+ collected = apply_axis[pi.axis](collected)
+ elseif kind == "nodes" then
+ collected = apply_nodes(collected,pi.nodetest,pi.nodes)
+ elseif kind == "expression" then
+ collected = apply_expression(collected,pi.evaluator,order)
+ elseif kind == "finalizer" then
+ collected = pi.finalizer(collected) -- no check on # here
+ p.matched = p.matched + 1
+ p.finalized = p.finalized + 1
+ return collected
+ end
+ if not collected or #collected == 0 then
+ local pn = i < nofparsed and parsed[nofparsed]
+ if pn and pn.kind == "finalizer" then
+ collected = pn.finalizer(collected)
+ p.finalized = p.finalized + 1
+ return collected
+ end
+ return nil
+ end
+ end
+ if collected then
+ p.matched = p.matched + 1
+ end
+ return collected
+end
+
+local function traced_apply(list,parsed,nofparsed,order)
+ if trace_lparse then
+ lshow(parsed)
+ end
+ report_lpath("collecting: %s",parsed.pattern)
+ report_lpath("root tags : %s",tagstostring(list))
+ report_lpath("order : %s",order or "unset")
+ local collected = list
+ for i=1,nofparsed do
+ local pi = parsed[i]
+ local kind = pi.kind
+ if kind == "axis" then
+ collected = apply_axis[pi.axis](collected)
+ report_lpath("% 10i : ax : %s",(collected and #collected) or 0,pi.axis)
+ elseif kind == "nodes" then
+ collected = apply_nodes(collected,pi.nodetest,pi.nodes)
+ report_lpath("% 10i : ns : %s",(collected and #collected) or 0,nodesettostring(pi.nodes,pi.nodetest))
+ elseif kind == "expression" then
+ collected = apply_expression(collected,pi.evaluator,order)
+ report_lpath("% 10i : ex : %s -> %s",(collected and #collected) or 0,pi.expression,pi.converted)
+ elseif kind == "finalizer" then
+ collected = pi.finalizer(collected)
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pi.name,pi.arguments or "")
+ return collected
+ end
+ if not collected or #collected == 0 then
+ local pn = i < nofparsed and parsed[nofparsed]
+ if pn and pn.kind == "finalizer" then
+ collected = pn.finalizer(collected)
+ report_lpath("% 10i : fi : %s : %s(%s)",(type(collected) == "table" and #collected) or 0,parsed.protocol or xml.defaultprotocol,pn.name,pn.arguments or "")
+ return collected
+ end
+ return nil
+ end
+ end
+ return collected
+end
+
+local function normal_apply(list,parsed,nofparsed,order)
+ local collected = list
+ for i=1,nofparsed do
+ local pi = parsed[i]
+ local kind = pi.kind
+ if kind == "axis" then
+ local axis = pi.axis
+ if axis ~= "self" then
+ collected = apply_axis[axis](collected)
+ end
+ elseif kind == "nodes" then
+ collected = apply_nodes(collected,pi.nodetest,pi.nodes)
+ elseif kind == "expression" then
+ collected = apply_expression(collected,pi.evaluator,order)
+ elseif kind == "finalizer" then
+ return pi.finalizer(collected)
+ end
+ if not collected or #collected == 0 then
+ local pf = i < nofparsed and parsed[nofparsed].finalizer
+ if pf then
+ return pf(collected) -- can be anything
+ end
+ return nil
+ end
+ end
+ return collected
+end
+
+--~ local function applylpath(list,pattern)
+--~ -- we avoid an extra call
+--~ local parsed = cache[pattern]
+--~ if parsed then
+--~ lpathcalls = lpathcalls + 1
+--~ lpathcached = lpathcached + 1
+--~ elseif type(pattern) == "table" then
+--~ lpathcalls = lpathcalls + 1
+--~ parsed = pattern
+--~ else
+--~ parsed = lpath(pattern) or pattern
+--~ end
+--~ if not parsed then
+--~ return
+--~ end
+--~ local nofparsed = #parsed
+--~ if nofparsed == 0 then
+--~ return -- something is wrong
+--~ end
+--~ local one = list[1] -- we could have a third argument: isroot and list or list[1] or whatever we like ... todo
+--~ if not one then
+--~ return -- something is wrong
+--~ elseif not trace_lpath then
+--~ return normal_apply(list,parsed,nofparsed,one.mi)
+--~ elseif trace_lprofile then
+--~ return profiled_apply(list,parsed,nofparsed,one.mi)
+--~ else
+--~ return traced_apply(list,parsed,nofparsed,one.mi)
+--~ end
+--~ end
+
+local function applylpath(list,pattern)
+ if not list then
+ return
+ end
+ local parsed = cache[pattern]
+ if parsed then
+ lpathcalls = lpathcalls + 1
+ lpathcached = lpathcached + 1
+ elseif type(pattern) == "table" then
+ lpathcalls = lpathcalls + 1
+ parsed = pattern
+ else
+ parsed = lpath(pattern) or pattern
+ end
+ if not parsed then
+ return
+ end
+ local nofparsed = #parsed
+ if nofparsed == 0 then
+ return -- something is wrong
+ end
+ if not trace_lpath then
+ return normal_apply ({ list },parsed,nofparsed,list.mi)
+ elseif trace_lprofile then
+ return profiled_apply({ list },parsed,nofparsed,list.mi)
+ else
+ return traced_apply ({ list },parsed,nofparsed,list.mi)
+ end
+end
+
+xml.applylpath = applylpath -- takes a table as first argment, which is what xml.filter will do
+
+--[[ldx--
+
This is the main filter function. It returns whatever is asked for.
+--ldx]]--
+
+function xml.filter(root,pattern) -- no longer funny attribute handling here
+ return applylpath(root,pattern)
+end
+
+-- internal (parsed)
+
+expressions.child = function(e,pattern)
+ return applylpath(e,pattern) -- todo: cache
+end
+
+expressions.count = function(e,pattern) -- what if pattern == empty or nil
+ local collected = applylpath(e,pattern) -- todo: cache
+ return pattern and (collected and #collected) or 0
+end
+
+-- external
+
+-- expressions.oneof = function(s,...)
+-- local t = {...}
+-- for i=1,#t do
+-- if s == t[i] then
+-- return true
+-- end
+-- end
+-- return false
+-- end
+
+expressions.oneof = function(s,...)
+ for i=1,select("#",...) do
+ if s == select(i,...) then
+ return true
+ end
+ end
+ return false
+end
+
+expressions.error = function(str)
+ xml.errorhandler(format("unknown function in lpath expression: %s",tostring(str or "?")))
+ return false
+end
+
+expressions.undefined = function(s)
+ return s == nil
+end
+
+expressions.quit = function(s)
+ if s or s == nil then
+ quit_expression = true
+ end
+ return true
+end
+
+expressions.print = function(...)
+ print(...)
+ return true
+end
+
+expressions.contains = find
+expressions.find = find
+expressions.upper = upper
+expressions.lower = lower
+expressions.number = tonumber
+expressions.boolean = toboolean
+
+function expressions.contains(str,pattern)
+ local t = type(str)
+ if t == "string" then
+ if find(str,pattern) then
+ return true
+ end
+ elseif t == "table" then
+ for i=1,#str do
+ local d = str[i]
+ if type(d) == "string" and find(d,pattern) then
+ return true
+ end
+ end
+ end
+ return false
+end
+
+-- user interface
+
+local function traverse(root,pattern,handle)
+ -- report_lpath("use 'xml.selection' instead for pattern: %s",pattern)
+ local collected = applylpath(root,pattern)
+ if collected then
+ for c=1,#collected do
+ local e = collected[c]
+ local r = e.__p__
+ handle(r,r.dt,e.ni)
+ end
+ end
+end
+
+local function selection(root,pattern,handle)
+ local collected = applylpath(root,pattern)
+ if collected then
+ if handle then
+ for c=1,#collected do
+ handle(collected[c])
+ end
+ else
+ return collected
+ end
+ end
+end
+
+xml.traverse = traverse -- old method, r, d, k
+xml.selection = selection -- new method, simple handle
+
+--~ function xml.cachedpatterns()
+--~ return cache
+--~ end
+
+-- generic function finalizer (independant namespace)
+
+local function dofunction(collected,fnc,...)
+ if collected then
+ local f = functions[fnc]
+ if f then
+ for c=1,#collected do
+ f(collected[c],...)
+ end
+ else
+ report_lpath("unknown function %a",fnc)
+ end
+ end
+end
+
+finalizers.xml["function"] = dofunction
+finalizers.tex["function"] = dofunction
+
+-- functions
+
+expressions.text = function(e,n)
+ local rdt = e.__p__.dt
+ return rdt and rdt[n] or ""
+end
+
+expressions.name = function(e,n) -- ns + tg
+ local found = false
+ n = tonumber(n) or 0
+ if n == 0 then
+ found = type(e) == "table" and e
+ elseif n < 0 then
+ local d, k = e.__p__.dt, e.ni
+ for i=k-1,1,-1 do
+ local di = d[i]
+ if type(di) == "table" then
+ if n == -1 then
+ found = di
+ break
+ else
+ n = n + 1
+ end
+ end
+ end
+ else
+ local d, k = e.__p__.dt, e.ni
+ for i=k+1,#d,1 do
+ local di = d[i]
+ if type(di) == "table" then
+ if n == 1 then
+ found = di
+ break
+ else
+ n = n - 1
+ end
+ end
+ end
+ end
+ if found then
+ local ns, tg = found.rn or found.ns or "", found.tg
+ if ns ~= "" then
+ return ns .. ":" .. tg
+ else
+ return tg
+ end
+ else
+ return ""
+ end
+end
+
+expressions.tag = function(e,n) -- only tg
+ if not e then
+ return ""
+ else
+ local found = false
+ n = tonumber(n) or 0
+ if n == 0 then
+ found = (type(e) == "table") and e -- seems to fail
+ elseif n < 0 then
+ local d, k = e.__p__.dt, e.ni
+ for i=k-1,1,-1 do
+ local di = d[i]
+ if type(di) == "table" then
+ if n == -1 then
+ found = di
+ break
+ else
+ n = n + 1
+ end
+ end
+ end
+ else
+ local d, k = e.__p__.dt, e.ni
+ for i=k+1,#d,1 do
+ local di = d[i]
+ if type(di) == "table" then
+ if n == 1 then
+ found = di
+ break
+ else
+ n = n - 1
+ end
+ end
+ end
+ end
+ return (found and found.tg) or ""
+ end
+end
+
+--[[ldx--
+
Often using an iterators looks nicer in the code than passing handler
+functions. The book describes how to use coroutines for that
+purpose (). This permits
+code like:
+
+
+for r, d, k in xml.elements(xml.load('text.xml'),"title") do
+ print(d[k]) -- old method
+end
+for e in xml.collected(xml.load('text.xml'),"title") do
+ print(e) -- new one
+end
+
+--ldx]]--
+
+-- local wrap, yield = coroutine.wrap, coroutine.yield
+-- local dummy = function() end
+--
+-- function xml.elements(root,pattern,reverse) -- r, d, k
+-- local collected = applylpath(root,pattern)
+-- if collected then
+-- if reverse then
+-- return wrap(function() for c=#collected,1,-1 do
+-- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
+-- end end)
+-- else
+-- return wrap(function() for c=1,#collected do
+-- local e = collected[c] local r = e.__p__ yield(r,r.dt,e.ni)
+-- end end)
+-- end
+-- end
+-- return wrap(dummy)
+-- end
+--
+-- function xml.collected(root,pattern,reverse) -- e
+-- local collected = applylpath(root,pattern)
+-- if collected then
+-- if reverse then
+-- return wrap(function() for c=#collected,1,-1 do yield(collected[c]) end end)
+-- else
+-- return wrap(function() for c=1,#collected do yield(collected[c]) end end)
+-- end
+-- end
+-- return wrap(dummy)
+-- end
+
+-- faster:
+
+local dummy = function() end
+
+function xml.elements(root,pattern,reverse) -- r, d, k
+ local collected = applylpath(root,pattern)
+ if not collected then
+ return dummy
+ elseif reverse then
+ local c = #collected + 1
+ return function()
+ if c > 1 then
+ c = c - 1
+ local e = collected[c]
+ local r = e.__p__
+ return r, r.dt, e.ni
+ end
+ end
+ else
+ local n, c = #collected, 0
+ return function()
+ if c < n then
+ c = c + 1
+ local e = collected[c]
+ local r = e.__p__
+ return r, r.dt, e.ni
+ end
+ end
+ end
+end
+
+function xml.collected(root,pattern,reverse) -- e
+ local collected = applylpath(root,pattern)
+ if not collected then
+ return dummy
+ elseif reverse then
+ local c = #collected + 1
+ return function()
+ if c > 1 then
+ c = c - 1
+ return collected[c]
+ end
+ end
+ else
+ local n, c = #collected, 0
+ return function()
+ if c < n then
+ c = c + 1
+ return collected[c]
+ end
+ end
+ end
+end
+
+-- handy
+
+function xml.inspect(collection,pattern)
+ pattern = pattern or "."
+ for e in xml.collected(collection,pattern or ".") do
+ report_lpath("pattern: %s\n\n%s\n",pattern,xml.tostring(e))
+ end
+end
+
+-- texy (see xfdf):
+
+local function split(e)
+ local dt = e.dt
+ if dt then
+ for i=1,#dt do
+ local dti = dt[i]
+ if type(dti) == "string" then
+ dti = gsub(dti,"^[\n\r]*(.-)[\n\r]*","%1")
+ dti = gsub(dti,"[\n\r]+","\n\n")
+ dt[i] = dti
+ else
+ split(dti)
+ end
+ end
+ end
+ return e
+end
+
+function xml.finalizers.paragraphs(c)
+ for i=1,#c do
+ split(c[i])
+ end
+ return c
+end
diff --git a/tex/context/base/lxml-mis.lua b/tex/context/base/lxml-mis.lua
index 94a26b974..6afc45002 100644
--- a/tex/context/base/lxml-mis.lua
+++ b/tex/context/base/lxml-mis.lua
@@ -1,103 +1,103 @@
-if not modules then modules = { } end modules ['lxml-mis'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local xml, lpeg, string = xml, lpeg, string
-
-local concat = table.concat
-local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
-local format, gsub, match = string.format, string.gsub, string.match
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs
-
-lpegpatterns.xml = lpegpatterns.xml or { }
-local xmlpatterns = lpegpatterns.xml
-
---[[ldx--
-
The following helper functions best belong to the lxml-ini
-module. Some are here because we need then in the mk
-document and other manuals, others came up when playing with
-this module. Since this module is also used in we've
-put them here instead of loading mode modules there then needed.
---ldx]]--
-
-local function xmlgsub(t,old,new) -- will be replaced
- local dt = t.dt
- if dt then
- for k=1,#dt do
- local v = dt[k]
- if type(v) == "string" then
- dt[k] = gsub(v,old,new)
- else
- xmlgsub(v,old,new)
- end
- end
- end
-end
-
---~ xml.gsub = xmlgsub
-
-function xml.stripleadingspaces(dk,d,k) -- cosmetic, for manual
- if d and k then
- local dkm = d[k-1]
- if dkm and type(dkm) == "string" then
- local s = match(dkm,"\n(%s+)")
- xmlgsub(dk,"\n"..rep(" ",#s),"\n")
- end
- end
-end
-
---~ xml.escapes = { ['&'] = '&', ['<'] = '<', ['>'] = '>', ['"'] = '"' }
---~ xml.unescapes = { } for k,v in next, xml.escapes do xml.unescapes[v] = k end
-
---~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end
---~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end
---~ function xml.cleansed (str) return (gsub(str,"<.->" , '' )) end -- "%b<>"
-
--- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg
---
--- 1021:0335:0287:0247
-
--- 10 * 1000 * "oeps< oeps> oeps& asfjhalskfjh alskfjh alskfjh alskfjh ;al J;LSFDJ"
---
--- 1559:0257:0288:0190 (last one suggested by roberto)
-
--- escaped = Cs((S("<&>") / xml.escapes + 1)^0)
--- escaped = Cs((S("<")/"<" + S(">")/">" + S("&")/"&" + 1)^0)
-local normal = (1 - S("<&>"))^0
-local special = P("<")/"<" + P(">")/">" + P("&")/"&"
-local escaped = Cs(normal * (special * normal)^0)
-
--- 100 * 1000 * "oeps< oeps> oeps&" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto)
-
-local normal = (1 - S"&")^0
-local special = P("<")/"<" + P(">")/">" + P("&")/"&"
-local unescaped = Cs(normal * (special * normal)^0)
-
--- 100 * 5000 * "oeps oeps oeps " : gsub:lpeg == 623:501 msec (short tags, less difference)
-
-local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0)
-
-xmlpatterns.escaped = escaped
-xmlpatterns.unescaped = unescaped
-xmlpatterns.cleansed = cleansed
-
-function xml.escaped (str) return lpegmatch(escaped,str) end
-function xml.unescaped(str) return lpegmatch(unescaped,str) end
-function xml.cleansed (str) return lpegmatch(cleansed,str) end
-
--- this might move
-
-function xml.fillin(root,pattern,str,check)
- local e = xml.first(root,pattern)
- if e then
- local n = #e.dt
- if not check or n == 0 or (n == 1 and e.dt[1] == "") then
- e.dt = { str }
- end
- end
-end
+if not modules then modules = { } end modules ['lxml-mis'] = {
+ version = 1.001,
+ comment = "this module is the basis for the lxml-* ones",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local xml, lpeg, string = xml, lpeg, string
+
+local concat = table.concat
+local type, next, tonumber, tostring, setmetatable, loadstring = type, next, tonumber, tostring, setmetatable, loadstring
+local format, gsub, match = string.format, string.gsub, string.match
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local P, S, R, C, V, Cc, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.Cc, lpeg.Cs
+
+lpegpatterns.xml = lpegpatterns.xml or { }
+local xmlpatterns = lpegpatterns.xml
+
+--[[ldx--
+
The following helper functions best belong to the lxml-ini
+module. Some are here because we need then in the mk
+document and other manuals, others came up when playing with
+this module. Since this module is also used in we've
+put them here instead of loading mode modules there then needed.
+--ldx]]--
+
+local function xmlgsub(t,old,new) -- will be replaced
+ local dt = t.dt
+ if dt then
+ for k=1,#dt do
+ local v = dt[k]
+ if type(v) == "string" then
+ dt[k] = gsub(v,old,new)
+ else
+ xmlgsub(v,old,new)
+ end
+ end
+ end
+end
+
+--~ xml.gsub = xmlgsub
+
+function xml.stripleadingspaces(dk,d,k) -- cosmetic, for manual
+ if d and k then
+ local dkm = d[k-1]
+ if dkm and type(dkm) == "string" then
+ local s = match(dkm,"\n(%s+)")
+ xmlgsub(dk,"\n"..rep(" ",#s),"\n")
+ end
+ end
+end
+
+--~ xml.escapes = { ['&'] = '&', ['<'] = '<', ['>'] = '>', ['"'] = '"' }
+--~ xml.unescapes = { } for k,v in next, xml.escapes do xml.unescapes[v] = k end
+
+--~ function xml.escaped (str) return (gsub(str,"(.)" , xml.escapes )) end
+--~ function xml.unescaped(str) return (gsub(str,"(&.-;)", xml.unescapes)) end
+--~ function xml.cleansed (str) return (gsub(str,"<.->" , '' )) end -- "%b<>"
+
+-- 100 * 2500 * "oeps< oeps> oeps&" : gsub:lpeg|lpeg|lpeg
+--
+-- 1021:0335:0287:0247
+
+-- 10 * 1000 * "oeps< oeps> oeps& asfjhalskfjh alskfjh alskfjh alskfjh ;al J;LSFDJ"
+--
+-- 1559:0257:0288:0190 (last one suggested by roberto)
+
+-- escaped = Cs((S("<&>") / xml.escapes + 1)^0)
+-- escaped = Cs((S("<")/"<" + S(">")/">" + S("&")/"&" + 1)^0)
+local normal = (1 - S("<&>"))^0
+local special = P("<")/"<" + P(">")/">" + P("&")/"&"
+local escaped = Cs(normal * (special * normal)^0)
+
+-- 100 * 1000 * "oeps< oeps> oeps&" : gsub:lpeg == 0153:0280:0151:0080 (last one by roberto)
+
+local normal = (1 - S"&")^0
+local special = P("<")/"<" + P(">")/">" + P("&")/"&"
+local unescaped = Cs(normal * (special * normal)^0)
+
+-- 100 * 5000 * "oeps oeps oeps " : gsub:lpeg == 623:501 msec (short tags, less difference)
+
+local cleansed = Cs(((P("<") * (1-P(">"))^0 * P(">"))/"" + 1)^0)
+
+xmlpatterns.escaped = escaped
+xmlpatterns.unescaped = unescaped
+xmlpatterns.cleansed = cleansed
+
+function xml.escaped (str) return lpegmatch(escaped,str) end
+function xml.unescaped(str) return lpegmatch(unescaped,str) end
+function xml.cleansed (str) return lpegmatch(cleansed,str) end
+
+-- this might move
+
+function xml.fillin(root,pattern,str,check)
+ local e = xml.first(root,pattern)
+ if e then
+ local n = #e.dt
+ if not check or n == 0 or (n == 1 and e.dt[1] == "") then
+ e.dt = { str }
+ end
+ end
+end
diff --git a/tex/context/base/lxml-sor.lua b/tex/context/base/lxml-sor.lua
index 951017bcd..a31d0ebb8 100644
--- a/tex/context/base/lxml-sor.lua
+++ b/tex/context/base/lxml-sor.lua
@@ -1,159 +1,159 @@
-if not modules then modules = { } end modules ['lxml-sor'] = {
- version = 1.001,
- comment = "companion to lxml-sor.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, concat, rep = string.format, table.concat, string.rep
-local lpegmatch = lpeg.match
-
-local xml, lxml = xml, lxml
-
-lxml.sorters = lxml.sorters or { }
-
-if not lxml.splitid then
- local splitter = lpeg.C((1-lpeg.P(":"))^1) * lpeg.P("::") * lpeg.C(lpeg.P(1)^1)
- function lxml.splitid(id)
- local d, i = lpegmatch(splitter,id)
- if d then
- return d, i
- else
- return "", id
- end
- end
-end
-
-local lists = { }
-
-function lxml.sorters.reset(name)
- lists[name] = {
- sorted = false,
- entries = { },
- reverse = { },
- results = { },
- }
-end
-
-function lxml.sorters.add(name,n,key)
- local list = lists[name]
- if list.sorted then
- -- reverse is messed up, we could regenerate it and go on
- else
- local entries = list and list.entries
- if entries then
- local reverse = list.reverse
- local e = reverse[n]
- if e then
- local keys = entries[e][2]
- keys[#keys+1] = key
- else
- entries[#entries+1] = { n, { key } }
- reverse[n] = #entries
- end
- end
- end
-end
-
-function lxml.sorters.show(name)
- local list = lists[name]
- local entries = list and list.entries
- local NC, NR, bold = context.NC, context.NR, context.bold -- somehow bold is not working
- if entries then
- local maxn = 1
- for i=1,#entries do
- if #entries[i][2] > maxn then maxn = #entries[i][2] end
- end
- context.starttabulate { "|Tr|Tr|" .. rep("Tlp|",maxn) }
- NC() bold("n")
- NC() bold("id")
- if maxn > 1 then
- for i=1,maxn do
- NC() bold("entry " .. i)
- end
- else
- NC() bold("entry")
- end
- NC() NR()
- context.HL()
- for i=1,#entries do
- local entry = entries[i]
- local document, node = lxml.splitid(entry[1])
- NC() context(i)
- NC() context(node)
- local e = entry[2]
- for i=1,#e do
- NC() context.detokenize(e[i])
- end
- NC() NR()
- end
- context.stoptabulate()
- end
-end
-
-lxml.sorters.compare = sorters.comparers.basic -- (a,b)
-
-function lxml.sorters.sort(name)
- local list = lists[name]
- local entries = list and list.entries
- if entries then
- -- filtering
- local results = { }
- list.results = results
- for i=1,#entries do
- local entry = entries[i]
- results[i] = {
- entry = entry[1],
- key = concat(entry[2], " "),
- }
- end
- -- preparation
- local strip = sorters.strip
- local splitter = sorters.splitters.utf
- local firstofsplit = sorters.firstofsplit
- for i=1, #results do
- local r = results[i]
- r.split = splitter(strip(r.key))
- end
- -- sorting
- sorters.sort(results,lxml.sorters.compare)
- -- finalizing
- list.nofsorted = #results
- local split = { }
- for k=1,#results do -- rather generic so maybe we need a function
- local v = results[k]
- local entry, tag = firstofsplit(v)
- local s = split[entry] -- keeps track of change
- if not s then
- s = { tag = tag, data = { } }
- split[entry] = s
- end
- s.data[#s.data+1] = v
- end
- list.results = split
- -- done
- list.sorted = true
- end
-end
-
-function lxml.sorters.flush(name,setup)
- local list = lists[name]
- local results = list and list.results
- local xmlw = context.xmlw
- if results and next(results) then
- for key, result in next, results do
- local tag, data = result.tag, result.data
- for d=1,#data do
- xmlw(setup,data[d].entry)
- end
- end
- else
- local entries = list and list.entries
- if entries then
- for i=1,#entries do
- xmlw(setup,entries[i][1])
- end
- end
- end
-end
+if not modules then modules = { } end modules ['lxml-sor'] = {
+ version = 1.001,
+ comment = "companion to lxml-sor.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format, concat, rep = string.format, table.concat, string.rep
+local lpegmatch = lpeg.match
+
+local xml, lxml = xml, lxml
+
+lxml.sorters = lxml.sorters or { }
+
+if not lxml.splitid then
+ local splitter = lpeg.C((1-lpeg.P(":"))^1) * lpeg.P("::") * lpeg.C(lpeg.P(1)^1)
+ function lxml.splitid(id)
+ local d, i = lpegmatch(splitter,id)
+ if d then
+ return d, i
+ else
+ return "", id
+ end
+ end
+end
+
+local lists = { }
+
+function lxml.sorters.reset(name)
+ lists[name] = {
+ sorted = false,
+ entries = { },
+ reverse = { },
+ results = { },
+ }
+end
+
+function lxml.sorters.add(name,n,key)
+ local list = lists[name]
+ if list.sorted then
+ -- reverse is messed up, we could regenerate it and go on
+ else
+ local entries = list and list.entries
+ if entries then
+ local reverse = list.reverse
+ local e = reverse[n]
+ if e then
+ local keys = entries[e][2]
+ keys[#keys+1] = key
+ else
+ entries[#entries+1] = { n, { key } }
+ reverse[n] = #entries
+ end
+ end
+ end
+end
+
+function lxml.sorters.show(name)
+ local list = lists[name]
+ local entries = list and list.entries
+ local NC, NR, bold = context.NC, context.NR, context.bold -- somehow bold is not working
+ if entries then
+ local maxn = 1
+ for i=1,#entries do
+ if #entries[i][2] > maxn then maxn = #entries[i][2] end
+ end
+ context.starttabulate { "|Tr|Tr|" .. rep("Tlp|",maxn) }
+ NC() bold("n")
+ NC() bold("id")
+ if maxn > 1 then
+ for i=1,maxn do
+ NC() bold("entry " .. i)
+ end
+ else
+ NC() bold("entry")
+ end
+ NC() NR()
+ context.HL()
+ for i=1,#entries do
+ local entry = entries[i]
+ local document, node = lxml.splitid(entry[1])
+ NC() context(i)
+ NC() context(node)
+ local e = entry[2]
+ for i=1,#e do
+ NC() context.detokenize(e[i])
+ end
+ NC() NR()
+ end
+ context.stoptabulate()
+ end
+end
+
+lxml.sorters.compare = sorters.comparers.basic -- (a,b)
+
+function lxml.sorters.sort(name)
+ local list = lists[name]
+ local entries = list and list.entries
+ if entries then
+ -- filtering
+ local results = { }
+ list.results = results
+ for i=1,#entries do
+ local entry = entries[i]
+ results[i] = {
+ entry = entry[1],
+ key = concat(entry[2], " "),
+ }
+ end
+ -- preparation
+ local strip = sorters.strip
+ local splitter = sorters.splitters.utf
+ local firstofsplit = sorters.firstofsplit
+ for i=1, #results do
+ local r = results[i]
+ r.split = splitter(strip(r.key))
+ end
+ -- sorting
+ sorters.sort(results,lxml.sorters.compare)
+ -- finalizing
+ list.nofsorted = #results
+ local split = { }
+ for k=1,#results do -- rather generic so maybe we need a function
+ local v = results[k]
+ local entry, tag = firstofsplit(v)
+ local s = split[entry] -- keeps track of change
+ if not s then
+ s = { tag = tag, data = { } }
+ split[entry] = s
+ end
+ s.data[#s.data+1] = v
+ end
+ list.results = split
+ -- done
+ list.sorted = true
+ end
+end
+
+function lxml.sorters.flush(name,setup)
+ local list = lists[name]
+ local results = list and list.results
+ local xmlw = context.xmlw
+ if results and next(results) then
+ for key, result in next, results do
+ local tag, data = result.tag, result.data
+ for d=1,#data do
+ xmlw(setup,data[d].entry)
+ end
+ end
+ else
+ local entries = list and list.entries
+ if entries then
+ for i=1,#entries do
+ xmlw(setup,entries[i][1])
+ end
+ end
+ end
+end
diff --git a/tex/context/base/lxml-tab.lua b/tex/context/base/lxml-tab.lua
index 2bb5844fc..b6c2b1b13 100644
--- a/tex/context/base/lxml-tab.lua
+++ b/tex/context/base/lxml-tab.lua
@@ -1,1367 +1,1367 @@
-if not modules then modules = { } end modules ['lxml-tab'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this module needs a cleanup: check latest lpeg, passing args, (sub)grammar, etc etc
--- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
--- trouble
-
--- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
--- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
--- of work so we delay this till we cleanup
-
-local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
-
-local report_xml = logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end
-
---[[ldx--
-
The parser used here is inspired by the variant discussed in the lua book, but
-handles comment and processing instructions, has a different structure, provides
-parent access; a first version used different trickery but was less optimized to we
-went this route. First we had a find based parser, now we have an based one.
-The find based parser can be found in l-xml-edu.lua along with other older code.
-
-
Beware, the interface may change. For instance at, ns, tg, dt may get more
-verbose names. Once the code is stable we will also remove some tracing and
-optimize the code.
-
-
I might even decide to reimplement the parser using the latest trickery
-as the current variant was written when showed up and it's easier now to
-build tables in one go.
First a hack to enable namespace resolving. A namespace is characterized by
-a . The following function associates a namespace prefix with a
-pattern. We use , which in this case is more than twice as fast as a
-find based solution where we loop over an array of patterns. Less code and
-much cleaner.
The next function associates a namespace prefix with an . This
-normally happens independent of parsing.
-
-
-xml.registerns("mml","mathml")
-
---ldx]]--
-
-function xml.registerns(namespace, pattern) -- pattern can be an lpeg
- check = check + C(P(lower(pattern))) / namespace
- parse = P { P(check) + 1 * V(1) }
-end
-
---[[ldx--
-
The next function also registers a namespace, but this time we map a
-given namespace prefix onto a registered one, using the given
-. This used for attributes like xmlns:m.
-
-
-xml.checkns("m","http://www.w3.org/mathml")
-
---ldx]]--
-
-function xml.checkns(namespace,url)
- local ns = lpegmatch(parse,lower(url))
- if ns and namespace ~= ns then
- xml.xmlns[namespace] = ns
- end
-end
-
---[[ldx--
-
Next we provide a way to turn an into a registered
-namespace. This used for the xmlns attribute.
A namespace in an element can be remapped onto the registered
-one efficiently by using the xml.xmlns table.
---ldx]]--
-
---[[ldx--
-
This version uses . We follow the same approach as before, stack and top and
-such. This version is about twice as fast which is mostly due to the fact that
-we don't have to prepare the stream for cdata, doctype etc etc. This variant is
-is dedicated to Luigi Scarso, who challenged me with 40 megabyte files that
-took 12.5 seconds to load (1.5 for file io and the rest for tree building). With
-the implementation we got that down to less 7.3 seconds. Loading the 14
- interface definition files (2.6 meg) went down from 1.05 seconds to 0.55.
-
-
Next comes the parser. The rather messy doctype definition comes in many
-disguises so it is no surprice that later on have to dedicate quite some
- code to it.
-
-
-
-
-
-
-
-
-
-
-
The code may look a bit complex but this is mostly due to the fact that we
-resolve namespaces and attach metatables. There is only one public function:
-
-
-local x = xml.convert(somestring)
-
-
-
An optional second boolean argument tells this function not to create a root
-element.
-
-
Valid entities are:
-
-
-
-
-
-
---ldx]]--
-
--- not just one big nested table capture (lpeg overflow)
-
-local nsremap, resolvens = xml.xmlns, xml.resolvens
-
-local stack = { }
-local top = { }
-local dt = { }
-local at = { }
-local xmlns = { }
-local errorstr = nil
-local entities = { }
-local strip = false
-local cleanup = false
-local utfize = false
-local resolve_predefined = false
-local unify_predefined = false
-
-local dcache = { }
-local hcache = { }
-local acache = { }
-
-local mt = { }
-
-local function initialize_mt(root)
- mt = { __index = root } -- will be redefined later
-end
-
-function xml.setproperty(root,k,v)
- getmetatable(root).__index[k] = v
-end
-
-function xml.checkerror(top,toclose)
- return "" -- can be set
-end
-
-local function add_attribute(namespace,tag,value)
- if cleanup and #value > 0 then
- value = cleanup(value) -- new
- end
- if tag == "xmlns" then
- xmlns[#xmlns+1] = resolvens(value)
- at[tag] = value
- elseif namespace == "" then
- at[tag] = value
- elseif namespace == "xmlns" then
- xml.checkns(tag,value)
- at["xmlns:" .. tag] = value
- else
- -- for the moment this way:
- at[namespace .. ":" .. tag] = value
- end
-end
-
-local function add_empty(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- local resolved = namespace == "" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top = stack[#stack]
- dt = top.dt
- local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top }
- dt[#dt+1] = t
- setmetatable(t, mt)
- if at.xmlns then
- remove(xmlns)
- end
- at = { }
-end
-
-local function add_begin(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- local resolved = namespace == "" and xmlns[#xmlns] or nsremap[namespace] or namespace
- top = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = stack[#stack] }
- setmetatable(top, mt)
- dt = top.dt
- stack[#stack+1] = top
- at = { }
-end
-
-local function add_end(spacing, namespace, tag)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- local toclose = remove(stack)
- top = stack[#stack]
- if #stack < 1 then
- errorstr = formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
- elseif toclose.tg ~= tag then -- no namespace check
- errorstr = formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "")
- end
- dt = top.dt
- dt[#dt+1] = toclose
- -- dt[0] = top -- nasty circular reference when serializing table
- if toclose.at.xmlns then
- remove(xmlns)
- end
-end
-
-local function add_text(text)
- if cleanup and #text > 0 then
- dt[#dt+1] = cleanup(text)
- else
- dt[#dt+1] = text
- end
-end
-
-local function add_special(what, spacing, text)
- if #spacing > 0 then
- dt[#dt+1] = spacing
- end
- if strip and (what == "@cm@" or what == "@dt@") then
- -- forget it
- else
- dt[#dt+1] = { special=true, ns="", tg=what, dt={ text } }
- end
-end
-
-local function set_message(txt)
- errorstr = "garbage at the end of the file: " .. gsub(txt,"([ \n\r\t]*)","")
-end
-
-local reported_attribute_errors = { }
-
-local function attribute_value_error(str)
- if not reported_attribute_errors[str] then
- report_xml("invalid attribute value %a",str)
- reported_attribute_errors[str] = true
- at._error_ = str
- end
- return str
-end
-
-local function attribute_specification_error(str)
- if not reported_attribute_errors[str] then
- report_xml("invalid attribute specification %a",str)
- reported_attribute_errors[str] = true
- at._error_ = str
- end
- return str
-end
-
-xml.placeholders = {
- unknown_dec_entity = function(str) return str == "" and "&error;" or formatters["&%s;"](str) end,
- unknown_hex_entity = function(str) return formatters["%s;"](str) end,
- unknown_any_entity = function(str) return formatters["%s;"](str) end,
-}
-
-local placeholders = xml.placeholders
-
-local function fromhex(s)
- local n = tonumber(s,16)
- if n then
- return utfchar(n)
- else
- return formatters["h:%s"](s), true
- end
-end
-
-local function fromdec(s)
- local n = tonumber(s)
- if n then
- return utfchar(n)
- else
- return formatters["d:%s"](s), true
- end
-end
-
--- one level expansion (simple case), no checking done
-
-local rest = (1-P(";"))^0
-local many = P(1)^0
-
-local parsedentity =
- P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) +
- (P("#x")*(many/fromhex) + P("#")*(many/fromdec))
-
--- parsing in the xml file
-
-local predefined_unified = {
- [38] = "&",
- [42] = """,
- [47] = "'",
- [74] = "<",
- [76] = ">",
-}
-
-local predefined_simplified = {
- [38] = "&", amp = "&",
- [42] = '"', quot = '"',
- [47] = "'", apos = "'",
- [74] = "<", lt = "<",
- [76] = ">", gt = ">",
-}
-
-local nofprivates = 0xF0000 -- shared but seldom used
-
-local privates_u = { -- unescaped
- [ [[&]] ] = "&",
- [ [["]] ] = """,
- [ [[']] ] = "'",
- [ [[<]] ] = "<",
- [ [[>]] ] = ">",
-}
-
-local privates_p = {
-}
-
-local privates_n = {
- -- keeps track of defined ones
-}
-
-local escaped = utf.remapper(privates_u)
-
-local function unescaped(s)
- local p = privates_n[s]
- if not p then
- nofprivates = nofprivates + 1
- p = utfchar(nofprivates)
- privates_n[s] = p
- s = "&" .. s .. ";" -- todo: use char-ent to map to hex
- privates_u[p] = s
- privates_p[p] = s
- end
- return p
-end
-
-local unprivatized = utf.remapper(privates_p)
-
-xml.privatetoken = unescaped
-xml.unprivatized = unprivatized
-xml.privatecodes = privates_n
-
-local function handle_hex_entity(str)
- local h = hcache[str]
- if not h then
- local n = tonumber(str,16)
- h = unify_predefined and predefined_unified[n]
- if h then
- if trace_entities then
- report_xml("utfize, converting hex entity %s; into %a",str,h)
- end
- elseif utfize then
- h = (n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring hex entity %s;",str)
- elseif trace_entities then
- report_xml("utfize, converting hex entity %s; into %a",str,h)
- end
- else
- if trace_entities then
- report_xml("found entity %s;",str)
- end
- h = "" .. str .. ";"
- end
- hcache[str] = h
- end
- return h
-end
-
-local function handle_dec_entity(str)
- local d = dcache[str]
- if not d then
- local n = tonumber(str)
- d = unify_predefined and predefined_unified[n]
- if d then
- if trace_entities then
- report_xml("utfize, converting dec entity %s; into %a",str,d)
- end
- elseif utfize then
- d = (n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
- if not n then
- report_xml("utfize, ignoring dec entity %s;",str)
- elseif trace_entities then
- report_xml("utfize, converting dec entity %s; into %a",str,d)
- end
- else
- if trace_entities then
- report_xml("found entity %s;",str)
- end
- d = "" .. str .. ";"
- end
- dcache[str] = d
- end
- return d
-end
-
-xml.parsedentitylpeg = parsedentity
-
-local function handle_any_entity(str)
- if resolve then
- local a = acache[str] -- per instance ! todo
- if not a then
- a = resolve_predefined and predefined_simplified[str]
- if a then
- if trace_entities then
- report_xml("resolving entity &%s; to predefined %a",str,a)
- end
- else
- if type(resolve) == "function" then
- a = resolve(str) or entities[str]
- else
- a = entities[str]
- end
- if a then
- if type(a) == "function" then
- if trace_entities then
- report_xml("expanding entity &%s; to function call",str)
- end
- a = a(str) or ""
- end
- a = lpegmatch(parsedentity,a) or a -- for nested
- if trace_entities then
- report_xml("resolving entity &%s; to internal %a",str,a)
- end
- else
- local unknown_any_entity = placeholders.unknown_any_entity
- if unknown_any_entity then
- a = unknown_any_entity(str) or ""
- end
- if a then
- if trace_entities then
- report_xml("resolving entity &%s; to external %s",str,a)
- end
- else
- if trace_entities then
- report_xml("keeping entity &%s;",str)
- end
- if str == "" then
- a = "&error;"
- else
- a = "&" .. str .. ";"
- end
- end
- end
- end
- acache[str] = a
- elseif trace_entities then
- if not acache[str] then
- report_xml("converting entity &%s; to %a",str,a)
- acache[str] = a
- end
- end
- return a
- else
- local a = acache[str]
- if not a then
- a = resolve_predefined and predefined_simplified[str]
- if a then
- -- one of the predefined
- acache[str] = a
- if trace_entities then
- report_xml("entity &%s; becomes %a",str,a)
- end
- elseif str == "" then
- if trace_entities then
- report_xml("invalid entity &%s;",str)
- end
- a = "&error;"
- acache[str] = a
- else
- if trace_entities then
- report_xml("entity &%s; is made private",str)
- end
- -- a = "&" .. str .. ";"
- a = unescaped(str)
- acache[str] = a
- end
- end
- return a
- end
-end
-
-local function handle_end_entity(chr)
- report_xml("error in entity, %a found instead of %a",chr,";")
-end
-
-local space = S(' \r\n\t')
-local open = P('<')
-local close = P('>')
-local squote = S("'")
-local dquote = S('"')
-local equal = P('=')
-local slash = P('/')
-local colon = P(':')
-local semicolon = P(';')
-local ampersand = P('&')
-local valid = R('az', 'AZ', '09') + S('_-.')
-local name_yes = C(valid^1) * colon * C(valid^1)
-local name_nop = C(P(true)) * C(valid^1)
-local name = name_yes + name_nop
-local utfbom = lpeg.patterns.utfbom -- no capture
-local spacing = C(space^0)
-
------ entitycontent = (1-open-semicolon)^0
-local anyentitycontent = (1-open-semicolon-space-close)^0
-local hexentitycontent = R("AF","af","09")^0
-local decentitycontent = R("09")^0
-local parsedentity = P("#")/"" * (
- P("x")/"" * (hexentitycontent/handle_hex_entity) +
- (decentitycontent/handle_dec_entity)
- ) + (anyentitycontent/handle_any_entity)
-local entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity))
-
-local text_unparsed = C((1-open)^1)
-local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1)
-
-local somespace = space^1
-local optionalspace = space^0
-
------ value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -- ampersand and < also invalid in value
-local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dquote * Cs((entity + (1 - dquote))^0) * dquote) -- ampersand and < also invalid in value
-
-local endofattributes = slash * close + close -- recovery of flacky html
-local whatever = space * name * optionalspace * equal
------ wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
------ wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
------ wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
-local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
-
-local attributevalue = value + wrongvalue
-
-local attribute = (somespace * name * optionalspace * equal * optionalspace * attributevalue) / add_attribute
------ attributes = (attribute)^0
-
-local attributes = (attribute + somespace^-1 * (((1-endofattributes)^1)/attribute_specification_error))^0
-
-local parsedtext = text_parsed / add_text
-local unparsedtext = text_unparsed / add_text
-local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example
-
-local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty
-local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin
-local endelement = (spacing * open * slash * name * optionalspace * close) / add_end
-
-local begincomment = open * P("!--")
-local endcomment = P("--") * close
-local begininstruction = open * P("?")
-local endinstruction = P("?") * close
-local begincdata = open * P("![CDATA[")
-local endcdata = P("]]") * close
-
-local someinstruction = C((1 - endinstruction)^0)
-local somecomment = C((1 - endcomment )^0)
-local somecdata = C((1 - endcdata )^0)
-
-local function normalentity(k,v ) entities[k] = v end
-local function systementity(k,v,n) entities[k] = v end
-local function publicentity(k,v,n) entities[k] = v end
-
--- todo: separate dtd parser
-
-local begindoctype = open * P("!DOCTYPE")
-local enddoctype = close
-local beginset = P("[")
-local endset = P("]")
-local doctypename = C((1-somespace-close)^0)
-local elementdoctype = optionalspace * P(" &
- cleanup = settings.text_cleanup
- entities = settings.entities or { }
- --
- if utfize == nil then
- settings.utfize_entities = true
- utfize = true
- end
- if resolve_predefined == nil then
- settings.resolve_predefined_entities = true
- resolve_predefined = true
- end
- --
- stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
- acache, hcache, dcache = { }, { }, { } -- not stored
- reported_attribute_errors = { }
- if settings.parent_root then
- mt = getmetatable(settings.parent_root)
- else
- initialize_mt(top)
- end
- stack[#stack+1] = top
- top.dt = { }
- dt = top.dt
- if not data or data == "" then
- errorstr = "empty xml file"
- elseif utfize or resolve then
- if lpegmatch(grammar_parsed_text,data) then
- errorstr = ""
- else
- errorstr = "invalid xml file - parsed text"
- end
- elseif type(data) == "string" then
- if lpegmatch(grammar_unparsed_text,data) then
- errorstr = ""
- else
- errorstr = "invalid xml file - unparsed text"
- end
- else
- errorstr = "invalid xml file - no text at all"
- end
- local result
- if errorstr and errorstr ~= "" then
- result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } }
- setmetatable(stack, mt)
- local errorhandler = settings.error_handler
- if errorhandler == false then
- -- no error message
- else
- errorhandler = errorhandler or xml.errorhandler
- if errorhandler then
- local currentresource = settings.currentresource
- if currentresource and currentresource ~= "" then
- xml.errorhandler(formatters["load error in [%s]: %s"](currentresource,errorstr))
- else
- xml.errorhandler(formatters["load error: %s"](errorstr))
- end
- end
- end
- else
- result = stack[1]
- end
- if not settings.no_root then
- result = { special = true, ns = "", tg = '@rt@', dt = result.dt, at={ }, entities = entities, settings = settings }
- setmetatable(result, mt)
- local rdt = result.dt
- for k=1,#rdt do
- local v = rdt[k]
- if type(v) == "table" and not v.special then -- always table -)
- result.ri = k -- rootindex
- v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
- break
- end
- end
- end
- if errorstr and errorstr ~= "" then
- result.error = true
- end
- result.statistics = {
- entities = {
- decimals = dcache,
- hexadecimals = hcache,
- names = acache,
- }
- }
- strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
- unify_predefined, cleanup, entities = nil, nil, nil
- stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
- acache, hcache, dcache = nil, nil, nil
- reported_attribute_errors, mt, errorhandler = nil, nil, nil
- return result
-end
-
--- Because we can have a crash (stack issues) with faulty xml, we wrap this one
--- in a protector:
-
-function xmlconvert(data,settings)
- local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
- if ok then
- return result
- else
- return _xmlconvert_("",settings)
- end
-end
-
-xml.convert = xmlconvert
-
-function xml.inheritedconvert(data,xmldata) -- xmldata is parent
- local settings = xmldata.settings
- if settings then
- settings.parent_root = xmldata -- to be tested
- end
- -- settings.no_root = true
- local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
- -- xc.settings = nil
- -- xc.entities = nil
- -- xc.special = nil
- -- xc.ri = nil
- -- print(xc.tg)
- return xc
-end
-
---[[ldx--
-
Packaging data in an xml like table is done with the following
-function. Maybe it will go away (when not used).
---ldx]]--
-
-function xml.is_valid(root)
- return root and root.dt and root.dt[1] and type(root.dt[1]) == "table" and not root.dt[1].er
-end
-
-function xml.package(tag,attributes,data)
- local ns, tg = match(tag,"^(.-):?([^:]+)$")
- local t = { ns = ns, tg = tg, dt = data or "", at = attributes or {} }
- setmetatable(t, mt)
- return t
-end
-
-function xml.is_valid(root)
- return root and not root.error
-end
-
-xml.errorhandler = report_xml
-
---[[ldx--
-
We cannot load an from a filehandle so we need to load
-the whole file first. The function accepts a string representing
-a filename or a file handle.
---ldx]]--
-
-function xml.load(filename,settings)
- local data = ""
- if type(filename) == "string" then
- -- local data = io.loaddata(filename) - -todo: check type in io.loaddata
- local f = io.open(filename,'r') -- why not 'rb'
- if f then
- data = f:read("*all") -- io.readall(f) ... only makes sense for large files
- f:close()
- end
- elseif filename then -- filehandle
- data = filename:read("*all") -- io.readall(f) ... only makes sense for large files
- end
- if settings then
- settings.currentresource = filename
- local result = xmlconvert(data,settings)
- settings.currentresource = nil
- return result
- else
- return xmlconvert(data,{ currentresource = filename })
- end
-end
-
---[[ldx--
-
When we inject new elements, we need to convert strings to
-valid trees, which is what the next function does.
---ldx]]--
-
-local no_root = { no_root = true }
-
-function xml.toxml(data)
- if type(data) == "string" then
- local root = { xmlconvert(data,no_root) }
- return (#root > 1 and root) or root[1]
- else
- return data
- end
-end
-
---[[ldx--
-
For copying a tree we use a dedicated function instead of the
-generic table copier. Since we know what we're dealing with we
-can speed up things a bit. The second argument is not to be used!
---ldx]]--
-
-local function copy(old,tables)
- if old then
- tables = tables or { }
- local new = { }
- if not tables[old] then
- tables[old] = new
- end
- for k,v in next, old do
- new[k] = (type(v) == "table" and (tables[v] or copy(v, tables))) or v
- end
- local mt = getmetatable(old)
- if mt then
- setmetatable(new,mt)
- end
- return new
- else
- return { }
- end
-end
-
-xml.copy = copy
-
---[[ldx--
-
In serializing the tree or parts of the tree is a major
-actitivity which is why the following function is pretty optimized resulting
-in a few more lines of code than needed. The variant that uses the formatting
-function for all components is about 15% slower than the concatinating
-alternative.
---ldx]]--
-
--- todo: add when not present
-
-function xml.checkbom(root) -- can be made faster
- if root.ri then
- local dt = root.dt
- for k=1,#dt do
- local v = dt[k]
- if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
- return
- end
- end
- insert(dt, 1, { special = true, ns = "", tg = "@pi@", dt = { "xml version='1.0' standalone='yes'" } } )
- insert(dt, 2, "\n" )
- end
-end
-
---[[ldx--
-
At the cost of some 25% runtime overhead you can first convert the tree to a string
-and then handle the lot.
---ldx]]--
-
--- new experimental reorganized serialize
-
-local function verbose_element(e,handlers) -- options
- local handle = handlers.handle
- local serialize = handlers.serialize
- local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
- local ats = eat and next(eat) and { }
- if ats then
- for k,v in next, eat do
- ats[#ats+1] = formatters['%s=%q'](k,escaped(v))
- end
- end
- if ern and trace_entities and ern ~= ens then
- ens = ern
- end
- if ens ~= "" then
- if edt and #edt > 0 then
- if ats then
- handle("<",ens,":",etg," ",concat(ats," "),">")
- else
- handle("<",ens,":",etg,">")
- end
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "string" then
- handle(escaped(e))
- else
- serialize(e,handlers)
- end
- end
- handle("",ens,":",etg,">")
- else
- if ats then
- handle("<",ens,":",etg," ",concat(ats," "),"/>")
- else
- handle("<",ens,":",etg,"/>")
- end
- end
- else
- if edt and #edt > 0 then
- if ats then
- handle("<",etg," ",concat(ats," "),">")
- else
- handle("<",etg,">")
- end
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "string" then
- handle(escaped(e)) -- option: hexify escaped entities
- else
- serialize(e,handlers)
- end
- end
- handle("",etg,">")
- else
- if ats then
- handle("<",etg," ",concat(ats," "),"/>")
- else
- handle("<",etg,"/>")
- end
- end
- end
-end
-
-local function verbose_pi(e,handlers)
- handlers.handle("",e.dt[1],"?>")
-end
-
-local function verbose_comment(e,handlers)
- handlers.handle("")
-end
-
-local function verbose_cdata(e,handlers)
- handlers.handle("")
-end
-
-local function verbose_doctype(e,handlers)
- handlers.handle("")
-end
-
-local function verbose_root(e,handlers)
- handlers.serialize(e.dt,handlers)
-end
-
-local function verbose_text(e,handlers)
- handlers.handle(escaped(e))
-end
-
-local function verbose_document(e,handlers)
- local serialize = handlers.serialize
- local functions = handlers.functions
- for i=1,#e do
- local ei = e[i]
- if type(ei) == "string" then
- functions["@tx@"](ei,handlers)
- else
- serialize(ei,handlers)
- end
- end
-end
-
-local function serialize(e,handlers,...)
- local initialize = handlers.initialize
- local finalize = handlers.finalize
- local functions = handlers.functions
- if initialize then
- local state = initialize(...)
- if not state == true then
- return state
- end
- end
- local etg = e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- -- elseif type(e) == "string" then
- -- functions["@tx@"](e,handlers)
- else
- functions["@dc@"](e,handlers) -- dc ?
- end
- if finalize then
- return finalize()
- end
-end
-
-local function xserialize(e,handlers)
- local functions = handlers.functions
- local etg = e.tg
- if etg then
- (functions[etg] or functions["@el@"])(e,handlers)
- -- elseif type(e) == "string" then
- -- functions["@tx@"](e,handlers)
- else
- functions["@dc@"](e,handlers)
- end
-end
-
-local handlers = { }
-
-local function newhandlers(settings)
- local t = table.copy(handlers[settings and settings.parent or "verbose"] or { }) -- merge
- if settings then
- for k,v in next, settings do
- if type(v) == "table" then
- local tk = t[k] if not tk then tk = { } t[k] = tk end
- for kk,vv in next, v do
- tk[kk] = vv
- end
- else
- t[k] = v
- end
- end
- if settings.name then
- handlers[settings.name] = t
- end
- end
- utilities.storage.mark(t)
- return t
-end
-
-local nofunction = function() end
-
-function xml.sethandlersfunction(handler,name,fnc)
- handler.functions[name] = fnc or nofunction
-end
-
-function xml.gethandlersfunction(handler,name)
- return handler.functions[name]
-end
-
-function xml.gethandlers(name)
- return handlers[name]
-end
-
-newhandlers {
- name = "verbose",
- initialize = false, -- faster than nil and mt lookup
- finalize = false, -- faster than nil and mt lookup
- serialize = xserialize,
- handle = print,
- functions = {
- ["@dc@"] = verbose_document,
- ["@dt@"] = verbose_doctype,
- ["@rt@"] = verbose_root,
- ["@el@"] = verbose_element,
- ["@pi@"] = verbose_pi,
- ["@cm@"] = verbose_comment,
- ["@cd@"] = verbose_cdata,
- ["@tx@"] = verbose_text,
- }
-}
-
---[[ldx--
-
How you deal with saving data depends on your preferences. For a 40 MB database
-file the timing on a 2.3 Core Duo are as follows (time in seconds):
-
-
-1.3 : load data from file to string
-6.1 : convert string into tree
-5.3 : saving in file using xmlsave
-6.8 : converting to string using xml.tostring
-3.6 : saving converted string in file
-
-
-
Beware, these were timing with the old routine but measurements will not be that
-much different I guess.
---ldx]]--
-
--- maybe this will move to lxml-xml
-
-local result
-
-local xmlfilehandler = newhandlers {
- name = "file",
- initialize = function(name)
- result = io.open(name,"wb")
- return result
- end,
- finalize = function()
- result:close()
- return true
- end,
- handle = function(...)
- result:write(...)
- end,
-}
-
--- no checking on writeability here but not faster either
---
--- local xmlfilehandler = newhandlers {
--- initialize = function(name)
--- io.output(name,"wb")
--- return true
--- end,
--- finalize = function()
--- io.close()
--- return true
--- end,
--- handle = io.write,
--- }
-
-function xml.save(root,name)
- serialize(root,xmlfilehandler,name)
-end
-
-local result
-
-local xmlstringhandler = newhandlers {
- name = "string",
- initialize = function()
- result = { }
- return result
- end,
- finalize = function()
- return concat(result)
- end,
- handle = function(...)
- result[#result+1] = concat { ... }
- end,
-}
-
-local function xmltostring(root) -- 25% overhead due to collecting
- if not root then
- return ""
- elseif type(root) == "string" then
- return root
- else -- if next(root) then -- next is faster than type (and >0 test)
- return serialize(root,xmlstringhandler) or ""
- end
-end
-
-local function __tostring(root) -- inline
- return (root and xmltostring(root)) or ""
-end
-
-initialize_mt = function(root) -- redefinition
- mt = { __tostring = __tostring, __index = root }
-end
-
-xml.defaulthandlers = handlers
-xml.newhandlers = newhandlers
-xml.serialize = serialize
-xml.tostring = xmltostring
-
---[[ldx--
-
The next function operated on the content only and needs a handle function
-that accepts a string.
---ldx]]--
-
-local function xmlstring(e,handle)
- if not handle or (e.special and e.tg ~= "@rt@") then
- -- nothing
- elseif e.tg then
- local edt = e.dt
- if edt then
- for i=1,#edt do
- xmlstring(edt[i],handle)
- end
- end
- else
- handle(e)
- end
-end
-
-xml.string = xmlstring
-
---[[ldx--
-
A few helpers:
---ldx]]--
-
---~ xmlsetproperty(root,"settings",settings)
-
-function xml.settings(e)
- while e do
- local s = e.settings
- if s then
- return s
- else
- e = e.__p__
- end
- end
- return nil
-end
-
-function xml.root(e)
- local r = e
- while e do
- e = e.__p__
- if e then
- r = e
- end
- end
- return r
-end
-
-function xml.parent(root)
- return root.__p__
-end
-
-function xml.body(root)
- return root.ri and root.dt[root.ri] or root -- not ok yet
-end
-
-function xml.name(root)
- if not root then
- return ""
- end
- local ns = root.ns
- local tg = root.tg
- if ns == "" then
- return tg
- else
- return ns .. ":" .. tg
- end
-end
-
---[[ldx--
-
The next helper erases an element but keeps the table as it is,
-and since empty strings are not serialized (effectively) it does
-not harm. Copying the table would take more time. Usage:
---ldx]]--
-
-function xml.erase(dt,k)
- if dt then
- if k then
- dt[k] = ""
- else for k=1,#dt do
- dt[1] = { "" }
- end end
- end
-end
-
---[[ldx--
-
The next helper assigns a tree (or string). Usage:
-
-
-dt[k] = xml.assign(root) or xml.assign(dt,k,root)
-
---ldx]]--
-
-function xml.assign(dt,k,root)
- if dt and k then
- dt[k] = type(root) == "table" and xml.body(root) or root
- return dt[k]
- else
- return xml.body(root)
- end
-end
-
--- the following helpers may move
-
---[[ldx--
-
The next helper assigns a tree (or string). Usage:
-
-xml.tocdata(e)
-xml.tocdata(e,"error")
-
---ldx]]--
-
-function xml.tocdata(e,wrapper) -- a few more in the aux module
- local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
- if wrapper then
- whatever = formatters["<%s>%s%s>"](wrapper,whatever,wrapper)
- end
- local t = { special = true, ns = "", tg = "@cd@", at = { }, rn = "", dt = { whatever }, __p__ = e }
- setmetatable(t,getmetatable(e))
- e.dt = { t }
-end
-
-function xml.makestandalone(root)
- if root.ri then
- local dt = root.dt
- for k=1,#dt do
- local v = dt[k]
- if type(v) == "table" and v.special and v.tg == "@pi@" then
- local txt = v.dt[1]
- if find(txt,"xml.*version=") then
- v.dt[1] = txt .. " standalone='yes'"
- break
- end
- end
- end
- end
- return root
-end
-
-function xml.kind(e)
- local dt = e and e.dt
- if dt then
- local n = #dt
- if n == 1 then
- local d = dt[1]
- if d.special then
- local tg = d.tg
- if tg == "@cd@" then
- return "cdata"
- elseif tg == "@cm" then
- return "comment"
- elseif tg == "@pi@" then
- return "instruction"
- elseif tg == "@dt@" then
- return "declaration"
- end
- elseif type(d) == "string" then
- return "text"
- end
- return "element"
- elseif n > 0 then
- return "mixed"
- end
- end
- return "empty"
-end
+if not modules then modules = { } end modules ['lxml-tab'] = {
+ version = 1.001,
+ comment = "this module is the basis for the lxml-* ones",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- this module needs a cleanup: check latest lpeg, passing args, (sub)grammar, etc etc
+-- stripping spaces from e.g. cont-en.xml saves .2 sec runtime so it's not worth the
+-- trouble
+
+-- todo: when serializing optionally remap named entities to hex (if known in char-ent.lua)
+-- maybe when letter -> utf, else name .. then we need an option to the serializer .. a bit
+-- of work so we delay this till we cleanup
+
+local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
+
+local report_xml = logs and logs.reporter("xml","core") or function(...) print(string.format(...)) end
+
+--[[ldx--
+
The parser used here is inspired by the variant discussed in the lua book, but
+handles comment and processing instructions, has a different structure, provides
+parent access; a first version used different trickery but was less optimized to we
+went this route. First we had a find based parser, now we have an based one.
+The find based parser can be found in l-xml-edu.lua along with other older code.
+
+
Beware, the interface may change. For instance at, ns, tg, dt may get more
+verbose names. Once the code is stable we will also remove some tracing and
+optimize the code.
+
+
I might even decide to reimplement the parser using the latest trickery
+as the current variant was written when showed up and it's easier now to
+build tables in one go.
First a hack to enable namespace resolving. A namespace is characterized by
+a . The following function associates a namespace prefix with a
+pattern. We use , which in this case is more than twice as fast as a
+find based solution where we loop over an array of patterns. Less code and
+much cleaner.
The next function associates a namespace prefix with an . This
+normally happens independent of parsing.
+
+
+xml.registerns("mml","mathml")
+
+--ldx]]--
+
+function xml.registerns(namespace, pattern) -- pattern can be an lpeg
+ check = check + C(P(lower(pattern))) / namespace
+ parse = P { P(check) + 1 * V(1) }
+end
+
+--[[ldx--
+
The next function also registers a namespace, but this time we map a
+given namespace prefix onto a registered one, using the given
+. This used for attributes like xmlns:m.
+
+
+xml.checkns("m","http://www.w3.org/mathml")
+
+--ldx]]--
+
+function xml.checkns(namespace,url)
+ local ns = lpegmatch(parse,lower(url))
+ if ns and namespace ~= ns then
+ xml.xmlns[namespace] = ns
+ end
+end
+
+--[[ldx--
+
Next we provide a way to turn an into a registered
+namespace. This used for the xmlns attribute.
A namespace in an element can be remapped onto the registered
+one efficiently by using the xml.xmlns table.
+--ldx]]--
+
+--[[ldx--
+
This version uses . We follow the same approach as before, stack and top and
+such. This version is about twice as fast which is mostly due to the fact that
+we don't have to prepare the stream for cdata, doctype etc etc. This variant is
+is dedicated to Luigi Scarso, who challenged me with 40 megabyte files that
+took 12.5 seconds to load (1.5 for file io and the rest for tree building). With
+the implementation we got that down to less 7.3 seconds. Loading the 14
+ interface definition files (2.6 meg) went down from 1.05 seconds to 0.55.
+
+
Next comes the parser. The rather messy doctype definition comes in many
+disguises so it is no surprice that later on have to dedicate quite some
+ code to it.
+
+
+
+
+
+
+
+
+
+
+
The code may look a bit complex but this is mostly due to the fact that we
+resolve namespaces and attach metatables. There is only one public function:
+
+
+local x = xml.convert(somestring)
+
+
+
An optional second boolean argument tells this function not to create a root
+element.
+
+
Valid entities are:
+
+
+
+
+
+
+--ldx]]--
+
+-- not just one big nested table capture (lpeg overflow)
+
+local nsremap, resolvens = xml.xmlns, xml.resolvens
+
+local stack = { }
+local top = { }
+local dt = { }
+local at = { }
+local xmlns = { }
+local errorstr = nil
+local entities = { }
+local strip = false
+local cleanup = false
+local utfize = false
+local resolve_predefined = false
+local unify_predefined = false
+
+local dcache = { }
+local hcache = { }
+local acache = { }
+
+local mt = { }
+
+local function initialize_mt(root)
+ mt = { __index = root } -- will be redefined later
+end
+
+function xml.setproperty(root,k,v)
+ getmetatable(root).__index[k] = v
+end
+
+function xml.checkerror(top,toclose)
+ return "" -- can be set
+end
+
+local function add_attribute(namespace,tag,value)
+ if cleanup and #value > 0 then
+ value = cleanup(value) -- new
+ end
+ if tag == "xmlns" then
+ xmlns[#xmlns+1] = resolvens(value)
+ at[tag] = value
+ elseif namespace == "" then
+ at[tag] = value
+ elseif namespace == "xmlns" then
+ xml.checkns(tag,value)
+ at["xmlns:" .. tag] = value
+ else
+ -- for the moment this way:
+ at[namespace .. ":" .. tag] = value
+ end
+end
+
+local function add_empty(spacing, namespace, tag)
+ if #spacing > 0 then
+ dt[#dt+1] = spacing
+ end
+ local resolved = namespace == "" and xmlns[#xmlns] or nsremap[namespace] or namespace
+ top = stack[#stack]
+ dt = top.dt
+ local t = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = top }
+ dt[#dt+1] = t
+ setmetatable(t, mt)
+ if at.xmlns then
+ remove(xmlns)
+ end
+ at = { }
+end
+
+local function add_begin(spacing, namespace, tag)
+ if #spacing > 0 then
+ dt[#dt+1] = spacing
+ end
+ local resolved = namespace == "" and xmlns[#xmlns] or nsremap[namespace] or namespace
+ top = { ns=namespace or "", rn=resolved, tg=tag, at=at, dt={}, __p__ = stack[#stack] }
+ setmetatable(top, mt)
+ dt = top.dt
+ stack[#stack+1] = top
+ at = { }
+end
+
+local function add_end(spacing, namespace, tag)
+ if #spacing > 0 then
+ dt[#dt+1] = spacing
+ end
+ local toclose = remove(stack)
+ top = stack[#stack]
+ if #stack < 1 then
+ errorstr = formatters["unable to close %s %s"](tag,xml.checkerror(top,toclose) or "")
+ elseif toclose.tg ~= tag then -- no namespace check
+ errorstr = formatters["unable to close %s with %s %s"](toclose.tg,tag,xml.checkerror(top,toclose) or "")
+ end
+ dt = top.dt
+ dt[#dt+1] = toclose
+ -- dt[0] = top -- nasty circular reference when serializing table
+ if toclose.at.xmlns then
+ remove(xmlns)
+ end
+end
+
+local function add_text(text)
+ if cleanup and #text > 0 then
+ dt[#dt+1] = cleanup(text)
+ else
+ dt[#dt+1] = text
+ end
+end
+
+local function add_special(what, spacing, text)
+ if #spacing > 0 then
+ dt[#dt+1] = spacing
+ end
+ if strip and (what == "@cm@" or what == "@dt@") then
+ -- forget it
+ else
+ dt[#dt+1] = { special=true, ns="", tg=what, dt={ text } }
+ end
+end
+
+local function set_message(txt)
+ errorstr = "garbage at the end of the file: " .. gsub(txt,"([ \n\r\t]*)","")
+end
+
+local reported_attribute_errors = { }
+
+local function attribute_value_error(str)
+ if not reported_attribute_errors[str] then
+ report_xml("invalid attribute value %a",str)
+ reported_attribute_errors[str] = true
+ at._error_ = str
+ end
+ return str
+end
+
+local function attribute_specification_error(str)
+ if not reported_attribute_errors[str] then
+ report_xml("invalid attribute specification %a",str)
+ reported_attribute_errors[str] = true
+ at._error_ = str
+ end
+ return str
+end
+
+xml.placeholders = {
+ unknown_dec_entity = function(str) return str == "" and "&error;" or formatters["&%s;"](str) end,
+ unknown_hex_entity = function(str) return formatters["%s;"](str) end,
+ unknown_any_entity = function(str) return formatters["%s;"](str) end,
+}
+
+local placeholders = xml.placeholders
+
+local function fromhex(s)
+ local n = tonumber(s,16)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["h:%s"](s), true
+ end
+end
+
+local function fromdec(s)
+ local n = tonumber(s)
+ if n then
+ return utfchar(n)
+ else
+ return formatters["d:%s"](s), true
+ end
+end
+
+-- one level expansion (simple case), no checking done
+
+local rest = (1-P(";"))^0
+local many = P(1)^0
+
+local parsedentity =
+ P("&") * (P("#x")*(rest/fromhex) + P("#")*(rest/fromdec)) * P(";") * P(-1) +
+ (P("#x")*(many/fromhex) + P("#")*(many/fromdec))
+
+-- parsing in the xml file
+
+local predefined_unified = {
+ [38] = "&",
+ [42] = """,
+ [47] = "'",
+ [74] = "<",
+ [76] = ">",
+}
+
+local predefined_simplified = {
+ [38] = "&", amp = "&",
+ [42] = '"', quot = '"',
+ [47] = "'", apos = "'",
+ [74] = "<", lt = "<",
+ [76] = ">", gt = ">",
+}
+
+local nofprivates = 0xF0000 -- shared but seldom used
+
+local privates_u = { -- unescaped
+ [ [[&]] ] = "&",
+ [ [["]] ] = """,
+ [ [[']] ] = "'",
+ [ [[<]] ] = "<",
+ [ [[>]] ] = ">",
+}
+
+local privates_p = {
+}
+
+local privates_n = {
+ -- keeps track of defined ones
+}
+
+local escaped = utf.remapper(privates_u)
+
+local function unescaped(s)
+ local p = privates_n[s]
+ if not p then
+ nofprivates = nofprivates + 1
+ p = utfchar(nofprivates)
+ privates_n[s] = p
+ s = "&" .. s .. ";" -- todo: use char-ent to map to hex
+ privates_u[p] = s
+ privates_p[p] = s
+ end
+ return p
+end
+
+local unprivatized = utf.remapper(privates_p)
+
+xml.privatetoken = unescaped
+xml.unprivatized = unprivatized
+xml.privatecodes = privates_n
+
+local function handle_hex_entity(str)
+ local h = hcache[str]
+ if not h then
+ local n = tonumber(str,16)
+ h = unify_predefined and predefined_unified[n]
+ if h then
+ if trace_entities then
+ report_xml("utfize, converting hex entity %s; into %a",str,h)
+ end
+ elseif utfize then
+ h = (n and utfchar(n)) or xml.unknown_hex_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring hex entity %s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting hex entity %s; into %a",str,h)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity %s;",str)
+ end
+ h = "" .. str .. ";"
+ end
+ hcache[str] = h
+ end
+ return h
+end
+
+local function handle_dec_entity(str)
+ local d = dcache[str]
+ if not d then
+ local n = tonumber(str)
+ d = unify_predefined and predefined_unified[n]
+ if d then
+ if trace_entities then
+ report_xml("utfize, converting dec entity %s; into %a",str,d)
+ end
+ elseif utfize then
+ d = (n and utfchar(n)) or placeholders.unknown_dec_entity(str) or ""
+ if not n then
+ report_xml("utfize, ignoring dec entity %s;",str)
+ elseif trace_entities then
+ report_xml("utfize, converting dec entity %s; into %a",str,d)
+ end
+ else
+ if trace_entities then
+ report_xml("found entity %s;",str)
+ end
+ d = "" .. str .. ";"
+ end
+ dcache[str] = d
+ end
+ return d
+end
+
+xml.parsedentitylpeg = parsedentity
+
+local function handle_any_entity(str)
+ if resolve then
+ local a = acache[str] -- per instance ! todo
+ if not a then
+ a = resolve_predefined and predefined_simplified[str]
+ if a then
+ if trace_entities then
+ report_xml("resolving entity &%s; to predefined %a",str,a)
+ end
+ else
+ if type(resolve) == "function" then
+ a = resolve(str) or entities[str]
+ else
+ a = entities[str]
+ end
+ if a then
+ if type(a) == "function" then
+ if trace_entities then
+ report_xml("expanding entity &%s; to function call",str)
+ end
+ a = a(str) or ""
+ end
+ a = lpegmatch(parsedentity,a) or a -- for nested
+ if trace_entities then
+ report_xml("resolving entity &%s; to internal %a",str,a)
+ end
+ else
+ local unknown_any_entity = placeholders.unknown_any_entity
+ if unknown_any_entity then
+ a = unknown_any_entity(str) or ""
+ end
+ if a then
+ if trace_entities then
+ report_xml("resolving entity &%s; to external %s",str,a)
+ end
+ else
+ if trace_entities then
+ report_xml("keeping entity &%s;",str)
+ end
+ if str == "" then
+ a = "&error;"
+ else
+ a = "&" .. str .. ";"
+ end
+ end
+ end
+ end
+ acache[str] = a
+ elseif trace_entities then
+ if not acache[str] then
+ report_xml("converting entity &%s; to %a",str,a)
+ acache[str] = a
+ end
+ end
+ return a
+ else
+ local a = acache[str]
+ if not a then
+ a = resolve_predefined and predefined_simplified[str]
+ if a then
+ -- one of the predefined
+ acache[str] = a
+ if trace_entities then
+ report_xml("entity &%s; becomes %a",str,a)
+ end
+ elseif str == "" then
+ if trace_entities then
+ report_xml("invalid entity &%s;",str)
+ end
+ a = "&error;"
+ acache[str] = a
+ else
+ if trace_entities then
+ report_xml("entity &%s; is made private",str)
+ end
+ -- a = "&" .. str .. ";"
+ a = unescaped(str)
+ acache[str] = a
+ end
+ end
+ return a
+ end
+end
+
+local function handle_end_entity(chr)
+ report_xml("error in entity, %a found instead of %a",chr,";")
+end
+
+local space = S(' \r\n\t')
+local open = P('<')
+local close = P('>')
+local squote = S("'")
+local dquote = S('"')
+local equal = P('=')
+local slash = P('/')
+local colon = P(':')
+local semicolon = P(';')
+local ampersand = P('&')
+local valid = R('az', 'AZ', '09') + S('_-.')
+local name_yes = C(valid^1) * colon * C(valid^1)
+local name_nop = C(P(true)) * C(valid^1)
+local name = name_yes + name_nop
+local utfbom = lpeg.patterns.utfbom -- no capture
+local spacing = C(space^0)
+
+----- entitycontent = (1-open-semicolon)^0
+local anyentitycontent = (1-open-semicolon-space-close)^0
+local hexentitycontent = R("AF","af","09")^0
+local decentitycontent = R("09")^0
+local parsedentity = P("#")/"" * (
+ P("x")/"" * (hexentitycontent/handle_hex_entity) +
+ (decentitycontent/handle_dec_entity)
+ ) + (anyentitycontent/handle_any_entity)
+local entity = ampersand/"" * parsedentity * ( (semicolon/"") + #(P(1)/handle_end_entity))
+
+local text_unparsed = C((1-open)^1)
+local text_parsed = Cs(((1-open-ampersand)^1 + entity)^1)
+
+local somespace = space^1
+local optionalspace = space^0
+
+----- value = (squote * C((1 - squote)^0) * squote) + (dquote * C((1 - dquote)^0) * dquote) -- ampersand and < also invalid in value
+local value = (squote * Cs((entity + (1 - squote))^0) * squote) + (dquote * Cs((entity + (1 - dquote))^0) * dquote) -- ampersand and < also invalid in value
+
+local endofattributes = slash * close + close -- recovery of flacky html
+local whatever = space * name * optionalspace * equal
+----- wrongvalue = C(P(1-whatever-close)^1 + P(1-close)^1) / attribute_value_error
+----- wrongvalue = C(P(1-whatever-endofattributes)^1 + P(1-endofattributes)^1) / attribute_value_error
+----- wrongvalue = C(P(1-space-endofattributes)^1) / attribute_value_error
+local wrongvalue = Cs(P(entity + (1-space-endofattributes))^1) / attribute_value_error
+
+local attributevalue = value + wrongvalue
+
+local attribute = (somespace * name * optionalspace * equal * optionalspace * attributevalue) / add_attribute
+----- attributes = (attribute)^0
+
+local attributes = (attribute + somespace^-1 * (((1-endofattributes)^1)/attribute_specification_error))^0
+
+local parsedtext = text_parsed / add_text
+local unparsedtext = text_unparsed / add_text
+local balanced = P { "[" * ((1 - S"[]") + V(1))^0 * "]" } -- taken from lpeg manual, () example
+
+local emptyelement = (spacing * open * name * attributes * optionalspace * slash * close) / add_empty
+local beginelement = (spacing * open * name * attributes * optionalspace * close) / add_begin
+local endelement = (spacing * open * slash * name * optionalspace * close) / add_end
+
+local begincomment = open * P("!--")
+local endcomment = P("--") * close
+local begininstruction = open * P("?")
+local endinstruction = P("?") * close
+local begincdata = open * P("![CDATA[")
+local endcdata = P("]]") * close
+
+local someinstruction = C((1 - endinstruction)^0)
+local somecomment = C((1 - endcomment )^0)
+local somecdata = C((1 - endcdata )^0)
+
+local function normalentity(k,v ) entities[k] = v end
+local function systementity(k,v,n) entities[k] = v end
+local function publicentity(k,v,n) entities[k] = v end
+
+-- todo: separate dtd parser
+
+local begindoctype = open * P("!DOCTYPE")
+local enddoctype = close
+local beginset = P("[")
+local endset = P("]")
+local doctypename = C((1-somespace-close)^0)
+local elementdoctype = optionalspace * P(" &
+ cleanup = settings.text_cleanup
+ entities = settings.entities or { }
+ --
+ if utfize == nil then
+ settings.utfize_entities = true
+ utfize = true
+ end
+ if resolve_predefined == nil then
+ settings.resolve_predefined_entities = true
+ resolve_predefined = true
+ end
+ --
+ stack, top, at, xmlns, errorstr = { }, { }, { }, { }, nil
+ acache, hcache, dcache = { }, { }, { } -- not stored
+ reported_attribute_errors = { }
+ if settings.parent_root then
+ mt = getmetatable(settings.parent_root)
+ else
+ initialize_mt(top)
+ end
+ stack[#stack+1] = top
+ top.dt = { }
+ dt = top.dt
+ if not data or data == "" then
+ errorstr = "empty xml file"
+ elseif utfize or resolve then
+ if lpegmatch(grammar_parsed_text,data) then
+ errorstr = ""
+ else
+ errorstr = "invalid xml file - parsed text"
+ end
+ elseif type(data) == "string" then
+ if lpegmatch(grammar_unparsed_text,data) then
+ errorstr = ""
+ else
+ errorstr = "invalid xml file - unparsed text"
+ end
+ else
+ errorstr = "invalid xml file - no text at all"
+ end
+ local result
+ if errorstr and errorstr ~= "" then
+ result = { dt = { { ns = "", tg = "error", dt = { errorstr }, at={ }, er = true } } }
+ setmetatable(stack, mt)
+ local errorhandler = settings.error_handler
+ if errorhandler == false then
+ -- no error message
+ else
+ errorhandler = errorhandler or xml.errorhandler
+ if errorhandler then
+ local currentresource = settings.currentresource
+ if currentresource and currentresource ~= "" then
+ xml.errorhandler(formatters["load error in [%s]: %s"](currentresource,errorstr))
+ else
+ xml.errorhandler(formatters["load error: %s"](errorstr))
+ end
+ end
+ end
+ else
+ result = stack[1]
+ end
+ if not settings.no_root then
+ result = { special = true, ns = "", tg = '@rt@', dt = result.dt, at={ }, entities = entities, settings = settings }
+ setmetatable(result, mt)
+ local rdt = result.dt
+ for k=1,#rdt do
+ local v = rdt[k]
+ if type(v) == "table" and not v.special then -- always table -)
+ result.ri = k -- rootindex
+ v.__p__ = result -- new, experiment, else we cannot go back to settings, we need to test this !
+ break
+ end
+ end
+ end
+ if errorstr and errorstr ~= "" then
+ result.error = true
+ end
+ result.statistics = {
+ entities = {
+ decimals = dcache,
+ hexadecimals = hcache,
+ names = acache,
+ }
+ }
+ strip, utfize, resolve, resolve_predefined = nil, nil, nil, nil
+ unify_predefined, cleanup, entities = nil, nil, nil
+ stack, top, at, xmlns, errorstr = nil, nil, nil, nil, nil
+ acache, hcache, dcache = nil, nil, nil
+ reported_attribute_errors, mt, errorhandler = nil, nil, nil
+ return result
+end
+
+-- Because we can have a crash (stack issues) with faulty xml, we wrap this one
+-- in a protector:
+
+function xmlconvert(data,settings)
+ local ok, result = pcall(function() return _xmlconvert_(data,settings) end)
+ if ok then
+ return result
+ else
+ return _xmlconvert_("",settings)
+ end
+end
+
+xml.convert = xmlconvert
+
+function xml.inheritedconvert(data,xmldata) -- xmldata is parent
+ local settings = xmldata.settings
+ if settings then
+ settings.parent_root = xmldata -- to be tested
+ end
+ -- settings.no_root = true
+ local xc = xmlconvert(data,settings) -- hm, we might need to locate settings
+ -- xc.settings = nil
+ -- xc.entities = nil
+ -- xc.special = nil
+ -- xc.ri = nil
+ -- print(xc.tg)
+ return xc
+end
+
+--[[ldx--
+
Packaging data in an xml like table is done with the following
+function. Maybe it will go away (when not used).
+--ldx]]--
+
+function xml.is_valid(root)
+ return root and root.dt and root.dt[1] and type(root.dt[1]) == "table" and not root.dt[1].er
+end
+
+function xml.package(tag,attributes,data)
+ local ns, tg = match(tag,"^(.-):?([^:]+)$")
+ local t = { ns = ns, tg = tg, dt = data or "", at = attributes or {} }
+ setmetatable(t, mt)
+ return t
+end
+
+function xml.is_valid(root)
+ return root and not root.error
+end
+
+xml.errorhandler = report_xml
+
+--[[ldx--
+
We cannot load an from a filehandle so we need to load
+the whole file first. The function accepts a string representing
+a filename or a file handle.
+--ldx]]--
+
+function xml.load(filename,settings)
+ local data = ""
+ if type(filename) == "string" then
+ -- local data = io.loaddata(filename) - -todo: check type in io.loaddata
+ local f = io.open(filename,'r') -- why not 'rb'
+ if f then
+ data = f:read("*all") -- io.readall(f) ... only makes sense for large files
+ f:close()
+ end
+ elseif filename then -- filehandle
+ data = filename:read("*all") -- io.readall(f) ... only makes sense for large files
+ end
+ if settings then
+ settings.currentresource = filename
+ local result = xmlconvert(data,settings)
+ settings.currentresource = nil
+ return result
+ else
+ return xmlconvert(data,{ currentresource = filename })
+ end
+end
+
+--[[ldx--
+
When we inject new elements, we need to convert strings to
+valid trees, which is what the next function does.
+--ldx]]--
+
+local no_root = { no_root = true }
+
+function xml.toxml(data)
+ if type(data) == "string" then
+ local root = { xmlconvert(data,no_root) }
+ return (#root > 1 and root) or root[1]
+ else
+ return data
+ end
+end
+
+--[[ldx--
+
For copying a tree we use a dedicated function instead of the
+generic table copier. Since we know what we're dealing with we
+can speed up things a bit. The second argument is not to be used!
+--ldx]]--
+
+local function copy(old,tables)
+ if old then
+ tables = tables or { }
+ local new = { }
+ if not tables[old] then
+ tables[old] = new
+ end
+ for k,v in next, old do
+ new[k] = (type(v) == "table" and (tables[v] or copy(v, tables))) or v
+ end
+ local mt = getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
+ return new
+ else
+ return { }
+ end
+end
+
+xml.copy = copy
+
+--[[ldx--
+
In serializing the tree or parts of the tree is a major
+actitivity which is why the following function is pretty optimized resulting
+in a few more lines of code than needed. The variant that uses the formatting
+function for all components is about 15% slower than the concatinating
+alternative.
+--ldx]]--
+
+-- todo: add when not present
+
+function xml.checkbom(root) -- can be made faster
+ if root.ri then
+ local dt = root.dt
+ for k=1,#dt do
+ local v = dt[k]
+ if type(v) == "table" and v.special and v.tg == "@pi@" and find(v.dt[1],"xml.*version=") then
+ return
+ end
+ end
+ insert(dt, 1, { special = true, ns = "", tg = "@pi@", dt = { "xml version='1.0' standalone='yes'" } } )
+ insert(dt, 2, "\n" )
+ end
+end
+
+--[[ldx--
+
At the cost of some 25% runtime overhead you can first convert the tree to a string
+and then handle the lot.
+--ldx]]--
+
+-- new experimental reorganized serialize
+
+local function verbose_element(e,handlers) -- options
+ local handle = handlers.handle
+ local serialize = handlers.serialize
+ local ens, etg, eat, edt, ern = e.ns, e.tg, e.at, e.dt, e.rn
+ local ats = eat and next(eat) and { }
+ if ats then
+ for k,v in next, eat do
+ ats[#ats+1] = formatters['%s=%q'](k,escaped(v))
+ end
+ end
+ if ern and trace_entities and ern ~= ens then
+ ens = ern
+ end
+ if ens ~= "" then
+ if edt and #edt > 0 then
+ if ats then
+ handle("<",ens,":",etg," ",concat(ats," "),">")
+ else
+ handle("<",ens,":",etg,">")
+ end
+ for i=1,#edt do
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e))
+ else
+ serialize(e,handlers)
+ end
+ end
+ handle("",ens,":",etg,">")
+ else
+ if ats then
+ handle("<",ens,":",etg," ",concat(ats," "),"/>")
+ else
+ handle("<",ens,":",etg,"/>")
+ end
+ end
+ else
+ if edt and #edt > 0 then
+ if ats then
+ handle("<",etg," ",concat(ats," "),">")
+ else
+ handle("<",etg,">")
+ end
+ for i=1,#edt do
+ local e = edt[i]
+ if type(e) == "string" then
+ handle(escaped(e)) -- option: hexify escaped entities
+ else
+ serialize(e,handlers)
+ end
+ end
+ handle("",etg,">")
+ else
+ if ats then
+ handle("<",etg," ",concat(ats," "),"/>")
+ else
+ handle("<",etg,"/>")
+ end
+ end
+ end
+end
+
+local function verbose_pi(e,handlers)
+ handlers.handle("",e.dt[1],"?>")
+end
+
+local function verbose_comment(e,handlers)
+ handlers.handle("")
+end
+
+local function verbose_cdata(e,handlers)
+ handlers.handle("")
+end
+
+local function verbose_doctype(e,handlers)
+ handlers.handle("")
+end
+
+local function verbose_root(e,handlers)
+ handlers.serialize(e.dt,handlers)
+end
+
+local function verbose_text(e,handlers)
+ handlers.handle(escaped(e))
+end
+
+local function verbose_document(e,handlers)
+ local serialize = handlers.serialize
+ local functions = handlers.functions
+ for i=1,#e do
+ local ei = e[i]
+ if type(ei) == "string" then
+ functions["@tx@"](ei,handlers)
+ else
+ serialize(ei,handlers)
+ end
+ end
+end
+
+local function serialize(e,handlers,...)
+ local initialize = handlers.initialize
+ local finalize = handlers.finalize
+ local functions = handlers.functions
+ if initialize then
+ local state = initialize(...)
+ if not state == true then
+ return state
+ end
+ end
+ local etg = e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ -- elseif type(e) == "string" then
+ -- functions["@tx@"](e,handlers)
+ else
+ functions["@dc@"](e,handlers) -- dc ?
+ end
+ if finalize then
+ return finalize()
+ end
+end
+
+local function xserialize(e,handlers)
+ local functions = handlers.functions
+ local etg = e.tg
+ if etg then
+ (functions[etg] or functions["@el@"])(e,handlers)
+ -- elseif type(e) == "string" then
+ -- functions["@tx@"](e,handlers)
+ else
+ functions["@dc@"](e,handlers)
+ end
+end
+
+local handlers = { }
+
+local function newhandlers(settings)
+ local t = table.copy(handlers[settings and settings.parent or "verbose"] or { }) -- merge
+ if settings then
+ for k,v in next, settings do
+ if type(v) == "table" then
+ local tk = t[k] if not tk then tk = { } t[k] = tk end
+ for kk,vv in next, v do
+ tk[kk] = vv
+ end
+ else
+ t[k] = v
+ end
+ end
+ if settings.name then
+ handlers[settings.name] = t
+ end
+ end
+ utilities.storage.mark(t)
+ return t
+end
+
+local nofunction = function() end
+
+function xml.sethandlersfunction(handler,name,fnc)
+ handler.functions[name] = fnc or nofunction
+end
+
+function xml.gethandlersfunction(handler,name)
+ return handler.functions[name]
+end
+
+function xml.gethandlers(name)
+ return handlers[name]
+end
+
+newhandlers {
+ name = "verbose",
+ initialize = false, -- faster than nil and mt lookup
+ finalize = false, -- faster than nil and mt lookup
+ serialize = xserialize,
+ handle = print,
+ functions = {
+ ["@dc@"] = verbose_document,
+ ["@dt@"] = verbose_doctype,
+ ["@rt@"] = verbose_root,
+ ["@el@"] = verbose_element,
+ ["@pi@"] = verbose_pi,
+ ["@cm@"] = verbose_comment,
+ ["@cd@"] = verbose_cdata,
+ ["@tx@"] = verbose_text,
+ }
+}
+
+--[[ldx--
+
How you deal with saving data depends on your preferences. For a 40 MB database
+file the timing on a 2.3 Core Duo are as follows (time in seconds):
+
+
+1.3 : load data from file to string
+6.1 : convert string into tree
+5.3 : saving in file using xmlsave
+6.8 : converting to string using xml.tostring
+3.6 : saving converted string in file
+
+
+
Beware, these were timing with the old routine but measurements will not be that
+much different I guess.
+--ldx]]--
+
+-- maybe this will move to lxml-xml
+
+local result
+
+local xmlfilehandler = newhandlers {
+ name = "file",
+ initialize = function(name)
+ result = io.open(name,"wb")
+ return result
+ end,
+ finalize = function()
+ result:close()
+ return true
+ end,
+ handle = function(...)
+ result:write(...)
+ end,
+}
+
+-- no checking on writeability here but not faster either
+--
+-- local xmlfilehandler = newhandlers {
+-- initialize = function(name)
+-- io.output(name,"wb")
+-- return true
+-- end,
+-- finalize = function()
+-- io.close()
+-- return true
+-- end,
+-- handle = io.write,
+-- }
+
+function xml.save(root,name)
+ serialize(root,xmlfilehandler,name)
+end
+
+local result
+
+local xmlstringhandler = newhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+}
+
+local function xmltostring(root) -- 25% overhead due to collecting
+ if not root then
+ return ""
+ elseif type(root) == "string" then
+ return root
+ else -- if next(root) then -- next is faster than type (and >0 test)
+ return serialize(root,xmlstringhandler) or ""
+ end
+end
+
+local function __tostring(root) -- inline
+ return (root and xmltostring(root)) or ""
+end
+
+initialize_mt = function(root) -- redefinition
+ mt = { __tostring = __tostring, __index = root }
+end
+
+xml.defaulthandlers = handlers
+xml.newhandlers = newhandlers
+xml.serialize = serialize
+xml.tostring = xmltostring
+
+--[[ldx--
+
The next function operated on the content only and needs a handle function
+that accepts a string.
+--ldx]]--
+
+local function xmlstring(e,handle)
+ if not handle or (e.special and e.tg ~= "@rt@") then
+ -- nothing
+ elseif e.tg then
+ local edt = e.dt
+ if edt then
+ for i=1,#edt do
+ xmlstring(edt[i],handle)
+ end
+ end
+ else
+ handle(e)
+ end
+end
+
+xml.string = xmlstring
+
+--[[ldx--
+
A few helpers:
+--ldx]]--
+
+--~ xmlsetproperty(root,"settings",settings)
+
+function xml.settings(e)
+ while e do
+ local s = e.settings
+ if s then
+ return s
+ else
+ e = e.__p__
+ end
+ end
+ return nil
+end
+
+function xml.root(e)
+ local r = e
+ while e do
+ e = e.__p__
+ if e then
+ r = e
+ end
+ end
+ return r
+end
+
+function xml.parent(root)
+ return root.__p__
+end
+
+function xml.body(root)
+ return root.ri and root.dt[root.ri] or root -- not ok yet
+end
+
+function xml.name(root)
+ if not root then
+ return ""
+ end
+ local ns = root.ns
+ local tg = root.tg
+ if ns == "" then
+ return tg
+ else
+ return ns .. ":" .. tg
+ end
+end
+
+--[[ldx--
+
The next helper erases an element but keeps the table as it is,
+and since empty strings are not serialized (effectively) it does
+not harm. Copying the table would take more time. Usage:
+--ldx]]--
+
+function xml.erase(dt,k)
+ if dt then
+ if k then
+ dt[k] = ""
+ else for k=1,#dt do
+ dt[1] = { "" }
+ end end
+ end
+end
+
+--[[ldx--
+
The next helper assigns a tree (or string). Usage:
+
+
+dt[k] = xml.assign(root) or xml.assign(dt,k,root)
+
+--ldx]]--
+
+function xml.assign(dt,k,root)
+ if dt and k then
+ dt[k] = type(root) == "table" and xml.body(root) or root
+ return dt[k]
+ else
+ return xml.body(root)
+ end
+end
+
+-- the following helpers may move
+
+--[[ldx--
+
The next helper assigns a tree (or string). Usage:
+
+xml.tocdata(e)
+xml.tocdata(e,"error")
+
+--ldx]]--
+
+function xml.tocdata(e,wrapper) -- a few more in the aux module
+ local whatever = type(e) == "table" and xmltostring(e.dt) or e or ""
+ if wrapper then
+ whatever = formatters["<%s>%s%s>"](wrapper,whatever,wrapper)
+ end
+ local t = { special = true, ns = "", tg = "@cd@", at = { }, rn = "", dt = { whatever }, __p__ = e }
+ setmetatable(t,getmetatable(e))
+ e.dt = { t }
+end
+
+function xml.makestandalone(root)
+ if root.ri then
+ local dt = root.dt
+ for k=1,#dt do
+ local v = dt[k]
+ if type(v) == "table" and v.special and v.tg == "@pi@" then
+ local txt = v.dt[1]
+ if find(txt,"xml.*version=") then
+ v.dt[1] = txt .. " standalone='yes'"
+ break
+ end
+ end
+ end
+ end
+ return root
+end
+
+function xml.kind(e)
+ local dt = e and e.dt
+ if dt then
+ local n = #dt
+ if n == 1 then
+ local d = dt[1]
+ if d.special then
+ local tg = d.tg
+ if tg == "@cd@" then
+ return "cdata"
+ elseif tg == "@cm" then
+ return "comment"
+ elseif tg == "@pi@" then
+ return "instruction"
+ elseif tg == "@dt@" then
+ return "declaration"
+ end
+ elseif type(d) == "string" then
+ return "text"
+ end
+ return "element"
+ elseif n > 0 then
+ return "mixed"
+ end
+ end
+ return "empty"
+end
diff --git a/tex/context/base/lxml-tex.lua b/tex/context/base/lxml-tex.lua
index 112f62751..936a96041 100644
--- a/tex/context/base/lxml-tex.lua
+++ b/tex/context/base/lxml-tex.lua
@@ -1,1686 +1,1686 @@
-if not modules then modules = { } end modules ['lxml-tex'] = {
- version = 1.001,
- comment = "companion to lxml-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- Because we split and resolve entities we use the direct printing
--- interface and not the context one. If we ever do that there will
--- be an cldf-xml helper library.
-
-local utfchar = utf.char
-local concat, insert, remove = table.concat, table.insert, table.remove
-local format, sub, gsub, find, gmatch, match = string.format, string.sub, string.gsub, string.find, string.gmatch, string.match
-local type, next, tonumber, tostring, select = type, next, tonumber, tostring, select
-local lpegmatch = lpeg.match
-local P, S, C, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc
-
-local tex, xml = tex, xml
-local lowerchars, upperchars, lettered = characters.lower, characters.upper, characters.lettered
-
-lxml = lxml or { }
-local lxml = lxml
-
-local catcodenumbers = catcodes.numbers
-local ctxcatcodes = catcodenumbers.ctxcatcodes -- todo: use different method
-local notcatcodes = catcodenumbers.notcatcodes -- todo: use different method
-
-local context = context
-local contextsprint = context.sprint -- with catcodes (here we use fast variants, but with option for tracing)
-
-local xmlelements, xmlcollected, xmlsetproperty = xml.elements, xml.collected, xml.setproperty
-local xmlwithelements = xml.withelements
-local xmlserialize, xmlcollect, xmltext, xmltostring = xml.serialize, xml.collect, xml.text, xml.tostring
-local xmlapplylpath = xml.applylpath
-local xmlunprivatized, xmlprivatetoken, xmlprivatecodes = xml.unprivatized, xml.privatetoken, xml.privatecodes
-
-local variables = (interfaces and interfaces.variables) or { }
-
-local insertbeforevalue, insertaftervalue = utilities.tables.insertbeforevalue, utilities.tables.insertaftervalue
-
-local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
-
-local trace_setups = false trackers.register("lxml.setups", function(v) trace_setups = v end)
-local trace_loading = false trackers.register("lxml.loading", function(v) trace_loading = v end)
-local trace_access = false trackers.register("lxml.access", function(v) trace_access = v end)
-local trace_comments = false trackers.register("lxml.comments", function(v) trace_comments = v end)
-local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
-
-local report_lxml = logs.reporter("xml","tex")
-local report_xml = logs.reporter("xml","tex")
-
-local forceraw, rawroot = false, nil
-
--- tex entities
---
--- todo: unprivatize attributes
-
-lxml.entities = lxml.entities or { }
-
-storage.register("lxml/entities",lxml.entities,"lxml.entities")
-
---~ xml.placeholders.unknown_any_entity = nil -- has to be per xml
-
-local xmlentities = xml.entities
-local texentities = lxml.entities
-local parsedentity = xml.parsedentitylpeg
-
-function lxml.registerentity(key,value)
- texentities[key] = value
- if trace_entities then
- report_xml("registering tex entity %a as %a",key,value)
- end
-end
-
-function lxml.resolvedentity(str)
- if forceraw then
- if trace_entities then
- report_xml("passing entity %a as &%s;",str,str)
- end
- context("&%s;",str)
- else
- local e = texentities[str]
- if e then
- local te = type(e)
- if te == "function" then
- if trace_entities then
- report_xml("passing entity %a using function",str)
- end
- e(str)
- elseif e then
- if trace_entities then
- report_xml("passing entity %a as %a using %a",str,e,"ctxcatcodes")
- end
- context(e)
- end
- return
- end
- local e = xmlentities[str]
- if e then
- local te = type(e)
- if te == "function" then
- e = e(str)
- end
- if e then
- if trace_entities then
- report_xml("passing entity %a as %a using %a",str,e,"notcatcodes")
- end
- contextsprint(notcatcodes,e)
- return
- end
- end
- -- resolve hex and dec, todo: escape # & etc for ctxcatcodes
- -- normally this is already solved while loading the file
- local chr, err = lpegmatch(parsedentity,str)
- if chr then
- if trace_entities then
- report_xml("passing entity %a as %a using %a",str,chr,"ctxcatcodes")
- end
- context(chr)
- elseif err then
- if trace_entities then
- report_xml("passing faulty entity %a as %a",str,err)
- end
- context(err)
- else
- local tag = upperchars(str)
- if trace_entities then
- report_xml("passing entity %a to \\xmle using tag %a",str,tag)
- end
- context.xmle(str,tag) -- we need to use our own upper
- end
- end
-end
-
--- tex interface
-
-lxml.loaded = lxml.loaded or { }
-local loaded = lxml.loaded
-
--- print(contextdirective("context-mathml-directive function reduction yes "))
--- print(contextdirective("context-mathml-directive function "))
-
-xml.defaultprotocol = "tex"
-
-local finalizers = xml.finalizers
-
-finalizers.xml = finalizers.xml or { }
-finalizers.tex = finalizers.tex or { }
-
-local xmlfinalizers = finalizers.xml
-local texfinalizers = finalizers.tex
-
--- serialization with entity handling
-
-local ampersand = P("&")
-local semicolon = P(";")
-local entity = ampersand * C((1-semicolon)^1) * semicolon / lxml.resolvedentity -- context.bold
-
-local _, xmltextcapture = context.newtexthandler {
- exception = entity,
- catcodes = notcatcodes,
-}
-
-local _, xmlspacecapture = context.newtexthandler {
- endofline = context.xmlcdataobeyedline,
- emptyline = context.xmlcdataobeyedline,
- simpleline = context.xmlcdataobeyedline,
- space = context.xmlcdataobeyedspace,
- exception = entity,
- catcodes = notcatcodes,
-}
-
-local _, xmllinecapture = context.newtexthandler {
- endofline = context.xmlcdataobeyedline,
- emptyline = context.xmlcdataobeyedline,
- simpleline = context.xmlcdataobeyedline,
- exception = entity,
- catcodes = notcatcodes,
-}
-
-local _, ctxtextcapture = context.newtexthandler {
- exception = entity,
- catcodes = ctxcatcodes,
-}
-
--- cdata
-
-local toverbatim = context.newverbosehandler {
- line = context.xmlcdataobeyedline,
- space = context.xmlcdataobeyedspace,
- before = context.xmlcdatabefore,
- after = context.xmlcdataafter,
-}
-
-lxml.toverbatim = context.newverbosehandler {
- line = context.xmlcdataobeyedline,
- space = context.xmlcdataobeyedspace,
- before = context.xmlcdatabefore,
- after = context.xmlcdataafter,
- strip = true,
-}
-
--- raw flushing
-
-function lxml.startraw()
- forceraw = true
-end
-
-function lxml.stopraw()
- forceraw = false
-end
-
-function lxml.rawroot()
- return rawroot
-end
-
--- storage
-
-function lxml.store(id,root,filename)
- loaded[id] = root
- xmlsetproperty(root,"name",id)
- if filename then
- xmlsetproperty(root,"filename",filename)
- end
-end
-
-local splitter = lpeg.splitat("::")
-
-lxml.idsplitter = splitter
-
-function lxml.splitid(id)
- local d, i = lpegmatch(splitter,id)
- if d then
- return d, i
- else
- return "", id
- end
-end
-
-local function getid(id, qualified)
- if id then
- local lid = loaded[id]
- if lid then
- return lid
- elseif type(id) == "table" then
- return id
- else
- local d, i = lpegmatch(splitter,id)
- if d then
- local ld = loaded[d]
- if ld then
- local ldi = ld.index
- if ldi then
- local root = ldi[tonumber(i)]
- if root then
- if qualified then -- we need this else two args that confuse others
- return root, d
- else
- return root
- end
- elseif trace_access then
- report_lxml("%a has no index entry %a",d,i)
- end
- elseif trace_access then
- report_lxml("%a has no index",d)
- end
- elseif trace_access then
- report_lxml("%a is not loaded",d)
- end
- elseif trace_access then
- report_lxml("%a is not loaded",i)
- end
- end
- elseif trace_access then
- report_lxml("invalid id (nil)")
- end
-end
-
-lxml.id = getid -- we provide two names as locals can already use such
-lxml.getid = getid -- names and we don't want clashes
-
-function lxml.root(id)
- return loaded[id]
-end
-
--- index
-
-local nofindices = 0
-
-local function addindex(name,check_sum,force)
- local root = getid(name)
- if root and (not root.index or force) then -- weird, only called once
- local n, index, maxindex, check = 0, root.index or { }, root.maxindex or 0, root.check or { }
- local function nest(root)
- local dt = root.dt
- if not root.ix then
- maxindex = maxindex + 1
- root.ix = maxindex
- check[maxindex] = root.tg -- still needed ?
- index[maxindex] = root
- n = n + 1
- end
- if dt then
- for k=1,#dt do
- local dk = dt[k]
- if type(dk) == "table" then
- nest(dk)
- end
- end
- end
- end
- nest(root)
- nofindices = nofindices + n
- --
- if type(name) ~= "string" then
- name = "unknown"
- end
- root.index = index
- root.maxindex = maxindex
- if trace_access then
- report_lxml("indexed entries %a, found nodes %a",tostring(name),maxindex)
- end
- end
-end
-
-lxml.addindex = addindex
-
--- another cache
-
-local function lxmlapplylpath(id,pattern) -- better inline, saves call
- return xmlapplylpath(getid(id),pattern)
-end
-
-lxml.filter = lxmlapplylpath
-
-function lxml.filterlist(list,pattern)
- for s in gmatch(list,"[^, ]+") do -- we could cache a table
- xmlapplylpath(getid(s),pattern)
- end
-end
-
-function lxml.applyfunction(id,name)
- local f = xml.functions[name]
- return f and f(getid(id))
-end
-
--- rather new, indexed storage (backward refs), maybe i will merge this
-
-function lxml.checkindex(name)
- local root = getid(name)
- return (root and root.index) or 0
-end
-
-function lxml.withindex(name,n,command) -- will change as name is always there now
- local i, p = lpegmatch(splitter,n)
- if p then
- contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",n,"}")
- else
- contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",name,"::",n,"}")
- end
-end
-
-function lxml.getindex(name,n) -- will change as name is always there now
- local i, p = lpegmatch(splitter,n)
- if p then
- contextsprint(ctxcatcodes,n)
- else
- contextsprint(ctxcatcodes,name,"::",n)
- end
-end
-
--- loading (to be redone, no overload) .. best use different methods and
--- keep raw xml (at least as option)
-
-xml.originalload = xml.originalload or xml.load
-
-local noffiles, nofconverted = 0, 0
-
-function xml.load(filename,settings)
- noffiles, nofconverted = noffiles + 1, nofconverted + 1
- starttiming(xml)
- local ok, data = resolvers.loadbinfile(filename)
- settings = settings or { }
- settings.currentresource = filename
- local xmltable = xml.convert((ok and data) or "",settings)
- settings.currentresource = nil
- stoptiming(xml)
- return xmltable
-end
-
-local function entityconverter(id,str)
- return xmlentities[str] or xmlprivatetoken(str) or "" -- roundtrip handler
-end
-
-function lxml.convert(id,data,entities,compress,currentresource)
- local settings = { -- we're now roundtrip anyway
- unify_predefined_entities = true,
- utfize_entities = true,
- resolve_predefined_entities = true,
- resolve_entities = function(str) return entityconverter(id,str) end, -- needed for mathml
- currentresource = tostring(currentresource or id),
- }
- if compress and compress == variables.yes then
- settings.strip_cm_and_dt = true
- end
- -- if entities and entities == variables.yes then
- -- settings.utfize_entities = true
- -- -- settings.resolve_entities = function (str) return entityconverter(id,str) end
- -- end
- return xml.convert(data,settings)
-end
-
-function lxml.load(id,filename,compress,entities)
- filename = commands.preparedfile(filename) -- not commands!
- if trace_loading then
- report_lxml("loading file %a as %a",filename,id)
- end
- noffiles, nofconverted = noffiles + 1, nofconverted + 1
- -- local xmltable = xml.load(filename)
- starttiming(xml)
- local ok, data = resolvers.loadbinfile(filename)
- local xmltable = lxml.convert(id,(ok and data) or "",compress,entities,format("id: %s, file: %s",id,filename))
- stoptiming(xml)
- lxml.store(id,xmltable,filename)
- return xmltable, filename
-end
-
-function lxml.register(id,xmltable,filename)
- lxml.store(id,xmltable,filename)
- return xmltable
-end
-
-function lxml.include(id,pattern,attribute,recurse)
- starttiming(xml)
- local root = getid(id)
- xml.include(root,pattern,attribute,recurse,function(filename)
- if filename then
- filename = commands.preparedfile(filename)
- if file.dirname(filename) == "" and root.filename then
- local dn = file.dirname(root.filename)
- if dn ~= "" then
- filename = file.join(dn,filename)
- end
- end
- if trace_loading then
- report_lxml("including file %a",filename)
- end
- noffiles, nofconverted = noffiles + 1, nofconverted + 1
- return resolvers.loadtexfile(filename) or ""
- else
- return ""
- end
- end)
- stoptiming(xml)
-end
-
-function xml.getbuffer(name,compress,entities) -- we need to make sure that commands are processed
- if not name or name == "" then
- name = tex.jobname
- end
- nofconverted = nofconverted + 1
- local data = buffers.getcontent(name)
- xmltostring(lxml.convert(name,data,compress,entities,format("buffer: %s",tostring(name or "?")))) -- one buffer
-end
-
-function lxml.loadbuffer(id,name,compress,entities)
- starttiming(xml)
- nofconverted = nofconverted + 1
- local data = buffers.collectcontent(name or id) -- name can be list
- local xmltable = lxml.convert(id,data,compress,entities,format("buffer: %s",tostring(name or id or "?")))
- lxml.store(id,xmltable)
- stoptiming(xml)
- return xmltable, name or id
-end
-
-function lxml.loaddata(id,str,compress,entities)
- starttiming(xml)
- nofconverted = nofconverted + 1
- local xmltable = lxml.convert(id,str or "",compress,entities,format("id: %s",id))
- lxml.store(id,xmltable)
- stoptiming(xml)
- return xmltable, id
-end
-
-function lxml.loadregistered(id)
- return loaded[id], id
-end
-
--- e.command:
---
--- string : setup
--- true : text (no )
--- false : ignore
--- function : call
-
-local function tex_doctype(e,handlers)
- -- ignore
-end
-
-local function tex_comment(e,handlers)
- if trace_comments then
- report_lxml("comment %a",e.dt[1])
- end
-end
-
-local default_element_handler = xml.gethandlers("verbose").functions["@el@"]
-
-local function tex_element(e,handlers)
- local command = e.command
- if command == nil then
- default_element_handler(e,handlers)
- elseif command == true then
- -- text (no ) / so, no mkii fallback then
- handlers.serialize(e.dt,handlers)
- elseif command == false then
- -- ignore
- else
- local tc = type(command)
- if tc == "string" then
- local rootname, ix = e.name, e.ix
- if rootname then
- if not ix then
- addindex(rootname,false,true)
- ix = e.ix
- end
- -- faster than context.xmlw
- contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",rootname,"::",ix,"}")
- else
- report_lxml("fatal error: no index for %a",command)
- contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",ix or 0,"}")
- end
- elseif tc == "function" then
- command(e)
- end
- end
-end
-
-local pihandlers = { } xml.pihandlers = pihandlers
-
-local category = P("context-") * C((1-P("-"))^1) * P("-directive")
-local space = S(" \n\r")
-local spaces = space^0
-local class = C((1-space)^0)
-local key = class
-local value = C(P(1-(space * -1))^0)
-
-local parser = category * spaces * class * spaces * key * spaces * value
-
-pihandlers[#pihandlers+1] = function(str)
- if str then
- local a, b, c, d = lpegmatch(parser,str)
- if d then
- contextsprint(ctxcatcodes,"\\xmlcontextdirective{",a,"}{",b,"}{",c,"}{",d,"}")
- end
- end
-end
-
-local function tex_pi(e,handlers)
- local str = e.dt[1]
- for i=1,#pihandlers do
- pihandlers[i](str)
- end
-end
-
-local obeycdata = true
-
-function lxml.setcdata()
- obeycdata = true
-end
-
-function lxml.resetcdata()
- obeycdata = false
-end
-
-local function tex_cdata(e,handlers)
- if obeycdata then
- toverbatim(e.dt[1])
- end
-end
-
-local function tex_text(e)
- e = xmlunprivatized(e)
- lpegmatch(xmltextcapture,e)
-end
-
-local function ctx_text(e) -- can be just context(e) as we split there
- lpegmatch(ctxtextcapture,e)
-end
-
-local function tex_handle(...)
- contextsprint(ctxcatcodes,...) -- notcatcodes is active anyway
-end
-
-local xmltexhandler = xml.newhandlers {
- name = "tex",
- handle = tex_handle,
- functions = {
- -- ["@dc@"] = tex_document,
- ["@dt@"] = tex_doctype,
- -- ["@rt@"] = tex_root,
- ["@el@"] = tex_element,
- ["@pi@"] = tex_pi,
- ["@cm@"] = tex_comment,
- ["@cd@"] = tex_cdata,
- ["@tx@"] = tex_text,
- }
-}
-
-lxml.xmltexhandler = xmltexhandler
-
--- begin of test
-
-local function tex_space(e)
- e = xmlunprivatized(e)
- lpegmatch(xmlspacecapture,e)
-end
-
-local xmltexspacehandler = xml.newhandlers {
- name = "texspace",
- handle = tex_handle,
- functions = {
- ["@dt@"] = tex_doctype,
- ["@el@"] = tex_element,
- ["@pi@"] = tex_pi,
- ["@cm@"] = tex_comment,
- ["@cd@"] = tex_cdata,
- ["@tx@"] = tex_space,
- }
-}
-
-local function tex_line(e)
- e = xmlunprivatized(e)
- lpegmatch(xmllinecapture,e)
-end
-
-local xmltexlinehandler = xml.newhandlers {
- name = "texline",
- handle = tex_handle,
- functions = {
- ["@dt@"] = tex_doctype,
- ["@el@"] = tex_element,
- ["@pi@"] = tex_pi,
- ["@cm@"] = tex_comment,
- ["@cd@"] = tex_cdata,
- ["@tx@"] = tex_line,
- }
-}
-
-function lxml.flushspacewise(id) -- keeps spaces and lines
- id = getid(id)
- local dt = id and id.dt
- if dt then
- xmlserialize(dt,xmltexspacehandler)
- end
-end
-
-function lxml.flushlinewise(id) -- keeps lines
- id = getid(id)
- local dt = id and id.dt
- if dt then
- xmlserialize(dt,xmltexlinehandler)
- end
-end
-
--- end of test
-
-function lxml.serialize(root)
- xmlserialize(root,xmltexhandler)
-end
-
-function lxml.setaction(id,pattern,action)
- local collected = xmlapplylpath(getid(id),pattern)
- if collected then
- local nc = #collected
- if nc > 0 then
- for c=1,nc do
- collected[c].command = action
- end
- end
- end
-end
-
-local function sprint(root) -- check rawroot usage
- if root then
- local tr = type(root)
- if tr == "string" then -- can also be result of lpath
- -- rawroot = false -- ?
- root = xmlunprivatized(root)
- lpegmatch(xmltextcapture,root)
- elseif tr == "table" then
- if forceraw then
- rawroot = root
- -- contextsprint(ctxcatcodes,xmltostring(root)) -- goes wrong with % etc
- root = xmlunprivatized(xmltostring(root))
- lpegmatch(xmltextcapture,root) -- goes to toc
- else
- xmlserialize(root,xmltexhandler)
- end
- end
- end
-end
-
-local function tprint(root) -- we can move sprint inline
- local tr = type(root)
- if tr == "table" then
- local n = #root
- if n == 0 then
- -- skip
- else
- for i=1,n do
- sprint(root[i])
- end
- end
- elseif tr == "string" then
- root = xmlunprivatized(root)
- lpegmatch(xmltextcapture,root)
- end
-end
-
-local function cprint(root) -- content
- if not root then
- -- rawroot = false
- -- quit
- elseif type(root) == 'string' then
- -- rawroot = false
- root = xmlunprivatized(root)
- lpegmatch(xmltextcapture,root)
- else
- local rootdt = root.dt
- if forceraw then
- rawroot = root
- -- contextsprint(ctxcatcodes,xmltostring(rootdt or root))
- root = xmlunprivatized(xmltostring(root))
- lpegmatch(xmltextcapture,root) -- goes to toc
- else
- xmlserialize(rootdt or root,xmltexhandler)
- end
- end
-end
-
-xml.sprint = sprint local xmlsprint = sprint -- calls ct mathml -> will be replaced
-xml.tprint = tprint local xmltprint = tprint -- only used here
-xml.cprint = cprint local xmlcprint = cprint -- calls ct mathml -> will be replaced
-
--- now we can flush
-
-function lxml.main(id)
- xmlserialize(getid(id),xmltexhandler) -- the real root (@rt@)
-end
-
--- -- lines (untested)
---
--- local buffer = { }
---
--- local xmllinescapture = (
--- newline^2 / function() buffer[#buffer+1] = "" end +
--- newline / function() buffer[#buffer] = buffer[#buffer] .. " " end +
--- content / function(s) buffer[#buffer] = buffer[#buffer] .. s end
--- )^0
---
--- local xmllineshandler = table.copy(xmltexhandler)
---
--- xmllineshandler.handle = function(...) lpegmatch(xmllinescapture,concat{ ... }) end
---
--- function lines(root)
--- if not root then
--- -- rawroot = false
--- -- quit
--- elseif type(root) == 'string' then
--- -- rawroot = false
--- lpegmatch(xmllinescapture,root)
--- elseif next(root) then -- tr == 'table'
--- xmlserialize(root,xmllineshandler)
--- end
--- end
---
--- function xml.lines(root) -- used at all?
--- buffer = { "" }
--- lines(root)
--- return result
--- end
-
-local function to_text(e)
- if e.command == nil then
- local etg = e.tg
- if etg and e.special and etg ~= "@rt@" then
- e.command = false -- i.e. skip
- else
- e.command = true -- i.e. no
- end
- end
-end
-
-local function to_none(e)
- if e.command == nil then
- e.command = false -- i.e. skip
- end
-end
-
--- setups
-
-local setups = { }
-
-function lxml.setcommandtotext(id)
- xmlwithelements(getid(id),to_text)
-end
-
-function lxml.setcommandtonone(id)
- xmlwithelements(getid(id),to_none)
-end
-
-function lxml.installsetup(what,document,setup,where)
- document = document or "*"
- local sd = setups[document]
- if not sd then sd = { } setups[document] = sd end
- for k=1,#sd do
- if sd[k] == setup then sd[k] = nil break end
- end
- if what == 1 then
- if trace_loading then
- report_lxml("prepending setup %a for %a",setup,document)
- end
- insert(sd,1,setup)
- elseif what == 2 then
- if trace_loading then
- report_lxml("appending setup %a for %a",setup,document)
- end
- insert(sd,setup)
- elseif what == 3 then
- if trace_loading then
- report_lxml("inserting setup %a for %a before %a",setup,document,where)
- end
- insertbeforevalue(sd,setup,where)
- elseif what == 4 then
- if trace_loading then
- report_lxml("inserting setup %a for %a after %a",setup,document,where)
- end
- insertaftervalue(sd,setup,where)
- end
-end
-
-function lxml.flushsetups(id,...)
- local done = { }
- for i=1,select("#",...) do
- local document = select(i,...)
- local sd = setups[document]
- if sd then
- for k=1,#sd do
- local v= sd[k]
- if not done[v] then
- if trace_loading then
- report_lxml("applying setup %02i : %a to %a",k,v,document)
- end
- contextsprint(ctxcatcodes,"\\xmlsetup{",id,"}{",v,"}")
- done[v] = true
- end
- end
- elseif trace_loading then
- report_lxml("no setups for %a",document)
- end
- end
-end
-
-function lxml.resetsetups(document)
- if trace_loading then
- report_lxml("resetting all setups for %a",document)
- end
- setups[document] = { }
-end
-
-function lxml.removesetup(document,setup)
- local s = setups[document]
- if s then
- for i=1,#s do
- if s[i] == setup then
- if trace_loading then
- report_lxml("removing setup %a for %a",setup,document)
- end
- remove(t,i)
- break
- end
- end
- end
-end
-
-function lxml.setsetup(id,pattern,setup)
- if not setup or setup == "" or setup == "*" or setup == "-" or setup == "+" then
- local collected = xmlapplylpath(getid(id),pattern)
- if collected then
- local nc = #collected
- if nc > 0 then
- if trace_setups then
- for c=1,nc do
- local e = collected[c]
- local ix = e.ix or 0
- if setup == "-" then
- e.command = false
- report_lxml("lpath matched (a) %5i: %s = %s -> skipped",c,ix,setup)
- elseif setup == "+" then
- e.command = true
- report_lxml("lpath matched (b) %5i: %s = %s -> text",c,ix,setup)
- else
- local tg = e.tg
- if tg then -- to be sure
- e.command = tg
- local ns = e.rn or e.ns
- if ns == "" then
- report_lxml("lpath matched (c) %5i: %s = %s -> %s",c,ix,tg,tg)
- else
- report_lxml("lpath matched (d) %5i: %s = %s:%s -> %s",c,ix,ns,tg,tg)
- end
- end
- end
- end
- else
- for c=1,nc do
- local e = collected[c]
- if setup == "-" then
- e.command = false
- elseif setup == "+" then
- e.command = true
- else
- e.command = e.tg
- end
- end
- end
- elseif trace_setups then
- report_lxml("%s lpath matches for pattern: %s","zero",pattern)
- end
- elseif trace_setups then
- report_lxml("%s lpath matches for pattern: %s","no",pattern)
- end
- else
- local a, b = match(setup,"^(.+:)([%*%-])$")
- if a and b then
- local collected = xmlapplylpath(getid(id),pattern)
- if collected then
- local nc = #collected
- if nc > 0 then
- if trace_setups then
- for c=1,nc do
- local e = collected[c]
- local ns, tg, ix = e.rn or e.ns, e.tg, e.ix or 0
- if b == "-" then
- e.command = false
- if ns == "" then
- report_lxml("lpath matched (e) %5i: %s = %s -> skipped",c,ix,tg)
- else
- report_lxml("lpath matched (f) %5i: %s = %s:%s -> skipped",c,ix,ns,tg)
- end
- elseif b == "+" then
- e.command = true
- if ns == "" then
- report_lxml("lpath matched (g) %5i: %s = %s -> text",c,ix,tg)
- else
- report_lxml("lpath matched (h) %5i: %s = %s:%s -> text",c,ix,ns,tg)
- end
- else
- e.command = a .. tg
- if ns == "" then
- report_lxml("lpath matched (i) %5i: %s = %s -> %s",c,ix,tg,e.command)
- else
- report_lxml("lpath matched (j) %5i: %s = %s:%s -> %s",c,ix,ns,tg,e.command)
- end
- end
- end
- else
- for c=1,nc do
- local e = collected[c]
- if b == "-" then
- e.command = false
- elseif b == "+" then
- e.command = true
- else
- e.command = a .. e.tg
- end
- end
- end
- elseif trace_setups then
- report_lxml("%s lpath matches for pattern: %s","zero",pattern)
- end
- elseif trace_setups then
- report_lxml("%s lpath matches for pattern: %s","no",pattern)
- end
- else
- local collected = xmlapplylpath(getid(id),pattern)
- if collected then
- local nc = #collected
- if nc > 0 then
- if trace_setups then
- for c=1,nc do
- local e = collected[c]
- e.command = setup
- local ns, tg, ix = e.rn or e.ns, e.tg, e.ix or 0
- if ns == "" then
- report_lxml("lpath matched (k) %5i: %s = %s -> %s",c,ix,tg,setup)
- else
- report_lxml("lpath matched (l) %5i: %s = %s:%s -> %s",c,ix,ns,tg,setup)
- end
- end
- else
- for c=1,nc do
- collected[c].command = setup
- end
- end
- elseif trace_setups then
- report_lxml("%s lpath matches for pattern: %s","zero",pattern)
- end
- elseif trace_setups then
- report_lxml("%s lpath matches for pattern: %s","no",pattern)
- end
- end
- end
-end
-
--- finalizers
-
-local function first(collected)
- if collected and #collected > 0 then
- xmlsprint(collected[1])
- end
-end
-
-local function last(collected)
- if collected then
- local nc = #collected
- if nc > 0 then
- xmlsprint(collected[nc])
- end
- end
-end
-
-local function all(collected)
- if collected then
- local nc = #collected
- if nc > 0 then
- for c=1,nc do
- xmlsprint(collected[c])
- end
- end
- end
-end
-
-local function reverse(collected)
- if collected then
- local nc = #collected
- if nc >0 then
- for c=nc,1,-1 do
- xmlsprint(collected[c])
- end
- end
- end
-end
-
-local function count(collected)
- contextsprint(ctxcatcodes,(collected and #collected) or 0) -- why ctxcatcodes
-end
-
-local function position(collected,n)
- -- todo: if not n then == match
- if collected then
- local nc = #collected
- if nc > 0 then
- n = tonumber(n) or 0
- if n < 0 then
- n = nc + n + 1
- end
- if n > 0 then
- local cn = collected[n]
- if cn then
- xmlsprint(cn)
- return
- end
- end
- end
- end
-end
-
-local function match(collected) -- is match in preceding collected, never change, see bibxml
- local m = collected and collected[1]
- contextsprint(ctxcatcodes,m and m.mi or 0) -- why ctxcatcodes
-end
-
-local function index(collected,n)
- if collected then
- local nc = #collected
- if nc > 0 then
- n = tonumber(n) or 0
- if n < 0 then
- n = nc + n + 1 -- brrr
- end
- if n > 0 then
- local cn = collected[n]
- if cn then
- contextsprint(ctxcatcodes,cn.ni or 0) -- why ctxcatcodes
- return
- end
- end
- end
- end
- contextsprint(ctxcatcodes,0) -- why ctxcatcodes
-end
-
-local function command(collected,cmd,otherwise)
- local n = collected and #collected
- if n and n > 0 then
- local wildcard = find(cmd,"%*")
- for c=1,n do -- maybe optimize for n=1
- local e = collected[c]
- local ix = e.ix
- local name = e.name
- if not ix then
- lxml.addindex(name,false,true)
- ix = e.ix
- end
- if wildcard then
- contextsprint(ctxcatcodes,"\\xmlw{",(gsub(cmd,"%*",e.tg)),"}{",name,"::",ix,"}")
- else
- contextsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",name,"::",ix,"}")
- end
- end
- elseif otherwise then
- contextsprint(ctxcatcodes,"\\xmlw{",otherwise,"}{#1}")
- end
-end
-
-local function attribute(collected,a,default)
- if collected and #collected > 0 then
- local at = collected[1].at
- local str = (at and at[a]) or default
- if str and str ~= "" then
- contextsprint(notcatcodes,str)
- end
- elseif default then
- contextsprint(notcatcodes,default)
- end
-end
-
-local function chainattribute(collected,arguments) -- todo: optional levels
- if collected and #collected > 0 then
- local e = collected[1]
- while e do
- local at = e.at
- if at then
- local a = at[arguments]
- if a then
- contextsprint(notcatcodes,a)
- end
- else
- break -- error
- end
- e = e.__p__
- end
- end
-end
-
-local function text(collected)
- if collected then
- local nc = #collected
- if nc == 0 then
- -- nothing
- elseif nc == 1 then -- hardly any gain so this will go
- cprint(collected[1])
- else for c=1,nc do
- cprint(collected[c])
- end end
- end
-end
-
-local function ctxtext(collected)
- if collected then
- local nc = #collected
- if nc > 0 then
- for c=1,nc do
- contextsprint(ctxcatcodes,collected[c].dt)
- end
- end
- end
-end
-
-local function stripped(collected) -- tricky as we strip in place
- if collected then
- local nc = #collected
- if nc > 0 then
- for c=1,nc do
- cprint(xml.stripelement(collected[c]))
- end
- end
- end
-end
-
-local function lower(collected)
- if not collected then
- local nc = #collected
- if nc > 0 then
- for c=1,nc do
- contextsprint(ctxcatcodes,lowerchars(collected[c].dt[1]))
- end
- end
- end
-end
-
-local function upper(collected)
- if collected then
- local nc = #collected
- if nc > 0 then
- for c=1,nc do
- contextsprint(ctxcatcodes,upperchars(collected[c].dt[1]))
- end
- end
- end
-end
-
-local function number(collected)
- local nc = collected and #collected or 0
- local n = 0
- if nc > 0 then
- for c=1,nc do
- n = n + tonumber(collected[c].dt[1] or 0)
- end
- end
- contextsprint(ctxcatcodes,n)
-end
-
-local function concatrange(collected,start,stop,separator,lastseparator,textonly) -- test this on mml
- if collected then
- local nofcollected = #collected
- if nofcollected > 0 then
- local separator = separator or ""
- local lastseparator = lastseparator or separator or ""
- start, stop = (start == "" and 1) or tonumber(start) or 1, (stop == "" and nofcollected) or tonumber(stop) or nofcollected
- if stop < 0 then stop = nofcollected + stop end -- -1 == last-1
- for i=start,stop do
- if textonly then
- xmlcprint(collected[i])
- else
- xmlsprint(collected[i])
- end
- if i == nofcollected then
- -- nothing
- elseif i == nofcollected-1 and lastseparator ~= "" then
- contextsprint(ctxcatcodes,lastseparator)
- elseif separator ~= "" then
- contextsprint(ctxcatcodes,separator)
- end
- end
- end
- end
-end
-
-local function concat(collected,separator,lastseparator,textonly) -- test this on mml
- concatrange(collected,false,false,separator,lastseparator,textonly)
-end
-
-texfinalizers.first = first
-texfinalizers.last = last
-texfinalizers.all = all
-texfinalizers.reverse = reverse
-texfinalizers.count = count
-texfinalizers.command = command
-texfinalizers.attribute = attribute
-texfinalizers.text = text
-texfinalizers.stripped = stripped
-texfinalizers.lower = lower
-texfinalizers.upper = upper
-texfinalizers.ctxtext = ctxtext
-texfinalizers.context = ctxtext
-texfinalizers.position = position
-texfinalizers.match = match
-texfinalizers.index = index
-texfinalizers.concat = concat
-texfinalizers.concatrange = concatrange
-texfinalizers.chainattribute = chainattribute
-texfinalizers.default = all -- !!
-
-local concat = table.concat
-
-function texfinalizers.tag(collected,n)
- if collected then
- local nc = #collected
- if nc > 0 then
- n = tonumber(n) or 0
- local c
- if n == 0 then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[nc-n+1]
- end
- if c then
- contextsprint(ctxcatcodes,c.tg)
- end
- end
- end
-end
-
-function texfinalizers.name(collected,n)
- if collected then
- local nc = #collected
- if nc > 0 then
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[nc-n+1]
- end
- if c then
- if c.ns == "" then
- contextsprint(ctxcatcodes,c.tg)
- else
- contextsprint(ctxcatcodes,c.ns,":",c.tg)
- end
- end
- end
- end
-end
-
-function texfinalizers.tags(collected,nonamespace)
- if collected then
- local nc = #collected
- if nc > 0 then
- for c=1,nc do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- if nonamespace or ns == "" then
- contextsprint(ctxcatcodes,tg)
- else
- contextsprint(ctxcatcodes,ns,":",tg)
- end
- end
- end
- end
-end
-
---
-
-local function verbatim(id,before,after)
- local root = getid(id)
- if root then
- if before then contextsprint(ctxcatcodes,before,"[",root.tg or "?","]") end
- lxml.toverbatim(xmltostring(root.dt))
---~ lxml.toverbatim(xml.totext(root.dt))
- if after then contextsprint(ctxcatcodes,after) end
- end
-end
-
-function lxml.inlineverbatim(id)
- verbatim(id,"\\startxmlinlineverbatim","\\stopxmlinlineverbatim")
-end
-
-function lxml.displayverbatim(id)
- verbatim(id,"\\startxmldisplayverbatim","\\stopxmldisplayverbatim")
-end
-
-lxml.verbatim = verbatim
-
--- helpers
-
-function lxml.first(id,pattern)
- local collected = xmlapplylpath(getid(id),pattern)
- if collected then
- first(collected)
- end
-end
-
-function lxml.last(id,pattern)
- local collected = xmlapplylpath(getid(id),pattern)
- if collected then
- last(collected)
- end
-end
-
-function lxml.all(id,pattern)
- local collected = xmlapplylpath(getid(id),pattern)
- if collected then
- all(collected)
- end
-end
-
-function lxml.count(id,pattern)
- -- always needs to produce a result so no test here
- count(xmlapplylpath(getid(id),pattern))
-end
-
-function lxml.attribute(id,pattern,a,default)
- local collected = xmlapplylpath(getid(id),pattern)
- if collected then
- attribute(collected,a,default)
- end
-end
-
-function lxml.raw(id,pattern) -- the content, untouched by commands
- local collected = (pattern and xmlapplylpath(getid(id),pattern)) or getid(id)
- if collected and #collected > 0 then
- contextsprint(notcatcodes,xmltostring(collected[1].dt))
- end
-end
-
-function lxml.context(id,pattern) -- the content, untouched by commands
- if pattern then
- local collected = xmlapplylpath(getid(id),pattern) or getid(id)
- if collected and #collected > 0 then
- contextsprint(ctxcatcodes,collected[1].dt)
- end
- else
- local collected = getid(id)
- if collected then
- local dt = collected.dt
- if #dt > 0 then
- ctx_text(dt[1])
- end
- end
- end
-end
-
-function lxml.text(id,pattern)
- local collected = (pattern and xmlapplylpath(getid(id),pattern)) or getid(id)
- if collected and #collected > 0 then
- text(collected)
- end
-end
-
-lxml.content = text
-
-function lxml.position(id,pattern,n)
- position(xmlapplylpath(getid(id),pattern),n)
-end
-
-function lxml.chainattribute(id,pattern,a,default)
- chainattribute(xmlapplylpath(getid(id),pattern),a,default)
-end
-
-function lxml.concatrange(id,pattern,start,stop,separator,lastseparator,textonly) -- test this on mml
- concatrange(xmlapplylpath(getid(id),pattern),start,stop,separator,lastseparator,textonly)
-end
-
-function lxml.concat(id,pattern,separator,lastseparator,textonly)
- concatrange(xmlapplylpath(getid(id),pattern),false,false,separator,lastseparator,textonly)
-end
-
-function lxml.element(id,n)
- position(xmlapplylpath(getid(id),"/*"),n)
-end
-
-lxml.index = lxml.position
-
-function lxml.pos(id)
- local root = getid(id)
- contextsprint(ctxcatcodes,(root and root.ni) or 0)
-end
-
-function lxml.att(id,a,default)
- local root = getid(id)
- if root then
- local at = root.at
- local str = (at and at[a]) or default
- if str and str ~= "" then
- contextsprint(notcatcodes,str)
- end
- elseif default then
- contextsprint(notcatcodes,default)
- end
-end
-
-function lxml.name(id) -- or remapped name? -> lxml.info, combine
- local r = getid(id)
- local ns = r.rn or r.ns or ""
- if ns ~= "" then
- contextsprint(ctxcatcodes,ns,":",r.tg)
- else
- contextsprint(ctxcatcodes,r.tg)
- end
-end
-
-function lxml.match(id) -- or remapped name? -> lxml.info, combine
- contextsprint(ctxcatcodes,getid(id).mi or 0)
-end
-
-function lxml.tag(id) -- tag vs name -> also in l-xml tag->name
- contextsprint(ctxcatcodes,getid(id).tg or "")
-end
-
-function lxml.namespace(id) -- or remapped name?
- local root = getid(id)
- contextsprint(ctxcatcodes,root.rn or root.ns or "")
-end
-
-function lxml.flush(id)
- id = getid(id)
- local dt = id and id.dt
- if dt then
- xmlsprint(dt)
- end
-end
-
-function lxml.snippet(id,i)
- local e = getid(id)
- if e then
- local edt = e.dt
- if edt then
- xmlsprint(edt[i])
- end
- end
-end
-
-function lxml.direct(id)
- xmlsprint(getid(id))
-end
-
-function lxml.command(id,pattern,cmd)
- local i, p = getid(id,true)
- local collected = xmlapplylpath(getid(i),pattern)
- if collected then
- local nc = #collected
- if nc > 0 then
- local rootname = p or i.name
- for c=1,nc do
- local e = collected[c]
- local ix = e.ix
- if not ix then
- addindex(rootname,false,true)
- ix = e.ix
- end
- contextsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",rootname,"::",ix,"}")
- end
- end
- end
-end
-
--- loops
-
-function lxml.collected(id,pattern,reverse)
- return xmlcollected(getid(id),pattern,reverse)
-end
-
-function lxml.elements(id,pattern,reverse)
- return xmlelements(getid(id),pattern,reverse)
-end
-
--- obscure ones
-
-lxml.info = lxml.name
-
--- testers
-
-local found, empty = xml.found, xml.empty
-
-local doif, doifnot, doifelse = commands.doif, commands.doifnot, commands.doifelse
-
-function lxml.doif (id,pattern) doif (found(getid(id),pattern)) end
-function lxml.doifnot (id,pattern) doifnot (found(getid(id),pattern)) end
-function lxml.doifelse (id,pattern) doifelse(found(getid(id),pattern)) end
-function lxml.doiftext (id,pattern) doif (not empty(getid(id),pattern)) end
-function lxml.doifnottext (id,pattern) doifnot (not empty(getid(id),pattern)) end
-function lxml.doifelsetext (id,pattern) doifelse(not empty(getid(id),pattern)) end
-
--- special case: "*" and "" -> self else lpath lookup
-
---~ function lxml.doifelseempty(id,pattern) doifelse(isempty(getid(id),pattern ~= "" and pattern ~= nil)) end -- not yet done, pattern
-
--- status info
-
-statistics.register("xml load time", function()
- if noffiles > 0 or nofconverted > 0 then
- return format("%s seconds, %s files, %s converted", statistics.elapsedtime(xml), noffiles, nofconverted)
- else
- return nil
- end
-end)
-
-statistics.register("lxml preparation time", function()
- local calls, cached = xml.lpathcalls(), xml.lpathcached()
- if calls > 0 or cached > 0 then
- return format("%s seconds, %s nodes, %s lpath calls, %s cached calls",
- statistics.elapsedtime(lxml), nofindices, calls, cached)
- else
- return nil
- end
-end)
-
-statistics.register("lxml lpath profile", function()
- local p = xml.profiled
- if p and next(p) then
- local s = table.sortedkeys(p)
- local tested, matched, finalized = 0, 0, 0
- logs.pushtarget("logfile")
- logs.writer("\nbegin of lxml profile\n")
- logs.writer("\n tested matched finalized pattern\n\n")
- for i=1,#s do
- local pattern = s[i]
- local pp = p[pattern]
- local t, m, f = pp.tested, pp.matched, pp.finalized
- tested, matched, finalized = tested + t, matched + m, finalized + f
- logs.writer(format("%9i %9i %9i %s",t,m,f,pattern))
- end
- logs.writer("\nend of lxml profile\n")
- logs.poptarget()
- return format("%s patterns, %s tested, %s matched, %s finalized (see log for details)",#s,tested,matched,finalized)
- else
- return nil
- end
-end)
-
--- misc
-
-function lxml.nonspace(id,pattern) -- slow, todo loop
- xmltprint(xmlcollect(getid(id),pattern,true))
-end
-
-function lxml.strip(id,pattern,nolines,anywhere)
- xml.strip(getid(id),pattern,nolines,anywhere)
-end
-
-function lxml.stripped(id,pattern,nolines)
- local str = xmltext(getid(id),pattern) or ""
- str = gsub(str,"^%s*(.-)%s*$","%1")
- if nolines then
- str = gsub(str,"%s+"," ")
- end
- xmlsprint(str)
-end
-
-function lxml.delete(id,pattern)
- xml.delete(getid(id),pattern)
-end
-
-lxml.obsolete = { }
-
-lxml.get_id = getid lxml.obsolete.get_id = getid
-
--- goodies:
-
-function texfinalizers.lettered(collected)
- if collected then
- local nc = #collected
- if nc > 0 then
- for c=1,nc do
- contextsprint(ctxcatcodes,lettered(collected[c].dt[1]))
- end
- end
- end
-end
-
---~ function texfinalizers.apply(collected,what) -- to be tested
---~ if collected then
---~ for c=1,#collected do
---~ contextsprint(ctxcatcodes,what(collected[c].dt[1]))
---~ end
---~ end
---~ end
-
-function lxml.toparameters(id)
- local e = getid(id)
- if e then
- local a = e.at
- if a and next(a) then
- local setups, s = { }, 0
- for k, v in next, a do
- s = s + 1
- setups[s] = k .. "=" .. v
- end
- setups = concat(setups,",")
- -- tracing
- context(setups)
- end
- end
-end
-
-local template = '\n\n\n\n%s'
-
-function lxml.tofile(id,pattern,filename,comment)
- local collected = xmlapplylpath(getid(id),pattern)
- if collected then
- io.savedata(filename,format(template,comment or "exported fragment",tostring(collected[1])))
- else
- os.remove(filename) -- get rid of old content
- end
-end
-
-texfinalizers.upperall = xmlfinalizers.upperall
-texfinalizers.lowerall = xmlfinalizers.lowerall
+if not modules then modules = { } end modules ['lxml-tex'] = {
+ version = 1.001,
+ comment = "companion to lxml-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Because we split and resolve entities we use the direct printing
+-- interface and not the context one. If we ever do that there will
+-- be an cldf-xml helper library.
+
+local utfchar = utf.char
+local concat, insert, remove = table.concat, table.insert, table.remove
+local format, sub, gsub, find, gmatch, match = string.format, string.sub, string.gsub, string.find, string.gmatch, string.match
+local type, next, tonumber, tostring, select = type, next, tonumber, tostring, select
+local lpegmatch = lpeg.match
+local P, S, C, Cc = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc
+
+local tex, xml = tex, xml
+local lowerchars, upperchars, lettered = characters.lower, characters.upper, characters.lettered
+
+lxml = lxml or { }
+local lxml = lxml
+
+local catcodenumbers = catcodes.numbers
+local ctxcatcodes = catcodenumbers.ctxcatcodes -- todo: use different method
+local notcatcodes = catcodenumbers.notcatcodes -- todo: use different method
+
+local context = context
+local contextsprint = context.sprint -- with catcodes (here we use fast variants, but with option for tracing)
+
+local xmlelements, xmlcollected, xmlsetproperty = xml.elements, xml.collected, xml.setproperty
+local xmlwithelements = xml.withelements
+local xmlserialize, xmlcollect, xmltext, xmltostring = xml.serialize, xml.collect, xml.text, xml.tostring
+local xmlapplylpath = xml.applylpath
+local xmlunprivatized, xmlprivatetoken, xmlprivatecodes = xml.unprivatized, xml.privatetoken, xml.privatecodes
+
+local variables = (interfaces and interfaces.variables) or { }
+
+local insertbeforevalue, insertaftervalue = utilities.tables.insertbeforevalue, utilities.tables.insertaftervalue
+
+local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
+
+local trace_setups = false trackers.register("lxml.setups", function(v) trace_setups = v end)
+local trace_loading = false trackers.register("lxml.loading", function(v) trace_loading = v end)
+local trace_access = false trackers.register("lxml.access", function(v) trace_access = v end)
+local trace_comments = false trackers.register("lxml.comments", function(v) trace_comments = v end)
+local trace_entities = false trackers.register("xml.entities", function(v) trace_entities = v end)
+
+local report_lxml = logs.reporter("xml","tex")
+local report_xml = logs.reporter("xml","tex")
+
+local forceraw, rawroot = false, nil
+
+-- tex entities
+--
+-- todo: unprivatize attributes
+
+lxml.entities = lxml.entities or { }
+
+storage.register("lxml/entities",lxml.entities,"lxml.entities")
+
+--~ xml.placeholders.unknown_any_entity = nil -- has to be per xml
+
+local xmlentities = xml.entities
+local texentities = lxml.entities
+local parsedentity = xml.parsedentitylpeg
+
+function lxml.registerentity(key,value)
+ texentities[key] = value
+ if trace_entities then
+ report_xml("registering tex entity %a as %a",key,value)
+ end
+end
+
+function lxml.resolvedentity(str)
+ if forceraw then
+ if trace_entities then
+ report_xml("passing entity %a as &%s;",str,str)
+ end
+ context("&%s;",str)
+ else
+ local e = texentities[str]
+ if e then
+ local te = type(e)
+ if te == "function" then
+ if trace_entities then
+ report_xml("passing entity %a using function",str)
+ end
+ e(str)
+ elseif e then
+ if trace_entities then
+ report_xml("passing entity %a as %a using %a",str,e,"ctxcatcodes")
+ end
+ context(e)
+ end
+ return
+ end
+ local e = xmlentities[str]
+ if e then
+ local te = type(e)
+ if te == "function" then
+ e = e(str)
+ end
+ if e then
+ if trace_entities then
+ report_xml("passing entity %a as %a using %a",str,e,"notcatcodes")
+ end
+ contextsprint(notcatcodes,e)
+ return
+ end
+ end
+ -- resolve hex and dec, todo: escape # & etc for ctxcatcodes
+ -- normally this is already solved while loading the file
+ local chr, err = lpegmatch(parsedentity,str)
+ if chr then
+ if trace_entities then
+ report_xml("passing entity %a as %a using %a",str,chr,"ctxcatcodes")
+ end
+ context(chr)
+ elseif err then
+ if trace_entities then
+ report_xml("passing faulty entity %a as %a",str,err)
+ end
+ context(err)
+ else
+ local tag = upperchars(str)
+ if trace_entities then
+ report_xml("passing entity %a to \\xmle using tag %a",str,tag)
+ end
+ context.xmle(str,tag) -- we need to use our own upper
+ end
+ end
+end
+
+-- tex interface
+
+lxml.loaded = lxml.loaded or { }
+local loaded = lxml.loaded
+
+-- print(contextdirective("context-mathml-directive function reduction yes "))
+-- print(contextdirective("context-mathml-directive function "))
+
+xml.defaultprotocol = "tex"
+
+local finalizers = xml.finalizers
+
+finalizers.xml = finalizers.xml or { }
+finalizers.tex = finalizers.tex or { }
+
+local xmlfinalizers = finalizers.xml
+local texfinalizers = finalizers.tex
+
+-- serialization with entity handling
+
+local ampersand = P("&")
+local semicolon = P(";")
+local entity = ampersand * C((1-semicolon)^1) * semicolon / lxml.resolvedentity -- context.bold
+
+local _, xmltextcapture = context.newtexthandler {
+ exception = entity,
+ catcodes = notcatcodes,
+}
+
+local _, xmlspacecapture = context.newtexthandler {
+ endofline = context.xmlcdataobeyedline,
+ emptyline = context.xmlcdataobeyedline,
+ simpleline = context.xmlcdataobeyedline,
+ space = context.xmlcdataobeyedspace,
+ exception = entity,
+ catcodes = notcatcodes,
+}
+
+local _, xmllinecapture = context.newtexthandler {
+ endofline = context.xmlcdataobeyedline,
+ emptyline = context.xmlcdataobeyedline,
+ simpleline = context.xmlcdataobeyedline,
+ exception = entity,
+ catcodes = notcatcodes,
+}
+
+local _, ctxtextcapture = context.newtexthandler {
+ exception = entity,
+ catcodes = ctxcatcodes,
+}
+
+-- cdata
+
+local toverbatim = context.newverbosehandler {
+ line = context.xmlcdataobeyedline,
+ space = context.xmlcdataobeyedspace,
+ before = context.xmlcdatabefore,
+ after = context.xmlcdataafter,
+}
+
+lxml.toverbatim = context.newverbosehandler {
+ line = context.xmlcdataobeyedline,
+ space = context.xmlcdataobeyedspace,
+ before = context.xmlcdatabefore,
+ after = context.xmlcdataafter,
+ strip = true,
+}
+
+-- raw flushing
+
+function lxml.startraw()
+ forceraw = true
+end
+
+function lxml.stopraw()
+ forceraw = false
+end
+
+function lxml.rawroot()
+ return rawroot
+end
+
+-- storage
+
+function lxml.store(id,root,filename)
+ loaded[id] = root
+ xmlsetproperty(root,"name",id)
+ if filename then
+ xmlsetproperty(root,"filename",filename)
+ end
+end
+
+local splitter = lpeg.splitat("::")
+
+lxml.idsplitter = splitter
+
+function lxml.splitid(id)
+ local d, i = lpegmatch(splitter,id)
+ if d then
+ return d, i
+ else
+ return "", id
+ end
+end
+
+local function getid(id, qualified)
+ if id then
+ local lid = loaded[id]
+ if lid then
+ return lid
+ elseif type(id) == "table" then
+ return id
+ else
+ local d, i = lpegmatch(splitter,id)
+ if d then
+ local ld = loaded[d]
+ if ld then
+ local ldi = ld.index
+ if ldi then
+ local root = ldi[tonumber(i)]
+ if root then
+ if qualified then -- we need this else two args that confuse others
+ return root, d
+ else
+ return root
+ end
+ elseif trace_access then
+ report_lxml("%a has no index entry %a",d,i)
+ end
+ elseif trace_access then
+ report_lxml("%a has no index",d)
+ end
+ elseif trace_access then
+ report_lxml("%a is not loaded",d)
+ end
+ elseif trace_access then
+ report_lxml("%a is not loaded",i)
+ end
+ end
+ elseif trace_access then
+ report_lxml("invalid id (nil)")
+ end
+end
+
+lxml.id = getid -- we provide two names as locals can already use such
+lxml.getid = getid -- names and we don't want clashes
+
+function lxml.root(id)
+ return loaded[id]
+end
+
+-- index
+
+local nofindices = 0
+
+local function addindex(name,check_sum,force)
+ local root = getid(name)
+ if root and (not root.index or force) then -- weird, only called once
+ local n, index, maxindex, check = 0, root.index or { }, root.maxindex or 0, root.check or { }
+ local function nest(root)
+ local dt = root.dt
+ if not root.ix then
+ maxindex = maxindex + 1
+ root.ix = maxindex
+ check[maxindex] = root.tg -- still needed ?
+ index[maxindex] = root
+ n = n + 1
+ end
+ if dt then
+ for k=1,#dt do
+ local dk = dt[k]
+ if type(dk) == "table" then
+ nest(dk)
+ end
+ end
+ end
+ end
+ nest(root)
+ nofindices = nofindices + n
+ --
+ if type(name) ~= "string" then
+ name = "unknown"
+ end
+ root.index = index
+ root.maxindex = maxindex
+ if trace_access then
+ report_lxml("indexed entries %a, found nodes %a",tostring(name),maxindex)
+ end
+ end
+end
+
+lxml.addindex = addindex
+
+-- another cache
+
+local function lxmlapplylpath(id,pattern) -- better inline, saves call
+ return xmlapplylpath(getid(id),pattern)
+end
+
+lxml.filter = lxmlapplylpath
+
+function lxml.filterlist(list,pattern)
+ for s in gmatch(list,"[^, ]+") do -- we could cache a table
+ xmlapplylpath(getid(s),pattern)
+ end
+end
+
+function lxml.applyfunction(id,name)
+ local f = xml.functions[name]
+ return f and f(getid(id))
+end
+
+-- rather new, indexed storage (backward refs), maybe i will merge this
+
+function lxml.checkindex(name)
+ local root = getid(name)
+ return (root and root.index) or 0
+end
+
+function lxml.withindex(name,n,command) -- will change as name is always there now
+ local i, p = lpegmatch(splitter,n)
+ if p then
+ contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",n,"}")
+ else
+ contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",name,"::",n,"}")
+ end
+end
+
+function lxml.getindex(name,n) -- will change as name is always there now
+ local i, p = lpegmatch(splitter,n)
+ if p then
+ contextsprint(ctxcatcodes,n)
+ else
+ contextsprint(ctxcatcodes,name,"::",n)
+ end
+end
+
+-- loading (to be redone, no overload) .. best use different methods and
+-- keep raw xml (at least as option)
+
+xml.originalload = xml.originalload or xml.load
+
+local noffiles, nofconverted = 0, 0
+
+function xml.load(filename,settings)
+ noffiles, nofconverted = noffiles + 1, nofconverted + 1
+ starttiming(xml)
+ local ok, data = resolvers.loadbinfile(filename)
+ settings = settings or { }
+ settings.currentresource = filename
+ local xmltable = xml.convert((ok and data) or "",settings)
+ settings.currentresource = nil
+ stoptiming(xml)
+ return xmltable
+end
+
+local function entityconverter(id,str)
+ return xmlentities[str] or xmlprivatetoken(str) or "" -- roundtrip handler
+end
+
+function lxml.convert(id,data,entities,compress,currentresource)
+ local settings = { -- we're now roundtrip anyway
+ unify_predefined_entities = true,
+ utfize_entities = true,
+ resolve_predefined_entities = true,
+ resolve_entities = function(str) return entityconverter(id,str) end, -- needed for mathml
+ currentresource = tostring(currentresource or id),
+ }
+ if compress and compress == variables.yes then
+ settings.strip_cm_and_dt = true
+ end
+ -- if entities and entities == variables.yes then
+ -- settings.utfize_entities = true
+ -- -- settings.resolve_entities = function (str) return entityconverter(id,str) end
+ -- end
+ return xml.convert(data,settings)
+end
+
+function lxml.load(id,filename,compress,entities)
+ filename = commands.preparedfile(filename) -- not commands!
+ if trace_loading then
+ report_lxml("loading file %a as %a",filename,id)
+ end
+ noffiles, nofconverted = noffiles + 1, nofconverted + 1
+ -- local xmltable = xml.load(filename)
+ starttiming(xml)
+ local ok, data = resolvers.loadbinfile(filename)
+ local xmltable = lxml.convert(id,(ok and data) or "",compress,entities,format("id: %s, file: %s",id,filename))
+ stoptiming(xml)
+ lxml.store(id,xmltable,filename)
+ return xmltable, filename
+end
+
+function lxml.register(id,xmltable,filename)
+ lxml.store(id,xmltable,filename)
+ return xmltable
+end
+
+function lxml.include(id,pattern,attribute,recurse)
+ starttiming(xml)
+ local root = getid(id)
+ xml.include(root,pattern,attribute,recurse,function(filename)
+ if filename then
+ filename = commands.preparedfile(filename)
+ if file.dirname(filename) == "" and root.filename then
+ local dn = file.dirname(root.filename)
+ if dn ~= "" then
+ filename = file.join(dn,filename)
+ end
+ end
+ if trace_loading then
+ report_lxml("including file %a",filename)
+ end
+ noffiles, nofconverted = noffiles + 1, nofconverted + 1
+ return resolvers.loadtexfile(filename) or ""
+ else
+ return ""
+ end
+ end)
+ stoptiming(xml)
+end
+
+function xml.getbuffer(name,compress,entities) -- we need to make sure that commands are processed
+ if not name or name == "" then
+ name = tex.jobname
+ end
+ nofconverted = nofconverted + 1
+ local data = buffers.getcontent(name)
+ xmltostring(lxml.convert(name,data,compress,entities,format("buffer: %s",tostring(name or "?")))) -- one buffer
+end
+
+function lxml.loadbuffer(id,name,compress,entities)
+ starttiming(xml)
+ nofconverted = nofconverted + 1
+ local data = buffers.collectcontent(name or id) -- name can be list
+ local xmltable = lxml.convert(id,data,compress,entities,format("buffer: %s",tostring(name or id or "?")))
+ lxml.store(id,xmltable)
+ stoptiming(xml)
+ return xmltable, name or id
+end
+
+function lxml.loaddata(id,str,compress,entities)
+ starttiming(xml)
+ nofconverted = nofconverted + 1
+ local xmltable = lxml.convert(id,str or "",compress,entities,format("id: %s",id))
+ lxml.store(id,xmltable)
+ stoptiming(xml)
+ return xmltable, id
+end
+
+function lxml.loadregistered(id)
+ return loaded[id], id
+end
+
+-- e.command:
+--
+-- string : setup
+-- true : text (no )
+-- false : ignore
+-- function : call
+
+local function tex_doctype(e,handlers)
+ -- ignore
+end
+
+local function tex_comment(e,handlers)
+ if trace_comments then
+ report_lxml("comment %a",e.dt[1])
+ end
+end
+
+local default_element_handler = xml.gethandlers("verbose").functions["@el@"]
+
+local function tex_element(e,handlers)
+ local command = e.command
+ if command == nil then
+ default_element_handler(e,handlers)
+ elseif command == true then
+ -- text (no ) / so, no mkii fallback then
+ handlers.serialize(e.dt,handlers)
+ elseif command == false then
+ -- ignore
+ else
+ local tc = type(command)
+ if tc == "string" then
+ local rootname, ix = e.name, e.ix
+ if rootname then
+ if not ix then
+ addindex(rootname,false,true)
+ ix = e.ix
+ end
+ -- faster than context.xmlw
+ contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",rootname,"::",ix,"}")
+ else
+ report_lxml("fatal error: no index for %a",command)
+ contextsprint(ctxcatcodes,"\\xmlw{",command,"}{",ix or 0,"}")
+ end
+ elseif tc == "function" then
+ command(e)
+ end
+ end
+end
+
+local pihandlers = { } xml.pihandlers = pihandlers
+
+local category = P("context-") * C((1-P("-"))^1) * P("-directive")
+local space = S(" \n\r")
+local spaces = space^0
+local class = C((1-space)^0)
+local key = class
+local value = C(P(1-(space * -1))^0)
+
+local parser = category * spaces * class * spaces * key * spaces * value
+
+pihandlers[#pihandlers+1] = function(str)
+ if str then
+ local a, b, c, d = lpegmatch(parser,str)
+ if d then
+ contextsprint(ctxcatcodes,"\\xmlcontextdirective{",a,"}{",b,"}{",c,"}{",d,"}")
+ end
+ end
+end
+
+local function tex_pi(e,handlers)
+ local str = e.dt[1]
+ for i=1,#pihandlers do
+ pihandlers[i](str)
+ end
+end
+
+local obeycdata = true
+
+function lxml.setcdata()
+ obeycdata = true
+end
+
+function lxml.resetcdata()
+ obeycdata = false
+end
+
+local function tex_cdata(e,handlers)
+ if obeycdata then
+ toverbatim(e.dt[1])
+ end
+end
+
+local function tex_text(e)
+ e = xmlunprivatized(e)
+ lpegmatch(xmltextcapture,e)
+end
+
+local function ctx_text(e) -- can be just context(e) as we split there
+ lpegmatch(ctxtextcapture,e)
+end
+
+local function tex_handle(...)
+ contextsprint(ctxcatcodes,...) -- notcatcodes is active anyway
+end
+
+local xmltexhandler = xml.newhandlers {
+ name = "tex",
+ handle = tex_handle,
+ functions = {
+ -- ["@dc@"] = tex_document,
+ ["@dt@"] = tex_doctype,
+ -- ["@rt@"] = tex_root,
+ ["@el@"] = tex_element,
+ ["@pi@"] = tex_pi,
+ ["@cm@"] = tex_comment,
+ ["@cd@"] = tex_cdata,
+ ["@tx@"] = tex_text,
+ }
+}
+
+lxml.xmltexhandler = xmltexhandler
+
+-- begin of test
+
+local function tex_space(e)
+ e = xmlunprivatized(e)
+ lpegmatch(xmlspacecapture,e)
+end
+
+local xmltexspacehandler = xml.newhandlers {
+ name = "texspace",
+ handle = tex_handle,
+ functions = {
+ ["@dt@"] = tex_doctype,
+ ["@el@"] = tex_element,
+ ["@pi@"] = tex_pi,
+ ["@cm@"] = tex_comment,
+ ["@cd@"] = tex_cdata,
+ ["@tx@"] = tex_space,
+ }
+}
+
+local function tex_line(e)
+ e = xmlunprivatized(e)
+ lpegmatch(xmllinecapture,e)
+end
+
+local xmltexlinehandler = xml.newhandlers {
+ name = "texline",
+ handle = tex_handle,
+ functions = {
+ ["@dt@"] = tex_doctype,
+ ["@el@"] = tex_element,
+ ["@pi@"] = tex_pi,
+ ["@cm@"] = tex_comment,
+ ["@cd@"] = tex_cdata,
+ ["@tx@"] = tex_line,
+ }
+}
+
+function lxml.flushspacewise(id) -- keeps spaces and lines
+ id = getid(id)
+ local dt = id and id.dt
+ if dt then
+ xmlserialize(dt,xmltexspacehandler)
+ end
+end
+
+function lxml.flushlinewise(id) -- keeps lines
+ id = getid(id)
+ local dt = id and id.dt
+ if dt then
+ xmlserialize(dt,xmltexlinehandler)
+ end
+end
+
+-- end of test
+
+function lxml.serialize(root)
+ xmlserialize(root,xmltexhandler)
+end
+
+function lxml.setaction(id,pattern,action)
+ local collected = xmlapplylpath(getid(id),pattern)
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ for c=1,nc do
+ collected[c].command = action
+ end
+ end
+ end
+end
+
+local function sprint(root) -- check rawroot usage
+ if root then
+ local tr = type(root)
+ if tr == "string" then -- can also be result of lpath
+ -- rawroot = false -- ?
+ root = xmlunprivatized(root)
+ lpegmatch(xmltextcapture,root)
+ elseif tr == "table" then
+ if forceraw then
+ rawroot = root
+ -- contextsprint(ctxcatcodes,xmltostring(root)) -- goes wrong with % etc
+ root = xmlunprivatized(xmltostring(root))
+ lpegmatch(xmltextcapture,root) -- goes to toc
+ else
+ xmlserialize(root,xmltexhandler)
+ end
+ end
+ end
+end
+
+local function tprint(root) -- we can move sprint inline
+ local tr = type(root)
+ if tr == "table" then
+ local n = #root
+ if n == 0 then
+ -- skip
+ else
+ for i=1,n do
+ sprint(root[i])
+ end
+ end
+ elseif tr == "string" then
+ root = xmlunprivatized(root)
+ lpegmatch(xmltextcapture,root)
+ end
+end
+
+local function cprint(root) -- content
+ if not root then
+ -- rawroot = false
+ -- quit
+ elseif type(root) == 'string' then
+ -- rawroot = false
+ root = xmlunprivatized(root)
+ lpegmatch(xmltextcapture,root)
+ else
+ local rootdt = root.dt
+ if forceraw then
+ rawroot = root
+ -- contextsprint(ctxcatcodes,xmltostring(rootdt or root))
+ root = xmlunprivatized(xmltostring(root))
+ lpegmatch(xmltextcapture,root) -- goes to toc
+ else
+ xmlserialize(rootdt or root,xmltexhandler)
+ end
+ end
+end
+
+xml.sprint = sprint local xmlsprint = sprint -- calls ct mathml -> will be replaced
+xml.tprint = tprint local xmltprint = tprint -- only used here
+xml.cprint = cprint local xmlcprint = cprint -- calls ct mathml -> will be replaced
+
+-- now we can flush
+
+function lxml.main(id)
+ xmlserialize(getid(id),xmltexhandler) -- the real root (@rt@)
+end
+
+-- -- lines (untested)
+--
+-- local buffer = { }
+--
+-- local xmllinescapture = (
+-- newline^2 / function() buffer[#buffer+1] = "" end +
+-- newline / function() buffer[#buffer] = buffer[#buffer] .. " " end +
+-- content / function(s) buffer[#buffer] = buffer[#buffer] .. s end
+-- )^0
+--
+-- local xmllineshandler = table.copy(xmltexhandler)
+--
+-- xmllineshandler.handle = function(...) lpegmatch(xmllinescapture,concat{ ... }) end
+--
+-- function lines(root)
+-- if not root then
+-- -- rawroot = false
+-- -- quit
+-- elseif type(root) == 'string' then
+-- -- rawroot = false
+-- lpegmatch(xmllinescapture,root)
+-- elseif next(root) then -- tr == 'table'
+-- xmlserialize(root,xmllineshandler)
+-- end
+-- end
+--
+-- function xml.lines(root) -- used at all?
+-- buffer = { "" }
+-- lines(root)
+-- return result
+-- end
+
+local function to_text(e)
+ if e.command == nil then
+ local etg = e.tg
+ if etg and e.special and etg ~= "@rt@" then
+ e.command = false -- i.e. skip
+ else
+ e.command = true -- i.e. no
+ end
+ end
+end
+
+local function to_none(e)
+ if e.command == nil then
+ e.command = false -- i.e. skip
+ end
+end
+
+-- setups
+
+local setups = { }
+
+function lxml.setcommandtotext(id)
+ xmlwithelements(getid(id),to_text)
+end
+
+function lxml.setcommandtonone(id)
+ xmlwithelements(getid(id),to_none)
+end
+
+function lxml.installsetup(what,document,setup,where)
+ document = document or "*"
+ local sd = setups[document]
+ if not sd then sd = { } setups[document] = sd end
+ for k=1,#sd do
+ if sd[k] == setup then sd[k] = nil break end
+ end
+ if what == 1 then
+ if trace_loading then
+ report_lxml("prepending setup %a for %a",setup,document)
+ end
+ insert(sd,1,setup)
+ elseif what == 2 then
+ if trace_loading then
+ report_lxml("appending setup %a for %a",setup,document)
+ end
+ insert(sd,setup)
+ elseif what == 3 then
+ if trace_loading then
+ report_lxml("inserting setup %a for %a before %a",setup,document,where)
+ end
+ insertbeforevalue(sd,setup,where)
+ elseif what == 4 then
+ if trace_loading then
+ report_lxml("inserting setup %a for %a after %a",setup,document,where)
+ end
+ insertaftervalue(sd,setup,where)
+ end
+end
+
+function lxml.flushsetups(id,...)
+ local done = { }
+ for i=1,select("#",...) do
+ local document = select(i,...)
+ local sd = setups[document]
+ if sd then
+ for k=1,#sd do
+ local v= sd[k]
+ if not done[v] then
+ if trace_loading then
+ report_lxml("applying setup %02i : %a to %a",k,v,document)
+ end
+ contextsprint(ctxcatcodes,"\\xmlsetup{",id,"}{",v,"}")
+ done[v] = true
+ end
+ end
+ elseif trace_loading then
+ report_lxml("no setups for %a",document)
+ end
+ end
+end
+
+function lxml.resetsetups(document)
+ if trace_loading then
+ report_lxml("resetting all setups for %a",document)
+ end
+ setups[document] = { }
+end
+
+function lxml.removesetup(document,setup)
+ local s = setups[document]
+ if s then
+ for i=1,#s do
+ if s[i] == setup then
+ if trace_loading then
+ report_lxml("removing setup %a for %a",setup,document)
+ end
+ remove(t,i)
+ break
+ end
+ end
+ end
+end
+
+function lxml.setsetup(id,pattern,setup)
+ if not setup or setup == "" or setup == "*" or setup == "-" or setup == "+" then
+ local collected = xmlapplylpath(getid(id),pattern)
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ if trace_setups then
+ for c=1,nc do
+ local e = collected[c]
+ local ix = e.ix or 0
+ if setup == "-" then
+ e.command = false
+ report_lxml("lpath matched (a) %5i: %s = %s -> skipped",c,ix,setup)
+ elseif setup == "+" then
+ e.command = true
+ report_lxml("lpath matched (b) %5i: %s = %s -> text",c,ix,setup)
+ else
+ local tg = e.tg
+ if tg then -- to be sure
+ e.command = tg
+ local ns = e.rn or e.ns
+ if ns == "" then
+ report_lxml("lpath matched (c) %5i: %s = %s -> %s",c,ix,tg,tg)
+ else
+ report_lxml("lpath matched (d) %5i: %s = %s:%s -> %s",c,ix,ns,tg,tg)
+ end
+ end
+ end
+ end
+ else
+ for c=1,nc do
+ local e = collected[c]
+ if setup == "-" then
+ e.command = false
+ elseif setup == "+" then
+ e.command = true
+ else
+ e.command = e.tg
+ end
+ end
+ end
+ elseif trace_setups then
+ report_lxml("%s lpath matches for pattern: %s","zero",pattern)
+ end
+ elseif trace_setups then
+ report_lxml("%s lpath matches for pattern: %s","no",pattern)
+ end
+ else
+ local a, b = match(setup,"^(.+:)([%*%-])$")
+ if a and b then
+ local collected = xmlapplylpath(getid(id),pattern)
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ if trace_setups then
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg, ix = e.rn or e.ns, e.tg, e.ix or 0
+ if b == "-" then
+ e.command = false
+ if ns == "" then
+ report_lxml("lpath matched (e) %5i: %s = %s -> skipped",c,ix,tg)
+ else
+ report_lxml("lpath matched (f) %5i: %s = %s:%s -> skipped",c,ix,ns,tg)
+ end
+ elseif b == "+" then
+ e.command = true
+ if ns == "" then
+ report_lxml("lpath matched (g) %5i: %s = %s -> text",c,ix,tg)
+ else
+ report_lxml("lpath matched (h) %5i: %s = %s:%s -> text",c,ix,ns,tg)
+ end
+ else
+ e.command = a .. tg
+ if ns == "" then
+ report_lxml("lpath matched (i) %5i: %s = %s -> %s",c,ix,tg,e.command)
+ else
+ report_lxml("lpath matched (j) %5i: %s = %s:%s -> %s",c,ix,ns,tg,e.command)
+ end
+ end
+ end
+ else
+ for c=1,nc do
+ local e = collected[c]
+ if b == "-" then
+ e.command = false
+ elseif b == "+" then
+ e.command = true
+ else
+ e.command = a .. e.tg
+ end
+ end
+ end
+ elseif trace_setups then
+ report_lxml("%s lpath matches for pattern: %s","zero",pattern)
+ end
+ elseif trace_setups then
+ report_lxml("%s lpath matches for pattern: %s","no",pattern)
+ end
+ else
+ local collected = xmlapplylpath(getid(id),pattern)
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ if trace_setups then
+ for c=1,nc do
+ local e = collected[c]
+ e.command = setup
+ local ns, tg, ix = e.rn or e.ns, e.tg, e.ix or 0
+ if ns == "" then
+ report_lxml("lpath matched (k) %5i: %s = %s -> %s",c,ix,tg,setup)
+ else
+ report_lxml("lpath matched (l) %5i: %s = %s:%s -> %s",c,ix,ns,tg,setup)
+ end
+ end
+ else
+ for c=1,nc do
+ collected[c].command = setup
+ end
+ end
+ elseif trace_setups then
+ report_lxml("%s lpath matches for pattern: %s","zero",pattern)
+ end
+ elseif trace_setups then
+ report_lxml("%s lpath matches for pattern: %s","no",pattern)
+ end
+ end
+ end
+end
+
+-- finalizers
+
+local function first(collected)
+ if collected and #collected > 0 then
+ xmlsprint(collected[1])
+ end
+end
+
+local function last(collected)
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ xmlsprint(collected[nc])
+ end
+ end
+end
+
+local function all(collected)
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ for c=1,nc do
+ xmlsprint(collected[c])
+ end
+ end
+ end
+end
+
+local function reverse(collected)
+ if collected then
+ local nc = #collected
+ if nc >0 then
+ for c=nc,1,-1 do
+ xmlsprint(collected[c])
+ end
+ end
+ end
+end
+
+local function count(collected)
+ contextsprint(ctxcatcodes,(collected and #collected) or 0) -- why ctxcatcodes
+end
+
+local function position(collected,n)
+ -- todo: if not n then == match
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ n = tonumber(n) or 0
+ if n < 0 then
+ n = nc + n + 1
+ end
+ if n > 0 then
+ local cn = collected[n]
+ if cn then
+ xmlsprint(cn)
+ return
+ end
+ end
+ end
+ end
+end
+
+local function match(collected) -- is match in preceding collected, never change, see bibxml
+ local m = collected and collected[1]
+ contextsprint(ctxcatcodes,m and m.mi or 0) -- why ctxcatcodes
+end
+
+local function index(collected,n)
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ n = tonumber(n) or 0
+ if n < 0 then
+ n = nc + n + 1 -- brrr
+ end
+ if n > 0 then
+ local cn = collected[n]
+ if cn then
+ contextsprint(ctxcatcodes,cn.ni or 0) -- why ctxcatcodes
+ return
+ end
+ end
+ end
+ end
+ contextsprint(ctxcatcodes,0) -- why ctxcatcodes
+end
+
+local function command(collected,cmd,otherwise)
+ local n = collected and #collected
+ if n and n > 0 then
+ local wildcard = find(cmd,"%*")
+ for c=1,n do -- maybe optimize for n=1
+ local e = collected[c]
+ local ix = e.ix
+ local name = e.name
+ if not ix then
+ lxml.addindex(name,false,true)
+ ix = e.ix
+ end
+ if wildcard then
+ contextsprint(ctxcatcodes,"\\xmlw{",(gsub(cmd,"%*",e.tg)),"}{",name,"::",ix,"}")
+ else
+ contextsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",name,"::",ix,"}")
+ end
+ end
+ elseif otherwise then
+ contextsprint(ctxcatcodes,"\\xmlw{",otherwise,"}{#1}")
+ end
+end
+
+local function attribute(collected,a,default)
+ if collected and #collected > 0 then
+ local at = collected[1].at
+ local str = (at and at[a]) or default
+ if str and str ~= "" then
+ contextsprint(notcatcodes,str)
+ end
+ elseif default then
+ contextsprint(notcatcodes,default)
+ end
+end
+
+local function chainattribute(collected,arguments) -- todo: optional levels
+ if collected and #collected > 0 then
+ local e = collected[1]
+ while e do
+ local at = e.at
+ if at then
+ local a = at[arguments]
+ if a then
+ contextsprint(notcatcodes,a)
+ end
+ else
+ break -- error
+ end
+ e = e.__p__
+ end
+ end
+end
+
+local function text(collected)
+ if collected then
+ local nc = #collected
+ if nc == 0 then
+ -- nothing
+ elseif nc == 1 then -- hardly any gain so this will go
+ cprint(collected[1])
+ else for c=1,nc do
+ cprint(collected[c])
+ end end
+ end
+end
+
+local function ctxtext(collected)
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ for c=1,nc do
+ contextsprint(ctxcatcodes,collected[c].dt)
+ end
+ end
+ end
+end
+
+local function stripped(collected) -- tricky as we strip in place
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ for c=1,nc do
+ cprint(xml.stripelement(collected[c]))
+ end
+ end
+ end
+end
+
+local function lower(collected)
+ if not collected then
+ local nc = #collected
+ if nc > 0 then
+ for c=1,nc do
+ contextsprint(ctxcatcodes,lowerchars(collected[c].dt[1]))
+ end
+ end
+ end
+end
+
+local function upper(collected)
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ for c=1,nc do
+ contextsprint(ctxcatcodes,upperchars(collected[c].dt[1]))
+ end
+ end
+ end
+end
+
+local function number(collected)
+ local nc = collected and #collected or 0
+ local n = 0
+ if nc > 0 then
+ for c=1,nc do
+ n = n + tonumber(collected[c].dt[1] or 0)
+ end
+ end
+ contextsprint(ctxcatcodes,n)
+end
+
+local function concatrange(collected,start,stop,separator,lastseparator,textonly) -- test this on mml
+ if collected then
+ local nofcollected = #collected
+ if nofcollected > 0 then
+ local separator = separator or ""
+ local lastseparator = lastseparator or separator or ""
+ start, stop = (start == "" and 1) or tonumber(start) or 1, (stop == "" and nofcollected) or tonumber(stop) or nofcollected
+ if stop < 0 then stop = nofcollected + stop end -- -1 == last-1
+ for i=start,stop do
+ if textonly then
+ xmlcprint(collected[i])
+ else
+ xmlsprint(collected[i])
+ end
+ if i == nofcollected then
+ -- nothing
+ elseif i == nofcollected-1 and lastseparator ~= "" then
+ contextsprint(ctxcatcodes,lastseparator)
+ elseif separator ~= "" then
+ contextsprint(ctxcatcodes,separator)
+ end
+ end
+ end
+ end
+end
+
+local function concat(collected,separator,lastseparator,textonly) -- test this on mml
+ concatrange(collected,false,false,separator,lastseparator,textonly)
+end
+
+texfinalizers.first = first
+texfinalizers.last = last
+texfinalizers.all = all
+texfinalizers.reverse = reverse
+texfinalizers.count = count
+texfinalizers.command = command
+texfinalizers.attribute = attribute
+texfinalizers.text = text
+texfinalizers.stripped = stripped
+texfinalizers.lower = lower
+texfinalizers.upper = upper
+texfinalizers.ctxtext = ctxtext
+texfinalizers.context = ctxtext
+texfinalizers.position = position
+texfinalizers.match = match
+texfinalizers.index = index
+texfinalizers.concat = concat
+texfinalizers.concatrange = concatrange
+texfinalizers.chainattribute = chainattribute
+texfinalizers.default = all -- !!
+
+local concat = table.concat
+
+function texfinalizers.tag(collected,n)
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ n = tonumber(n) or 0
+ local c
+ if n == 0 then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if c then
+ contextsprint(ctxcatcodes,c.tg)
+ end
+ end
+ end
+end
+
+function texfinalizers.name(collected,n)
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if c then
+ if c.ns == "" then
+ contextsprint(ctxcatcodes,c.tg)
+ else
+ contextsprint(ctxcatcodes,c.ns,":",c.tg)
+ end
+ end
+ end
+ end
+end
+
+function texfinalizers.tags(collected,nonamespace)
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ if nonamespace or ns == "" then
+ contextsprint(ctxcatcodes,tg)
+ else
+ contextsprint(ctxcatcodes,ns,":",tg)
+ end
+ end
+ end
+ end
+end
+
+--
+
+local function verbatim(id,before,after)
+ local root = getid(id)
+ if root then
+ if before then contextsprint(ctxcatcodes,before,"[",root.tg or "?","]") end
+ lxml.toverbatim(xmltostring(root.dt))
+--~ lxml.toverbatim(xml.totext(root.dt))
+ if after then contextsprint(ctxcatcodes,after) end
+ end
+end
+
+function lxml.inlineverbatim(id)
+ verbatim(id,"\\startxmlinlineverbatim","\\stopxmlinlineverbatim")
+end
+
+function lxml.displayverbatim(id)
+ verbatim(id,"\\startxmldisplayverbatim","\\stopxmldisplayverbatim")
+end
+
+lxml.verbatim = verbatim
+
+-- helpers
+
+function lxml.first(id,pattern)
+ local collected = xmlapplylpath(getid(id),pattern)
+ if collected then
+ first(collected)
+ end
+end
+
+function lxml.last(id,pattern)
+ local collected = xmlapplylpath(getid(id),pattern)
+ if collected then
+ last(collected)
+ end
+end
+
+function lxml.all(id,pattern)
+ local collected = xmlapplylpath(getid(id),pattern)
+ if collected then
+ all(collected)
+ end
+end
+
+function lxml.count(id,pattern)
+ -- always needs to produce a result so no test here
+ count(xmlapplylpath(getid(id),pattern))
+end
+
+function lxml.attribute(id,pattern,a,default)
+ local collected = xmlapplylpath(getid(id),pattern)
+ if collected then
+ attribute(collected,a,default)
+ end
+end
+
+function lxml.raw(id,pattern) -- the content, untouched by commands
+ local collected = (pattern and xmlapplylpath(getid(id),pattern)) or getid(id)
+ if collected and #collected > 0 then
+ contextsprint(notcatcodes,xmltostring(collected[1].dt))
+ end
+end
+
+function lxml.context(id,pattern) -- the content, untouched by commands
+ if pattern then
+ local collected = xmlapplylpath(getid(id),pattern) or getid(id)
+ if collected and #collected > 0 then
+ contextsprint(ctxcatcodes,collected[1].dt)
+ end
+ else
+ local collected = getid(id)
+ if collected then
+ local dt = collected.dt
+ if #dt > 0 then
+ ctx_text(dt[1])
+ end
+ end
+ end
+end
+
+function lxml.text(id,pattern)
+ local collected = (pattern and xmlapplylpath(getid(id),pattern)) or getid(id)
+ if collected and #collected > 0 then
+ text(collected)
+ end
+end
+
+lxml.content = text
+
+function lxml.position(id,pattern,n)
+ position(xmlapplylpath(getid(id),pattern),n)
+end
+
+function lxml.chainattribute(id,pattern,a,default)
+ chainattribute(xmlapplylpath(getid(id),pattern),a,default)
+end
+
+function lxml.concatrange(id,pattern,start,stop,separator,lastseparator,textonly) -- test this on mml
+ concatrange(xmlapplylpath(getid(id),pattern),start,stop,separator,lastseparator,textonly)
+end
+
+function lxml.concat(id,pattern,separator,lastseparator,textonly)
+ concatrange(xmlapplylpath(getid(id),pattern),false,false,separator,lastseparator,textonly)
+end
+
+function lxml.element(id,n)
+ position(xmlapplylpath(getid(id),"/*"),n)
+end
+
+lxml.index = lxml.position
+
+function lxml.pos(id)
+ local root = getid(id)
+ contextsprint(ctxcatcodes,(root and root.ni) or 0)
+end
+
+function lxml.att(id,a,default)
+ local root = getid(id)
+ if root then
+ local at = root.at
+ local str = (at and at[a]) or default
+ if str and str ~= "" then
+ contextsprint(notcatcodes,str)
+ end
+ elseif default then
+ contextsprint(notcatcodes,default)
+ end
+end
+
+function lxml.name(id) -- or remapped name? -> lxml.info, combine
+ local r = getid(id)
+ local ns = r.rn or r.ns or ""
+ if ns ~= "" then
+ contextsprint(ctxcatcodes,ns,":",r.tg)
+ else
+ contextsprint(ctxcatcodes,r.tg)
+ end
+end
+
+function lxml.match(id) -- or remapped name? -> lxml.info, combine
+ contextsprint(ctxcatcodes,getid(id).mi or 0)
+end
+
+function lxml.tag(id) -- tag vs name -> also in l-xml tag->name
+ contextsprint(ctxcatcodes,getid(id).tg or "")
+end
+
+function lxml.namespace(id) -- or remapped name?
+ local root = getid(id)
+ contextsprint(ctxcatcodes,root.rn or root.ns or "")
+end
+
+function lxml.flush(id)
+ id = getid(id)
+ local dt = id and id.dt
+ if dt then
+ xmlsprint(dt)
+ end
+end
+
+function lxml.snippet(id,i)
+ local e = getid(id)
+ if e then
+ local edt = e.dt
+ if edt then
+ xmlsprint(edt[i])
+ end
+ end
+end
+
+function lxml.direct(id)
+ xmlsprint(getid(id))
+end
+
+function lxml.command(id,pattern,cmd)
+ local i, p = getid(id,true)
+ local collected = xmlapplylpath(getid(i),pattern)
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ local rootname = p or i.name
+ for c=1,nc do
+ local e = collected[c]
+ local ix = e.ix
+ if not ix then
+ addindex(rootname,false,true)
+ ix = e.ix
+ end
+ contextsprint(ctxcatcodes,"\\xmlw{",cmd,"}{",rootname,"::",ix,"}")
+ end
+ end
+ end
+end
+
+-- loops
+
+function lxml.collected(id,pattern,reverse)
+ return xmlcollected(getid(id),pattern,reverse)
+end
+
+function lxml.elements(id,pattern,reverse)
+ return xmlelements(getid(id),pattern,reverse)
+end
+
+-- obscure ones
+
+lxml.info = lxml.name
+
+-- testers
+
+local found, empty = xml.found, xml.empty
+
+local doif, doifnot, doifelse = commands.doif, commands.doifnot, commands.doifelse
+
+function lxml.doif (id,pattern) doif (found(getid(id),pattern)) end
+function lxml.doifnot (id,pattern) doifnot (found(getid(id),pattern)) end
+function lxml.doifelse (id,pattern) doifelse(found(getid(id),pattern)) end
+function lxml.doiftext (id,pattern) doif (not empty(getid(id),pattern)) end
+function lxml.doifnottext (id,pattern) doifnot (not empty(getid(id),pattern)) end
+function lxml.doifelsetext (id,pattern) doifelse(not empty(getid(id),pattern)) end
+
+-- special case: "*" and "" -> self else lpath lookup
+
+--~ function lxml.doifelseempty(id,pattern) doifelse(isempty(getid(id),pattern ~= "" and pattern ~= nil)) end -- not yet done, pattern
+
+-- status info
+
+statistics.register("xml load time", function()
+ if noffiles > 0 or nofconverted > 0 then
+ return format("%s seconds, %s files, %s converted", statistics.elapsedtime(xml), noffiles, nofconverted)
+ else
+ return nil
+ end
+end)
+
+statistics.register("lxml preparation time", function()
+ local calls, cached = xml.lpathcalls(), xml.lpathcached()
+ if calls > 0 or cached > 0 then
+ return format("%s seconds, %s nodes, %s lpath calls, %s cached calls",
+ statistics.elapsedtime(lxml), nofindices, calls, cached)
+ else
+ return nil
+ end
+end)
+
+statistics.register("lxml lpath profile", function()
+ local p = xml.profiled
+ if p and next(p) then
+ local s = table.sortedkeys(p)
+ local tested, matched, finalized = 0, 0, 0
+ logs.pushtarget("logfile")
+ logs.writer("\nbegin of lxml profile\n")
+ logs.writer("\n tested matched finalized pattern\n\n")
+ for i=1,#s do
+ local pattern = s[i]
+ local pp = p[pattern]
+ local t, m, f = pp.tested, pp.matched, pp.finalized
+ tested, matched, finalized = tested + t, matched + m, finalized + f
+ logs.writer(format("%9i %9i %9i %s",t,m,f,pattern))
+ end
+ logs.writer("\nend of lxml profile\n")
+ logs.poptarget()
+ return format("%s patterns, %s tested, %s matched, %s finalized (see log for details)",#s,tested,matched,finalized)
+ else
+ return nil
+ end
+end)
+
+-- misc
+
+function lxml.nonspace(id,pattern) -- slow, todo loop
+ xmltprint(xmlcollect(getid(id),pattern,true))
+end
+
+function lxml.strip(id,pattern,nolines,anywhere)
+ xml.strip(getid(id),pattern,nolines,anywhere)
+end
+
+function lxml.stripped(id,pattern,nolines)
+ local str = xmltext(getid(id),pattern) or ""
+ str = gsub(str,"^%s*(.-)%s*$","%1")
+ if nolines then
+ str = gsub(str,"%s+"," ")
+ end
+ xmlsprint(str)
+end
+
+function lxml.delete(id,pattern)
+ xml.delete(getid(id),pattern)
+end
+
+lxml.obsolete = { }
+
+lxml.get_id = getid lxml.obsolete.get_id = getid
+
+-- goodies:
+
+function texfinalizers.lettered(collected)
+ if collected then
+ local nc = #collected
+ if nc > 0 then
+ for c=1,nc do
+ contextsprint(ctxcatcodes,lettered(collected[c].dt[1]))
+ end
+ end
+ end
+end
+
+--~ function texfinalizers.apply(collected,what) -- to be tested
+--~ if collected then
+--~ for c=1,#collected do
+--~ contextsprint(ctxcatcodes,what(collected[c].dt[1]))
+--~ end
+--~ end
+--~ end
+
+function lxml.toparameters(id)
+ local e = getid(id)
+ if e then
+ local a = e.at
+ if a and next(a) then
+ local setups, s = { }, 0
+ for k, v in next, a do
+ s = s + 1
+ setups[s] = k .. "=" .. v
+ end
+ setups = concat(setups,",")
+ -- tracing
+ context(setups)
+ end
+ end
+end
+
+local template = '\n\n\n\n%s'
+
+function lxml.tofile(id,pattern,filename,comment)
+ local collected = xmlapplylpath(getid(id),pattern)
+ if collected then
+ io.savedata(filename,format(template,comment or "exported fragment",tostring(collected[1])))
+ else
+ os.remove(filename) -- get rid of old content
+ end
+end
+
+texfinalizers.upperall = xmlfinalizers.upperall
+texfinalizers.lowerall = xmlfinalizers.lowerall
diff --git a/tex/context/base/lxml-xml.lua b/tex/context/base/lxml-xml.lua
index d0e256078..d4e103206 100644
--- a/tex/context/base/lxml-xml.lua
+++ b/tex/context/base/lxml-xml.lua
@@ -1,445 +1,445 @@
-if not modules then modules = { } end modules ['lxml-xml'] = {
- version = 1.001,
- comment = "this module is the basis for the lxml-* ones",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local concat = table.concat
-local find, lower, upper = string.find, string.lower, string.upper
-
-local xml = xml
-
-local finalizers = xml.finalizers.xml
-local xmlfilter = xml.filter -- we could inline this one for speed
-local xmltostring = xml.tostring
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
-local xmlnewhandlers = xml.newhandlers
-
-local function first(collected) -- wrong ?
- return collected and collected[1]
-end
-
-local function last(collected)
- return collected and collected[#collected]
-end
-
-local function all(collected)
- return collected
-end
-
--- local function reverse(collected)
--- if collected then
--- local nc = #collected
--- if nc > 0 then
--- local reversed, r = { }, 0
--- for c=nc,1,-1 do
--- r = r + 1
--- reversed[r] = collected[c]
--- end
--- return reversed
--- else
--- return collected
--- end
--- end
--- end
-
-local reverse = table.reversed
-
-local function attribute(collected,name)
- if collected and #collected > 0 then
- local at = collected[1].at
- return at and at[name]
- end
-end
-
-local function att(id,name)
- local at = id.at
- return at and at[name]
-end
-
-local function count(collected)
- return collected and #collected or 0
-end
-
-local function position(collected,n)
- if not collected then
- return 0
- end
- local nc = #collected
- if nc == 0 then
- return 0
- end
- n = tonumber(n) or 0
- if n < 0 then
- return collected[nc + n + 1]
- elseif n > 0 then
- return collected[n]
- else
- return collected[1].mi or 0
- end
-end
-
-local function match(collected)
- return collected and #collected > 0 and collected[1].mi or 0 -- match
-end
-
-local function index(collected)
- return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
-end
-
-local function attributes(collected,arguments)
- if collected and #collected > 0 then
- local at = collected[1].at
- if arguments then
- return at[arguments]
- elseif next(at) then
- return at -- all of them
- end
- end
-end
-
-local function chainattribute(collected,arguments) -- todo: optional levels
- if collected and #collected > 0 then
- local e = collected[1]
- while e do
- local at = e.at
- if at then
- local a = at[arguments]
- if a then
- return a
- end
- else
- break -- error
- end
- e = e.__p__
- end
- end
- return ""
-end
-
-local function raw(collected) -- hybrid (not much different from text so it might go)
- if collected and #collected > 0 then
- local e = collected[1] or collected
- return e and xmltostring(e) or "" -- only first as we cannot concat function
- else
- return ""
- end
-end
-
---
-
-local xmltexthandler = xmlnewhandlers {
- name = "string",
- initialize = function()
- result = { }
- return result
- end,
- finalize = function()
- return concat(result)
- end,
- handle = function(...)
- result[#result+1] = concat { ... }
- end,
- escape = false,
-}
-
-local function xmltotext(root)
- local dt = root.dt
- if not dt then
- return ""
- end
- local nt = #dt -- string or table
- if nt == 0 then
- return ""
- elseif nt == 1 and type(dt[1]) == "string" then
- return dt[1] -- no escaping of " ' < > &
- else
- return xmlserialize(root,xmltexthandler) or ""
- end
-end
-
---
-
-local function text(collected) -- hybrid
- if collected then -- no # test here !
- local e = collected[1] or collected -- why fallback to element, how about cdata
- return e and xmltotext(e) or ""
- else
- return ""
- end
-end
-
-local function texts(collected)
- if not collected then
- return { } -- why no nil
- end
- local nc = #collected
- if nc == 0 then
- return { } -- why no nil
- end
- local t, n = { }, 0
- for c=1,nc do
- local e = collected[c]
- if e and e.dt then
- n = n + 1
- t[n] = e.dt
- end
- end
- return t
-end
-
-local function tag(collected,n)
- if not collected then
- return
- end
- local nc = #collected
- if nc == 0 then
- return
- end
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[nc-n+1]
- end
- return c and c.tg
-end
-
-local function name(collected,n)
- if not collected then
- return
- end
- local nc = #collected
- if nc == 0 then
- return
- end
- local c
- if n == 0 or not n then
- c = collected[1]
- elseif n > 1 then
- c = collected[n]
- else
- c = collected[nc-n+1]
- end
- if not c then
- -- sorry
- elseif c.ns == "" then
- return c.tg
- else
- return c.ns .. ":" .. c.tg
- end
-end
-
-local function tags(collected,nonamespace)
- if not collected then
- return
- end
- local nc = #collected
- if nc == 0 then
- return
- end
- local t, n = { }, 0
- for c=1,nc do
- local e = collected[c]
- local ns, tg = e.ns, e.tg
- n = n + 1
- if nonamespace or ns == "" then
- t[n] = tg
- else
- t[n] = ns .. ":" .. tg
- end
- end
- return t
-end
-
-local function empty(collected,spacesonly)
- if not collected then
- return true
- end
- local nc = #collected
- if nc == 0 then
- return true
- end
- for c=1,nc do
- local e = collected[c]
- if e then
- local edt = e.dt
- if edt then
- local n = #edt
- if n == 1 then
- local edk = edt[1]
- local typ = type(edk)
- if typ == "table" then
- return false
- elseif edk ~= "" then
- return false
- elseif spacesonly and not find(edk,"%S") then
- return false
- end
- elseif n > 1 then
- return false
- end
- end
- end
- end
- return true
-end
-
-finalizers.first = first
-finalizers.last = last
-finalizers.all = all
-finalizers.reverse = reverse
-finalizers.elements = all
-finalizers.default = all
-finalizers.attribute = attribute
-finalizers.att = att
-finalizers.count = count
-finalizers.position = position
-finalizers.match = match
-finalizers.index = index
-finalizers.attributes = attributes
-finalizers.chainattribute = chainattribute
-finalizers.text = text
-finalizers.texts = texts
-finalizers.tag = tag
-finalizers.name = name
-finalizers.tags = tags
-finalizers.empty = empty
-
--- shortcuts -- we could support xmlfilter(id,pattern,first)
-
-function xml.first(id,pattern)
- return first(xmlfilter(id,pattern))
-end
-
-function xml.last(id,pattern)
- return last(xmlfilter(id,pattern))
-end
-
-function xml.count(id,pattern)
- return count(xmlfilter(id,pattern))
-end
-
-function xml.attribute(id,pattern,a,default)
- return attribute(xmlfilter(id,pattern),a,default)
-end
-
-function xml.raw(id,pattern)
- if pattern then
- return raw(xmlfilter(id,pattern))
- else
- return raw(id)
- end
-end
-
-function xml.text(id,pattern) -- brrr either content or element (when cdata)
- if pattern then
- -- return text(xmlfilter(id,pattern))
- local collected = xmlfilter(id,pattern)
- return collected and #collected > 0 and xmltotext(collected[1]) or ""
- elseif id then
- -- return text(id)
- return xmltotext(id) or ""
- else
- return ""
- end
-end
-
-xml.content = text
-
---
-
-function xml.position(id,pattern,n) -- element
- return position(xmlfilter(id,pattern),n)
-end
-
-function xml.match(id,pattern) -- number
- return match(xmlfilter(id,pattern))
-end
-
-function xml.empty(id,pattern,spacesonly)
- return empty(xmlfilter(id,pattern),spacesonly)
-end
-
-xml.all = xml.filter
-xml.index = xml.position
-xml.found = xml.filter
-
--- a nice one:
-
-local function totable(x)
- local t = { }
- for e in xmlcollected(x[1] or x,"/*") do
- t[e.tg] = xmltostring(e.dt) or ""
- end
- return next(t) and t or nil
-end
-
-xml.table = totable
-finalizers.table = totable
-
-local function textonly(e,t)
- if e then
- local edt = e.dt
- if edt then
- for i=1,#edt do
- local e = edt[i]
- if type(e) == "table" then
- textonly(e,t)
- else
- t[#t+1] = e
- end
- end
- end
- end
- return t
-end
-
-function xml.textonly(e) -- no pattern
- return concat(textonly(e,{}))
-end
-
---
-
--- local x = xml.convert("123")
--- xml.filter(x,"**/lowerall()") print(x)
--- xml.filter(x,"**/upperall()") print(x)
-
-function finalizers.lowerall(collected)
- for c=1,#collected do
- local e = collected[c]
- if not e.special then
- e.tg = lower(e.tg)
- local eat = e.at
- if eat then
- local t = { }
- for k,v in next, eat do
- t[lower(k)] = v
- end
- e.at = t
- end
- end
- end
-end
-
-function finalizers.upperall(collected)
- for c=1,#collected do
- local e = collected[c]
- if not e.special then
- e.tg = upper(e.tg)
- local eat = e.at
- if eat then
- local t = { }
- for k,v in next, eat do
- t[upper(k)] = v
- end
- e.at = t
- end
- end
- end
-end
+if not modules then modules = { } end modules ['lxml-xml'] = {
+ version = 1.001,
+ comment = "this module is the basis for the lxml-* ones",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local concat = table.concat
+local find, lower, upper = string.find, string.lower, string.upper
+
+local xml = xml
+
+local finalizers = xml.finalizers.xml
+local xmlfilter = xml.filter -- we could inline this one for speed
+local xmltostring = xml.tostring
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmlnewhandlers = xml.newhandlers
+
+local function first(collected) -- wrong ?
+ return collected and collected[1]
+end
+
+local function last(collected)
+ return collected and collected[#collected]
+end
+
+local function all(collected)
+ return collected
+end
+
+-- local function reverse(collected)
+-- if collected then
+-- local nc = #collected
+-- if nc > 0 then
+-- local reversed, r = { }, 0
+-- for c=nc,1,-1 do
+-- r = r + 1
+-- reversed[r] = collected[c]
+-- end
+-- return reversed
+-- else
+-- return collected
+-- end
+-- end
+-- end
+
+local reverse = table.reversed
+
+local function attribute(collected,name)
+ if collected and #collected > 0 then
+ local at = collected[1].at
+ return at and at[name]
+ end
+end
+
+local function att(id,name)
+ local at = id.at
+ return at and at[name]
+end
+
+local function count(collected)
+ return collected and #collected or 0
+end
+
+local function position(collected,n)
+ if not collected then
+ return 0
+ end
+ local nc = #collected
+ if nc == 0 then
+ return 0
+ end
+ n = tonumber(n) or 0
+ if n < 0 then
+ return collected[nc + n + 1]
+ elseif n > 0 then
+ return collected[n]
+ else
+ return collected[1].mi or 0
+ end
+end
+
+local function match(collected)
+ return collected and #collected > 0 and collected[1].mi or 0 -- match
+end
+
+local function index(collected)
+ return collected and #collected > 0 and collected[1].ni or 0 -- 0 is new
+end
+
+local function attributes(collected,arguments)
+ if collected and #collected > 0 then
+ local at = collected[1].at
+ if arguments then
+ return at[arguments]
+ elseif next(at) then
+ return at -- all of them
+ end
+ end
+end
+
+local function chainattribute(collected,arguments) -- todo: optional levels
+ if collected and #collected > 0 then
+ local e = collected[1]
+ while e do
+ local at = e.at
+ if at then
+ local a = at[arguments]
+ if a then
+ return a
+ end
+ else
+ break -- error
+ end
+ e = e.__p__
+ end
+ end
+ return ""
+end
+
+local function raw(collected) -- hybrid (not much different from text so it might go)
+ if collected and #collected > 0 then
+ local e = collected[1] or collected
+ return e and xmltostring(e) or "" -- only first as we cannot concat function
+ else
+ return ""
+ end
+end
+
+--
+
+local xmltexthandler = xmlnewhandlers {
+ name = "string",
+ initialize = function()
+ result = { }
+ return result
+ end,
+ finalize = function()
+ return concat(result)
+ end,
+ handle = function(...)
+ result[#result+1] = concat { ... }
+ end,
+ escape = false,
+}
+
+local function xmltotext(root)
+ local dt = root.dt
+ if not dt then
+ return ""
+ end
+ local nt = #dt -- string or table
+ if nt == 0 then
+ return ""
+ elseif nt == 1 and type(dt[1]) == "string" then
+ return dt[1] -- no escaping of " ' < > &
+ else
+ return xmlserialize(root,xmltexthandler) or ""
+ end
+end
+
+--
+
+local function text(collected) -- hybrid
+ if collected then -- no # test here !
+ local e = collected[1] or collected -- why fallback to element, how about cdata
+ return e and xmltotext(e) or ""
+ else
+ return ""
+ end
+end
+
+local function texts(collected)
+ if not collected then
+ return { } -- why no nil
+ end
+ local nc = #collected
+ if nc == 0 then
+ return { } -- why no nil
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ if e and e.dt then
+ n = n + 1
+ t[n] = e.dt
+ end
+ end
+ return t
+end
+
+local function tag(collected,n)
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ return c and c.tg
+end
+
+local function name(collected,n)
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local c
+ if n == 0 or not n then
+ c = collected[1]
+ elseif n > 1 then
+ c = collected[n]
+ else
+ c = collected[nc-n+1]
+ end
+ if not c then
+ -- sorry
+ elseif c.ns == "" then
+ return c.tg
+ else
+ return c.ns .. ":" .. c.tg
+ end
+end
+
+local function tags(collected,nonamespace)
+ if not collected then
+ return
+ end
+ local nc = #collected
+ if nc == 0 then
+ return
+ end
+ local t, n = { }, 0
+ for c=1,nc do
+ local e = collected[c]
+ local ns, tg = e.ns, e.tg
+ n = n + 1
+ if nonamespace or ns == "" then
+ t[n] = tg
+ else
+ t[n] = ns .. ":" .. tg
+ end
+ end
+ return t
+end
+
+local function empty(collected,spacesonly)
+ if not collected then
+ return true
+ end
+ local nc = #collected
+ if nc == 0 then
+ return true
+ end
+ for c=1,nc do
+ local e = collected[c]
+ if e then
+ local edt = e.dt
+ if edt then
+ local n = #edt
+ if n == 1 then
+ local edk = edt[1]
+ local typ = type(edk)
+ if typ == "table" then
+ return false
+ elseif edk ~= "" then
+ return false
+ elseif spacesonly and not find(edk,"%S") then
+ return false
+ end
+ elseif n > 1 then
+ return false
+ end
+ end
+ end
+ end
+ return true
+end
+
+finalizers.first = first
+finalizers.last = last
+finalizers.all = all
+finalizers.reverse = reverse
+finalizers.elements = all
+finalizers.default = all
+finalizers.attribute = attribute
+finalizers.att = att
+finalizers.count = count
+finalizers.position = position
+finalizers.match = match
+finalizers.index = index
+finalizers.attributes = attributes
+finalizers.chainattribute = chainattribute
+finalizers.text = text
+finalizers.texts = texts
+finalizers.tag = tag
+finalizers.name = name
+finalizers.tags = tags
+finalizers.empty = empty
+
+-- shortcuts -- we could support xmlfilter(id,pattern,first)
+
+function xml.first(id,pattern)
+ return first(xmlfilter(id,pattern))
+end
+
+function xml.last(id,pattern)
+ return last(xmlfilter(id,pattern))
+end
+
+function xml.count(id,pattern)
+ return count(xmlfilter(id,pattern))
+end
+
+function xml.attribute(id,pattern,a,default)
+ return attribute(xmlfilter(id,pattern),a,default)
+end
+
+function xml.raw(id,pattern)
+ if pattern then
+ return raw(xmlfilter(id,pattern))
+ else
+ return raw(id)
+ end
+end
+
+function xml.text(id,pattern) -- brrr either content or element (when cdata)
+ if pattern then
+ -- return text(xmlfilter(id,pattern))
+ local collected = xmlfilter(id,pattern)
+ return collected and #collected > 0 and xmltotext(collected[1]) or ""
+ elseif id then
+ -- return text(id)
+ return xmltotext(id) or ""
+ else
+ return ""
+ end
+end
+
+xml.content = text
+
+--
+
+function xml.position(id,pattern,n) -- element
+ return position(xmlfilter(id,pattern),n)
+end
+
+function xml.match(id,pattern) -- number
+ return match(xmlfilter(id,pattern))
+end
+
+function xml.empty(id,pattern,spacesonly)
+ return empty(xmlfilter(id,pattern),spacesonly)
+end
+
+xml.all = xml.filter
+xml.index = xml.position
+xml.found = xml.filter
+
+-- a nice one:
+
+local function totable(x)
+ local t = { }
+ for e in xmlcollected(x[1] or x,"/*") do
+ t[e.tg] = xmltostring(e.dt) or ""
+ end
+ return next(t) and t or nil
+end
+
+xml.table = totable
+finalizers.table = totable
+
+local function textonly(e,t)
+ if e then
+ local edt = e.dt
+ if edt then
+ for i=1,#edt do
+ local e = edt[i]
+ if type(e) == "table" then
+ textonly(e,t)
+ else
+ t[#t+1] = e
+ end
+ end
+ end
+ end
+ return t
+end
+
+function xml.textonly(e) -- no pattern
+ return concat(textonly(e,{}))
+end
+
+--
+
+-- local x = xml.convert("123")
+-- xml.filter(x,"**/lowerall()") print(x)
+-- xml.filter(x,"**/upperall()") print(x)
+
+function finalizers.lowerall(collected)
+ for c=1,#collected do
+ local e = collected[c]
+ if not e.special then
+ e.tg = lower(e.tg)
+ local eat = e.at
+ if eat then
+ local t = { }
+ for k,v in next, eat do
+ t[lower(k)] = v
+ end
+ e.at = t
+ end
+ end
+ end
+end
+
+function finalizers.upperall(collected)
+ for c=1,#collected do
+ local e = collected[c]
+ if not e.special then
+ e.tg = upper(e.tg)
+ local eat = e.at
+ if eat then
+ local t = { }
+ for k,v in next, eat do
+ t[upper(k)] = v
+ end
+ e.at = t
+ end
+ end
+ end
+end
diff --git a/tex/context/base/m-chart.lua b/tex/context/base/m-chart.lua
index c4da2eb63..34f77c074 100644
--- a/tex/context/base/m-chart.lua
+++ b/tex/context/base/m-chart.lua
@@ -1,916 +1,916 @@
-if not modules then modules = { } end modules ['x-flow'] = {
- version = 1.001,
- comment = "companion to m-flow.mkvi",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- when we can resolve mpcolor at the lua end we will
--- use metapost.graphic(....) directly
-
--- todo: labels
-
-moduledata.charts = moduledata.charts or { }
-
-local gsub, match, find, format, lower = string.gsub, string.match, string.find, string.format, string.lower
-local setmetatableindex = table.setmetatableindex
-local P, S, C, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.match
-
-local report_chart = logs.reporter("chart")
-
-local points = number.points
-
-local variables = interfaces.variables
-
-local v_yes = variables.yes
-local v_no = variables.no
-local v_none = variables.none
-local v_standard = variables.standard
-local v_overlay = variables.overlay
-local v_round = variables.round
-local v_test = variables.test
-
-local defaults = {
- chart = {
- name = "",
- option = "",
- backgroundcolor = "",
- width = 100*65536,
- height = 50*65536,
- dx = 30*65536,
- dy = 30*65536,
- offset = 0,
- bodyfont = "",
- dot = "",
- hcompact = variables_no,
- vcompact = variables_no,
- autofocus = "",
- focus = "",
- labeloffset = 5*65536,
- commentoffset = 5*65536,
- exitoffset = 0,
-
- },
- shape = { -- FLOS
- rulethickness = 65536,
- default = "",
- framecolor = "darkblue",
- backgroundcolor = "lightgray",
- },
- focus = { -- FLOF
- rulethickness = 65536,
- framecolor = "darkred",
- backgroundcolor = "gray",
- },
- line = { -- FLOL
- rulethickness = 65536,
- radius = 10*65536,
- color = "darkgreen",
- corner = "",
- dash = "",
- arrow = "",
- offset = "",
- },
- set = { -- FLOX
- },
- split = {
- nx = 3,
- ny = 3,
- command = "",
- marking = "",
- before = "",
- after = "",
- }
-}
-
-local validshapes = {
- ["node"] = { kind = "shape", number = 0 },
- ["action"] = { kind = "shape", number = 24 },
- ["procedure"] = { kind = "shape", number = 5 },
- ["product"] = { kind = "shape", number = 12 },
- ["decision"] = { kind = "shape", number = 14 },
- ["archive"] = { kind = "shape", number = 19 },
- ["loop"] = { kind = "shape", number = 35 },
- ["wait"] = { kind = "shape", number = 6 },
- ["subprocedure"] = { kind = "shape", number = 20 },
- ["singledocument"] = { kind = "shape", number = 32 },
- ["multidocument"] = { kind = "shape", number = 33 },
-
- ["right"] = { kind = "line", number = 66 },
- ["left"] = { kind = "line", number = 67 },
- ["up"] = { kind = "line", number = 68 },
- ["down"] = { kind = "line", number = 69 },
-}
-
-local validlabellocations = {
- l = "l", left = "l",
- r = "r", right = "r",
- t = "t", top = "t",
- b = "b", bottom = "b",
- lt = "lt",
- rt = "rt",
- lb = "lb",
- rb = "rb",
- tl = "tl",
- tr = "tr",
- bl = "bl",
- br = "br",
-}
-
-local validcommentlocations = {
- l = "l", left = "l",
- r = "r", right = "r",
- t = "t", top = "t",
- b = "b", bottom = "b",
- lt = "lt",
- rt = "rt",
- lb = "lb",
- rb = "rb",
- tl = "tl",
- tr = "tr",
- bl = "bl",
- br = "br",
-}
-
-local validtextlocations = {
- l = "l", left = "l",
- r = "r", right = "r",
- t = "t", top = "t",
- b = "b", bottom = "b",
- c = "c", center = "c",
- m = "c", middle = "m",
- lt = "lt",
- rt = "rt",
- lb = "lb",
- rb = "rb",
- tl = "lt",
- tr = "rt",
- bl = "lb",
- br = "rb",
-}
-
-setmetatableindex(validshapes,function(t,k)
- local l = gsub(lower(k)," ","")
- local v = rawget(t,l)
- if not v then
- local n = tonumber(k)
- if n then
- v = { kind = "shape", number = n }
- else
- v = rawget(t,"action")
- end
- end
- t[k] = v
- return v
-end)
-
-local charts = { }
-
-local data, hash, temp, last_x, last_y, name
-
-function commands.flow_start_chart(chartname)
- data = { }
- hash = { }
- last_x, last_y = 0, 0
- name = chartname
-end
-
-function commands.flow_stop_chart()
- charts[name] = {
- data = data,
- hash = hash,
- last_x = last_x,
- last_y = last_y,
- }
- data, hash, temp = nil, nil, nil
-end
-
--- function commands.flow_set(chartname,chartdata)
--- local hash = { }
--- local data = { }
--- charts[name] = {
--- data = data,
--- hash = hash,
--- }
--- for i=1,#chartdata do
--- local di = data[i]
--- local name = di.name or ""
--- if name then
--- data[#data+1] = {
--- name = name,
--- labels = di.labels or { },
--- comments = di.comments or { },
--- exits = di.exits or { },
--- connections = di.connections or { },
--- settings = di.settings or { },
--- x = di.x or 1,
--- y = di.y or 1,
--- }
--- hash[name] = i
--- end
--- end
--- end
-
-function commands.flow_reset(chartname)
- charts[name] = nil
-end
-
-function commands.flow_set_current_cell(n)
- temp = data[tonumber(n)] or { }
-end
-
-function commands.flow_start_cell(settings)
- temp = {
- texts = { },
- labels = { },
- exits = { },
- connections = { },
- settings = settings,
- x = 1,
- y = 1,
- name = "",
- }
-end
-
-function commands.flow_stop_cell()
- data[#data+1] = temp
- hash[temp.name or #data] = temp
-end
-
-function commands.flow_set_name(str)
- temp.name = str
-end
-
-function commands.flow_set_shape(str)
- temp.shape = str
-end
-
-function commands.flow_set_destination(str)
- temp.destination = str
-end
-
-function commands.flow_set_text(align,str)
- temp.texts[#temp.texts+1] = {
- location = align,
- text = str,
- }
-end
-
-function commands.flow_set_overlay(str)
- temp.overlay = str
-end
-
-function commands.flow_set_focus(str)
- temp.focus = str
-end
-
-function commands.flow_set_figure(str)
- temp.figure = str
-end
-
-function commands.flow_set_label(location,text)
- temp.labels[#temp.labels+1] = {
- location = location,
- text = text,
- }
-end
-
-function commands.flow_set_comment(location,text)
- local connections = temp.connections
- if connections then
- local connection = connections[#connections]
- if connection then
- local comments = connection.comments
- if comments then
- comments[#comments+1] = {
- location = location,
- text = text,
- }
- end
- end
- end
-end
-
-function commands.flow_set_exit(location,text)
- temp.exits[#temp.exits+1] = {
- location = location,
- text = text,
- }
-end
-
-function commands.flow_set_include(name,x,y,settings)
- data[#data+1] = {
- include = name,
- x = x,
- y = y,
- -- settings = settings,
- }
-end
-
-local function inject(includedata,data,hash)
- local subchart = charts[includedata.include]
- if not subchart then
- return
- end
- local subdata = subchart.data
- if not subdata then
- return
- end
- local xoffset = (includedata.x or 1) - 1
- local yoffset = (includedata.y or 1) - 1
- local settings = includedata.settings
- for i=1,#subdata do
- local si = subdata[i]
- if si.include then
- inject(si,data,hash)
- else
- local t = {
- x = si.x + xoffset,
- y = si.y + yoffset,
- settings = settings,
- }
- setmetatableindex(t,si)
- data[#data+1] = t
- hash[si.name or #data] = t
- end
- end
-end
-
-local function pack(data,field)
- local list, max = { }, 0
- for e=1,#data do
- local d = data[e]
- local f = d[field]
- list[f] = true
- if f > max then
- max = f
- end
- end
- for i=1,max do
- if not list[i] then
- for e=1,#data do
- local d = data[e]
- local f = d[field]
- if f > i then
- d[field] = f - 1
- end
- end
- end
- end
-end
-
-local function expanded(chart,chartsettings)
- local expandeddata = { }
- local expandedhash = { }
- local expandedchart = {
- data = expandeddata,
- hash = expandedhash,
- }
- setmetatableindex(expandedchart,chart)
- local data = chart.data
- local hash = chart.hash
- for i=1,#data do
- local di = data[i]
- if di.include then
- inject(di,expandeddata,expandedhash)
- else
- expandeddata[#expandeddata+1] = di
- expandedhash[di.name or #expandeddata] = di
- end
- end
- --
- expandedchart.settings = chartsettings or { }
- -- make locals
- chartsettings.shape = chartsettings.shape or { }
- chartsettings.focus = chartsettings.focus or { }
- chartsettings.line = chartsettings.line or { }
- chartsettings.set = chartsettings.set or { }
- chartsettings.split = chartsettings.split or { }
- chartsettings.chart = chartsettings.chart or { }
- setmetatableindex(chartsettings.shape,defaults.shape)
- setmetatableindex(chartsettings.focus,defaults.focus)
- setmetatableindex(chartsettings.line ,defaults.line )
- setmetatableindex(chartsettings.set ,defaults.set )
- setmetatableindex(chartsettings.split,defaults.split)
- setmetatableindex(chartsettings.chart,defaults.chart)
- --
- if chartsettings.chart.vcompact == v_yes then
- pack(expandeddata,"y")
- end
- if chartsettings.chart.hcompact == v_yes then
- pack(expandeddata,"x")
- end
- --
- for i=1,#expandeddata do
- local cell = expandeddata[i]
- local settings = cell.settings
- if not settings then
- cell.settings = chartsettings
- else
- settings.shape = settings.shape or { }
- settings.focus = settings.focus or { }
- settings.line = settings.line or { }
- setmetatableindex(settings.shape,chartsettings.shape)
- setmetatableindex(settings.focus,chartsettings.focus)
- setmetatableindex(settings.line ,chartsettings.line)
- end
- end
- return expandedchart
-end
-
-local splitter = lpeg.splitat(",")
-
-function commands.flow_set_location(x,y)
- if type(x) == "string" and not y then
- x, y = lpegmatch(splitter,x)
- end
- if not x or x == "" then
- x = last_x
- elseif type(x) == "number" then
- -- ok
- elseif x == "+" then
- x = last_x + 1
- elseif x == "-" then
- x = last_x - 1
- elseif find(x,"^[%+%-]") then
- x = last_x + (tonumber(x) or 0)
- else
- x = tonumber(x)
- end
- if not y or y == "" then
- y = last_y
- elseif type(y) == "number" then
- -- ok
- elseif y == "+" then
- y = last_y + 1
- elseif x == "-" then
- y = last_y - 1
- elseif find(y,"^[%+%-]") then
- y = last_y + (tonumber(y) or 0)
- else
- y = tonumber(y)
- end
- temp.x = x or 1
- temp.y = y or 1
- last_x = x or last_x
- last_y = y or last_y
-end
-
-function commands.flow_set_connection(location,displacement,name)
- local dx, dy = lpegmatch(splitter,displacement)
- dx = tonumber(dx)
- dy = tonumber(dy)
- temp.connections[#temp.connections+1] = {
- location = location,
- dx = dx or 0,
- dy = dy or 0,
- name = name,
- comments = { },
- }
-end
-
-local function visible(chart,cell)
- local x, y = cell.x, cell.y
- return
- x >= chart.from_x and x <= chart.to_x and
- y >= chart.from_y and y <= chart.to_y and cell
-end
-
-local function process_cells(chart,xoffset,yoffset)
- local data = chart.data
- if not data then
- return
- end
- local focus = utilities.parsers.settings_to_hash(chart.settings.chart.focus or "")
- for i=1,#data do
- local cell = visible(chart,data[i])
- if cell then
- local settings = cell.settings
- local shapesettings = settings.shape
- local shape = cell.shape
- if not shape or shape == "" then
- shape = shapesettings.default or "none"
- end
- if shape ~= v_none then
- local shapedata = validshapes[shape]
- context("flow_begin_sub_chart ;") -- when is this needed
- if shapedata.kind == "line" then
- local linesettings = settings.line
- context("flow_shape_line_color := \\MPcolor{%s} ;", linesettings.color)
- context("flow_shape_fill_color := \\MPcolor{%s} ;", linesettings.backgroundcolor)
- context("flow_shape_line_width := %s ; ", points(linesettingsrulethickness))
- elseif focus[cell.focus] or focus[cell.name] then
- local focussettings = settings.focus
- context("flow_shape_line_color := \\MPcolor{%s} ;", focussettings.framecolor)
- context("flow_shape_fill_color := \\MPcolor{%s} ;", focussettings.backgroundcolor)
- context("flow_shape_line_width := %s ; ", points(focussettings.rulethickness))
- else
- local shapesettings = settings.shape
- context("flow_shape_line_color := \\MPcolor{%s} ;", shapesettings.framecolor)
- context("flow_shape_fill_color := \\MPcolor{%s} ;", shapesettings.backgroundcolor)
- context("flow_shape_line_width := %s ; " , points(shapesettings.rulethickness))
- end
- context("flow_peepshape := false ;") -- todo
- context("flow_new_shape(%s,%s,%s) ;",cell.x+xoffset,cell.y+yoffset,shapedata.number)
- context("flow_end_sub_chart ;")
- end
- end
- end
-end
-
--- todo : make lpeg for splitter
-
-local sign = S("+p") / "1"
- + S("-m") / "-1"
-
-local full = C(P("left"))
- + C(P("right"))
- + C(P("top"))
- + C(P("bottom"))
-
-local char = P("l") / "left"
- + P("r") / "right"
- + P("t") / "top"
- + P("b") / "bottom"
-
-local space = P(" ")^0
-
-local what = space
- * (sign + Cc("0"))
- * space
- * (full + char)
- * space
- * (sign + Cc("0"))
- * space
- * (full + char)
- * space
- * P(-1)
-
--- print(lpegmatch(what,"lr"))
--- print(lpegmatch(what,"+l+r"))
--- print(lpegmatch(what,"+l"))
--- print(lpegmatch(what,"+ left+r "))
-
-local function process_connections(chart,xoffset,yoffset)
- local data = chart.data
- local hash = chart.hash
- if not data then
- return
- end
- local settings = chart.settings
- for i=1,#data do
- local cell = visible(chart,data[i])
- if cell then
- local connections = cell.connections
- for j=1,#connections do
- local connection = connections[j]
- local othername = connection.name
- local othercell = hash[othername]
- if othercell then -- and visible(chart,data[i]) then
- local cellx, celly = cell.x, cell.y
- local otherx, othery, location = othercell.x, othercell.y, connection.location
- if otherx > 0 and othery > 0 and cellx > 0 and celly > 0 and connection.location then
- local what_cell, where_cell, what_other, where_other = lpegmatch(what,location)
- if what_cell and where_cell and what_other and where_other then
- local linesettings = settings.line
- context("flow_smooth := %s ;", linesettings.corner == v_round and "true" or "false")
- context("flow_dashline := %s ;", linesettings.dash == v_yes and "true" or "false")
- context("flow_arrowtip := %s ;", linesettings.arrow == v_yes and "true" or "false")
- context("flow_touchshape := %s ;", linesettings.offset == v_none and "true" or "false")
- context("flow_dsp_x := %s ; flow_dsp_y := %s ;",connection.dx or 0, connection.dy or 0)
- context("flow_connection_line_color := \\MPcolor{%s} ;",linesettings.color)
- context("flow_connection_line_width := 2pt ;",points(linesettings.rulethickness))
- context("flow_connect_%s_%s (%s) (%s,%s,%s) (%s,%s,%s) ;",where_cell,where_other,j,cellx,celly,what_cell,otherx,othery,what_other)
- context("flow_dsp_x := 0 ; flow_dsp_y := 0 ;")
- end
- end
- end
- end
- end
- end
-end
-
-local texttemplate = "\\setvariables[flowcell:text][x=%s,y=%s,text={%s},align={%s},figure={%s},destination={%s}]"
-
-local splitter = lpeg.splitat(":")
-
-local function process_texts(chart,xoffset,yoffset)
- local data = chart.data
- local hash = chart.hash
- if not data then
- return
- end
- for i=1,#data do
- local cell = visible(chart,data[i])
- if cell then
- local x = cell.x or 1
- local y = cell.y or 1
- local texts = cell.texts
- for i=1,#texts do
- local text = texts[i]
- local data = text.text
- local align = validlabellocations[text.align or ""] or text.align or ""
- local figure = i == 1 and cell.figure or ""
- local destination = i == 1 and cell.destination or ""
- context('flow_chart_draw_text(%s,%s,textext("%s")) ;',x,y,format(texttemplate,x,y,data,align,figure,destination))
- end
- local labels = cell.labels
- for i=1,#labels do
- local label = labels[i]
- local text = label.text
- local location = validlabellocations[label.location or ""] or label.location or ""
- if text and location then
- context('flow_chart_draw_label(%s,%s,"%s",textext("\\strut %s")) ;',x,y,location,text)
- end
- end
- local exits = cell.exits
- for i=1,#exits do
- local exit = exits[i]
- local text = exit.text
- local location = validlabellocations[exit.location or ""]
- if text and location then
- -- maybe make autoexit an option
- if location == "l" and x == chart.from_x + 1 or
- location == "r" and x == chart.to_x - 1 or
- location == "t" and y == chart.to_y - 1 or
- location == "b" and y == chart.from_y + 1 then
- context('flow_chart_draw_exit(%s,%s,"%s",textext("\\strut %s")) ;',x,y,location,text)
- end
- end
- end
- local connections = cell.connections
- for i=1,#connections do
- local comments = connections[i].comments
- for j=1,#comments do
- local comment = comments[j]
- local text = comment.text
- local location = comment.location or ""
- local length = 0
- -- "tl" "tl:*" "tl:0.5"
- local loc, len = lpegmatch(splitter,location) -- do the following in lpeg
- if len == "*" then
- location = validcommentlocations[loc] or ""
- if location == "" then
- location = "*"
- else
- location = location .. ":*"
- end
- elseif loc then
- location = validcommentlocations[loc] or "*"
- length = tonumber(len) or 0
- else
- location = validcommentlocations[location] or ""
- end
- if text and location then
- context('flow_chart_draw_comment(%s,%s,%s,"%s",%s,textext("\\strut %s")) ;',x,y,i,location,length,text)
- end
- end
- end
- end
- end
-end
-
-local function getchart(settings,forced_x,forced_y,forced_nx,forced_ny)
- if not settings then
- print("no settings given")
- return
- end
- local chartname = settings.chart.name
- if not chartname then
- print("no name given")
- return
- end
- local chart = charts[chartname]
- if not chart then
- print("no such chart",chartname)
- return
- end
- chart = expanded(chart,settings)
- local chartsettings = chart.settings.chart
- local autofocus = chart.settings.chart.autofocus
- if autofocus then
- autofocus = utilities.parsers.settings_to_hash(autofocus)
- if not next(autofocus) then
- autofocus = false
- end
- end
- -- check natural window
- local x = forced_x or tonumber(chartsettings.x)
- local y = forced_y or tonumber(chartsettings.y)
- local nx = forced_nx or tonumber(chartsettings.nx)
- local ny = forced_ny or tonumber(chartsettings.ny)
- --
- local minx, miny, maxx, maxy = 0, 0, 0, 0
- local data = chart.data
- for i=1,#data do
- local cell = data[i]
- if not autofocus or autofocus[cell.name] then -- offsets probably interfere with autofocus
- local x = cell.x
- local y = cell.y
- if minx == 0 or x < minx then minx = x end
- if miny == 0 or y < miny then miny = y end
- if minx == 0 or x > maxx then maxx = x end
- if miny == 0 or y > maxy then maxy = y end
- end
- end
- -- print("1>",x,y,nx,ny)
- -- print("2>",minx, miny, maxx, maxy)
- -- check of window should be larger (maybe autofocus + nx/ny?)
- if autofocus then
- -- x and y are ignored
- if nx and nx > 0 then
- maxx = minx + nx - 1
- end
- if ny and ny > 0 then
- maxy = miny + ny - 1
- end
- else
- if x and x > 0 then
- minx = x
- end
- if y and y > 0 then
- miny = y
- end
- if nx and nx > 0 then
- maxx = minx + nx - 1
- end
- if ny and ny > 0 then
- maxy = miny + ny - 1
- end
- end
--- print("3>",minx, miny, maxx, maxy)
- --
- local nx = maxx - minx + 1
- local ny = maxy - miny + 1
- -- relocate cells
- for i=1,#data do
- local cell = data[i]
- cell.x = cell.x - minx + 1
- cell.y = cell.y - miny + 1
- end
- chart.from_x = 1
- chart.from_y = 1
- chart.to_x = nx
- chart.to_y = ny
- chart.nx = nx
- chart.ny = ny
- --
- -- inspect(chart)
- return chart
-end
-
-local function makechart(chart)
- local settings = chart.settings
- local chartsettings = settings.chart
- --
- context.begingroup()
- context.forgetall()
- --
- context.startMPcode()
- context("if unknown context_flow : input mp-char.mpiv ; fi ;")
- context("flow_begin_chart(0,%s,%s);",chart.nx,chart.ny)
- --
- if chartsettings.option == v_test or chartsettings.dot == v_yes then
- context("flow_show_con_points := true ;")
- context("flow_show_mid_points := true ;")
- context("flow_show_all_points := true ;")
- elseif chartsettings.dot ~= "" then -- no checking done, private option
- context("flow_show_%s_points := true ;",chartsettings.dot)
- end
- --
- local backgroundcolor = chartsettings.backgroundcolor
- if backgroundcolor and backgroundcolor ~= "" then
- context("flow_chart_background_color := \\MPcolor{%s} ;",backgroundcolor)
- end
- --
- local shapewidth = chartsettings.width
- local gridwidth = shapewidth + 2*chartsettings.dx
- local shapeheight = chartsettings.height
- local gridheight = shapeheight + 2*chartsettings.dy
- local chartoffset = chartsettings.offset
- local labeloffset = chartsettings.labeloffset
- local exitoffset = chartsettings.exitoffset
- local commentoffset = chartsettings.commentoffset
- context("flow_grid_width := %s ;", points(gridwidth))
- context("flow_grid_height := %s ;", points(gridheight))
- context("flow_shape_width := %s ;", points(shapewidth))
- context("flow_shape_height := %s ;", points(shapeheight))
- context("flow_chart_offset := %s ;", points(chartoffset))
- context("flow_label_offset := %s ;", points(labeloffset))
- context("flow_exit_offset := %s ;", points(exitoffset))
- context("flow_comment_offset := %s ;", points(commentoffset))
- --
- local radius = settings.line.radius
- local rulethickness = settings.line.rulethickness
- local dx = chartsettings.dx
- local dy = chartsettings.dy
- if radius < rulethickness then
- radius = 2.5*rulethickness
- if radius > dx then
- radius = dx
- end
- if radius > dy then
- radius = dy
- end
- end
- context("flow_connection_line_width := %s ;", points(rulethickness))
- context("flow_connection_smooth_size := %s ;", points(radius))
- context("flow_connection_arrow_size := %s ;", points(radius))
- context("flow_connection_dash_size := %s ;", points(radius))
- --
- local offset = chartsettings.offset -- todo: pass string
- if offset == v_none or offset == v_overlay or offset == "" then
- offset = -2.5 * radius -- or rulethickness?
- elseif offset == v_standard then
- offset = radius -- or rulethickness?
- end
- context("flow_chart_offset := %s ;",points(offset))
- --
- context("flow_reverse_y := true ;")
- process_cells(chart,0,0)
- process_connections(chart,0,0)
- process_texts(chart,0,0)
- -- context("clip_chart(%s,%s,%s,%s) ;",x,y,nx,ny) -- todo: draw lines but not shapes
- context("flow_end_chart ;")
- context.stopMPcode()
- context.endgroup()
-end
-
-local function splitchart(chart)
- local settings = chart.settings
- local splitsettings = settings.split
- local chartsettings = settings.chart
- --
- local name = chartsettings.name
- --
- local from_x = chart.from_x
- local from_y = chart.from_y
- local to_x = chart.to_x
- local to_y = chart.to_y
- --
- local step_x = splitsettings.nx or to_x
- local step_y = splitsettings.ny or to_y
- local delta_x = splitsettings.dx or 0
- local delta_y = splitsettings.dy or 0
- --
- report_chart("spliting %a from (%s,%s) upto (%s,%s) into (%s,%s) with overlap (%s,%s)",
- name,from_x,from_y,to_x,to_y,step_x,step_y,delta_x,delta_y)
- --
- local part_x = 0
- local first_x = from_x
- while true do
- part_x = part_x + 1
- local last_x = first_x + step_x - 1
- local done = last_x >= to_x
- if done then
- last_x = to_x
- end
- local part_y = 0
- local first_y = from_y
- while true do
- part_y = part_y + 1
- local last_y = first_y + step_y - 1
- local done = last_y >= to_y
- if done then
- last_y = to_y
- end
- --
- report_chart("part (%s,%s) of %a is split from (%s,%s) -> (%s,%s)",part_x,part_y,name,first_x,first_y,last_x,last_y)
- local x, y, nx, ny = first_x, first_y, last_x - first_x + 1,last_y - first_y + 1
- context.beforeFLOWsplit()
- context.handleFLOWsplit(function()
- makechart(getchart(settings,x,y,nx,ny)) -- we need to pass frozen settings !
- end)
- context.afterFLOWsplit()
- --
- if done then
- break
- else
- first_y = last_y + 1 - delta_y
- end
- end
- if done then
- break
- else
- first_x = last_x + 1 - delta_x
- end
- end
-end
-
-function commands.flow_make_chart(settings)
- local chart = getchart(settings)
- if chart then
- local settings = chart.settings
- if settings then
- local chartsettings = settings.chart
- if chartsettings and chartsettings.split == v_yes then
- splitchart(chart)
- else
- makechart(chart)
- end
- else
- makechart(chart)
- end
- end
-end
+if not modules then modules = { } end modules ['x-flow'] = {
+ version = 1.001,
+ comment = "companion to m-flow.mkvi",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- when we can resolve mpcolor at the lua end we will
+-- use metapost.graphic(....) directly
+
+-- todo: labels
+
+moduledata.charts = moduledata.charts or { }
+
+local gsub, match, find, format, lower = string.gsub, string.match, string.find, string.format, string.lower
+local setmetatableindex = table.setmetatableindex
+local P, S, C, Cc, lpegmatch = lpeg.P, lpeg.S, lpeg.C, lpeg.Cc, lpeg.match
+
+local report_chart = logs.reporter("chart")
+
+local points = number.points
+
+local variables = interfaces.variables
+
+local v_yes = variables.yes
+local v_no = variables.no
+local v_none = variables.none
+local v_standard = variables.standard
+local v_overlay = variables.overlay
+local v_round = variables.round
+local v_test = variables.test
+
+local defaults = {
+ chart = {
+ name = "",
+ option = "",
+ backgroundcolor = "",
+ width = 100*65536,
+ height = 50*65536,
+ dx = 30*65536,
+ dy = 30*65536,
+ offset = 0,
+ bodyfont = "",
+ dot = "",
+ hcompact = variables_no,
+ vcompact = variables_no,
+ autofocus = "",
+ focus = "",
+ labeloffset = 5*65536,
+ commentoffset = 5*65536,
+ exitoffset = 0,
+
+ },
+ shape = { -- FLOS
+ rulethickness = 65536,
+ default = "",
+ framecolor = "darkblue",
+ backgroundcolor = "lightgray",
+ },
+ focus = { -- FLOF
+ rulethickness = 65536,
+ framecolor = "darkred",
+ backgroundcolor = "gray",
+ },
+ line = { -- FLOL
+ rulethickness = 65536,
+ radius = 10*65536,
+ color = "darkgreen",
+ corner = "",
+ dash = "",
+ arrow = "",
+ offset = "",
+ },
+ set = { -- FLOX
+ },
+ split = {
+ nx = 3,
+ ny = 3,
+ command = "",
+ marking = "",
+ before = "",
+ after = "",
+ }
+}
+
+local validshapes = {
+ ["node"] = { kind = "shape", number = 0 },
+ ["action"] = { kind = "shape", number = 24 },
+ ["procedure"] = { kind = "shape", number = 5 },
+ ["product"] = { kind = "shape", number = 12 },
+ ["decision"] = { kind = "shape", number = 14 },
+ ["archive"] = { kind = "shape", number = 19 },
+ ["loop"] = { kind = "shape", number = 35 },
+ ["wait"] = { kind = "shape", number = 6 },
+ ["subprocedure"] = { kind = "shape", number = 20 },
+ ["singledocument"] = { kind = "shape", number = 32 },
+ ["multidocument"] = { kind = "shape", number = 33 },
+
+ ["right"] = { kind = "line", number = 66 },
+ ["left"] = { kind = "line", number = 67 },
+ ["up"] = { kind = "line", number = 68 },
+ ["down"] = { kind = "line", number = 69 },
+}
+
+local validlabellocations = {
+ l = "l", left = "l",
+ r = "r", right = "r",
+ t = "t", top = "t",
+ b = "b", bottom = "b",
+ lt = "lt",
+ rt = "rt",
+ lb = "lb",
+ rb = "rb",
+ tl = "tl",
+ tr = "tr",
+ bl = "bl",
+ br = "br",
+}
+
+local validcommentlocations = {
+ l = "l", left = "l",
+ r = "r", right = "r",
+ t = "t", top = "t",
+ b = "b", bottom = "b",
+ lt = "lt",
+ rt = "rt",
+ lb = "lb",
+ rb = "rb",
+ tl = "tl",
+ tr = "tr",
+ bl = "bl",
+ br = "br",
+}
+
+local validtextlocations = {
+ l = "l", left = "l",
+ r = "r", right = "r",
+ t = "t", top = "t",
+ b = "b", bottom = "b",
+ c = "c", center = "c",
+ m = "c", middle = "m",
+ lt = "lt",
+ rt = "rt",
+ lb = "lb",
+ rb = "rb",
+ tl = "lt",
+ tr = "rt",
+ bl = "lb",
+ br = "rb",
+}
+
+setmetatableindex(validshapes,function(t,k)
+ local l = gsub(lower(k)," ","")
+ local v = rawget(t,l)
+ if not v then
+ local n = tonumber(k)
+ if n then
+ v = { kind = "shape", number = n }
+ else
+ v = rawget(t,"action")
+ end
+ end
+ t[k] = v
+ return v
+end)
+
+local charts = { }
+
+local data, hash, temp, last_x, last_y, name
+
+function commands.flow_start_chart(chartname)
+ data = { }
+ hash = { }
+ last_x, last_y = 0, 0
+ name = chartname
+end
+
+function commands.flow_stop_chart()
+ charts[name] = {
+ data = data,
+ hash = hash,
+ last_x = last_x,
+ last_y = last_y,
+ }
+ data, hash, temp = nil, nil, nil
+end
+
+-- function commands.flow_set(chartname,chartdata)
+-- local hash = { }
+-- local data = { }
+-- charts[name] = {
+-- data = data,
+-- hash = hash,
+-- }
+-- for i=1,#chartdata do
+-- local di = data[i]
+-- local name = di.name or ""
+-- if name then
+-- data[#data+1] = {
+-- name = name,
+-- labels = di.labels or { },
+-- comments = di.comments or { },
+-- exits = di.exits or { },
+-- connections = di.connections or { },
+-- settings = di.settings or { },
+-- x = di.x or 1,
+-- y = di.y or 1,
+-- }
+-- hash[name] = i
+-- end
+-- end
+-- end
+
+function commands.flow_reset(chartname)
+ charts[name] = nil
+end
+
+function commands.flow_set_current_cell(n)
+ temp = data[tonumber(n)] or { }
+end
+
+function commands.flow_start_cell(settings)
+ temp = {
+ texts = { },
+ labels = { },
+ exits = { },
+ connections = { },
+ settings = settings,
+ x = 1,
+ y = 1,
+ name = "",
+ }
+end
+
+function commands.flow_stop_cell()
+ data[#data+1] = temp
+ hash[temp.name or #data] = temp
+end
+
+function commands.flow_set_name(str)
+ temp.name = str
+end
+
+function commands.flow_set_shape(str)
+ temp.shape = str
+end
+
+function commands.flow_set_destination(str)
+ temp.destination = str
+end
+
+function commands.flow_set_text(align,str)
+ temp.texts[#temp.texts+1] = {
+ location = align,
+ text = str,
+ }
+end
+
+function commands.flow_set_overlay(str)
+ temp.overlay = str
+end
+
+function commands.flow_set_focus(str)
+ temp.focus = str
+end
+
+function commands.flow_set_figure(str)
+ temp.figure = str
+end
+
+function commands.flow_set_label(location,text)
+ temp.labels[#temp.labels+1] = {
+ location = location,
+ text = text,
+ }
+end
+
+function commands.flow_set_comment(location,text)
+ local connections = temp.connections
+ if connections then
+ local connection = connections[#connections]
+ if connection then
+ local comments = connection.comments
+ if comments then
+ comments[#comments+1] = {
+ location = location,
+ text = text,
+ }
+ end
+ end
+ end
+end
+
+function commands.flow_set_exit(location,text)
+ temp.exits[#temp.exits+1] = {
+ location = location,
+ text = text,
+ }
+end
+
+function commands.flow_set_include(name,x,y,settings)
+ data[#data+1] = {
+ include = name,
+ x = x,
+ y = y,
+ -- settings = settings,
+ }
+end
+
+local function inject(includedata,data,hash)
+ local subchart = charts[includedata.include]
+ if not subchart then
+ return
+ end
+ local subdata = subchart.data
+ if not subdata then
+ return
+ end
+ local xoffset = (includedata.x or 1) - 1
+ local yoffset = (includedata.y or 1) - 1
+ local settings = includedata.settings
+ for i=1,#subdata do
+ local si = subdata[i]
+ if si.include then
+ inject(si,data,hash)
+ else
+ local t = {
+ x = si.x + xoffset,
+ y = si.y + yoffset,
+ settings = settings,
+ }
+ setmetatableindex(t,si)
+ data[#data+1] = t
+ hash[si.name or #data] = t
+ end
+ end
+end
+
+local function pack(data,field)
+ local list, max = { }, 0
+ for e=1,#data do
+ local d = data[e]
+ local f = d[field]
+ list[f] = true
+ if f > max then
+ max = f
+ end
+ end
+ for i=1,max do
+ if not list[i] then
+ for e=1,#data do
+ local d = data[e]
+ local f = d[field]
+ if f > i then
+ d[field] = f - 1
+ end
+ end
+ end
+ end
+end
+
+local function expanded(chart,chartsettings)
+ local expandeddata = { }
+ local expandedhash = { }
+ local expandedchart = {
+ data = expandeddata,
+ hash = expandedhash,
+ }
+ setmetatableindex(expandedchart,chart)
+ local data = chart.data
+ local hash = chart.hash
+ for i=1,#data do
+ local di = data[i]
+ if di.include then
+ inject(di,expandeddata,expandedhash)
+ else
+ expandeddata[#expandeddata+1] = di
+ expandedhash[di.name or #expandeddata] = di
+ end
+ end
+ --
+ expandedchart.settings = chartsettings or { }
+ -- make locals
+ chartsettings.shape = chartsettings.shape or { }
+ chartsettings.focus = chartsettings.focus or { }
+ chartsettings.line = chartsettings.line or { }
+ chartsettings.set = chartsettings.set or { }
+ chartsettings.split = chartsettings.split or { }
+ chartsettings.chart = chartsettings.chart or { }
+ setmetatableindex(chartsettings.shape,defaults.shape)
+ setmetatableindex(chartsettings.focus,defaults.focus)
+ setmetatableindex(chartsettings.line ,defaults.line )
+ setmetatableindex(chartsettings.set ,defaults.set )
+ setmetatableindex(chartsettings.split,defaults.split)
+ setmetatableindex(chartsettings.chart,defaults.chart)
+ --
+ if chartsettings.chart.vcompact == v_yes then
+ pack(expandeddata,"y")
+ end
+ if chartsettings.chart.hcompact == v_yes then
+ pack(expandeddata,"x")
+ end
+ --
+ for i=1,#expandeddata do
+ local cell = expandeddata[i]
+ local settings = cell.settings
+ if not settings then
+ cell.settings = chartsettings
+ else
+ settings.shape = settings.shape or { }
+ settings.focus = settings.focus or { }
+ settings.line = settings.line or { }
+ setmetatableindex(settings.shape,chartsettings.shape)
+ setmetatableindex(settings.focus,chartsettings.focus)
+ setmetatableindex(settings.line ,chartsettings.line)
+ end
+ end
+ return expandedchart
+end
+
+local splitter = lpeg.splitat(",")
+
+function commands.flow_set_location(x,y)
+ if type(x) == "string" and not y then
+ x, y = lpegmatch(splitter,x)
+ end
+ if not x or x == "" then
+ x = last_x
+ elseif type(x) == "number" then
+ -- ok
+ elseif x == "+" then
+ x = last_x + 1
+ elseif x == "-" then
+ x = last_x - 1
+ elseif find(x,"^[%+%-]") then
+ x = last_x + (tonumber(x) or 0)
+ else
+ x = tonumber(x)
+ end
+ if not y or y == "" then
+ y = last_y
+ elseif type(y) == "number" then
+ -- ok
+ elseif y == "+" then
+ y = last_y + 1
+ elseif x == "-" then
+ y = last_y - 1
+ elseif find(y,"^[%+%-]") then
+ y = last_y + (tonumber(y) or 0)
+ else
+ y = tonumber(y)
+ end
+ temp.x = x or 1
+ temp.y = y or 1
+ last_x = x or last_x
+ last_y = y or last_y
+end
+
+function commands.flow_set_connection(location,displacement,name)
+ local dx, dy = lpegmatch(splitter,displacement)
+ dx = tonumber(dx)
+ dy = tonumber(dy)
+ temp.connections[#temp.connections+1] = {
+ location = location,
+ dx = dx or 0,
+ dy = dy or 0,
+ name = name,
+ comments = { },
+ }
+end
+
+local function visible(chart,cell)
+ local x, y = cell.x, cell.y
+ return
+ x >= chart.from_x and x <= chart.to_x and
+ y >= chart.from_y and y <= chart.to_y and cell
+end
+
+local function process_cells(chart,xoffset,yoffset)
+ local data = chart.data
+ if not data then
+ return
+ end
+ local focus = utilities.parsers.settings_to_hash(chart.settings.chart.focus or "")
+ for i=1,#data do
+ local cell = visible(chart,data[i])
+ if cell then
+ local settings = cell.settings
+ local shapesettings = settings.shape
+ local shape = cell.shape
+ if not shape or shape == "" then
+ shape = shapesettings.default or "none"
+ end
+ if shape ~= v_none then
+ local shapedata = validshapes[shape]
+ context("flow_begin_sub_chart ;") -- when is this needed
+ if shapedata.kind == "line" then
+ local linesettings = settings.line
+ context("flow_shape_line_color := \\MPcolor{%s} ;", linesettings.color)
+ context("flow_shape_fill_color := \\MPcolor{%s} ;", linesettings.backgroundcolor)
+ context("flow_shape_line_width := %s ; ", points(linesettingsrulethickness))
+ elseif focus[cell.focus] or focus[cell.name] then
+ local focussettings = settings.focus
+ context("flow_shape_line_color := \\MPcolor{%s} ;", focussettings.framecolor)
+ context("flow_shape_fill_color := \\MPcolor{%s} ;", focussettings.backgroundcolor)
+ context("flow_shape_line_width := %s ; ", points(focussettings.rulethickness))
+ else
+ local shapesettings = settings.shape
+ context("flow_shape_line_color := \\MPcolor{%s} ;", shapesettings.framecolor)
+ context("flow_shape_fill_color := \\MPcolor{%s} ;", shapesettings.backgroundcolor)
+ context("flow_shape_line_width := %s ; " , points(shapesettings.rulethickness))
+ end
+ context("flow_peepshape := false ;") -- todo
+ context("flow_new_shape(%s,%s,%s) ;",cell.x+xoffset,cell.y+yoffset,shapedata.number)
+ context("flow_end_sub_chart ;")
+ end
+ end
+ end
+end
+
+-- todo : make lpeg for splitter
+
+local sign = S("+p") / "1"
+ + S("-m") / "-1"
+
+local full = C(P("left"))
+ + C(P("right"))
+ + C(P("top"))
+ + C(P("bottom"))
+
+local char = P("l") / "left"
+ + P("r") / "right"
+ + P("t") / "top"
+ + P("b") / "bottom"
+
+local space = P(" ")^0
+
+local what = space
+ * (sign + Cc("0"))
+ * space
+ * (full + char)
+ * space
+ * (sign + Cc("0"))
+ * space
+ * (full + char)
+ * space
+ * P(-1)
+
+-- print(lpegmatch(what,"lr"))
+-- print(lpegmatch(what,"+l+r"))
+-- print(lpegmatch(what,"+l"))
+-- print(lpegmatch(what,"+ left+r "))
+
+local function process_connections(chart,xoffset,yoffset)
+ local data = chart.data
+ local hash = chart.hash
+ if not data then
+ return
+ end
+ local settings = chart.settings
+ for i=1,#data do
+ local cell = visible(chart,data[i])
+ if cell then
+ local connections = cell.connections
+ for j=1,#connections do
+ local connection = connections[j]
+ local othername = connection.name
+ local othercell = hash[othername]
+ if othercell then -- and visible(chart,data[i]) then
+ local cellx, celly = cell.x, cell.y
+ local otherx, othery, location = othercell.x, othercell.y, connection.location
+ if otherx > 0 and othery > 0 and cellx > 0 and celly > 0 and connection.location then
+ local what_cell, where_cell, what_other, where_other = lpegmatch(what,location)
+ if what_cell and where_cell and what_other and where_other then
+ local linesettings = settings.line
+ context("flow_smooth := %s ;", linesettings.corner == v_round and "true" or "false")
+ context("flow_dashline := %s ;", linesettings.dash == v_yes and "true" or "false")
+ context("flow_arrowtip := %s ;", linesettings.arrow == v_yes and "true" or "false")
+ context("flow_touchshape := %s ;", linesettings.offset == v_none and "true" or "false")
+ context("flow_dsp_x := %s ; flow_dsp_y := %s ;",connection.dx or 0, connection.dy or 0)
+ context("flow_connection_line_color := \\MPcolor{%s} ;",linesettings.color)
+ context("flow_connection_line_width := 2pt ;",points(linesettings.rulethickness))
+ context("flow_connect_%s_%s (%s) (%s,%s,%s) (%s,%s,%s) ;",where_cell,where_other,j,cellx,celly,what_cell,otherx,othery,what_other)
+ context("flow_dsp_x := 0 ; flow_dsp_y := 0 ;")
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+local texttemplate = "\\setvariables[flowcell:text][x=%s,y=%s,text={%s},align={%s},figure={%s},destination={%s}]"
+
+local splitter = lpeg.splitat(":")
+
+local function process_texts(chart,xoffset,yoffset)
+ local data = chart.data
+ local hash = chart.hash
+ if not data then
+ return
+ end
+ for i=1,#data do
+ local cell = visible(chart,data[i])
+ if cell then
+ local x = cell.x or 1
+ local y = cell.y or 1
+ local texts = cell.texts
+ for i=1,#texts do
+ local text = texts[i]
+ local data = text.text
+ local align = validlabellocations[text.align or ""] or text.align or ""
+ local figure = i == 1 and cell.figure or ""
+ local destination = i == 1 and cell.destination or ""
+ context('flow_chart_draw_text(%s,%s,textext("%s")) ;',x,y,format(texttemplate,x,y,data,align,figure,destination))
+ end
+ local labels = cell.labels
+ for i=1,#labels do
+ local label = labels[i]
+ local text = label.text
+ local location = validlabellocations[label.location or ""] or label.location or ""
+ if text and location then
+ context('flow_chart_draw_label(%s,%s,"%s",textext("\\strut %s")) ;',x,y,location,text)
+ end
+ end
+ local exits = cell.exits
+ for i=1,#exits do
+ local exit = exits[i]
+ local text = exit.text
+ local location = validlabellocations[exit.location or ""]
+ if text and location then
+ -- maybe make autoexit an option
+ if location == "l" and x == chart.from_x + 1 or
+ location == "r" and x == chart.to_x - 1 or
+ location == "t" and y == chart.to_y - 1 or
+ location == "b" and y == chart.from_y + 1 then
+ context('flow_chart_draw_exit(%s,%s,"%s",textext("\\strut %s")) ;',x,y,location,text)
+ end
+ end
+ end
+ local connections = cell.connections
+ for i=1,#connections do
+ local comments = connections[i].comments
+ for j=1,#comments do
+ local comment = comments[j]
+ local text = comment.text
+ local location = comment.location or ""
+ local length = 0
+ -- "tl" "tl:*" "tl:0.5"
+ local loc, len = lpegmatch(splitter,location) -- do the following in lpeg
+ if len == "*" then
+ location = validcommentlocations[loc] or ""
+ if location == "" then
+ location = "*"
+ else
+ location = location .. ":*"
+ end
+ elseif loc then
+ location = validcommentlocations[loc] or "*"
+ length = tonumber(len) or 0
+ else
+ location = validcommentlocations[location] or ""
+ end
+ if text and location then
+ context('flow_chart_draw_comment(%s,%s,%s,"%s",%s,textext("\\strut %s")) ;',x,y,i,location,length,text)
+ end
+ end
+ end
+ end
+ end
+end
+
+local function getchart(settings,forced_x,forced_y,forced_nx,forced_ny)
+ if not settings then
+ print("no settings given")
+ return
+ end
+ local chartname = settings.chart.name
+ if not chartname then
+ print("no name given")
+ return
+ end
+ local chart = charts[chartname]
+ if not chart then
+ print("no such chart",chartname)
+ return
+ end
+ chart = expanded(chart,settings)
+ local chartsettings = chart.settings.chart
+ local autofocus = chart.settings.chart.autofocus
+ if autofocus then
+ autofocus = utilities.parsers.settings_to_hash(autofocus)
+ if not next(autofocus) then
+ autofocus = false
+ end
+ end
+ -- check natural window
+ local x = forced_x or tonumber(chartsettings.x)
+ local y = forced_y or tonumber(chartsettings.y)
+ local nx = forced_nx or tonumber(chartsettings.nx)
+ local ny = forced_ny or tonumber(chartsettings.ny)
+ --
+ local minx, miny, maxx, maxy = 0, 0, 0, 0
+ local data = chart.data
+ for i=1,#data do
+ local cell = data[i]
+ if not autofocus or autofocus[cell.name] then -- offsets probably interfere with autofocus
+ local x = cell.x
+ local y = cell.y
+ if minx == 0 or x < minx then minx = x end
+ if miny == 0 or y < miny then miny = y end
+ if minx == 0 or x > maxx then maxx = x end
+ if miny == 0 or y > maxy then maxy = y end
+ end
+ end
+ -- print("1>",x,y,nx,ny)
+ -- print("2>",minx, miny, maxx, maxy)
+ -- check of window should be larger (maybe autofocus + nx/ny?)
+ if autofocus then
+ -- x and y are ignored
+ if nx and nx > 0 then
+ maxx = minx + nx - 1
+ end
+ if ny and ny > 0 then
+ maxy = miny + ny - 1
+ end
+ else
+ if x and x > 0 then
+ minx = x
+ end
+ if y and y > 0 then
+ miny = y
+ end
+ if nx and nx > 0 then
+ maxx = minx + nx - 1
+ end
+ if ny and ny > 0 then
+ maxy = miny + ny - 1
+ end
+ end
+-- print("3>",minx, miny, maxx, maxy)
+ --
+ local nx = maxx - minx + 1
+ local ny = maxy - miny + 1
+ -- relocate cells
+ for i=1,#data do
+ local cell = data[i]
+ cell.x = cell.x - minx + 1
+ cell.y = cell.y - miny + 1
+ end
+ chart.from_x = 1
+ chart.from_y = 1
+ chart.to_x = nx
+ chart.to_y = ny
+ chart.nx = nx
+ chart.ny = ny
+ --
+ -- inspect(chart)
+ return chart
+end
+
+local function makechart(chart)
+ local settings = chart.settings
+ local chartsettings = settings.chart
+ --
+ context.begingroup()
+ context.forgetall()
+ --
+ context.startMPcode()
+ context("if unknown context_flow : input mp-char.mpiv ; fi ;")
+ context("flow_begin_chart(0,%s,%s);",chart.nx,chart.ny)
+ --
+ if chartsettings.option == v_test or chartsettings.dot == v_yes then
+ context("flow_show_con_points := true ;")
+ context("flow_show_mid_points := true ;")
+ context("flow_show_all_points := true ;")
+ elseif chartsettings.dot ~= "" then -- no checking done, private option
+ context("flow_show_%s_points := true ;",chartsettings.dot)
+ end
+ --
+ local backgroundcolor = chartsettings.backgroundcolor
+ if backgroundcolor and backgroundcolor ~= "" then
+ context("flow_chart_background_color := \\MPcolor{%s} ;",backgroundcolor)
+ end
+ --
+ local shapewidth = chartsettings.width
+ local gridwidth = shapewidth + 2*chartsettings.dx
+ local shapeheight = chartsettings.height
+ local gridheight = shapeheight + 2*chartsettings.dy
+ local chartoffset = chartsettings.offset
+ local labeloffset = chartsettings.labeloffset
+ local exitoffset = chartsettings.exitoffset
+ local commentoffset = chartsettings.commentoffset
+ context("flow_grid_width := %s ;", points(gridwidth))
+ context("flow_grid_height := %s ;", points(gridheight))
+ context("flow_shape_width := %s ;", points(shapewidth))
+ context("flow_shape_height := %s ;", points(shapeheight))
+ context("flow_chart_offset := %s ;", points(chartoffset))
+ context("flow_label_offset := %s ;", points(labeloffset))
+ context("flow_exit_offset := %s ;", points(exitoffset))
+ context("flow_comment_offset := %s ;", points(commentoffset))
+ --
+ local radius = settings.line.radius
+ local rulethickness = settings.line.rulethickness
+ local dx = chartsettings.dx
+ local dy = chartsettings.dy
+ if radius < rulethickness then
+ radius = 2.5*rulethickness
+ if radius > dx then
+ radius = dx
+ end
+ if radius > dy then
+ radius = dy
+ end
+ end
+ context("flow_connection_line_width := %s ;", points(rulethickness))
+ context("flow_connection_smooth_size := %s ;", points(radius))
+ context("flow_connection_arrow_size := %s ;", points(radius))
+ context("flow_connection_dash_size := %s ;", points(radius))
+ --
+ local offset = chartsettings.offset -- todo: pass string
+ if offset == v_none or offset == v_overlay or offset == "" then
+ offset = -2.5 * radius -- or rulethickness?
+ elseif offset == v_standard then
+ offset = radius -- or rulethickness?
+ end
+ context("flow_chart_offset := %s ;",points(offset))
+ --
+ context("flow_reverse_y := true ;")
+ process_cells(chart,0,0)
+ process_connections(chart,0,0)
+ process_texts(chart,0,0)
+ -- context("clip_chart(%s,%s,%s,%s) ;",x,y,nx,ny) -- todo: draw lines but not shapes
+ context("flow_end_chart ;")
+ context.stopMPcode()
+ context.endgroup()
+end
+
+local function splitchart(chart)
+ local settings = chart.settings
+ local splitsettings = settings.split
+ local chartsettings = settings.chart
+ --
+ local name = chartsettings.name
+ --
+ local from_x = chart.from_x
+ local from_y = chart.from_y
+ local to_x = chart.to_x
+ local to_y = chart.to_y
+ --
+ local step_x = splitsettings.nx or to_x
+ local step_y = splitsettings.ny or to_y
+ local delta_x = splitsettings.dx or 0
+ local delta_y = splitsettings.dy or 0
+ --
+ report_chart("spliting %a from (%s,%s) upto (%s,%s) into (%s,%s) with overlap (%s,%s)",
+ name,from_x,from_y,to_x,to_y,step_x,step_y,delta_x,delta_y)
+ --
+ local part_x = 0
+ local first_x = from_x
+ while true do
+ part_x = part_x + 1
+ local last_x = first_x + step_x - 1
+ local done = last_x >= to_x
+ if done then
+ last_x = to_x
+ end
+ local part_y = 0
+ local first_y = from_y
+ while true do
+ part_y = part_y + 1
+ local last_y = first_y + step_y - 1
+ local done = last_y >= to_y
+ if done then
+ last_y = to_y
+ end
+ --
+ report_chart("part (%s,%s) of %a is split from (%s,%s) -> (%s,%s)",part_x,part_y,name,first_x,first_y,last_x,last_y)
+ local x, y, nx, ny = first_x, first_y, last_x - first_x + 1,last_y - first_y + 1
+ context.beforeFLOWsplit()
+ context.handleFLOWsplit(function()
+ makechart(getchart(settings,x,y,nx,ny)) -- we need to pass frozen settings !
+ end)
+ context.afterFLOWsplit()
+ --
+ if done then
+ break
+ else
+ first_y = last_y + 1 - delta_y
+ end
+ end
+ if done then
+ break
+ else
+ first_x = last_x + 1 - delta_x
+ end
+ end
+end
+
+function commands.flow_make_chart(settings)
+ local chart = getchart(settings)
+ if chart then
+ local settings = chart.settings
+ if settings then
+ local chartsettings = settings.chart
+ if chartsettings and chartsettings.split == v_yes then
+ splitchart(chart)
+ else
+ makechart(chart)
+ end
+ else
+ makechart(chart)
+ end
+ end
+end
diff --git a/tex/context/base/m-database.lua b/tex/context/base/m-database.lua
index 47854daa0..c287f4926 100644
--- a/tex/context/base/m-database.lua
+++ b/tex/context/base/m-database.lua
@@ -1,137 +1,137 @@
-if not modules then modules = { } end modules ['m-database'] = {
- version = 1.001,
- comment = "companion to m-database.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local sub, gmatch, format = string.sub, string.gmatch, string.format
-local concat = table.concat
-local lpegpatterns, lpegmatch, lpegsplitat = lpeg.patterns, lpeg.match, lpeg.splitat
-local lpegP, lpegC, lpegS, lpegCt = lpeg.P, lpeg.C, lpeg.S, lpeg.Ct
-local stripstring = string.strip
-
--- One also needs to enable context.trace, here we only plug in some code (maybe
--- some day this tracker will also toggle the main context tracer.
-
-local trace_flush = false trackers.register("module.database.flush", function(v) trace_flush = v end)
-
-local report_database = logs.reporter("database")
-
-buffers.database = buffers.database or { }
-
-local l_tab = lpegpatterns.tab
-local l_space = lpegpatterns.space
-local l_comma = lpegpatterns.comma
-local l_empty = lpegS("\t\n\r ")^0 * lpegP(-1)
-
-local v_yes = interfaces.variables.yes
-
-local separators = { -- not interfaced
- tab = l_tab,
- tabs = l_tab^1,
- comma = l_comma,
- space = l_space,
- spaces = l_space^1,
-}
-
-function buffers.database.process(settings)
- local data
- if settings.type == "file" then
- local filename = resolvers.finders.byscheme("any",settings.database)
- data = filename ~= "" and io.loaddata(filename)
- data = data and string.splitlines(data)
- else
- data = buffers.getlines(settings.database)
- end
- if data and #data > 0 then
- if trace_flush then
- context.pushlogger(report_database)
- end
- local separatorchar, quotechar, commentchar = settings.separator, settings.quotechar, settings.commentchar
- local before, after = settings.before or "", settings.after or ""
- local first, last = settings.first or "", settings.last or ""
- local left, right = settings.left or "", settings.right or ""
- local setups = settings.setups or ""
- local strip = settings.strip == v_yes or false
- local command = settings.command
- separatorchar = (not separatorchar and ",") or separators[separatorchar] or separatorchar
- local separator = type(separatorchar) == "string" and lpegS(separatorchar) or separatorchar
- local whatever = lpegC((1 - separator)^0)
- if quotechar and quotechar ~= "" then
- local quotedata = nil
- for chr in gmatch(quotechar,".") do
- local quotechar = lpegP(chr)
- local quoteword = l_space^0 * quotechar * lpegC((1 - quotechar)^0) * quotechar * l_space^0
- if quotedata then
- quotedata = quotedata + quoteword
- else
- quotedata = quoteword
- end
- end
- whatever = quotedata + whatever
- end
- local checker = commentchar ~= "" and lpegS(commentchar)
- local splitter = lpegCt(whatever * (separator * whatever)^0)
- local found = false
- for i=1,#data do
- local line = data[i]
- if not lpegmatch(l_empty,line) and (not checker or not lpegmatch(checker,line)) then
- local list = lpegmatch(splitter,line)
- if not found then
- if setups ~= "" then
- context.begingroup()
- context.setups { setups }
- end
- context(before)
- found = true
- end
- if trace_flush then
- local result, r = { }, 0
- r = r + 1 ; result[r] = first
- for j=1,#list do
- local str = strip and stripstring(list[j]) or list[j]
- r = r + 1 ; result[r] = left
- if command == "" then
- r = r + 1 ; result[r] = str
- else
- r = r + 1 ; result[r] = command
- r = r + 1 ; result[r] = "{"
- r = r + 1 ; result[r] = str
- r = r + 1 ; result[r] = "}"
- end
- r = r + 1 ; result[r] = right
- end
- r = r + 1 ; result[r] = last
- context(concat(result))
- else
- context(first)
- for j=1,#list do
- local str = strip and stripstring(list[j]) or list[j]
- context(left)
- if command == "" then
- context(str)
- else
- context(command)
- context(false,str)
- end
- context(right)
- end
- context(last)
- end
- end
- end
- if found then
- context(after)
- if setups ~= "" then
- context.endgroup()
- end
- end
- if trace_flush then
- context.poplogger()
- end
- else
- -- message
- end
-end
+if not modules then modules = { } end modules ['m-database'] = {
+ version = 1.001,
+ comment = "companion to m-database.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local sub, gmatch, format = string.sub, string.gmatch, string.format
+local concat = table.concat
+local lpegpatterns, lpegmatch, lpegsplitat = lpeg.patterns, lpeg.match, lpeg.splitat
+local lpegP, lpegC, lpegS, lpegCt = lpeg.P, lpeg.C, lpeg.S, lpeg.Ct
+local stripstring = string.strip
+
+-- One also needs to enable context.trace, here we only plug in some code (maybe
+-- some day this tracker will also toggle the main context tracer.
+
+local trace_flush = false trackers.register("module.database.flush", function(v) trace_flush = v end)
+
+local report_database = logs.reporter("database")
+
+buffers.database = buffers.database or { }
+
+local l_tab = lpegpatterns.tab
+local l_space = lpegpatterns.space
+local l_comma = lpegpatterns.comma
+local l_empty = lpegS("\t\n\r ")^0 * lpegP(-1)
+
+local v_yes = interfaces.variables.yes
+
+local separators = { -- not interfaced
+ tab = l_tab,
+ tabs = l_tab^1,
+ comma = l_comma,
+ space = l_space,
+ spaces = l_space^1,
+}
+
+function buffers.database.process(settings)
+ local data
+ if settings.type == "file" then
+ local filename = resolvers.finders.byscheme("any",settings.database)
+ data = filename ~= "" and io.loaddata(filename)
+ data = data and string.splitlines(data)
+ else
+ data = buffers.getlines(settings.database)
+ end
+ if data and #data > 0 then
+ if trace_flush then
+ context.pushlogger(report_database)
+ end
+ local separatorchar, quotechar, commentchar = settings.separator, settings.quotechar, settings.commentchar
+ local before, after = settings.before or "", settings.after or ""
+ local first, last = settings.first or "", settings.last or ""
+ local left, right = settings.left or "", settings.right or ""
+ local setups = settings.setups or ""
+ local strip = settings.strip == v_yes or false
+ local command = settings.command
+ separatorchar = (not separatorchar and ",") or separators[separatorchar] or separatorchar
+ local separator = type(separatorchar) == "string" and lpegS(separatorchar) or separatorchar
+ local whatever = lpegC((1 - separator)^0)
+ if quotechar and quotechar ~= "" then
+ local quotedata = nil
+ for chr in gmatch(quotechar,".") do
+ local quotechar = lpegP(chr)
+ local quoteword = l_space^0 * quotechar * lpegC((1 - quotechar)^0) * quotechar * l_space^0
+ if quotedata then
+ quotedata = quotedata + quoteword
+ else
+ quotedata = quoteword
+ end
+ end
+ whatever = quotedata + whatever
+ end
+ local checker = commentchar ~= "" and lpegS(commentchar)
+ local splitter = lpegCt(whatever * (separator * whatever)^0)
+ local found = false
+ for i=1,#data do
+ local line = data[i]
+ if not lpegmatch(l_empty,line) and (not checker or not lpegmatch(checker,line)) then
+ local list = lpegmatch(splitter,line)
+ if not found then
+ if setups ~= "" then
+ context.begingroup()
+ context.setups { setups }
+ end
+ context(before)
+ found = true
+ end
+ if trace_flush then
+ local result, r = { }, 0
+ r = r + 1 ; result[r] = first
+ for j=1,#list do
+ local str = strip and stripstring(list[j]) or list[j]
+ r = r + 1 ; result[r] = left
+ if command == "" then
+ r = r + 1 ; result[r] = str
+ else
+ r = r + 1 ; result[r] = command
+ r = r + 1 ; result[r] = "{"
+ r = r + 1 ; result[r] = str
+ r = r + 1 ; result[r] = "}"
+ end
+ r = r + 1 ; result[r] = right
+ end
+ r = r + 1 ; result[r] = last
+ context(concat(result))
+ else
+ context(first)
+ for j=1,#list do
+ local str = strip and stripstring(list[j]) or list[j]
+ context(left)
+ if command == "" then
+ context(str)
+ else
+ context(command)
+ context(false,str)
+ end
+ context(right)
+ end
+ context(last)
+ end
+ end
+ end
+ if found then
+ context(after)
+ if setups ~= "" then
+ context.endgroup()
+ end
+ end
+ if trace_flush then
+ context.poplogger()
+ end
+ else
+ -- message
+ end
+end
diff --git a/tex/context/base/m-markdown.lua b/tex/context/base/m-markdown.lua
index 1f9402f60..6c9c44d78 100644
--- a/tex/context/base/m-markdown.lua
+++ b/tex/context/base/m-markdown.lua
@@ -1,824 +1,824 @@
-if not modules then modules = { } end modules ['m-markdown'] = {
- version = 1.002,
- comment = "companion to m-markdown.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "see below",
- license = "see context related readme files"
-}
-
---[[
-Copyright (C) 2009 John MacFarlane / Khaled Hosny / Hans Hagen
-
-The main parser is derived from the lunamark parser written by John MacFarlane. You
-can download lunamark from:
-
- http://github.com/jgm/lunamark.git
-
-Khaled Hosny provided the context writer for lunamark and that was used as starting
-point for the mapping. The original code can be fetched from the above location.
-
-While playing with the original code I got the feeling that lpeg could perform better.
-The slowdown was due to the fact that the parser's lpeg was reconstructed each time a
-nested parse was needed. After changing that code a bit I could bring down parsing of
-some test code from 2 seconds to less than 0.1 second so I decided to stick to this
-parser instead of writing my own. After all, the peg code looks pretty impressive and
-visiting Johns pandoc pages is worth the effort:
-
- http://johnmacfarlane.net/pandoc/
-
-The code here is mostly meant for processing snippets embedded in a context
-documents and is no replacement for pandoc at all. Therefore an alternative is to use
-pandoc in combination with Aditya's filter module.
-
-As I changed (and optimized) the original code, it will be clear that all errors
-are mine. Eventually I might also adapt the parser code a bit more. When I ran into of
-closure stack limitations I decided to flatten the code. The following implementation
-seems to be a couple of hundred times faster than what I started with which is not that
-bad.
-
-This is a second rewrite. The mentioned speed gain largely depended on the kind of
-content: blocks, references and items can be rather demanding. Also, There were
-some limitations with respect to the captures. So, table storage has been removed in
-favor of strings, and nesting has been simplified. The first example at the end of this
-file now takes .33 seconds for 567KB code (resulting in over 1MB) so we're getting there.
-
-There will be a third rewrite eventually.
-]]--
-
--- todo: we have better quote and tag scanners in ctx
--- todo: provide an xhtml mapping
--- todo: add a couple of extensions
--- todo: check patches to the real peg
-
-local type, next, tonumber = type, next, tonumber
-local lower, upper, gsub, rep, gmatch, format, length = string.lower, string.upper, string.gsub, string.rep, string.gmatch, string.format, string.len
-local concat = table.concat
-local P, R, S, V, C, Ct, Cg, Cb, Cmt, Cc, Cf, Cs = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.C, lpeg.Ct, lpeg.Cg, lpeg.Cb, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cs
-local lpegmatch = lpeg.match
-local utfbyte, utfchar = utf.byte, utf.char
-
-moduledata = moduledata or { }
-moduledata.markdown = moduledata.markdown or { }
-local markdown = moduledata.markdown
-
-local nofruns, nofbytes, nofhtmlblobs = 0, 0, 0
-
----------------------------------------------------------------------------------------------
-
-local nestedparser
-local syntax
-
-nestedparser = function(str) return lpegmatch(syntax,str) end
-
----------------------------------------------------------------------------------------------
-
-local asterisk = P("*")
-local dash = P("-")
-local plus = P("+")
-local underscore = P("_")
-local period = P(".")
-local hash = P("#")
-local ampersand = P("&")
-local backtick = P("`")
-local less = P("<")
-local more = P(">")
-local space = P(" ")
-local squote = P("'")
-local dquote = P('"')
-local lparent = P("(")
-local rparent = P(")")
-local lbracket = P("[")
-local rbracket = P("]")
-local slash = P("/")
-local equal = P("=")
-local colon = P(":")
-local semicolon = P(";")
-local exclamation = P("!")
-
-local digit = R("09")
-local hexdigit = R("09","af","AF")
-local alphanumeric = R("AZ","az","09")
-
-local doubleasterisks = P("**")
-local doubleunderscores = P("__")
-local fourspaces = P(" ")
-
-local any = P(1)
-local always = P("")
-
-local tab = P("\t")
-local spacechar = S("\t ")
-local spacing = S(" \n\r\t")
-local newline = P("\r")^-1 * P("\n")
-local spaceornewline = spacechar + newline
-local nonspacechar = any - spaceornewline
-local optionalspace = spacechar^0
-local spaces = spacechar^1
-local eof = - any
-local nonindentspace = space^-3
-local blankline = optionalspace * C(newline)
-local blanklines = blankline^0
-local skipblanklines = (optionalspace * newline)^0
-local linechar = P(1 - newline)
-local indent = fourspaces + (nonindentspace * tab) / ""
-local indentedline = indent /"" * C(linechar^1 * (newline + eof))
-local optionallyindentedline = indent^-1 /"" * C(linechar^1 * (newline + eof))
-local spnl = optionalspace * (newline * optionalspace)^-1
-local specialchar = S("*_`*&[]
--- [3]:http://example.com/ (Optional Title Here)
--- [2]: http://example.com/ 'Optional Title Here'
--- [a]: http://example.com/ "Optional *oeps* Title Here"
--- ]]
---
--- local linktest = [[
--- [This link] (http://example.net/)
--- [an example] (http://example.com/ "Title")
--- [an example][1]
--- [an example] [2]
--- ]]
---
--- lpeg.match((define_reference_parser+1)^0,reftest)
---
--- inspect(references)
---
--- lpeg.match((direct_link_parser/print + indirect_link_parser/print + 1)^0,linktest)
-
----------------------------------------------------------------------------------------------
-
-local blocktags = table.tohash {
- "address", "blockquote" , "center", "dir", "div", "p", "pre",
- "li", "ol", "ul", "dl", "dd",
- "form", "fieldset", "isindex", "menu", "noframes", "frameset",
- "h1", "h2", "h3", "h4", "h5", "h6",
- "hr", "ht", "script", "noscript",
- "table", "tbody", "tfoot", "thead", "th", "td", "tr",
-}
-
------ htmlattributevalue = squote * C((any - (blankline + squote))^0) * squote
------ + dquote * C((any - (blankline + dquote))^0) * dquote
------ + (any - S("\t >"))^1 -- any - tab - space - more
------ htmlattribute = (alphanumeric + S("_-"))^1 * spnl * (equal * spnl * htmlattributevalue)^-1 * spnl
------ htmlcomment = P(""))^0 * P("-->")
-
------ htmltag = less * spnl * slash^-1 * alphanumeric^1 * spnl * htmlattribute^0 * slash^-1 * spnl * more
------
------ blocktag = Cmt(C(alphanumeric^1), function(s,i,a) return blocktags[lower(a)] and i, a end)
------
------ openblocktag = less * Cg(blocktag, "opentag") * spnl * htmlattribute^0 * more
------ closeblocktag = less * slash * Cmt(C(alphanumeric^1) * Cb("opentag"), function(s,i,a,b) return lower(a) == lower(b) and i end) * spnl * more
------ selfclosingblocktag = less * blocktag * spnl * htmlattribute^0 * slash * more
------
------ displayhtml = Cs { "HtmlBlock",
------ InBlockTags = openblocktag * (V("HtmlBlock") + (any - closeblocktag))^0 * closeblocktag,
------ HtmlBlock = C(V("InBlockTags") + selfclosingblocktag + htmlcomment),
------ }
------
------ inlinehtml = Cs(htmlcomment + htmltag)
-
--- There is no reason to support crappy html, so we expect proper attributes.
-
-local htmlattributevalue = squote * C((any - (blankline + squote))^0) * squote
- + dquote * C((any - (blankline + dquote))^0) * dquote
-local htmlattribute = (alphanumeric + S("_-"))^1 * spnl * equal * spnl * htmlattributevalue * spnl
-
-local htmlcomment = P(""))^0 * P("-->")
-local htmlinstruction = P("") * (any - P("?>" ))^0 * P("?>" )
-
--- We don't care too much about matching elements and there is no reason why display elements could not
--- have inline elements so the above should be patched then. Well, markdown mixed with html is not meant
--- for anything else than webpages anyway.
-
-local blocktag = Cmt(C(alphanumeric^1), function(s,i,a) return blocktags[lower(a)] and i, a end)
-
-local openelement = less * alphanumeric^1 * spnl * htmlattribute^0 * more
-local closeelement = less * slash * alphanumeric^1 * spnl * more
-local emptyelement = less * alphanumeric^1 * spnl * htmlattribute^0 * slash * more
-
-local displaytext = (any - less)^1
-local inlinetext = displaytext / nestedparser
-
-local displayhtml = #(less * blocktag * spnl * htmlattribute^0 * more)
- * Cs { "HtmlBlock",
- InBlockTags = openelement * (V("HtmlBlock") + displaytext)^0 * closeelement,
- HtmlBlock = (V("InBlockTags") + emptyelement + htmlcomment + htmlinstruction),
- }
-
-local inlinehtml = Cs { "HtmlBlock",
- InBlockTags = openelement * (V("HtmlBlock") + inlinetext)^0 * closeelement,
- HtmlBlock = (V("InBlockTags") + emptyelement + htmlcomment + htmlinstruction),
- }
-
----------------------------------------------------------------------------------------------
-
-local hexentity = ampersand * hash * S("Xx") * C(hexdigit ^1) * semicolon
-local decentity = ampersand * hash * C(digit ^1) * semicolon
-local tagentity = ampersand * C(alphanumeric^1) * semicolon
-
----------------------------------------------------------------------------------------------
-
--- --[[
-
-local escaped = {
- ["{" ] = "",
- ["}" ] = "",
- ["$" ] = "",
- ["&" ] = "",
- ["#" ] = "",
- ["~" ] = "",
- ["|" ] = "",
- ["%%"] = "",
- ["\\"] = "",
-}
-
-for k, v in next, escaped do
- escaped[k] = "\\char" .. utfbyte(k) .. "{}"
-end
-
-local function c_string(s) -- has to be done more often
- return (gsub(s,".",escaped))
-end
-
-local c_linebreak = "\\crlf\n" -- is this ok?
-local c_space = " "
-
-local function c_paragraph(c)
- return c .. "\n\n" -- { "\\startparagraph ", c, " \\stopparagraph\n" }
-end
-
-local function listitem(c)
- return format("\n\\startitem\n%s\n\\stopitem\n",nestedparser(c))
-end
-
-local function c_tightbulletlist(c)
- return format("\n\\startmarkdownitemize[packed]\n%s\\stopmarkdownitemize\n",c)
-end
-
-local function c_loosebulletlist(c)
- return format("\n\\startmarkdownitemize\n\\stopmarkdownitemize\n",c)
-end
-
-local function c_tightorderedlist(c)
- return format("\n\\startmarkdownitemize[n,packed]\n%s\\stopmarkdownitemize\n",c)
-end
-
-local function c_looseorderedlist(c)
- return format("\n\\startmarkdownitemize[n]\n%s\\stopmarkdownitemize\n",c)
-end
-
-local function c_inline_html(content)
- nofhtmlblobs = nofhtmlblobs + 1
- return format("\\markdowninlinehtml{%s}",content)
-end
-
-local function c_display_html(content)
- nofhtmlblobs = nofhtmlblobs + 1
- return format("\\startmarkdowndisplayhtml\n%s\n\\stopmarkdowndisplayhtml",content)
-end
-
-local function c_emphasis(c)
- return format("\\markdownemphasis{%s}",c)
-end
-
-local function c_strong(c)
- return format("\\markdownstrong{%s}",c)
-end
-
-local function c_blockquote(c)
- return format("\\startmarkdownblockquote\n%s\\stopmarkdownblockquote\n",nestedparser(c))
-end
-
-local function c_verbatim(c)
- return format("\\startmarkdowntyping\n%s\\stopmarkdowntyping\n",c)
-end
-
-local function c_code(c)
- return format("\\markdowntype{%s}",c)
-end
-
-local levels = { "", "", "", "", "", "" }
-
-local function c_start_document()
- levels = { "", "", "", "", "", "" }
- return ""
-end
-
-local function c_stop_document()
- return concat(levels,"\n") or ""
-end
-
-local function c_heading(level,c)
- if level > #levels then
- level = #levels
- end
- local finish = concat(levels,"\n",level) or ""
- for i=level+1,#levels do
- levels[i] = ""
- end
- levels[level] = "\\stopstructurelevel"
- return format("%s\\startstructurelevel[markdown][title={%s}]\n",finish,c)
-end
-
-local function c_hrule()
- return "\\markdownrule\n"
-end
-
-local function c_link(lab,src,tit)
- return format("\\goto{%s}[url(%s)]",nestedparser(lab),src)
-end
-
-local function c_image(lab,src,tit)
- return format("\\externalfigure[%s]",src)
-end
-
-local function c_email_link(address)
- return format("\\goto{%s}[url(mailto:%s)]",c_string(address),address)
-end
-
-local function c_url_link(url)
- return format("\\goto{%s}[url(%s)]",c_string(url),url)
-end
-
-local function f_heading(c,n)
- return c_heading(n,c)
-end
-
-local function c_hex_entity(s)
- return utfchar(tonumber(s,16))
-end
-
-local function c_dec_entity(s)
- return utfchar(tonumber(s))
-end
-
-local function c_tag_entity(s)
- return s -- we can use the default resolver
-end
-
---]]
-
----------------------------------------------------------------------------------------------
-
---[[
-
-local escaped = {
- ["<"] = "<",
- [">"] = ">",
- ["&"] = "&",
- ['"'] = """,
-}
-
-local function c_string(s) -- has to be done more often
- return (gsub(s,".",escaped))
-end
-
-local c_linebreak = " "
-local c_space = " "
-
-local function c_paragraph(c)
- return format("
%s
\n", c)
-end
-
-local function listitem(c)
- return format("
%s
",nestedparser(c))
-end
-
-local function c_tightbulletlist(c)
- return format("
\n%s\n
\n",c)
-end
-
-local function c_loosebulletlist(c)
- return format("
\n%s\n
\n",c)
-end
-
-local function c_tightorderedlist(c)
- return format("\n%s\n\n",c)
-end
-
-local function c_looseorderedlist(c)
- return format("\n%s\n\n",c)
-end
-
-local function c_inline_html(content)
- nofhtmlblobs = nofhtmlblobs + 1
- return content
-end
-
-local function c_display_html(content)
- nofhtmlblobs = nofhtmlblobs + 1
- return format("\n%s\n",content)
-end
-
-local function c_emphasis(c)
- return format("%s",c)
-end
-
-local function c_strong(c)
- return format("%s",c)
-end
-
-local function c_blockquote(c)
- return format("
\n%s\n
",nestedparser(c))
-end
-
-local function c_verbatim(c)
- return format("
+
+
+* Test
+** Test
+* Test1
+ * Test2
+* Test
+
+Test
+====
+
+> test
+> test **123** *123*
+> test `code`
+
+test
+
+Test
+====
+
+> test
+> test
+> test
+
+test
+oeps
+
+more
+
+ code
+ code
+
+oeps
+
+[an example][a]
+
+[an example] [2]
+
+[a]: http://example.com/ "Optional *oeps* Title Here"
+[2]: http://example.com/ 'Optional Title Here'
+[3]: http://example.com/ (Optional Title Here)
+
+[an example][a]
+
+[an example] [2]
+
+[an [tricky] example](http://example.com/ "Title")
+
+[This **xx** link](http://example.net/)
+ ]]
+
+-- This snippet takes some 4 seconds in the original parser (the one that is
+-- a bit clearer from the perspective of grammars but somewhat messy with
+-- respect to the captures. In the above parser it takes .1 second. Also,
+-- in the later case only memory is the limit.
+
+ local two = [[
+Test
+====
+* Test
+** Test
+* Test
+** Test
+* Test
+
+Test
+====
+
+> test
+> test
+> test
+
+test
+
+Test
+====
+
+> test
+> test
+> test
+
+test
+ ]]
+
+ local function test(str)
+ local n = 1 -- 000
+ local t = os.clock()
+ local one = convert(str)
+ -- print("runtime",1,#str,#one,os.clock()-t)
+ str = string.rep(str,n)
+ local t = os.clock()
+ local two = convert(str)
+ print(two)
+ -- print("runtime",n,#str,#two,os.clock()-t)
+ -- print(format("==============\n%s\n==============",one))
+ end
+
+ -- test(one)
+ -- test(two)
+ -- test(io.read("*all"))
+
+
+end
diff --git a/tex/context/base/m-pstricks.lua b/tex/context/base/m-pstricks.lua
index b151e313a..2c01ed898 100644
--- a/tex/context/base/m-pstricks.lua
+++ b/tex/context/base/m-pstricks.lua
@@ -1,74 +1,74 @@
-if not modules then modules = { } end modules ['m-pstricks'] = {
- version = 1.001,
- comment = "companion to m-pstricks.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- The following will be done when I need ps tricks figures
--- in large quantities:
---
--- + hash graphics and only process them once
--- + save md5 checksums in tuc file
---
--- It's no big deal but has a low priority.
-
-local format, lower, concat, gmatch = string.format, string.lower, table.concat, string.gmatch
-local variables = interfaces.variables
-
-moduledata.pstricks = moduledata.pstricks or { }
-
-local report_pstricks = logs.reporter("pstricks")
-
-local template = [[
-\starttext
- \pushcatcodetable
- \setcatcodetable\texcatcodes
- \usemodule[pstric]
- %s
- \popcatcodetable
- \startTEXpage
- \hbox\bgroup
- \ignorespaces
- %s
- \removeunwantedspaces
- \egroup
- \obeydepth %% temp hack as we need to figure this out
- \stopTEXpage
-\stoptext
-]]
-
-local loaded = { }
-local graphics = 0
-
-function moduledata.pstricks.usemodule(names)
- for name in gmatch(names,"([^%s,]+)") do
- loaded[#loaded+1] = format([[\readfile{%s}{}{}]],name)
- end
-end
-
-function moduledata.pstricks.process(n)
- graphics = graphics + 1
- local name = format("%s-pstricks-%04i",tex.jobname,graphics)
- local data = buffers.collectcontent("def-"..n)
- local tmpfile = name .. ".tmp"
- local epsfile = name .. ".ps"
- local pdffile = name .. ".pdf"
- local loaded = concat(loaded,"\n")
- os.remove(epsfile)
- os.remove(pdffile)
- io.savedata(tmpfile,format(template,loaded,data))
- os.execute(format("mtxrun --script texexec %s --once --dvips",tmpfile))
- if lfs.isfile(epsfile) then
- os.execute(format("ps2pdf %s %s",epsfile,pdffile))
- -- todo: direct call but not now
- if lfs.isfile(pdffile) then
- context.externalfigure( { pdffile }, { object = variables.no } )
- else
- report_pstricks("run failed, no pdf file")
- end
- else
- report_pstricks("run failed, no ps file")
- end
-end
+if not modules then modules = { } end modules ['m-pstricks'] = {
+ version = 1.001,
+ comment = "companion to m-pstricks.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- The following will be done when I need ps tricks figures
+-- in large quantities:
+--
+-- + hash graphics and only process them once
+-- + save md5 checksums in tuc file
+--
+-- It's no big deal but has a low priority.
+
+local format, lower, concat, gmatch = string.format, string.lower, table.concat, string.gmatch
+local variables = interfaces.variables
+
+moduledata.pstricks = moduledata.pstricks or { }
+
+local report_pstricks = logs.reporter("pstricks")
+
+local template = [[
+\starttext
+ \pushcatcodetable
+ \setcatcodetable\texcatcodes
+ \usemodule[pstric]
+ %s
+ \popcatcodetable
+ \startTEXpage
+ \hbox\bgroup
+ \ignorespaces
+ %s
+ \removeunwantedspaces
+ \egroup
+ \obeydepth %% temp hack as we need to figure this out
+ \stopTEXpage
+\stoptext
+]]
+
+local loaded = { }
+local graphics = 0
+
+function moduledata.pstricks.usemodule(names)
+ for name in gmatch(names,"([^%s,]+)") do
+ loaded[#loaded+1] = format([[\readfile{%s}{}{}]],name)
+ end
+end
+
+function moduledata.pstricks.process(n)
+ graphics = graphics + 1
+ local name = format("%s-pstricks-%04i",tex.jobname,graphics)
+ local data = buffers.collectcontent("def-"..n)
+ local tmpfile = name .. ".tmp"
+ local epsfile = name .. ".ps"
+ local pdffile = name .. ".pdf"
+ local loaded = concat(loaded,"\n")
+ os.remove(epsfile)
+ os.remove(pdffile)
+ io.savedata(tmpfile,format(template,loaded,data))
+ os.execute(format("mtxrun --script texexec %s --once --dvips",tmpfile))
+ if lfs.isfile(epsfile) then
+ os.execute(format("ps2pdf %s %s",epsfile,pdffile))
+ -- todo: direct call but not now
+ if lfs.isfile(pdffile) then
+ context.externalfigure( { pdffile }, { object = variables.no } )
+ else
+ report_pstricks("run failed, no pdf file")
+ end
+ else
+ report_pstricks("run failed, no ps file")
+ end
+end
diff --git a/tex/context/base/m-spreadsheet.lua b/tex/context/base/m-spreadsheet.lua
index 9d5106e35..dcd4ea1c4 100644
--- a/tex/context/base/m-spreadsheet.lua
+++ b/tex/context/base/m-spreadsheet.lua
@@ -1,332 +1,332 @@
-if not modules then modules = { } end modules ['m-spreadsheet'] = {
- version = 1.001,
- comment = "companion to m-spreadsheet.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local byte, format, gsub, find = string.byte, string.format, string.gsub, string.find
-local R, P, S, C, V, Cs, Cc, Ct, Cg, Cf, Carg = lpeg.R, lpeg.P, lpeg.S, lpeg.C, lpeg.V, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cg, lpeg.Cf, lpeg.Carg
-local lpegmatch, patterns = lpeg.match, lpeg.patterns
-local setmetatable, loadstring, next, tostring, tonumber,rawget = setmetatable, loadstring, next, tostring, tonumber, rawget
-local formatters = string.formatters
-
-local context = context
-
-local splitthousands = utilities.parsers.splitthousands
-local variables = interfaces.variables
-
-local v_yes = variables.yes
-
-moduledata = moduledata or { }
-
-local spreadsheets = { }
-moduledata.spreadsheets = spreadsheets
-
-local data = {
- -- nothing yet
-}
-
-local settings = {
- period = ".",
- comma = ",",
-}
-
-spreadsheets.data = data
-spreadsheets.settings = settings
-
-local defaultname = "default"
-local stack = { }
-local current = defaultname
-
-local d_mt ; d_mt = {
- __index = function(t,k)
- local v = { }
- setmetatable(v,d_mt)
- t[k] = v
- return v
- end,
-}
-
-local s_mt ; s_mt = {
- __index = function(t,k)
- local v = settings[k]
- t[k] = v
- return v
- end,
-}
-
-function spreadsheets.setup(t)
- for k, v in next, t do
- settings[k] = v
- end
-end
-
-local function emptydata(name,settings)
- local data = { }
- local specifications = { }
- local settings = settings or { }
- setmetatable(data,d_mt)
- setmetatable(specifications,d_mt)
- setmetatable(settings,s_mt)
- return {
- name = name,
- data = data,
- maxcol = 0,
- maxrow = 0,
- settings = settings,
- temp = { }, -- for local usage
- specifications = specifications,
- }
-end
-
-function spreadsheets.reset(name)
- if not name or name == "" then name = defaultname end
- data[name] = emptydata(name,data[name] and data[name].settings)
-end
-
-function spreadsheets.start(name,s)
- if not name or name == "" then
- name = defaultname
- end
- if not s then
- s = { }
- end
- table.insert(stack,current)
- current = name
- if data[current] then
- setmetatable(s,s_mt)
- data[current].settings = s
- else
- data[current] = emptydata(name,s)
- end
-end
-
-function spreadsheets.stop()
- current = table.remove(stack)
-end
-
-spreadsheets.reset()
-
-local offset = byte("A") - 1
-
-local function assign(s,n)
- return formatters["moduledata.spreadsheets.data['%s'].data[%s]"](n,byte(s)-offset)
-end
-
-function datacell(a,b,...)
- local n = 0
- if b then
- local t = { a, b, ... }
- for i=1,#t do
- n = n * (i-1) * 26 + byte(t[i]) - offset
- end
- else
- n = byte(a) - offset
- end
- return formatters["dat[%s]"](n)
-end
-
-local function checktemplate(s)
- if find(s,"%%") then
- -- normal template
- return s
- elseif find(s,"@") then
- -- tex specific template
- return gsub(s,"@","%%")
- else
- -- tex specific quick template
- return "%" .. s
- end
-end
-
-local quoted = Cs(patterns.unquoted)
-local spaces = patterns.whitespace^0
-local cell = C(R("AZ"))^1 / datacell * (Cc("[") * (R("09")^1) * Cc("]") + #P(1))
-
--- A nasty aspect of lpeg: Cf ( spaces * Cc("") * { "start" ... this will create a table that will
--- be reused, so we accumulate!
-
-local pattern = Cf ( spaces * Ct("") * { "start",
- start = V("value") + V("set") + V("format") + V("string") + V("code"),
- value = Cg(P([[=]]) * spaces * Cc("kind") * Cc("value")) * V("code"),
- set = Cg(P([[!]]) * spaces * Cc("kind") * Cc("set")) * V("code"),
- format = Cg(P([[@]]) * spaces * Cc("kind") * Cc("format")) * spaces * Cg(Cc("template") * Cs(quoted/checktemplate)) * V("code"),
- string = Cg(#S([["']]) * Cc("kind") * Cc("string")) * Cg(Cc("content") * quoted),
- code = spaces * Cg(Cc("code") * Cs((cell + P(1))^0)),
-}, rawset)
-
-local functions = { }
-spreadsheets.functions = functions
-
-function functions._s_(row,col,c,f,t)
- local r = 0
- if f and t then -- f..t
- -- ok
- elseif f then -- 1..f
- f, t = 1, f
- else
- f, t = 1, row - 1
- end
- for i=f,t do
- local ci = c[i]
- if type(ci) == "number" then
- r = r + c[i]
- end
- end
- return r
-end
-
-functions.fmt = string.tformat
-
-local f_code = formatters [ [[
- local _m_ = moduledata.spreadsheets
- local dat = _m_.data['%s'].data
- local tmp = _m_.temp
- local fnc = _m_.functions
- local row = %s
- local col = %s
- function fnc.sum(...) return fnc._s_(row,col,...) end
- local sum = fnc.sum
- local fmt = fnc.fmt
- return %s
-]] ]
-
--- to be considered: a weak cache
-
-local function propername(name)
- if name ~= "" then
- return name
- elseif current ~= "" then
- return current
- else
- return defaultname
- end
-end
-
--- if name == "" then name = current if name == "" then name = defaultname end end
-
-local function execute(name,r,c,str)
- if str ~= "" then
- local d = data[name]
- if c > d.maxcol then
- d.maxcol = c
- end
- if r > d.maxrow then
- d.maxrow = r
- end
- local specification = lpegmatch(pattern,str,1,name)
- d.specifications[c][r] = specification
- local kind = specification.kind
- if kind == "string" then
- return specification.content or ""
- else
- local code = specification.code
- if code and code ~= "" then
- code = f_code(name,r,c,code or "")
- local result = loadstring(code) -- utilities.lua.strippedloadstring(code,true) -- when tracing
- result = result and result()
- if type(result) == "function" then
- result = result()
- end
- if type(result) == "number" then
- d.data[c][r] = result
- end
- if not result then
- -- nothing
- elseif kind == "set" then
- -- no return
- elseif kind == "format" then
- return formatters[specification.template](result)
- else
- return result
- end
- end
- end
- end
-end
-
-function spreadsheets.set(name,r,c,str)
- name = propername(name)
- execute(name,r,c,str)
-end
-
-function spreadsheets.get(name,r,c,str)
- name = propername(name)
- local dname = data[name]
- if not dname then
- -- nothing
- elseif not str or str == "" then
- context(dname.data[c][r] or 0)
- else
- local result = execute(name,r,c,str)
- if result then
--- if type(result) == "number" then
--- dname.data[c][r] = result
--- result = tostring(result)
--- end
- local settings = dname.settings
- local split = settings.split
- local period = settings.period
- local comma = settings.comma
- if split == v_yes then
- result = splitthousands(result)
- end
- if period == "" then period = nil end
- if comma == "" then comma = nil end
- result = gsub(result,".",{ ["."] = period, [","] = comma })
- context(result)
- end
- end
-end
-
-function spreadsheets.doifelsecell(name,r,c)
- name = propername(name)
- local d = data[name]
- local d = d and d.data
- local r = d and rawget(d,r)
- local c = r and rawget(r,c)
- commands.doifelse(c)
-end
-
-local function simplify(name)
- name = propername(name)
- local data = data[name]
- if data then
- data = data.data
- local temp = { }
- for k, v in next, data do
- local t = { }
- temp[k] = t
- for kk, vv in next, v do
- if type(vv) == "function" then
- t[kk] = ""
- else
- t[kk] = vv
- end
- end
- end
- return temp
- end
-end
-
-local function serialize(name)
- local s = simplify(name)
- if s then
- return table.serialize(s,name)
- else
- return formatters[""](name)
- end
-end
-
-spreadsheets.simplify = simplify
-spreadsheets.serialize = serialize
-
-function spreadsheets.inspect(name)
- inspect(serialize(name))
-end
-
-function spreadsheets.tocontext(name)
- context.tocontext(simplify(name))
-end
+if not modules then modules = { } end modules ['m-spreadsheet'] = {
+ version = 1.001,
+ comment = "companion to m-spreadsheet.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local byte, format, gsub, find = string.byte, string.format, string.gsub, string.find
+local R, P, S, C, V, Cs, Cc, Ct, Cg, Cf, Carg = lpeg.R, lpeg.P, lpeg.S, lpeg.C, lpeg.V, lpeg.Cs, lpeg.Cc, lpeg.Ct, lpeg.Cg, lpeg.Cf, lpeg.Carg
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local setmetatable, loadstring, next, tostring, tonumber,rawget = setmetatable, loadstring, next, tostring, tonumber, rawget
+local formatters = string.formatters
+
+local context = context
+
+local splitthousands = utilities.parsers.splitthousands
+local variables = interfaces.variables
+
+local v_yes = variables.yes
+
+moduledata = moduledata or { }
+
+local spreadsheets = { }
+moduledata.spreadsheets = spreadsheets
+
+local data = {
+ -- nothing yet
+}
+
+local settings = {
+ period = ".",
+ comma = ",",
+}
+
+spreadsheets.data = data
+spreadsheets.settings = settings
+
+local defaultname = "default"
+local stack = { }
+local current = defaultname
+
+local d_mt ; d_mt = {
+ __index = function(t,k)
+ local v = { }
+ setmetatable(v,d_mt)
+ t[k] = v
+ return v
+ end,
+}
+
+local s_mt ; s_mt = {
+ __index = function(t,k)
+ local v = settings[k]
+ t[k] = v
+ return v
+ end,
+}
+
+function spreadsheets.setup(t)
+ for k, v in next, t do
+ settings[k] = v
+ end
+end
+
+local function emptydata(name,settings)
+ local data = { }
+ local specifications = { }
+ local settings = settings or { }
+ setmetatable(data,d_mt)
+ setmetatable(specifications,d_mt)
+ setmetatable(settings,s_mt)
+ return {
+ name = name,
+ data = data,
+ maxcol = 0,
+ maxrow = 0,
+ settings = settings,
+ temp = { }, -- for local usage
+ specifications = specifications,
+ }
+end
+
+function spreadsheets.reset(name)
+ if not name or name == "" then name = defaultname end
+ data[name] = emptydata(name,data[name] and data[name].settings)
+end
+
+function spreadsheets.start(name,s)
+ if not name or name == "" then
+ name = defaultname
+ end
+ if not s then
+ s = { }
+ end
+ table.insert(stack,current)
+ current = name
+ if data[current] then
+ setmetatable(s,s_mt)
+ data[current].settings = s
+ else
+ data[current] = emptydata(name,s)
+ end
+end
+
+function spreadsheets.stop()
+ current = table.remove(stack)
+end
+
+spreadsheets.reset()
+
+local offset = byte("A") - 1
+
+local function assign(s,n)
+ return formatters["moduledata.spreadsheets.data['%s'].data[%s]"](n,byte(s)-offset)
+end
+
+function datacell(a,b,...)
+ local n = 0
+ if b then
+ local t = { a, b, ... }
+ for i=1,#t do
+ n = n * (i-1) * 26 + byte(t[i]) - offset
+ end
+ else
+ n = byte(a) - offset
+ end
+ return formatters["dat[%s]"](n)
+end
+
+local function checktemplate(s)
+ if find(s,"%%") then
+ -- normal template
+ return s
+ elseif find(s,"@") then
+ -- tex specific template
+ return gsub(s,"@","%%")
+ else
+ -- tex specific quick template
+ return "%" .. s
+ end
+end
+
+local quoted = Cs(patterns.unquoted)
+local spaces = patterns.whitespace^0
+local cell = C(R("AZ"))^1 / datacell * (Cc("[") * (R("09")^1) * Cc("]") + #P(1))
+
+-- A nasty aspect of lpeg: Cf ( spaces * Cc("") * { "start" ... this will create a table that will
+-- be reused, so we accumulate!
+
+local pattern = Cf ( spaces * Ct("") * { "start",
+ start = V("value") + V("set") + V("format") + V("string") + V("code"),
+ value = Cg(P([[=]]) * spaces * Cc("kind") * Cc("value")) * V("code"),
+ set = Cg(P([[!]]) * spaces * Cc("kind") * Cc("set")) * V("code"),
+ format = Cg(P([[@]]) * spaces * Cc("kind") * Cc("format")) * spaces * Cg(Cc("template") * Cs(quoted/checktemplate)) * V("code"),
+ string = Cg(#S([["']]) * Cc("kind") * Cc("string")) * Cg(Cc("content") * quoted),
+ code = spaces * Cg(Cc("code") * Cs((cell + P(1))^0)),
+}, rawset)
+
+local functions = { }
+spreadsheets.functions = functions
+
+function functions._s_(row,col,c,f,t)
+ local r = 0
+ if f and t then -- f..t
+ -- ok
+ elseif f then -- 1..f
+ f, t = 1, f
+ else
+ f, t = 1, row - 1
+ end
+ for i=f,t do
+ local ci = c[i]
+ if type(ci) == "number" then
+ r = r + c[i]
+ end
+ end
+ return r
+end
+
+functions.fmt = string.tformat
+
+local f_code = formatters [ [[
+ local _m_ = moduledata.spreadsheets
+ local dat = _m_.data['%s'].data
+ local tmp = _m_.temp
+ local fnc = _m_.functions
+ local row = %s
+ local col = %s
+ function fnc.sum(...) return fnc._s_(row,col,...) end
+ local sum = fnc.sum
+ local fmt = fnc.fmt
+ return %s
+]] ]
+
+-- to be considered: a weak cache
+
+local function propername(name)
+ if name ~= "" then
+ return name
+ elseif current ~= "" then
+ return current
+ else
+ return defaultname
+ end
+end
+
+-- if name == "" then name = current if name == "" then name = defaultname end end
+
+local function execute(name,r,c,str)
+ if str ~= "" then
+ local d = data[name]
+ if c > d.maxcol then
+ d.maxcol = c
+ end
+ if r > d.maxrow then
+ d.maxrow = r
+ end
+ local specification = lpegmatch(pattern,str,1,name)
+ d.specifications[c][r] = specification
+ local kind = specification.kind
+ if kind == "string" then
+ return specification.content or ""
+ else
+ local code = specification.code
+ if code and code ~= "" then
+ code = f_code(name,r,c,code or "")
+ local result = loadstring(code) -- utilities.lua.strippedloadstring(code,true) -- when tracing
+ result = result and result()
+ if type(result) == "function" then
+ result = result()
+ end
+ if type(result) == "number" then
+ d.data[c][r] = result
+ end
+ if not result then
+ -- nothing
+ elseif kind == "set" then
+ -- no return
+ elseif kind == "format" then
+ return formatters[specification.template](result)
+ else
+ return result
+ end
+ end
+ end
+ end
+end
+
+function spreadsheets.set(name,r,c,str)
+ name = propername(name)
+ execute(name,r,c,str)
+end
+
+function spreadsheets.get(name,r,c,str)
+ name = propername(name)
+ local dname = data[name]
+ if not dname then
+ -- nothing
+ elseif not str or str == "" then
+ context(dname.data[c][r] or 0)
+ else
+ local result = execute(name,r,c,str)
+ if result then
+-- if type(result) == "number" then
+-- dname.data[c][r] = result
+-- result = tostring(result)
+-- end
+ local settings = dname.settings
+ local split = settings.split
+ local period = settings.period
+ local comma = settings.comma
+ if split == v_yes then
+ result = splitthousands(result)
+ end
+ if period == "" then period = nil end
+ if comma == "" then comma = nil end
+ result = gsub(result,".",{ ["."] = period, [","] = comma })
+ context(result)
+ end
+ end
+end
+
+function spreadsheets.doifelsecell(name,r,c)
+ name = propername(name)
+ local d = data[name]
+ local d = d and d.data
+ local r = d and rawget(d,r)
+ local c = r and rawget(r,c)
+ commands.doifelse(c)
+end
+
+local function simplify(name)
+ name = propername(name)
+ local data = data[name]
+ if data then
+ data = data.data
+ local temp = { }
+ for k, v in next, data do
+ local t = { }
+ temp[k] = t
+ for kk, vv in next, v do
+ if type(vv) == "function" then
+ t[kk] = ""
+ else
+ t[kk] = vv
+ end
+ end
+ end
+ return temp
+ end
+end
+
+local function serialize(name)
+ local s = simplify(name)
+ if s then
+ return table.serialize(s,name)
+ else
+ return formatters[""](name)
+ end
+end
+
+spreadsheets.simplify = simplify
+spreadsheets.serialize = serialize
+
+function spreadsheets.inspect(name)
+ inspect(serialize(name))
+end
+
+function spreadsheets.tocontext(name)
+ context.tocontext(simplify(name))
+end
diff --git a/tex/context/base/m-steps.lua b/tex/context/base/m-steps.lua
index 97759b799..caf765a56 100644
--- a/tex/context/base/m-steps.lua
+++ b/tex/context/base/m-steps.lua
@@ -1,227 +1,227 @@
-if not modules then modules = { } end modules ['x-flow'] = {
- version = 1.001,
- comment = "companion to m-flow.mkvi",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- when we can resolve mpcolor at the lua end we will use metapost.graphic(....) directly
-
-moduledata.steps = moduledata.steps or { }
-
-local points = number.points -- number.pt
-local variables = interfaces.variables
-
-local trace_charts = false
-
-local defaults = {
- chart = {
- dx = 10*65436,
- dy = 10*65436,
- },
- cell = {
- alternative = 1,
- offset = 2*65436,
- rulethickness = 65436,
- framecolor = "blue",
- backgroundcolor = "gray",
- },
- text = {
- alternative = 1,
- offset = 2*65436,
- distance = 4*65436,
- rulethickness = 65436,
- framecolor = "red",
- backgroundcolor = "gray",
- },
- line = {
- alternative = 1,
- rulethickness = 65436,
- height = 30*65436,
- distance = 10*65436,
- offset = 5*65436,
- color = "green",
- },
-}
-
--- todo : name (no name then direct)
--- maybe: includes
--- maybe: flush ranges
-
-local charts = { }
-local steps = { }
-
-function commands.step_start_chart(name)
- name = name or ""
- steps = { }
- charts[name] = {
- steps = steps,
- }
-end
-
-function commands.step_stop_chart()
-end
-
-function commands.step_make_chart(settings)
- local chartsettings = settings.chart
- if not chartsettings then
- print("no chart")
- return
- end
- local chartname = chartsettings.name
- if not chartname then
- print("no name given")
- return
- end
- local chart = charts[chartname]
- if not chart then
- print("no such chart",chartname)
- return
- end
- local steps = chart.steps or { }
- --
- table.setmetatableindex(settings,defaults)
- --
- if trace_charts then
- inspect(steps)
- end
- --
- local textsettings = settings.text
- local cellsettings = settings.cell
- local linesettings = settings.line
- --
- context.startMPcode()
- context("if unknown context_cell : input mp-step.mpiv ; fi ;")
- context("step_begin_chart ;")
- --
- if chartsettings.alternative == variables.vertical then
- context("chart_vertical := true ;")
- end
- --
- context("text_line_color := \\MPcolor{%s} ;", textsettings.framecolor)
- context("text_line_width := %s ;", points(textsettings.rulethickness))
- context("text_fill_color := \\MPcolor{%s} ;", textsettings.backgroundcolor)
- context("text_offset := %s ;", points(textsettings.offset))
- context("text_distance_set := %s ;", points(textsettings.distance))
- --
- context("cell_line_color := \\MPcolor{%s} ;", cellsettings.framecolor)
- context("cell_line_width := %s ;", points(cellsettings.rulethickness))
- context("cell_fill_color := \\MPcolor{%s} ;", cellsettings.backgroundcolor)
- context("cell_offset := %s ;", points(cellsettings.offset))
- context("cell_distance_x := %s ;", points(cellsettings.dx))
- context("cell_distance_y := %s ;", points(cellsettings.dy))
- --
- context("line_line_color := \\MPcolor{%s} ;", linesettings.color)
- context("line_line_width := %s ;", points(linesettings.rulethickness))
- context("line_distance := %s ;", points(linesettings.distance))
- context("line_offset := %s ;", points(linesettings.offset))
- --
- for i=1,#steps do
- local step = steps[i]
- context("step_begin_cell ;")
- if step.cell_top ~= "" then
- context('step_cell_top("%s") ;',string.strip(step.cell_top))
- end
- if step.cell_bot ~= "" then
- context('step_cell_bot("%s") ;',string.strip(step.cell_bot))
- end
- if step.text_top ~= "" then
- context('step_text_top("%s") ;',string.strip(step.text_top))
- end
- if step.text_mid ~= "" then
- context('step_text_mid("%s") ;',string.strip(step.text_mid))
- end
- if step.text_bot ~= "" then
- context('step_text_bot("%s") ;',string.strip(step.text_bot))
- end
- context("step_end_cell ;")
- end
- --
- context("step_end_chart ;")
- context.stopMPcode()
-end
-
-function commands.step_cells(top,bot)
- steps[#steps+1] = {
- cell_top = top or "",
- cell_bot = bot or "",
- text_top = "",
- text_mid = "",
- text_bot = "",
- }
-end
-
-function commands.step_texts(top,bot)
- if #steps > 0 then
- steps[#steps].text_top = top or ""
- steps[#steps].text_bot = bot or ""
- end
-end
-
-function commands.step_cell(top)
- steps[#steps+1] = {
- cell_top = top or "",
- cell_bot = "",
- text_top = "",
- text_mid = "",
- text_bot = "",
- }
-end
-
-function commands.step_text(top)
- if #steps > 0 then
- steps[#steps].text_top = top or ""
- end
-end
-
-function commands.step_textset(left,middle,right)
- if #steps > 0 then
- steps[#steps].text_top = left or ""
- steps[#steps].text_mid = middle or ""
- steps[#steps].text_bot = right or ""
- end
-end
-
-function commands.step_start_cell()
- steps[#steps+1] = {
- cell_top = "",
- cell_bot = "",
- text_top = "",
- text_mid = "",
- text_bot = "",
- }
-end
-
-function commands.step_stop_cell()
-end
-
-function commands.step_text_top(str)
- if #steps > 0 then
- steps[#steps].text_top = str or ""
- end
-end
-
-function commands.step_text_mid(str)
- if #steps > 0 then
- steps[#steps].text_mid = str or ""
- end
-end
-
-function commands.step_text_bot(str)
- if #steps > 0 then
- steps[#steps].text_bot = str or ""
- end
-end
-
-function commands.step_cell_top(str)
- if #steps > 0 then
- steps[#steps].cell_top = str or ""
- end
-end
-
-function commands.step_cell_bot(str)
- if #steps > 0 then
- steps[#steps].cell_bot = str or ""
- end
-end
+if not modules then modules = { } end modules ['x-flow'] = {
+ version = 1.001,
+ comment = "companion to m-flow.mkvi",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- when we can resolve mpcolor at the lua end we will use metapost.graphic(....) directly
+
+moduledata.steps = moduledata.steps or { }
+
+local points = number.points -- number.pt
+local variables = interfaces.variables
+
+local trace_charts = false
+
+local defaults = {
+ chart = {
+ dx = 10*65436,
+ dy = 10*65436,
+ },
+ cell = {
+ alternative = 1,
+ offset = 2*65436,
+ rulethickness = 65436,
+ framecolor = "blue",
+ backgroundcolor = "gray",
+ },
+ text = {
+ alternative = 1,
+ offset = 2*65436,
+ distance = 4*65436,
+ rulethickness = 65436,
+ framecolor = "red",
+ backgroundcolor = "gray",
+ },
+ line = {
+ alternative = 1,
+ rulethickness = 65436,
+ height = 30*65436,
+ distance = 10*65436,
+ offset = 5*65436,
+ color = "green",
+ },
+}
+
+-- todo : name (no name then direct)
+-- maybe: includes
+-- maybe: flush ranges
+
+local charts = { }
+local steps = { }
+
+function commands.step_start_chart(name)
+ name = name or ""
+ steps = { }
+ charts[name] = {
+ steps = steps,
+ }
+end
+
+function commands.step_stop_chart()
+end
+
+function commands.step_make_chart(settings)
+ local chartsettings = settings.chart
+ if not chartsettings then
+ print("no chart")
+ return
+ end
+ local chartname = chartsettings.name
+ if not chartname then
+ print("no name given")
+ return
+ end
+ local chart = charts[chartname]
+ if not chart then
+ print("no such chart",chartname)
+ return
+ end
+ local steps = chart.steps or { }
+ --
+ table.setmetatableindex(settings,defaults)
+ --
+ if trace_charts then
+ inspect(steps)
+ end
+ --
+ local textsettings = settings.text
+ local cellsettings = settings.cell
+ local linesettings = settings.line
+ --
+ context.startMPcode()
+ context("if unknown context_cell : input mp-step.mpiv ; fi ;")
+ context("step_begin_chart ;")
+ --
+ if chartsettings.alternative == variables.vertical then
+ context("chart_vertical := true ;")
+ end
+ --
+ context("text_line_color := \\MPcolor{%s} ;", textsettings.framecolor)
+ context("text_line_width := %s ;", points(textsettings.rulethickness))
+ context("text_fill_color := \\MPcolor{%s} ;", textsettings.backgroundcolor)
+ context("text_offset := %s ;", points(textsettings.offset))
+ context("text_distance_set := %s ;", points(textsettings.distance))
+ --
+ context("cell_line_color := \\MPcolor{%s} ;", cellsettings.framecolor)
+ context("cell_line_width := %s ;", points(cellsettings.rulethickness))
+ context("cell_fill_color := \\MPcolor{%s} ;", cellsettings.backgroundcolor)
+ context("cell_offset := %s ;", points(cellsettings.offset))
+ context("cell_distance_x := %s ;", points(cellsettings.dx))
+ context("cell_distance_y := %s ;", points(cellsettings.dy))
+ --
+ context("line_line_color := \\MPcolor{%s} ;", linesettings.color)
+ context("line_line_width := %s ;", points(linesettings.rulethickness))
+ context("line_distance := %s ;", points(linesettings.distance))
+ context("line_offset := %s ;", points(linesettings.offset))
+ --
+ for i=1,#steps do
+ local step = steps[i]
+ context("step_begin_cell ;")
+ if step.cell_top ~= "" then
+ context('step_cell_top("%s") ;',string.strip(step.cell_top))
+ end
+ if step.cell_bot ~= "" then
+ context('step_cell_bot("%s") ;',string.strip(step.cell_bot))
+ end
+ if step.text_top ~= "" then
+ context('step_text_top("%s") ;',string.strip(step.text_top))
+ end
+ if step.text_mid ~= "" then
+ context('step_text_mid("%s") ;',string.strip(step.text_mid))
+ end
+ if step.text_bot ~= "" then
+ context('step_text_bot("%s") ;',string.strip(step.text_bot))
+ end
+ context("step_end_cell ;")
+ end
+ --
+ context("step_end_chart ;")
+ context.stopMPcode()
+end
+
+function commands.step_cells(top,bot)
+ steps[#steps+1] = {
+ cell_top = top or "",
+ cell_bot = bot or "",
+ text_top = "",
+ text_mid = "",
+ text_bot = "",
+ }
+end
+
+function commands.step_texts(top,bot)
+ if #steps > 0 then
+ steps[#steps].text_top = top or ""
+ steps[#steps].text_bot = bot or ""
+ end
+end
+
+function commands.step_cell(top)
+ steps[#steps+1] = {
+ cell_top = top or "",
+ cell_bot = "",
+ text_top = "",
+ text_mid = "",
+ text_bot = "",
+ }
+end
+
+function commands.step_text(top)
+ if #steps > 0 then
+ steps[#steps].text_top = top or ""
+ end
+end
+
+function commands.step_textset(left,middle,right)
+ if #steps > 0 then
+ steps[#steps].text_top = left or ""
+ steps[#steps].text_mid = middle or ""
+ steps[#steps].text_bot = right or ""
+ end
+end
+
+function commands.step_start_cell()
+ steps[#steps+1] = {
+ cell_top = "",
+ cell_bot = "",
+ text_top = "",
+ text_mid = "",
+ text_bot = "",
+ }
+end
+
+function commands.step_stop_cell()
+end
+
+function commands.step_text_top(str)
+ if #steps > 0 then
+ steps[#steps].text_top = str or ""
+ end
+end
+
+function commands.step_text_mid(str)
+ if #steps > 0 then
+ steps[#steps].text_mid = str or ""
+ end
+end
+
+function commands.step_text_bot(str)
+ if #steps > 0 then
+ steps[#steps].text_bot = str or ""
+ end
+end
+
+function commands.step_cell_top(str)
+ if #steps > 0 then
+ steps[#steps].cell_top = str or ""
+ end
+end
+
+function commands.step_cell_bot(str)
+ if #steps > 0 then
+ steps[#steps].cell_bot = str or ""
+ end
+end
diff --git a/tex/context/base/math-act.lua b/tex/context/base/math-act.lua
index 4f9b3b7e8..875e200c1 100644
--- a/tex/context/base/math-act.lua
+++ b/tex/context/base/math-act.lua
@@ -1,404 +1,404 @@
-if not modules then modules = { } end modules ['math-act'] = {
- version = 1.001,
- comment = "companion to math-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- Here we tweak some font properties (if needed).
-
-local type, next = type, next
-local fastcopy = table.fastcopy
-
-local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
-local report_math = logs.reporter("mathematics","initializing")
-
-local context = context
-local commands = commands
-local mathematics = mathematics
-local texdimen = tex.dimen
-local abs = math.abs
-
-local sequencers = utilities.sequencers
-local appendgroup = sequencers.appendgroup
-local appendaction = sequencers.appendaction
-
-local mathfontparameteractions = sequencers.new {
- name = "mathparameters",
- arguments = "target,original",
-}
-
-appendgroup("mathparameters","before") -- user
-appendgroup("mathparameters","system") -- private
-appendgroup("mathparameters","after" ) -- user
-
-function fonts.constructors.assignmathparameters(original,target)
- local runner = mathfontparameteractions.runner
- if runner then
- runner(original,target)
- end
-end
-
-function mathematics.initializeparameters(target,original)
- local mathparameters = original.mathparameters
- if mathparameters and next(mathparameters) then
- target.mathparameters = mathematics.dimensions(mathparameters)
- end
-end
-
-sequencers.appendaction("mathparameters","system","mathematics.initializeparameters")
-
-local how = {
- -- RadicalKernBeforeDegree = "horizontal",
- -- RadicalKernAfterDegree = "horizontal",
- ScriptPercentScaleDown = "unscaled",
- ScriptScriptPercentScaleDown = "unscaled",
- RadicalDegreeBottomRaisePercent = "unscaled"
-}
-
-function mathematics.scaleparameters(target,original)
- if not target.properties.math_is_scaled then
- local mathparameters = target.mathparameters
- if mathparameters and next(mathparameters) then
- local parameters = target.parameters
- local factor = parameters.factor
- local hfactor = parameters.hfactor
- local vfactor = parameters.vfactor
- for name, value in next, mathparameters do
- local h = how[name]
- if h == "unscaled" then
- -- kept
- elseif h == "horizontal" then
- value = value * hfactor
- elseif h == "vertical"then
- value = value * vfactor
- else
- value = value * factor
- end
- mathparameters[name] = value
- end
- end
- target.properties.math_is_scaled = true
- end
-end
-
-sequencers.appendaction("mathparameters","system","mathematics.scaleparameters")
-
-function mathematics.checkaccentbaseheight(target,original)
- local mathparameters = target.mathparameters
- if mathparameters and mathparameters.AccentBaseHeight == 0 then
- mathparameters.AccentBaseHeight = target.parameters.x_height -- needs checking
- end
-end
-
-sequencers.appendaction("mathparameters","system","mathematics.checkaccentbaseheight") -- should go in lfg instead
-
-function mathematics.checkprivateparameters(target,original)
- local mathparameters = target.mathparameters
- if mathparameters then
- local parameters = target.parameters
- if parameters then
- if not mathparameters.FractionDelimiterSize then
- mathparameters.FractionDelimiterSize = 1.01 * parameters.size
- end
- if not mathparameters.FractionDelimiterDisplayStyleSize then
- mathparameters.FractionDelimiterDisplayStyleSize = 2.40 * parameters.size
- end
- elseif target.properties then
- report_math("no parameters in font %a",target.properties.fullname or "?")
- else
- report_math("no parameters and properties in font")
- end
- end
-end
-
-sequencers.appendaction("mathparameters","system","mathematics.checkprivateparameters")
-
-function mathematics.overloadparameters(target,original)
- local mathparameters = target.mathparameters
- if mathparameters and next(mathparameters) then
- local goodies = target.goodies
- if goodies then
- for i=1,#goodies do
- local goodie = goodies[i]
- local mathematics = goodie.mathematics
- local parameters = mathematics and mathematics.parameters
- if parameters then
- if trace_defining then
- report_math("overloading math parameters in %a @ %p",target.properties.fullname,target.parameters.size)
- end
- for name, value in next, parameters do
- local tvalue = type(value)
- if tvalue == "string" then
- report_math("comment for math parameter %a: %s",name,value)
- else
- local oldvalue = mathparameters[name]
- local newvalue = oldvalue
- if oldvalue then
- if tvalue == "number" then
- newvalue = value
- elseif tvalue == "function" then
- newvalue = value(oldvalue,target,original)
- elseif not tvalue then
- newvalue = nil
- end
- if trace_defining and oldvalue ~= newvalue then
- report_math("overloading math parameter %a: %S => %S",name,oldvalue,newvalue)
- end
- else
- report_math("invalid math parameter %a",name)
- end
- mathparameters[name] = newvalue
- end
- end
- end
- end
- end
- end
-end
-
-sequencers.appendaction("mathparameters","system","mathematics.overloadparameters")
-
-local function applytweaks(when,target,original)
- local goodies = original.goodies
- if goodies then
- for i=1,#goodies do
- local goodie = goodies[i]
- local mathematics = goodie.mathematics
- local tweaks = mathematics and mathematics.tweaks
- if tweaks then
- tweaks = tweaks[when]
- if tweaks then
- if trace_defining then
- report_math("tweaking math of %a @ %p (%s)",target.properties.fullname,target.parameters.size,when)
- end
- for i=1,#tweaks do
- local tweak= tweaks[i]
- local tvalue = type(tweak)
- if tvalue == "function" then
- tweak(target,original)
- end
- end
- end
- end
- end
- end
-end
-
-function mathematics.tweakbeforecopyingfont(target,original)
- local mathparameters = target.mathparameters -- why not hasmath
- if mathparameters then
- applytweaks("beforecopying",target,original)
- end
-end
-
-function mathematics.tweakaftercopyingfont(target,original)
- local mathparameters = target.mathparameters -- why not hasmath
- if mathparameters then
- applytweaks("aftercopying",target,original)
- end
-end
-
-sequencers.appendaction("beforecopyingcharacters","system","mathematics.tweakbeforecopyingfont")
-sequencers.appendaction("aftercopyingcharacters", "system","mathematics.tweakaftercopyingfont")
-
-function mathematics.overloaddimensions(target,original,set)
- local goodies = target.goodies
- if goodies then
- for i=1,#goodies do
- local goodie = goodies[i]
- local mathematics = goodie.mathematics
- local dimensions = mathematics and mathematics.dimensions
- if dimensions then
- if trace_defining then
- report_math("overloading dimensions in %a @ %p",target.properties.fullname,target.parameters.size)
- end
- local characters = target.characters
- local parameters = target.parameters
- local factor = parameters.factor
- local hfactor = parameters.hfactor
- local vfactor = parameters.vfactor
- local addprivate = fonts.helpers.addprivate
- local function overload(dimensions)
- for unicode, data in next, dimensions do
- local character = characters[unicode]
- if character then
- --
- local width = data.width
- local height = data.height
- local depth = data.depth
- if trace_defining and (width or height or depth) then
- report_math("overloading dimensions of %C, width %a, height %a, depth %a",unicode,width,height,depth)
- end
- if width then character.width = width * hfactor end
- if height then character.height = height * vfactor end
- if depth then character.depth = depth * vfactor end
- --
- local xoffset = data.xoffset
- local yoffset = data.yoffset
- if xoffset then
- xoffset = { "right", xoffset * hfactor }
- end
- if yoffset then
- yoffset = { "down", -yoffset * vfactor }
- end
- if xoffset or yoffset then
- local slot = { "slot", 1, addprivate(target,nil,fastcopy(character)) }
- if xoffset and yoffset then
- character.commands = { xoffset, yoffset, slot }
- elseif xoffset then
- character.commands = { xoffset, slot }
- else
- character.commands = { yoffset, slot }
- end
- character.index = nil
- end
- elseif trace_defining then
- report_math("no overloading dimensions of %C, not in font",unicode)
- end
- end
- end
- if set == nil then
- set = { "default" }
- end
- if set == "all" or set == true then
- for name, set in next, dimensions do
- overload(set)
- end
- else
- if type(set) == "string" then
- set = utilities.parsers.settings_to_array(set)
- end
- if type(set) == "table" then
- for i=1,#set do
- local d = dimensions[set[i]]
- if d then
- overload(d)
- end
- end
- end
- end
- end
- end
- end
-end
-
-sequencers.appendaction("aftercopyingcharacters", "system","mathematics.overloaddimensions")
-
--- a couple of predefined tewaks:
-
-local tweaks = { }
-mathematics.tweaks = tweaks
-
-function tweaks.fixbadprime(target,original)
- target.characters[0xFE325] = target.characters[0x2032]
-end
-
--- helpers
-
-local setmetatableindex = table.setmetatableindex
-local family_font = node.family_font
-
-local fontcharacters = fonts.hashes.characters
-local extensibles = utilities.storage.allocate()
-fonts.hashes.extensibles = extensibles
-
-local chardata = characters.data
-local extensibles = mathematics.extensibles
-
--- we use numbers at the tex end (otherwise we could stick to chars)
-
-local e_left = extensibles.left
-local e_right = extensibles.right
-local e_horizontal = extensibles.horizontal
-local e_vertical = extensibles.vertical
-local e_mixed = extensibles.mixed
-local e_unknown = extensibles.unknown
-
-local unknown = { e_unknown, false, false }
-
-local function extensiblecode(font,unicode)
- local characters = fontcharacters[font]
- local character = characters[unicode]
- if not character then
- return unknown
- end
- local code = unicode
- local next = character.next
- while next do
- code = next
- character = characters[next]
- next = character.next
- end
- local char = chardata[unicode]
- local mathextensible = char and char.mathextensible
- if character.horiz_variants then
- if character.vert_variants then
- return { e_mixed, code, character }
- else
- local e = mathextensible and extensibles[mathextensible]
- return e and { e, code, character } or unknown
- end
- elseif character.vert_variants then
- local e = mathextensible and extensibles[mathextensible]
- return e and { e, code, character } or unknown
- else
- return unknown
- end
-end
-
-setmetatableindex(extensibles,function(extensibles,font)
- local codes = { }
- setmetatableindex(codes, function(codes,unicode)
- local status = extensiblecode(font,unicode)
- codes[unicode] = status
- return status
- end)
- extensibles[font] = codes
- return codes
-end)
-
-function mathematics.extensiblecode(family,unicode)
- return extensibles[family_font(family or 0)][unicode][1]
-end
-
-function commands.extensiblecode(family,unicode)
- context(extensibles[family_font(family or 0)][unicode][1])
-end
-
--- left : [head] ...
--- right : ... [head]
--- horizontal : [head] ... [head]
---
--- abs(right["start"] - right["end"]) | right.advance | characters[right.glyph].width
-
-function commands.horizontalcode(family,unicode)
- local font = family_font(family or 0)
- local data = extensibles[font][unicode]
- local kind = data[1]
- if kind == e_left then
- local charlist = data[3].horiz_variants
- local characters = fontcharacters[font]
- local left = charlist[1]
- texdimen.scratchleftoffset = abs((left["start"] or 0) - (left["end"] or 0))
- texdimen.scratchrightoffset = 0
- elseif kind == e_right then
- local charlist = data[3].horiz_variants
- local characters = fontcharacters[font]
- local right = charlist[#charlist]
- texdimen.scratchleftoffset = 0
- texdimen.scratchrightoffset = abs((right["start"] or 0) - (right["end"] or 0))
- elseif kind == e_horizontal then
- local charlist = data[3].horiz_variants
- local characters = fontcharacters[font]
- local left = charlist[1]
- local right = charlist[#charlist]
- texdimen.scratchleftoffset = abs((left["start"] or 0) - (left["end"] or 0))
- texdimen.scratchrightoffset = abs((right["start"] or 0) - (right["end"] or 0))
- else
- texdimen.scratchleftoffset = 0
- texdimen.scratchrightoffset = 0
- end
- context(kind)
-end
+if not modules then modules = { } end modules ['math-act'] = {
+ version = 1.001,
+ comment = "companion to math-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Here we tweak some font properties (if needed).
+
+local type, next = type, next
+local fastcopy = table.fastcopy
+
+local trace_defining = false trackers.register("math.defining", function(v) trace_defining = v end)
+local report_math = logs.reporter("mathematics","initializing")
+
+local context = context
+local commands = commands
+local mathematics = mathematics
+local texdimen = tex.dimen
+local abs = math.abs
+
+local sequencers = utilities.sequencers
+local appendgroup = sequencers.appendgroup
+local appendaction = sequencers.appendaction
+
+local mathfontparameteractions = sequencers.new {
+ name = "mathparameters",
+ arguments = "target,original",
+}
+
+appendgroup("mathparameters","before") -- user
+appendgroup("mathparameters","system") -- private
+appendgroup("mathparameters","after" ) -- user
+
+function fonts.constructors.assignmathparameters(original,target)
+ local runner = mathfontparameteractions.runner
+ if runner then
+ runner(original,target)
+ end
+end
+
+function mathematics.initializeparameters(target,original)
+ local mathparameters = original.mathparameters
+ if mathparameters and next(mathparameters) then
+ target.mathparameters = mathematics.dimensions(mathparameters)
+ end
+end
+
+sequencers.appendaction("mathparameters","system","mathematics.initializeparameters")
+
+local how = {
+ -- RadicalKernBeforeDegree = "horizontal",
+ -- RadicalKernAfterDegree = "horizontal",
+ ScriptPercentScaleDown = "unscaled",
+ ScriptScriptPercentScaleDown = "unscaled",
+ RadicalDegreeBottomRaisePercent = "unscaled"
+}
+
+function mathematics.scaleparameters(target,original)
+ if not target.properties.math_is_scaled then
+ local mathparameters = target.mathparameters
+ if mathparameters and next(mathparameters) then
+ local parameters = target.parameters
+ local factor = parameters.factor
+ local hfactor = parameters.hfactor
+ local vfactor = parameters.vfactor
+ for name, value in next, mathparameters do
+ local h = how[name]
+ if h == "unscaled" then
+ -- kept
+ elseif h == "horizontal" then
+ value = value * hfactor
+ elseif h == "vertical"then
+ value = value * vfactor
+ else
+ value = value * factor
+ end
+ mathparameters[name] = value
+ end
+ end
+ target.properties.math_is_scaled = true
+ end
+end
+
+sequencers.appendaction("mathparameters","system","mathematics.scaleparameters")
+
+function mathematics.checkaccentbaseheight(target,original)
+ local mathparameters = target.mathparameters
+ if mathparameters and mathparameters.AccentBaseHeight == 0 then
+ mathparameters.AccentBaseHeight = target.parameters.x_height -- needs checking
+ end
+end
+
+sequencers.appendaction("mathparameters","system","mathematics.checkaccentbaseheight") -- should go in lfg instead
+
+function mathematics.checkprivateparameters(target,original)
+ local mathparameters = target.mathparameters
+ if mathparameters then
+ local parameters = target.parameters
+ if parameters then
+ if not mathparameters.FractionDelimiterSize then
+ mathparameters.FractionDelimiterSize = 1.01 * parameters.size
+ end
+ if not mathparameters.FractionDelimiterDisplayStyleSize then
+ mathparameters.FractionDelimiterDisplayStyleSize = 2.40 * parameters.size
+ end
+ elseif target.properties then
+ report_math("no parameters in font %a",target.properties.fullname or "?")
+ else
+ report_math("no parameters and properties in font")
+ end
+ end
+end
+
+sequencers.appendaction("mathparameters","system","mathematics.checkprivateparameters")
+
+function mathematics.overloadparameters(target,original)
+ local mathparameters = target.mathparameters
+ if mathparameters and next(mathparameters) then
+ local goodies = target.goodies
+ if goodies then
+ for i=1,#goodies do
+ local goodie = goodies[i]
+ local mathematics = goodie.mathematics
+ local parameters = mathematics and mathematics.parameters
+ if parameters then
+ if trace_defining then
+ report_math("overloading math parameters in %a @ %p",target.properties.fullname,target.parameters.size)
+ end
+ for name, value in next, parameters do
+ local tvalue = type(value)
+ if tvalue == "string" then
+ report_math("comment for math parameter %a: %s",name,value)
+ else
+ local oldvalue = mathparameters[name]
+ local newvalue = oldvalue
+ if oldvalue then
+ if tvalue == "number" then
+ newvalue = value
+ elseif tvalue == "function" then
+ newvalue = value(oldvalue,target,original)
+ elseif not tvalue then
+ newvalue = nil
+ end
+ if trace_defining and oldvalue ~= newvalue then
+ report_math("overloading math parameter %a: %S => %S",name,oldvalue,newvalue)
+ end
+ else
+ report_math("invalid math parameter %a",name)
+ end
+ mathparameters[name] = newvalue
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+sequencers.appendaction("mathparameters","system","mathematics.overloadparameters")
+
+local function applytweaks(when,target,original)
+ local goodies = original.goodies
+ if goodies then
+ for i=1,#goodies do
+ local goodie = goodies[i]
+ local mathematics = goodie.mathematics
+ local tweaks = mathematics and mathematics.tweaks
+ if tweaks then
+ tweaks = tweaks[when]
+ if tweaks then
+ if trace_defining then
+ report_math("tweaking math of %a @ %p (%s)",target.properties.fullname,target.parameters.size,when)
+ end
+ for i=1,#tweaks do
+ local tweak= tweaks[i]
+ local tvalue = type(tweak)
+ if tvalue == "function" then
+ tweak(target,original)
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+function mathematics.tweakbeforecopyingfont(target,original)
+ local mathparameters = target.mathparameters -- why not hasmath
+ if mathparameters then
+ applytweaks("beforecopying",target,original)
+ end
+end
+
+function mathematics.tweakaftercopyingfont(target,original)
+ local mathparameters = target.mathparameters -- why not hasmath
+ if mathparameters then
+ applytweaks("aftercopying",target,original)
+ end
+end
+
+sequencers.appendaction("beforecopyingcharacters","system","mathematics.tweakbeforecopyingfont")
+sequencers.appendaction("aftercopyingcharacters", "system","mathematics.tweakaftercopyingfont")
+
+function mathematics.overloaddimensions(target,original,set)
+ local goodies = target.goodies
+ if goodies then
+ for i=1,#goodies do
+ local goodie = goodies[i]
+ local mathematics = goodie.mathematics
+ local dimensions = mathematics and mathematics.dimensions
+ if dimensions then
+ if trace_defining then
+ report_math("overloading dimensions in %a @ %p",target.properties.fullname,target.parameters.size)
+ end
+ local characters = target.characters
+ local parameters = target.parameters
+ local factor = parameters.factor
+ local hfactor = parameters.hfactor
+ local vfactor = parameters.vfactor
+ local addprivate = fonts.helpers.addprivate
+ local function overload(dimensions)
+ for unicode, data in next, dimensions do
+ local character = characters[unicode]
+ if character then
+ --
+ local width = data.width
+ local height = data.height
+ local depth = data.depth
+ if trace_defining and (width or height or depth) then
+ report_math("overloading dimensions of %C, width %a, height %a, depth %a",unicode,width,height,depth)
+ end
+ if width then character.width = width * hfactor end
+ if height then character.height = height * vfactor end
+ if depth then character.depth = depth * vfactor end
+ --
+ local xoffset = data.xoffset
+ local yoffset = data.yoffset
+ if xoffset then
+ xoffset = { "right", xoffset * hfactor }
+ end
+ if yoffset then
+ yoffset = { "down", -yoffset * vfactor }
+ end
+ if xoffset or yoffset then
+ local slot = { "slot", 1, addprivate(target,nil,fastcopy(character)) }
+ if xoffset and yoffset then
+ character.commands = { xoffset, yoffset, slot }
+ elseif xoffset then
+ character.commands = { xoffset, slot }
+ else
+ character.commands = { yoffset, slot }
+ end
+ character.index = nil
+ end
+ elseif trace_defining then
+ report_math("no overloading dimensions of %C, not in font",unicode)
+ end
+ end
+ end
+ if set == nil then
+ set = { "default" }
+ end
+ if set == "all" or set == true then
+ for name, set in next, dimensions do
+ overload(set)
+ end
+ else
+ if type(set) == "string" then
+ set = utilities.parsers.settings_to_array(set)
+ end
+ if type(set) == "table" then
+ for i=1,#set do
+ local d = dimensions[set[i]]
+ if d then
+ overload(d)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+sequencers.appendaction("aftercopyingcharacters", "system","mathematics.overloaddimensions")
+
+-- a couple of predefined tewaks:
+
+local tweaks = { }
+mathematics.tweaks = tweaks
+
+function tweaks.fixbadprime(target,original)
+ target.characters[0xFE325] = target.characters[0x2032]
+end
+
+-- helpers
+
+local setmetatableindex = table.setmetatableindex
+local family_font = node.family_font
+
+local fontcharacters = fonts.hashes.characters
+local extensibles = utilities.storage.allocate()
+fonts.hashes.extensibles = extensibles
+
+local chardata = characters.data
+local extensibles = mathematics.extensibles
+
+-- we use numbers at the tex end (otherwise we could stick to chars)
+
+local e_left = extensibles.left
+local e_right = extensibles.right
+local e_horizontal = extensibles.horizontal
+local e_vertical = extensibles.vertical
+local e_mixed = extensibles.mixed
+local e_unknown = extensibles.unknown
+
+local unknown = { e_unknown, false, false }
+
+local function extensiblecode(font,unicode)
+ local characters = fontcharacters[font]
+ local character = characters[unicode]
+ if not character then
+ return unknown
+ end
+ local code = unicode
+ local next = character.next
+ while next do
+ code = next
+ character = characters[next]
+ next = character.next
+ end
+ local char = chardata[unicode]
+ local mathextensible = char and char.mathextensible
+ if character.horiz_variants then
+ if character.vert_variants then
+ return { e_mixed, code, character }
+ else
+ local e = mathextensible and extensibles[mathextensible]
+ return e and { e, code, character } or unknown
+ end
+ elseif character.vert_variants then
+ local e = mathextensible and extensibles[mathextensible]
+ return e and { e, code, character } or unknown
+ else
+ return unknown
+ end
+end
+
+setmetatableindex(extensibles,function(extensibles,font)
+ local codes = { }
+ setmetatableindex(codes, function(codes,unicode)
+ local status = extensiblecode(font,unicode)
+ codes[unicode] = status
+ return status
+ end)
+ extensibles[font] = codes
+ return codes
+end)
+
+function mathematics.extensiblecode(family,unicode)
+ return extensibles[family_font(family or 0)][unicode][1]
+end
+
+function commands.extensiblecode(family,unicode)
+ context(extensibles[family_font(family or 0)][unicode][1])
+end
+
+-- left : [head] ...
+-- right : ... [head]
+-- horizontal : [head] ... [head]
+--
+-- abs(right["start"] - right["end"]) | right.advance | characters[right.glyph].width
+
+function commands.horizontalcode(family,unicode)
+ local font = family_font(family or 0)
+ local data = extensibles[font][unicode]
+ local kind = data[1]
+ if kind == e_left then
+ local charlist = data[3].horiz_variants
+ local characters = fontcharacters[font]
+ local left = charlist[1]
+ texdimen.scratchleftoffset = abs((left["start"] or 0) - (left["end"] or 0))
+ texdimen.scratchrightoffset = 0
+ elseif kind == e_right then
+ local charlist = data[3].horiz_variants
+ local characters = fontcharacters[font]
+ local right = charlist[#charlist]
+ texdimen.scratchleftoffset = 0
+ texdimen.scratchrightoffset = abs((right["start"] or 0) - (right["end"] or 0))
+ elseif kind == e_horizontal then
+ local charlist = data[3].horiz_variants
+ local characters = fontcharacters[font]
+ local left = charlist[1]
+ local right = charlist[#charlist]
+ texdimen.scratchleftoffset = abs((left["start"] or 0) - (left["end"] or 0))
+ texdimen.scratchrightoffset = abs((right["start"] or 0) - (right["end"] or 0))
+ else
+ texdimen.scratchleftoffset = 0
+ texdimen.scratchrightoffset = 0
+ end
+ context(kind)
+end
diff --git a/tex/context/base/math-dim.lua b/tex/context/base/math-dim.lua
index f4fc7905e..babed0afd 100644
--- a/tex/context/base/math-dim.lua
+++ b/tex/context/base/math-dim.lua
@@ -1,240 +1,240 @@
-if not modules then modules = { } end modules ['math-dim'] = {
- version = 1.001,
- comment = "companion to math-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- Beware: only Taco and Ulrik really understands in depth what these dimensions
--- do so if you run into problems ask on the context list.
-
--- The radical_rule value is also used as a trigger. In luatex the accent
--- placement happens either the opentype way (using top_accent cum suis) or the
--- traditional way. In order to determine what method to use the \Umathradicalrule
--- setting is consulted to determine what method to use. This is more efficient
--- than analyzing the (potentially spread over multiple families) situation. For
--- this reason we need to set the radical_rule here. It used to be "" in
--- which case the engine takes the rulethickness. In c-speak:
---
--- int compat_mode = (radical_rule(cur_style) == undefined_math_parameter) ;
-
-local abs, next = math.abs, next
-
-local defaults = {
- axis = { default = { "AxisHeight", "axis_height" }, },
- accent_base_height = { default = { "AccentBaseHeight", "x_height" }, },
- fraction_del_size = { default = { "FractionDelimiterSize", "delim2" },
- cramped_display_style = { "FractionDelimiterDisplayStyleSize", "delim1" },
- display_style = { "FractionDelimiterDisplayStyleSize", "delim1" }, },
- fraction_denom_down = { default = { "FractionDenominatorShiftDown", "denom2" },
- cramped_display_style = { "FractionDenominatorDisplayStyleShiftDown", "denom1" },
- display_style = { "FractionDenominatorDisplayStyleShiftDown", "denom1" }, },
- fraction_denom_vgap = { default = { "FractionDenominatorGapMin", "default_rule_thickness" },
- cramped_display_style = { "FractionDenominatorDisplayStyleGapMin", "3*default_rule_thickness" },
- display_style = { "FractionDenominatorDisplayStyleGapMin", "3*default_rule_thickness" }, },
- fraction_num_up = { default = { "FractionNumeratorShiftUp", "num2" },
- cramped_display_style = { "FractionNumeratorDisplayStyleShiftUp", "num1" },
- display_style = { "FractionNumeratorDisplayStyleShiftUp", "num1" }, },
- fraction_num_vgap = { default = { "FractionNumeratorGapMin", "default_rule_thickness" },
- cramped_display_style = { "FractionNumeratorDisplayStyleGapMin", "3*default_rule_thickness" },
- display_style = { "FractionNumeratorDisplayStyleGapMin", "3*default_rule_thickness" }, },
- fraction_rule = { default = { "FractionRuleThickness", "default_rule_thickness" }, },
- limit_above_bgap = { default = { "UpperLimitBaselineRiseMin", "big_op_spacing3" }, },
- limit_above_vgap = { default = { "UpperLimitGapMin", "big_op_spacing1" }, },
- limit_above_kern = { default = { "0", "big_op_spacing5" }, },
- limit_below_bgap = { default = { "LowerLimitBaselineDropMin", "big_op_spacing4" }, },
- limit_below_vgap = { default = { "LowerLimitGapMin", "big_op_spacing2" }, },
- limit_below_kern = { default = { "0", "big_op_spacing5" }, },
- math_operator_size = { default = { "DisplayOperatorMinHeight", "math_x_height*3" }, }, -- 2
- overbar_kern = { default = { "OverbarExtraAscender", "default_rule_thickness" }, },
- overbar_rule = { default = { "OverbarRuleThickness", "default_rule_thickness" }, },
- overbar_vgap = { default = { "OverbarVerticalGap", "3*default_rule_thickness" }, },
- quad = { default = { "font_size(f)", "math_quad" }, },
- radical_kern = { default = { "RadicalExtraAscender", "default_rule_thickness" }, },
- radical_rule = { default = { "RadicalRuleThickness", "default_rule_thickness" }, },
- -- default = { "surd_height(f)", "default_rule_thickness" },
- radical_vgap = { default = { "RadicalVerticalGap", "default_rule_thickness+(abs(default_rule_thickness)/4)" },
- display_style = { "RadicalDisplayStyleVerticalGap", "default_rule_thickness+(abs(math_x_height)/4)" }, },
- space_after_script = { default = { "SpaceAfterScript", "script_space" }, },
- stack_denom_down = { default = { "StackBottomShiftDown", "denom2" },
- cramped_display_style = { "StackBottomDisplayStyleShiftDown", "denom1" },
- display_style = { "StackBottomDisplayStyleShiftDown", "denom1" }, },
- stack_num_up = { default = { "StackTopShiftUp", "num3" },
- cramped_display_style = { "StackTopDisplayStyleShiftUp", "num1" },
- display_style = { "StackTopDisplayStyleShiftUp", "num1" }, },
- stack_vgap = { default = { "StackGapMin", "3*default_rule_thickness" },
- cramped_display_style = { "StackDisplayStyleGapMin", "7*default_rule_thickness" },
- display_style = { "StackDisplayStyleGapMin", "7*default_rule_thickness" }, },
- sub_shift_down = { default = { "SubscriptShiftDown", "sub1" }, },
- sub_shift_drop = { default = { "SubscriptBaselineDropMin", "sub_drop" }, },
- sub_sup_shift_down = { default = { "SubscriptShiftDown", "sub2" }, },
- sub_top_max = { default = { "SubscriptTopMax", "abs(math_x_height*4)/5" }, },
- subsup_vgap = { default = { "SubSuperscriptGapMin", "4*default_rule_thickness" }, },
- sup_bottom_min = { default = { "SuperscriptBottomMin", "abs(math_x_height)/4" }, },
- sup_shift_drop = { default = { "SuperscriptBaselineDropMax", "sup_drop" }, },
- sup_shift_up = { cramped_display_style = { "SuperscriptShiftUpCramped", "sup3" },
- cramped_script_script_style = { "SuperscriptShiftUpCramped", "sup3" },
- cramped_script_style = { "SuperscriptShiftUpCramped", "sup3" },
- cramped_text_style = { "SuperscriptShiftUpCramped", "sup3" },
- display_style = { "SuperscriptShiftUp", "sup1" },
- script_script_style = { "SuperscriptShiftUp", "sup2" },
- script_style = { "SuperscriptShiftUp", "sup2" },
- text_style = { "SuperscriptShiftUp", "sup2" }, },
- sup_sub_bottom_max = { default = { "SuperscriptBottomMaxWithSubscript", "abs(math_x_height*4)/5" }, },
- underbar_kern = { default = { "UnderbarExtraDescender", "0" }, },
- underbar_rule = { default = { "UnderbarRuleThickness", "default_rule_thickness" }, },
- underbar_vgap = { default = { "UnderbarVerticalGap", "3*default_rule_thickness" }, },
- connector_overlap_min = { default = { "MinConnectorOverlap", "0.25*default_rule_thickness" }, },
- over_delimiter_vgap = { default = { "StretchStackGapBelowMin", "big_op_spacing1" }, },
- over_delimiter_bgap = { default = { "StretchStackTopShiftUp", "big_op_spacing3" }, },
- under_delimiter_vgap = { default = { "StretchStackGapAboveMin", "big_op_spacing2" }, },
- under_delimiter_bgap = { default = { "StretchStackBottomShiftDown", "big_op_spacing4" }, },
- radical_degree_before = { default = { "RadicalKernBeforeDegree", "(5/18)*quad" }, },
- radical_degree_after = { default = { "RadicalKernAfterDegree", "(-10/18)*quad" }, },
- radical_degree_raise = { default = { "RadicalDegreeBottomRaisePercent", "60" }, },
-}
-
-local styles = {
- 'cramped_display_style',
- 'cramped_script_script_style',
- 'cramped_script_style',
- 'cramped_text_style',
- 'display_style',
- 'script_script_style',
- 'script_style',
- 'text_style',
-}
-
-for k, v in next, defaults do
- for _, s in next, styles do
- if not v[s] then
- v[s] = v.default
- end
- end
-end
-
--- we cannot use a metatable because we do a copy (takes a bit more work)
---
--- local mt = { } setmetatable(defaults,mt)
---
--- mt.__index = function(t,s)
--- return t.default or t.text_style or 0
--- end
-
-function mathematics.dimensions(dimens) -- beware, dimens get spoiled
- if dimens.SpaceAfterScript then
- dimens.SubscriptShiftDownWithSuperscript = dimens.SubscriptShiftDown * 1.5 -- move this one
- return table.fastcopy(dimens), { }
- elseif dimens.AxisHeight or dimens.axis_height then
- local t = { }
- local math_x_height = dimens.x_height or 10*65536
- local math_quad = dimens.quad or 10*65536
- local default_rule_thickness = dimens.FractionDenominatorGapMin or dimens.default_rule_thickness or 0.4*65536
- dimens["0"] = 0
- dimens["60"] = 60
- dimens["0.25*default_rule_thickness"] = default_rule_thickness / 4
- dimens["3*default_rule_thickness"] = 3 * default_rule_thickness
- dimens["4*default_rule_thickness"] = 4 * default_rule_thickness
- dimens["7*default_rule_thickness"] = 7 * default_rule_thickness
- dimens["(5/18)*quad"] = (math_quad * 5) / 18
- dimens["(-10/18)*quad"] = - (math_quad * 10) / 18
- dimens["math_x_height*3"] = math_x_height * 3 -- needs checking
- dimens["abs(math_x_height*4)/5"] = abs(math_x_height * 4) / 5
- dimens["default_rule_thickness+(abs(default_rule_thickness)/4)"] = default_rule_thickness+(abs(default_rule_thickness) / 4)
- dimens["default_rule_thickness+(abs(math_x_height)/4)"] = default_rule_thickness+(abs(math_x_height) / 4)
- dimens["abs(math_x_height)/4"] = abs(math_x_height) / 4
- dimens["abs(math_x_height*4)/5"] = abs(math_x_height * 4) / 5
- dimens[""] = false
- dimens["script_space"] = false -- at macro level
- for variable, styles in next, defaults do
- local tt = { }
- for style, default in next, styles do
- local one, two = default[1], default[2]
- local value = dimens[one]
- if value then
- tt[style] = value
- else
- value = dimens[two]
- if value == false then
- tt[style] = nil
- else
- tt[style] = value or 0
- end
- end
- end
- t[variable] = tt
- end
- local d = {
- AccentBaseHeight = t . accent_base_height . text_style,
- AxisHeight = t . axis . text_style,
- -- DelimitedSubFormulaMinHeight
- DisplayOperatorMinHeight = t . math_operator_size . text_style, -- no longer let tex decide (weird values)
- -- FlattenedAccentBaseHeight
- FractionDenominatorDisplayStyleGapMin = t . fraction_denom_vgap . display_style,
- FractionDenominatorDisplayStyleShiftDown = t . fraction_denom_down . display_style,
- FractionDenominatorGapMin = t . fraction_denom_vgap . text_style,
- FractionDenominatorShiftDown = t . fraction_denom_down . text_style,
- FractionNumeratorDisplayStyleGapMin = t . fraction_num_vgap . display_style,
- FractionNumeratorDisplayStyleShiftUp = t . fraction_num_up . display_style,
- FractionNumeratorGapMin = t . fraction_num_vgap . text_style,
- FractionNumeratorShiftUp = t . fraction_num_up . text_style,
- FractionRuleThickness = t . fraction_rule . text_style,
- FractionDelimiterSize = t . fraction_del_size . text_style,
- FractionDelimiterDisplayStyleSize = t . fraction_del_size . display_style,
- LowerLimitBaselineDropMin = t . limit_below_bgap . text_style,
- LowerLimitGapMin = t . limit_below_vgap . text_style,
- -- MathLeading
- MinConnectorOverlap = t . connector_overlap_min . text_style,
- OverbarExtraAscender = t . overbar_kern . text_style,
- OverbarRuleThickness = t . overbar_rule . text_style,
- OverbarVerticalGap = t . overbar_vgap . text_style,
- RadicalDisplayStyleVerticalGap = t . radical_vgap . display_style,
- RadicalExtraAscender = t . radical_kern . text_style,
- RadicalRuleThickness = t . radical_rule . text_style,
- RadicalVerticalGap = t . radical_vgap . text_style,
- RadicalKernBeforeDegree = t . radical_degree_before . display_style,
- RadicalKernAfterDegree = t . radical_degree_after . display_style,
- RadicalDegreeBottomRaisePercent = t . radical_degree_raise . display_style,
- -- ScriptPercentScaleDown
- -- ScriptScriptPercentScaleDown
- -- SkewedFractionHorizontalGap
- -- SkewedFractionVerticalGap
- SpaceAfterScript = t . space_after_script . text_style,
- StackBottomDisplayStyleShiftDown = t . stack_denom_down . display_style,
- StackBottomShiftDown = t . stack_denom_down . text_style,
- StackDisplayStyleGapMin = t . stack_vgap . display_style,
- StackGapMin = t . stack_vgap . text_style,
- StackTopDisplayStyleShiftUp = t . stack_num_up . display_style,
- StackTopShiftUp = t . stack_num_up . text_style,
- StretchStackGapBelowMin = t . over_delimiter_vgap . text_style,
- StretchStackTopShiftUp = t . over_delimiter_bgap . text_style,
- StretchStackGapAboveMin = t . under_delimiter_vgap . text_style,
- StretchStackBottomShiftDown = t . under_delimiter_bgap . text_style,
- SubSuperscriptGapMin = t . subsup_vgap . text_style,
- SubscriptBaselineDropMin = t . sub_shift_drop . text_style,
- SubscriptShiftDown = t . sub_shift_down . text_style,
- SubscriptShiftDownWithSuperscript = t . sub_sup_shift_down . text_style,
- SubscriptTopMax = t . sub_top_max . text_style,
- SuperscriptBaselineDropMax = t . sup_shift_drop . text_style,
- SuperscriptBottomMaxWithSubscript = t . sup_sub_bottom_max . text_style,
- SuperscriptBottomMin = t . sup_bottom_min . text_style,
- SuperscriptShiftUp = t . sup_shift_up . text_style,
- SuperscriptShiftUpCramped = t . sup_shift_up . cramped_text_style,
- UnderbarExtraDescender = t . underbar_kern . text_style,
- UnderbarRuleThickness = t . underbar_rule . text_style,
- UnderbarVerticalGap = t . underbar_vgap . text_style,
- UpperLimitBaselineRiseMin = t . limit_above_bgap . text_style,
- UpperLimitGapMin = t . limit_above_vgap . text_style,
- }
-
- -- too fragile for tx/px ... even the same values give different results
- d.DisplayOperatorMinHeight = nil
- --
- d.AccentBaseHeight = 0 -- here? still?
- return d, t -- t only for diagnostics
- else
- return { }, { }
- end
-end
-
+if not modules then modules = { } end modules ['math-dim'] = {
+ version = 1.001,
+ comment = "companion to math-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Beware: only Taco and Ulrik really understands in depth what these dimensions
+-- do so if you run into problems ask on the context list.
+
+-- The radical_rule value is also used as a trigger. In luatex the accent
+-- placement happens either the opentype way (using top_accent cum suis) or the
+-- traditional way. In order to determine what method to use the \Umathradicalrule
+-- setting is consulted to determine what method to use. This is more efficient
+-- than analyzing the (potentially spread over multiple families) situation. For
+-- this reason we need to set the radical_rule here. It used to be "" in
+-- which case the engine takes the rulethickness. In c-speak:
+--
+-- int compat_mode = (radical_rule(cur_style) == undefined_math_parameter) ;
+
+local abs, next = math.abs, next
+
+local defaults = {
+ axis = { default = { "AxisHeight", "axis_height" }, },
+ accent_base_height = { default = { "AccentBaseHeight", "x_height" }, },
+ fraction_del_size = { default = { "FractionDelimiterSize", "delim2" },
+ cramped_display_style = { "FractionDelimiterDisplayStyleSize", "delim1" },
+ display_style = { "FractionDelimiterDisplayStyleSize", "delim1" }, },
+ fraction_denom_down = { default = { "FractionDenominatorShiftDown", "denom2" },
+ cramped_display_style = { "FractionDenominatorDisplayStyleShiftDown", "denom1" },
+ display_style = { "FractionDenominatorDisplayStyleShiftDown", "denom1" }, },
+ fraction_denom_vgap = { default = { "FractionDenominatorGapMin", "default_rule_thickness" },
+ cramped_display_style = { "FractionDenominatorDisplayStyleGapMin", "3*default_rule_thickness" },
+ display_style = { "FractionDenominatorDisplayStyleGapMin", "3*default_rule_thickness" }, },
+ fraction_num_up = { default = { "FractionNumeratorShiftUp", "num2" },
+ cramped_display_style = { "FractionNumeratorDisplayStyleShiftUp", "num1" },
+ display_style = { "FractionNumeratorDisplayStyleShiftUp", "num1" }, },
+ fraction_num_vgap = { default = { "FractionNumeratorGapMin", "default_rule_thickness" },
+ cramped_display_style = { "FractionNumeratorDisplayStyleGapMin", "3*default_rule_thickness" },
+ display_style = { "FractionNumeratorDisplayStyleGapMin", "3*default_rule_thickness" }, },
+ fraction_rule = { default = { "FractionRuleThickness", "default_rule_thickness" }, },
+ limit_above_bgap = { default = { "UpperLimitBaselineRiseMin", "big_op_spacing3" }, },
+ limit_above_vgap = { default = { "UpperLimitGapMin", "big_op_spacing1" }, },
+ limit_above_kern = { default = { "0", "big_op_spacing5" }, },
+ limit_below_bgap = { default = { "LowerLimitBaselineDropMin", "big_op_spacing4" }, },
+ limit_below_vgap = { default = { "LowerLimitGapMin", "big_op_spacing2" }, },
+ limit_below_kern = { default = { "0", "big_op_spacing5" }, },
+ math_operator_size = { default = { "DisplayOperatorMinHeight", "math_x_height*3" }, }, -- 2
+ overbar_kern = { default = { "OverbarExtraAscender", "default_rule_thickness" }, },
+ overbar_rule = { default = { "OverbarRuleThickness", "default_rule_thickness" }, },
+ overbar_vgap = { default = { "OverbarVerticalGap", "3*default_rule_thickness" }, },
+ quad = { default = { "font_size(f)", "math_quad" }, },
+ radical_kern = { default = { "RadicalExtraAscender", "default_rule_thickness" }, },
+ radical_rule = { default = { "RadicalRuleThickness", "default_rule_thickness" }, },
+ -- default = { "surd_height(f)", "default_rule_thickness" },
+ radical_vgap = { default = { "RadicalVerticalGap", "default_rule_thickness+(abs(default_rule_thickness)/4)" },
+ display_style = { "RadicalDisplayStyleVerticalGap", "default_rule_thickness+(abs(math_x_height)/4)" }, },
+ space_after_script = { default = { "SpaceAfterScript", "script_space" }, },
+ stack_denom_down = { default = { "StackBottomShiftDown", "denom2" },
+ cramped_display_style = { "StackBottomDisplayStyleShiftDown", "denom1" },
+ display_style = { "StackBottomDisplayStyleShiftDown", "denom1" }, },
+ stack_num_up = { default = { "StackTopShiftUp", "num3" },
+ cramped_display_style = { "StackTopDisplayStyleShiftUp", "num1" },
+ display_style = { "StackTopDisplayStyleShiftUp", "num1" }, },
+ stack_vgap = { default = { "StackGapMin", "3*default_rule_thickness" },
+ cramped_display_style = { "StackDisplayStyleGapMin", "7*default_rule_thickness" },
+ display_style = { "StackDisplayStyleGapMin", "7*default_rule_thickness" }, },
+ sub_shift_down = { default = { "SubscriptShiftDown", "sub1" }, },
+ sub_shift_drop = { default = { "SubscriptBaselineDropMin", "sub_drop" }, },
+ sub_sup_shift_down = { default = { "SubscriptShiftDown", "sub2" }, },
+ sub_top_max = { default = { "SubscriptTopMax", "abs(math_x_height*4)/5" }, },
+ subsup_vgap = { default = { "SubSuperscriptGapMin", "4*default_rule_thickness" }, },
+ sup_bottom_min = { default = { "SuperscriptBottomMin", "abs(math_x_height)/4" }, },
+ sup_shift_drop = { default = { "SuperscriptBaselineDropMax", "sup_drop" }, },
+ sup_shift_up = { cramped_display_style = { "SuperscriptShiftUpCramped", "sup3" },
+ cramped_script_script_style = { "SuperscriptShiftUpCramped", "sup3" },
+ cramped_script_style = { "SuperscriptShiftUpCramped", "sup3" },
+ cramped_text_style = { "SuperscriptShiftUpCramped", "sup3" },
+ display_style = { "SuperscriptShiftUp", "sup1" },
+ script_script_style = { "SuperscriptShiftUp", "sup2" },
+ script_style = { "SuperscriptShiftUp", "sup2" },
+ text_style = { "SuperscriptShiftUp", "sup2" }, },
+ sup_sub_bottom_max = { default = { "SuperscriptBottomMaxWithSubscript", "abs(math_x_height*4)/5" }, },
+ underbar_kern = { default = { "UnderbarExtraDescender", "0" }, },
+ underbar_rule = { default = { "UnderbarRuleThickness", "default_rule_thickness" }, },
+ underbar_vgap = { default = { "UnderbarVerticalGap", "3*default_rule_thickness" }, },
+ connector_overlap_min = { default = { "MinConnectorOverlap", "0.25*default_rule_thickness" }, },
+ over_delimiter_vgap = { default = { "StretchStackGapBelowMin", "big_op_spacing1" }, },
+ over_delimiter_bgap = { default = { "StretchStackTopShiftUp", "big_op_spacing3" }, },
+ under_delimiter_vgap = { default = { "StretchStackGapAboveMin", "big_op_spacing2" }, },
+ under_delimiter_bgap = { default = { "StretchStackBottomShiftDown", "big_op_spacing4" }, },
+ radical_degree_before = { default = { "RadicalKernBeforeDegree", "(5/18)*quad" }, },
+ radical_degree_after = { default = { "RadicalKernAfterDegree", "(-10/18)*quad" }, },
+ radical_degree_raise = { default = { "RadicalDegreeBottomRaisePercent", "60" }, },
+}
+
+local styles = {
+ 'cramped_display_style',
+ 'cramped_script_script_style',
+ 'cramped_script_style',
+ 'cramped_text_style',
+ 'display_style',
+ 'script_script_style',
+ 'script_style',
+ 'text_style',
+}
+
+for k, v in next, defaults do
+ for _, s in next, styles do
+ if not v[s] then
+ v[s] = v.default
+ end
+ end
+end
+
+-- we cannot use a metatable because we do a copy (takes a bit more work)
+--
+-- local mt = { } setmetatable(defaults,mt)
+--
+-- mt.__index = function(t,s)
+-- return t.default or t.text_style or 0
+-- end
+
+function mathematics.dimensions(dimens) -- beware, dimens get spoiled
+ if dimens.SpaceAfterScript then
+ dimens.SubscriptShiftDownWithSuperscript = dimens.SubscriptShiftDown * 1.5 -- move this one
+ return table.fastcopy(dimens), { }
+ elseif dimens.AxisHeight or dimens.axis_height then
+ local t = { }
+ local math_x_height = dimens.x_height or 10*65536
+ local math_quad = dimens.quad or 10*65536
+ local default_rule_thickness = dimens.FractionDenominatorGapMin or dimens.default_rule_thickness or 0.4*65536
+ dimens["0"] = 0
+ dimens["60"] = 60
+ dimens["0.25*default_rule_thickness"] = default_rule_thickness / 4
+ dimens["3*default_rule_thickness"] = 3 * default_rule_thickness
+ dimens["4*default_rule_thickness"] = 4 * default_rule_thickness
+ dimens["7*default_rule_thickness"] = 7 * default_rule_thickness
+ dimens["(5/18)*quad"] = (math_quad * 5) / 18
+ dimens["(-10/18)*quad"] = - (math_quad * 10) / 18
+ dimens["math_x_height*3"] = math_x_height * 3 -- needs checking
+ dimens["abs(math_x_height*4)/5"] = abs(math_x_height * 4) / 5
+ dimens["default_rule_thickness+(abs(default_rule_thickness)/4)"] = default_rule_thickness+(abs(default_rule_thickness) / 4)
+ dimens["default_rule_thickness+(abs(math_x_height)/4)"] = default_rule_thickness+(abs(math_x_height) / 4)
+ dimens["abs(math_x_height)/4"] = abs(math_x_height) / 4
+ dimens["abs(math_x_height*4)/5"] = abs(math_x_height * 4) / 5
+ dimens[""] = false
+ dimens["script_space"] = false -- at macro level
+ for variable, styles in next, defaults do
+ local tt = { }
+ for style, default in next, styles do
+ local one, two = default[1], default[2]
+ local value = dimens[one]
+ if value then
+ tt[style] = value
+ else
+ value = dimens[two]
+ if value == false then
+ tt[style] = nil
+ else
+ tt[style] = value or 0
+ end
+ end
+ end
+ t[variable] = tt
+ end
+ local d = {
+ AccentBaseHeight = t . accent_base_height . text_style,
+ AxisHeight = t . axis . text_style,
+ -- DelimitedSubFormulaMinHeight
+ DisplayOperatorMinHeight = t . math_operator_size . text_style, -- no longer let tex decide (weird values)
+ -- FlattenedAccentBaseHeight
+ FractionDenominatorDisplayStyleGapMin = t . fraction_denom_vgap . display_style,
+ FractionDenominatorDisplayStyleShiftDown = t . fraction_denom_down . display_style,
+ FractionDenominatorGapMin = t . fraction_denom_vgap . text_style,
+ FractionDenominatorShiftDown = t . fraction_denom_down . text_style,
+ FractionNumeratorDisplayStyleGapMin = t . fraction_num_vgap . display_style,
+ FractionNumeratorDisplayStyleShiftUp = t . fraction_num_up . display_style,
+ FractionNumeratorGapMin = t . fraction_num_vgap . text_style,
+ FractionNumeratorShiftUp = t . fraction_num_up . text_style,
+ FractionRuleThickness = t . fraction_rule . text_style,
+ FractionDelimiterSize = t . fraction_del_size . text_style,
+ FractionDelimiterDisplayStyleSize = t . fraction_del_size . display_style,
+ LowerLimitBaselineDropMin = t . limit_below_bgap . text_style,
+ LowerLimitGapMin = t . limit_below_vgap . text_style,
+ -- MathLeading
+ MinConnectorOverlap = t . connector_overlap_min . text_style,
+ OverbarExtraAscender = t . overbar_kern . text_style,
+ OverbarRuleThickness = t . overbar_rule . text_style,
+ OverbarVerticalGap = t . overbar_vgap . text_style,
+ RadicalDisplayStyleVerticalGap = t . radical_vgap . display_style,
+ RadicalExtraAscender = t . radical_kern . text_style,
+ RadicalRuleThickness = t . radical_rule . text_style,
+ RadicalVerticalGap = t . radical_vgap . text_style,
+ RadicalKernBeforeDegree = t . radical_degree_before . display_style,
+ RadicalKernAfterDegree = t . radical_degree_after . display_style,
+ RadicalDegreeBottomRaisePercent = t . radical_degree_raise . display_style,
+ -- ScriptPercentScaleDown
+ -- ScriptScriptPercentScaleDown
+ -- SkewedFractionHorizontalGap
+ -- SkewedFractionVerticalGap
+ SpaceAfterScript = t . space_after_script . text_style,
+ StackBottomDisplayStyleShiftDown = t . stack_denom_down . display_style,
+ StackBottomShiftDown = t . stack_denom_down . text_style,
+ StackDisplayStyleGapMin = t . stack_vgap . display_style,
+ StackGapMin = t . stack_vgap . text_style,
+ StackTopDisplayStyleShiftUp = t . stack_num_up . display_style,
+ StackTopShiftUp = t . stack_num_up . text_style,
+ StretchStackGapBelowMin = t . over_delimiter_vgap . text_style,
+ StretchStackTopShiftUp = t . over_delimiter_bgap . text_style,
+ StretchStackGapAboveMin = t . under_delimiter_vgap . text_style,
+ StretchStackBottomShiftDown = t . under_delimiter_bgap . text_style,
+ SubSuperscriptGapMin = t . subsup_vgap . text_style,
+ SubscriptBaselineDropMin = t . sub_shift_drop . text_style,
+ SubscriptShiftDown = t . sub_shift_down . text_style,
+ SubscriptShiftDownWithSuperscript = t . sub_sup_shift_down . text_style,
+ SubscriptTopMax = t . sub_top_max . text_style,
+ SuperscriptBaselineDropMax = t . sup_shift_drop . text_style,
+ SuperscriptBottomMaxWithSubscript = t . sup_sub_bottom_max . text_style,
+ SuperscriptBottomMin = t . sup_bottom_min . text_style,
+ SuperscriptShiftUp = t . sup_shift_up . text_style,
+ SuperscriptShiftUpCramped = t . sup_shift_up . cramped_text_style,
+ UnderbarExtraDescender = t . underbar_kern . text_style,
+ UnderbarRuleThickness = t . underbar_rule . text_style,
+ UnderbarVerticalGap = t . underbar_vgap . text_style,
+ UpperLimitBaselineRiseMin = t . limit_above_bgap . text_style,
+ UpperLimitGapMin = t . limit_above_vgap . text_style,
+ }
+
+ -- too fragile for tx/px ... even the same values give different results
+ d.DisplayOperatorMinHeight = nil
+ --
+ d.AccentBaseHeight = 0 -- here? still?
+ return d, t -- t only for diagnostics
+ else
+ return { }, { }
+ end
+end
+
diff --git a/tex/context/base/math-ext.lua b/tex/context/base/math-ext.lua
index b00d6cde2..2b6860d75 100644
--- a/tex/context/base/math-ext.lua
+++ b/tex/context/base/math-ext.lua
@@ -1,197 +1,197 @@
-if not modules then modules = { } end modules ['math-ext'] = {
- version = 1.001,
- comment = "companion to math-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local trace_virtual = false trackers.register("math.virtual", function(v) trace_virtual = v end)
-
-local basename = file.basename
-
-local mathematics = mathematics
-local characters = characters
-
-local report_math = logs.reporter("mathematics")
-
-mathematics.extras = mathematics.extras or { }
-local extras = mathematics.extras
-
-characters.math = characters.math or { }
-local mathdata = characters.math
-local chardata = characters.data
-
-function extras.add(unicode,t) -- todo: if already stored ...
- local min, max = mathematics.extrabase, mathematics.privatebase - 1
- -- if mathdata[unicode] or chardata[unicode] then
- -- report_math("extra %U overloads existing character",unicode)
- -- end
- if unicode >= min and unicode <= max then
- mathdata[unicode], chardata[unicode] = t, t
- else
- report_math("extra %U should be in range %U - %U",unicode,min,max)
- end
-end
-
-function extras.copy(target,original)
- local characters = target.characters
- local properties = target.properties
- local parameters = target.parameters
- for unicode, extradesc in next, mathdata do
- -- always, because in an intermediate step we can have a non math font
- local extrachar = characters[unicode]
- local nextinsize = extradesc.nextinsize
- if nextinsize then
- local first = 1
- local charused = unicode
- if not extrachar then
- for i=1,#nextinsize do
- local slot = nextinsize[i]
- extrachar = characters[slot]
- if extrachar then
- characters[unicode] = extrachar
- first = i + 1
- charused = slot
- break
- end
- end
- end
- if not extrachar then
- if trace_virtual then
- report_math("extra %U in %a at %p with class %a and name %a is not mapped",
- unicode,basename(properties.fullname),parameters.size,
- extradesc.mathclass,extradesc.mathname)
- end
- elseif not extrachar.next then
- local nextused = false
- for i=first,#nextinsize do
- local nextslot = nextinsize[i]
- local nextbase = characters[nextslot]
- if nextbase then
- local nextnext = nextbase and nextbase.next
- if nextnext then
- local nextchar = characters[nextnext]
- if nextchar then
- extrachar.next = nextchar
- nextused = nextslot
- break
- end
- end
- end
- end
- if trace_virtual then
- if nextused then
- report_math("extra %U in %a at %p with class %a and name %a maps onto %U with next %U",
- unicode,basename(properties.fullname),parameters.size,charused,
- extradesc.mathclass,extradesc.mathname,nextused)
- else
- report_math("extra %U in %a at %p with class %a and name %a maps onto %U with no next",
- unicode,basename(properties.fullname),parameters.size,charused,
- extradesc.mathclass,extradesc.mathname)
- end
- end
- else
- if trace_virtual then
- report_math("extra %U in %a at %p with class %a and name %a maps onto %U with no next", -- own next
- unicode,basename(properties.fullname),parameters.size,charused,
- extradesc.mathclass,extradesc.mathname)
- end
- end
- end
- end
-end
-
-utilities.sequencers.appendaction(mathactions,"system","mathematics.extras.copy")
-
--- 0xFE302 -- 0xFE320 for accents (gone with new lm/gyre)
---
--- extras.add(0xFE302, {
--- category="mn",
--- description="WIDE MATHEMATICAL HAT",
--- direction="nsm",
--- linebreak="cm",
--- mathclass="topaccent",
--- mathname="widehat",
--- mathstretch="h",
--- unicodeslot=0xFE302,
--- nextinsize={ 0x00302, 0x0005E },
--- } )
---
--- extras.add(0xFE303, {
--- category="mn",
--- cjkwd="a",
--- description="WIDE MATHEMATICAL TILDE",
--- direction="nsm",
--- linebreak="cm",
--- mathclass="topaccent",
--- mathname="widetilde",
--- mathstretch="h",
--- unicodeslot=0xFE303,
--- nextinsize={ 0x00303, 0x0007E },
--- } )
-
--- 0xFE321 -- 0xFE340 for missing characters
-
-extras.add(0xFE321, {
- category="sm",
- description="MATHEMATICAL SHORT BAR",
- -- direction="on",
- -- linebreak="nu",
- mathclass="relation",
- mathname="mapstochar",
- unicodeslot=0xFE321,
-} )
-
-extras.add(0xFE322, {
- category="sm",
- description="MATHEMATICAL LEFT HOOK",
- mathclass="relation",
- mathname="lhook",
- unicodeslot=0xFE322,
-} )
-
-extras.add(0xFE323, {
- category="sm",
- description="MATHEMATICAL RIGHT HOOK",
- mathclass="relation",
- mathname="rhook",
- unicodeslot=0xFE323,
-} )
-
-extras.add(0xFE324, {
- category="sm",
- description="MATHEMATICAL SHORT BAR MIRRORED",
--- direction="on",
--- linebreak="nu",
- mathclass="relation",
- mathname="mapsfromchar",
- unicodeslot=0xFE324,
-} )
-
---~ extras.add(0xFE304, {
---~ category="sm",
---~ description="TOP AND BOTTOM PARENTHESES",
---~ direction="on",
---~ linebreak="al",
---~ mathclass="doubleaccent",
---~ mathname="doubleparent",
---~ unicodeslot=0xFE304,
---~ accents={ 0x023DC, 0x023DD },
---~ } )
-
---~ extras.add(0xFE305, {
---~ category="sm",
---~ description="TOP AND BOTTOM BRACES",
---~ direction="on",
---~ linebreak="al",
---~ mathclass="doubleaccent",
---~ mathname="doublebrace",
---~ unicodeslot=0xFE305,
---~ accents={ 0x023DE, 0x023DF },
---~ } )
-
---~ \Umathchardef\braceld="0 "1 "FF07A
---~ \Umathchardef\bracerd="0 "1 "FF07B
---~ \Umathchardef\bracelu="0 "1 "FF07C
---~ \Umathchardef\braceru="0 "1 "FF07D
+if not modules then modules = { } end modules ['math-ext'] = {
+ version = 1.001,
+ comment = "companion to math-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local trace_virtual = false trackers.register("math.virtual", function(v) trace_virtual = v end)
+
+local basename = file.basename
+
+local mathematics = mathematics
+local characters = characters
+
+local report_math = logs.reporter("mathematics")
+
+mathematics.extras = mathematics.extras or { }
+local extras = mathematics.extras
+
+characters.math = characters.math or { }
+local mathdata = characters.math
+local chardata = characters.data
+
+function extras.add(unicode,t) -- todo: if already stored ...
+ local min, max = mathematics.extrabase, mathematics.privatebase - 1
+ -- if mathdata[unicode] or chardata[unicode] then
+ -- report_math("extra %U overloads existing character",unicode)
+ -- end
+ if unicode >= min and unicode <= max then
+ mathdata[unicode], chardata[unicode] = t, t
+ else
+ report_math("extra %U should be in range %U - %U",unicode,min,max)
+ end
+end
+
+function extras.copy(target,original)
+ local characters = target.characters
+ local properties = target.properties
+ local parameters = target.parameters
+ for unicode, extradesc in next, mathdata do
+ -- always, because in an intermediate step we can have a non math font
+ local extrachar = characters[unicode]
+ local nextinsize = extradesc.nextinsize
+ if nextinsize then
+ local first = 1
+ local charused = unicode
+ if not extrachar then
+ for i=1,#nextinsize do
+ local slot = nextinsize[i]
+ extrachar = characters[slot]
+ if extrachar then
+ characters[unicode] = extrachar
+ first = i + 1
+ charused = slot
+ break
+ end
+ end
+ end
+ if not extrachar then
+ if trace_virtual then
+ report_math("extra %U in %a at %p with class %a and name %a is not mapped",
+ unicode,basename(properties.fullname),parameters.size,
+ extradesc.mathclass,extradesc.mathname)
+ end
+ elseif not extrachar.next then
+ local nextused = false
+ for i=first,#nextinsize do
+ local nextslot = nextinsize[i]
+ local nextbase = characters[nextslot]
+ if nextbase then
+ local nextnext = nextbase and nextbase.next
+ if nextnext then
+ local nextchar = characters[nextnext]
+ if nextchar then
+ extrachar.next = nextchar
+ nextused = nextslot
+ break
+ end
+ end
+ end
+ end
+ if trace_virtual then
+ if nextused then
+ report_math("extra %U in %a at %p with class %a and name %a maps onto %U with next %U",
+ unicode,basename(properties.fullname),parameters.size,charused,
+ extradesc.mathclass,extradesc.mathname,nextused)
+ else
+ report_math("extra %U in %a at %p with class %a and name %a maps onto %U with no next",
+ unicode,basename(properties.fullname),parameters.size,charused,
+ extradesc.mathclass,extradesc.mathname)
+ end
+ end
+ else
+ if trace_virtual then
+ report_math("extra %U in %a at %p with class %a and name %a maps onto %U with no next", -- own next
+ unicode,basename(properties.fullname),parameters.size,charused,
+ extradesc.mathclass,extradesc.mathname)
+ end
+ end
+ end
+ end
+end
+
+utilities.sequencers.appendaction(mathactions,"system","mathematics.extras.copy")
+
+-- 0xFE302 -- 0xFE320 for accents (gone with new lm/gyre)
+--
+-- extras.add(0xFE302, {
+-- category="mn",
+-- description="WIDE MATHEMATICAL HAT",
+-- direction="nsm",
+-- linebreak="cm",
+-- mathclass="topaccent",
+-- mathname="widehat",
+-- mathstretch="h",
+-- unicodeslot=0xFE302,
+-- nextinsize={ 0x00302, 0x0005E },
+-- } )
+--
+-- extras.add(0xFE303, {
+-- category="mn",
+-- cjkwd="a",
+-- description="WIDE MATHEMATICAL TILDE",
+-- direction="nsm",
+-- linebreak="cm",
+-- mathclass="topaccent",
+-- mathname="widetilde",
+-- mathstretch="h",
+-- unicodeslot=0xFE303,
+-- nextinsize={ 0x00303, 0x0007E },
+-- } )
+
+-- 0xFE321 -- 0xFE340 for missing characters
+
+extras.add(0xFE321, {
+ category="sm",
+ description="MATHEMATICAL SHORT BAR",
+ -- direction="on",
+ -- linebreak="nu",
+ mathclass="relation",
+ mathname="mapstochar",
+ unicodeslot=0xFE321,
+} )
+
+extras.add(0xFE322, {
+ category="sm",
+ description="MATHEMATICAL LEFT HOOK",
+ mathclass="relation",
+ mathname="lhook",
+ unicodeslot=0xFE322,
+} )
+
+extras.add(0xFE323, {
+ category="sm",
+ description="MATHEMATICAL RIGHT HOOK",
+ mathclass="relation",
+ mathname="rhook",
+ unicodeslot=0xFE323,
+} )
+
+extras.add(0xFE324, {
+ category="sm",
+ description="MATHEMATICAL SHORT BAR MIRRORED",
+-- direction="on",
+-- linebreak="nu",
+ mathclass="relation",
+ mathname="mapsfromchar",
+ unicodeslot=0xFE324,
+} )
+
+--~ extras.add(0xFE304, {
+--~ category="sm",
+--~ description="TOP AND BOTTOM PARENTHESES",
+--~ direction="on",
+--~ linebreak="al",
+--~ mathclass="doubleaccent",
+--~ mathname="doubleparent",
+--~ unicodeslot=0xFE304,
+--~ accents={ 0x023DC, 0x023DD },
+--~ } )
+
+--~ extras.add(0xFE305, {
+--~ category="sm",
+--~ description="TOP AND BOTTOM BRACES",
+--~ direction="on",
+--~ linebreak="al",
+--~ mathclass="doubleaccent",
+--~ mathname="doublebrace",
+--~ unicodeslot=0xFE305,
+--~ accents={ 0x023DE, 0x023DF },
+--~ } )
+
+--~ \Umathchardef\braceld="0 "1 "FF07A
+--~ \Umathchardef\bracerd="0 "1 "FF07B
+--~ \Umathchardef\bracelu="0 "1 "FF07C
+--~ \Umathchardef\braceru="0 "1 "FF07D
diff --git a/tex/context/base/math-fbk.lua b/tex/context/base/math-fbk.lua
index eebc4e4e7..f34019b6e 100644
--- a/tex/context/base/math-fbk.lua
+++ b/tex/context/base/math-fbk.lua
@@ -1,312 +1,312 @@
-if not modules then modules = { } end modules ['math-fbk'] = {
- version = 1.001,
- comment = "companion to math-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local trace_fallbacks = false trackers.register("math.fallbacks", function(v) trace_fallbacks = v end)
-
-local report_fallbacks = logs.reporter("math","fallbacks")
-
-local fallbacks = { }
-mathematics.fallbacks = fallbacks
-
-local virtualcharacters = { }
-
-local identifiers = fonts.hashes.identifiers
-local lastmathids = fonts.hashes.lastmathids
-
--- we need a trick (todo): if we define scriptscript, script and text in
--- that order we could use their id's .. i.e. we could always add a font
--- table with those id's .. in fact, we could also add a whole lot more
--- as it doesn't hurt
---
--- todo: use index 'true when luatex provides that feature (on the agenda)
-
-function fallbacks.apply(target,original)
- local mathparameters = target.mathparameters -- why not hasmath
- if mathparameters then
- local characters = target.characters
- local parameters = target.parameters
- local mathsize = parameters.mathsize
- local size = parameters.size
- local usedfonts = target.fonts
- if not usedfonts then
- usedfonts = { }
- target.fonts = usedfonts
- end
- -- This is not okay yet ... we have no proper way to refer to 'self'
- -- otherwise I will make my own id allocator).
-local self = #usedfonts == 0 and font.nextid() or nil -- will be true
- local textid, scriptid, scriptscriptid
- local textindex, scriptindex, scriptscriptindex
- local textdata, scriptdata, scriptscriptdata
- if mathsize == 3 then
- -- scriptscriptsize
- -- textid = nil -- self
- -- scriptid = nil -- no smaller
- -- scriptscriptid = nil -- no smaller
-textid = self
-scriptid = self
-scriptscriptid = self
- elseif mathsize == 2 then
- -- scriptsize
- -- textid = nil -- self
-textid = self
- scriptid = lastmathids[3]
- scriptscriptid = lastmathids[3]
- else
- -- textsize
- -- textid = nil -- self
-textid = self
- scriptid = lastmathids[2]
- scriptscriptid = lastmathids[3]
- end
- if textid then
- textindex = #usedfonts + 1
- usedfonts[textindex] = { id = textid }
- textdata = identifiers[textid]
- else
- textdata = target
- end
- if scriptid then
- scriptindex = #usedfonts + 1
- usedfonts[scriptindex] = { id = scriptid }
- scriptdata = identifiers[scriptid]
- else
- scriptindex = textindex
- scriptdata = textdata
- end
- if scriptscriptid then
- scriptscriptindex = #usedfonts + 1
- usedfonts[scriptscriptindex] = { id = scriptscriptid }
- scriptscriptdata = identifiers[scriptscriptid]
- else
- scriptscriptindex = scriptindex
- scriptscriptdata = scriptdata
- end
--- report_fallbacks("used textid: %s, used script id: %s, used scriptscript id: %s",
--- tostring(textid),tostring(scriptid),tostring(scriptscriptid))
- local data = {
- textdata = textdata,
- scriptdata = scriptdata,
- scriptscriptdata = scriptscriptdata,
- textindex = textindex,
- scriptindex = scriptindex,
- scriptscriptindex = scriptscriptindex,
- characters = characters,
- unicode = k,
- target = target,
- original = original,
- size = size,
- mathsize = mathsize,
- }
--- inspect(usedfonts)
- for k, v in next, virtualcharacters do
- if not characters[k] then
- local tv = type(v)
- if tv == "table" then
- characters[k] = v
- elseif tv == "number" then
- characters[k] = characters[v]
- elseif tv == "function" then
- characters[k] = v(data)
- end
- if trace_fallbacks then
- if characters[k] then
- report_fallbacks("extending font %a with %U",target.properties.fullname,k)
- end
- end
- end
- end
- end
-end
-
-utilities.sequencers.appendaction("aftercopyingcharacters","system","mathematics.fallbacks.apply")
-
-function fallbacks.install(unicode,value)
- virtualcharacters[unicode] = value
-end
-
--- a few examples:
-
-local function reference(index,char)
- if index then
- return { "slot", index, char }
- else
- return { "char", char }
- end
-end
-
-local function raised(data,down)
- local replacement = data.replacement
- local character = data.scriptdata.characters[replacement]
- if character then
- return {
- width = character.width,
- height = character.height,
- depth = character.depth,
- commands = {
- { "down", down and data.size/4 or -data.size/2 }, -- maybe exheight
- reference(data.scriptindex,replacement)
- }
- }
- end
-end
-
--- virtualcharacters[0x207A] = 0x2212
--- virtualcharacters[0x207B] = 0x002B
--- virtualcharacters[0x208A] = 0x2212
--- virtualcharacters[0x208B] = 0x002B
-
-virtualcharacters[0x207A] = function(data)
- data.replacement = 0x2212
- return raised(data)
-end
-
-virtualcharacters[0x207B] = function(data)
- data.replacement = 0x002B
- return raised(data)
-end
-
-virtualcharacters[0x208A] = function(data)
- data.replacement = 0x2212
- return raised(data,true)
-end
-
-virtualcharacters[0x208B] = function(data)
- data.replacement = 0x002B
- return raised(data,true)
-end
-
--- local function repeated(data,char,n,fraction)
--- local character = data.characters[char]
--- if character then
--- local width = character.width
--- local delta = width - character.italic -- width * fraction
--- local c = { "char", char }
--- local r = { "right", right }
--- local commands = { }
--- for i=1,n-1 do
--- width = width + delta
--- commands[#commands+1] = c
--- commands[#commands+1] = -delta
--- end
--- commands[#commands+1] = c
--- return {
--- width = width,
--- height = character.height,
--- depth = character.depth,
--- commands = commands,
--- }
--- end
--- end
-
--- virtualcharacters[0x222C] = function(data)
--- return repeated(data,0x222B,2,1/8)
--- end
-
--- virtualcharacters[0x222D] = function(data)
--- return repeated(data,0x222B,3,1/8)
--- end
-
-local addextra = mathematics.extras.add
-
-addextra(0xFE350, {
- category="sm",
- description="MATHEMATICAL DOUBLE ARROW LEFT END",
- mathclass="relation",
- mathname="ctxdoublearrowfillleftend",
- unicodeslot=0xFE350,
-} )
-
-addextra(0xFE351, {
- category="sm",
- description="MATHEMATICAL DOUBLE ARROW MIDDLE PART",
- mathclass="relation",
- mathname="ctxdoublearrowfillmiddlepart",
- unicodeslot=0xFE351,
-} )
-
-addextra(0xFE352, {
- category="sm",
- description="MATHEMATICAL DOUBLE ARROW RIGHT END",
- mathclass="relation",
- mathname="ctxdoublearrowfillrightend",
- unicodeslot=0xFE352,
-} )
-
-local push = { "push" }
-local pop = { "pop" }
-local leftarrow = { "char", 0x2190 }
-local relbar = { "char", 0x2212 }
-local rightarrow = { "char", 0x2192 }
-
-virtualcharacters[0xFE350] = function(data)
- -- return combined(data,0x2190,0x2212) -- leftarrow relbar
- local charone = data.characters[0x2190]
- local chartwo = data.characters[0x2212]
- if charone and chartwo then
- local size = data.size/2
- return {
- width = chartwo.width,
- height = size,
- depth = size,
- commands = {
- push,
- { "down", size/2 },
- leftarrow,
- pop,
- { "down", -size/2 },
- relbar,
- }
- }
- end
-end
-
-virtualcharacters[0xFE351] = function(data)
- -- return combined(data,0x2212,0x2212) -- relbar, relbar (isn't that just equal)
- local char = data.characters[0x2212]
- if char then
- local size = data.size/2
- return {
- width = char.width,
- height = size,
- depth = size,
- commands = {
- push,
- { "down", size/2 },
- relbar,
- pop,
- { "down", -size/2 },
- relbar,
- }
- }
- end
-end
-
-virtualcharacters[0xFE352] = function(data)
- -- return combined(data,0x2192,0x2212) -- rightarrow relbar
- local charone = data.characters[0x2192]
- local chartwo = data.characters[0x2212]
- if charone and chartwo then
- local size = data.size/2
- return {
- width = chartwo.width,
- height = size,
- depth = size,
- commands = {
- push,
- { "down", size/2 },
- relbar,
- pop,
- { "right", chartwo.width - charone.width },
- { "down", -size/2 },
- rightarrow,
- }
- }
- end
-end
-
+if not modules then modules = { } end modules ['math-fbk'] = {
+ version = 1.001,
+ comment = "companion to math-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local trace_fallbacks = false trackers.register("math.fallbacks", function(v) trace_fallbacks = v end)
+
+local report_fallbacks = logs.reporter("math","fallbacks")
+
+local fallbacks = { }
+mathematics.fallbacks = fallbacks
+
+local virtualcharacters = { }
+
+local identifiers = fonts.hashes.identifiers
+local lastmathids = fonts.hashes.lastmathids
+
+-- we need a trick (todo): if we define scriptscript, script and text in
+-- that order we could use their id's .. i.e. we could always add a font
+-- table with those id's .. in fact, we could also add a whole lot more
+-- as it doesn't hurt
+--
+-- todo: use index 'true when luatex provides that feature (on the agenda)
+
+function fallbacks.apply(target,original)
+ local mathparameters = target.mathparameters -- why not hasmath
+ if mathparameters then
+ local characters = target.characters
+ local parameters = target.parameters
+ local mathsize = parameters.mathsize
+ local size = parameters.size
+ local usedfonts = target.fonts
+ if not usedfonts then
+ usedfonts = { }
+ target.fonts = usedfonts
+ end
+ -- This is not okay yet ... we have no proper way to refer to 'self'
+ -- otherwise I will make my own id allocator).
+local self = #usedfonts == 0 and font.nextid() or nil -- will be true
+ local textid, scriptid, scriptscriptid
+ local textindex, scriptindex, scriptscriptindex
+ local textdata, scriptdata, scriptscriptdata
+ if mathsize == 3 then
+ -- scriptscriptsize
+ -- textid = nil -- self
+ -- scriptid = nil -- no smaller
+ -- scriptscriptid = nil -- no smaller
+textid = self
+scriptid = self
+scriptscriptid = self
+ elseif mathsize == 2 then
+ -- scriptsize
+ -- textid = nil -- self
+textid = self
+ scriptid = lastmathids[3]
+ scriptscriptid = lastmathids[3]
+ else
+ -- textsize
+ -- textid = nil -- self
+textid = self
+ scriptid = lastmathids[2]
+ scriptscriptid = lastmathids[3]
+ end
+ if textid then
+ textindex = #usedfonts + 1
+ usedfonts[textindex] = { id = textid }
+ textdata = identifiers[textid]
+ else
+ textdata = target
+ end
+ if scriptid then
+ scriptindex = #usedfonts + 1
+ usedfonts[scriptindex] = { id = scriptid }
+ scriptdata = identifiers[scriptid]
+ else
+ scriptindex = textindex
+ scriptdata = textdata
+ end
+ if scriptscriptid then
+ scriptscriptindex = #usedfonts + 1
+ usedfonts[scriptscriptindex] = { id = scriptscriptid }
+ scriptscriptdata = identifiers[scriptscriptid]
+ else
+ scriptscriptindex = scriptindex
+ scriptscriptdata = scriptdata
+ end
+-- report_fallbacks("used textid: %s, used script id: %s, used scriptscript id: %s",
+-- tostring(textid),tostring(scriptid),tostring(scriptscriptid))
+ local data = {
+ textdata = textdata,
+ scriptdata = scriptdata,
+ scriptscriptdata = scriptscriptdata,
+ textindex = textindex,
+ scriptindex = scriptindex,
+ scriptscriptindex = scriptscriptindex,
+ characters = characters,
+ unicode = k,
+ target = target,
+ original = original,
+ size = size,
+ mathsize = mathsize,
+ }
+-- inspect(usedfonts)
+ for k, v in next, virtualcharacters do
+ if not characters[k] then
+ local tv = type(v)
+ if tv == "table" then
+ characters[k] = v
+ elseif tv == "number" then
+ characters[k] = characters[v]
+ elseif tv == "function" then
+ characters[k] = v(data)
+ end
+ if trace_fallbacks then
+ if characters[k] then
+ report_fallbacks("extending font %a with %U",target.properties.fullname,k)
+ end
+ end
+ end
+ end
+ end
+end
+
+utilities.sequencers.appendaction("aftercopyingcharacters","system","mathematics.fallbacks.apply")
+
+function fallbacks.install(unicode,value)
+ virtualcharacters[unicode] = value
+end
+
+-- a few examples:
+
+local function reference(index,char)
+ if index then
+ return { "slot", index, char }
+ else
+ return { "char", char }
+ end
+end
+
+local function raised(data,down)
+ local replacement = data.replacement
+ local character = data.scriptdata.characters[replacement]
+ if character then
+ return {
+ width = character.width,
+ height = character.height,
+ depth = character.depth,
+ commands = {
+ { "down", down and data.size/4 or -data.size/2 }, -- maybe exheight
+ reference(data.scriptindex,replacement)
+ }
+ }
+ end
+end
+
+-- virtualcharacters[0x207A] = 0x2212
+-- virtualcharacters[0x207B] = 0x002B
+-- virtualcharacters[0x208A] = 0x2212
+-- virtualcharacters[0x208B] = 0x002B
+
+virtualcharacters[0x207A] = function(data)
+ data.replacement = 0x2212
+ return raised(data)
+end
+
+virtualcharacters[0x207B] = function(data)
+ data.replacement = 0x002B
+ return raised(data)
+end
+
+virtualcharacters[0x208A] = function(data)
+ data.replacement = 0x2212
+ return raised(data,true)
+end
+
+virtualcharacters[0x208B] = function(data)
+ data.replacement = 0x002B
+ return raised(data,true)
+end
+
+-- local function repeated(data,char,n,fraction)
+-- local character = data.characters[char]
+-- if character then
+-- local width = character.width
+-- local delta = width - character.italic -- width * fraction
+-- local c = { "char", char }
+-- local r = { "right", right }
+-- local commands = { }
+-- for i=1,n-1 do
+-- width = width + delta
+-- commands[#commands+1] = c
+-- commands[#commands+1] = -delta
+-- end
+-- commands[#commands+1] = c
+-- return {
+-- width = width,
+-- height = character.height,
+-- depth = character.depth,
+-- commands = commands,
+-- }
+-- end
+-- end
+
+-- virtualcharacters[0x222C] = function(data)
+-- return repeated(data,0x222B,2,1/8)
+-- end
+
+-- virtualcharacters[0x222D] = function(data)
+-- return repeated(data,0x222B,3,1/8)
+-- end
+
+local addextra = mathematics.extras.add
+
+addextra(0xFE350, {
+ category="sm",
+ description="MATHEMATICAL DOUBLE ARROW LEFT END",
+ mathclass="relation",
+ mathname="ctxdoublearrowfillleftend",
+ unicodeslot=0xFE350,
+} )
+
+addextra(0xFE351, {
+ category="sm",
+ description="MATHEMATICAL DOUBLE ARROW MIDDLE PART",
+ mathclass="relation",
+ mathname="ctxdoublearrowfillmiddlepart",
+ unicodeslot=0xFE351,
+} )
+
+addextra(0xFE352, {
+ category="sm",
+ description="MATHEMATICAL DOUBLE ARROW RIGHT END",
+ mathclass="relation",
+ mathname="ctxdoublearrowfillrightend",
+ unicodeslot=0xFE352,
+} )
+
+local push = { "push" }
+local pop = { "pop" }
+local leftarrow = { "char", 0x2190 }
+local relbar = { "char", 0x2212 }
+local rightarrow = { "char", 0x2192 }
+
+virtualcharacters[0xFE350] = function(data)
+ -- return combined(data,0x2190,0x2212) -- leftarrow relbar
+ local charone = data.characters[0x2190]
+ local chartwo = data.characters[0x2212]
+ if charone and chartwo then
+ local size = data.size/2
+ return {
+ width = chartwo.width,
+ height = size,
+ depth = size,
+ commands = {
+ push,
+ { "down", size/2 },
+ leftarrow,
+ pop,
+ { "down", -size/2 },
+ relbar,
+ }
+ }
+ end
+end
+
+virtualcharacters[0xFE351] = function(data)
+ -- return combined(data,0x2212,0x2212) -- relbar, relbar (isn't that just equal)
+ local char = data.characters[0x2212]
+ if char then
+ local size = data.size/2
+ return {
+ width = char.width,
+ height = size,
+ depth = size,
+ commands = {
+ push,
+ { "down", size/2 },
+ relbar,
+ pop,
+ { "down", -size/2 },
+ relbar,
+ }
+ }
+ end
+end
+
+virtualcharacters[0xFE352] = function(data)
+ -- return combined(data,0x2192,0x2212) -- rightarrow relbar
+ local charone = data.characters[0x2192]
+ local chartwo = data.characters[0x2212]
+ if charone and chartwo then
+ local size = data.size/2
+ return {
+ width = chartwo.width,
+ height = size,
+ depth = size,
+ commands = {
+ push,
+ { "down", size/2 },
+ relbar,
+ pop,
+ { "right", chartwo.width - charone.width },
+ { "down", -size/2 },
+ rightarrow,
+ }
+ }
+ end
+end
+
diff --git a/tex/context/base/math-frc.lua b/tex/context/base/math-frc.lua
index 4f531a530..077da643b 100644
--- a/tex/context/base/math-frc.lua
+++ b/tex/context/base/math-frc.lua
@@ -1,51 +1,51 @@
-if not modules then modules = { } end modules ['math-frc'] = {
- version = 1.001,
- comment = "companion to math-frc.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local utfchar = utf.char
-
-local context = context
-local variables = interfaces.variables
-
-local v_no = variables.no
-local v_yes = variables.yes
-
-local resolved = {
- [0x007B] = "\\{",
- [0x007D] = "\\}",
-}
-
-table.setmetatableindex(resolved, function(t,k)
- local v = utfchar(k)
- t[k] = v
- return v
-end)
-
-local normalatop = context.normalatop
-local normalover = context.normalover
-
-function commands.math_frac(how,left,right,width)
- if how == v_no then
- if left == 0x002E and right == 0x002E then
- normalatop()
- else
- context("\\atopwithdelims%s%s",resolved[left],resolved[right])
- end
- elseif how == v_yes then
- if left == 0x002E and right == 0x002E then
- context("\\normalabove%ssp",width)
- else
- context("\\abovewithdelims%s%s%ssp",resolved[left],resolved[right],width)
- end
- else -- v_auto
- if left == 0x002E and right == 0x002E then
- normalover()
- else
- context("\\overwithdelims%s%s",resolved[left],resolved[right])
- end
- end
-end
+if not modules then modules = { } end modules ['math-frc'] = {
+ version = 1.001,
+ comment = "companion to math-frc.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local utfchar = utf.char
+
+local context = context
+local variables = interfaces.variables
+
+local v_no = variables.no
+local v_yes = variables.yes
+
+local resolved = {
+ [0x007B] = "\\{",
+ [0x007D] = "\\}",
+}
+
+table.setmetatableindex(resolved, function(t,k)
+ local v = utfchar(k)
+ t[k] = v
+ return v
+end)
+
+local normalatop = context.normalatop
+local normalover = context.normalover
+
+function commands.math_frac(how,left,right,width)
+ if how == v_no then
+ if left == 0x002E and right == 0x002E then
+ normalatop()
+ else
+ context("\\atopwithdelims%s%s",resolved[left],resolved[right])
+ end
+ elseif how == v_yes then
+ if left == 0x002E and right == 0x002E then
+ context("\\normalabove%ssp",width)
+ else
+ context("\\abovewithdelims%s%s%ssp",resolved[left],resolved[right],width)
+ end
+ else -- v_auto
+ if left == 0x002E and right == 0x002E then
+ normalover()
+ else
+ context("\\overwithdelims%s%s",resolved[left],resolved[right])
+ end
+ end
+end
diff --git a/tex/context/base/math-map.lua b/tex/context/base/math-map.lua
index a0d7457d1..51e0f6831 100644
--- a/tex/context/base/math-map.lua
+++ b/tex/context/base/math-map.lua
@@ -1,684 +1,684 @@
-if not modules then modules = { } end modules ['math-map'] = {
- version = 1.001,
- comment = "companion to math-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: make sparse .. if self
-
---[[ldx--
-
Remapping mathematics alphabets.
---ldx]]--
-
--- oldstyle: not really mathematics but happened to be part of
--- the mathematics fonts in cmr
---
--- persian: we will also provide mappers for other
--- scripts
-
--- todo: alphabets namespace
--- maybe: script/scriptscript dynamic,
-
--- to be looked into once the fonts are ready (will become font
--- goodie):
---
--- (U+2202,U+1D715) : upright
--- (U+2202,U+1D715) : italic
--- (U+2202,U+1D715) : upright
---
--- plus add them to the regular vectors below so that they honor \it etc
-
-local type, next = type, next
-local floor, div = math.floor, math.div
-local merged = table.merged
-local extract = bit32.extract
-
-local allocate = utilities.storage.allocate
-local texattribute = tex.attribute
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-local setmetatableindex = table.setmetatableindex
-
-local trace_greek = false trackers.register("math.greek", function(v) trace_greek = v end)
-local report_remapping = logs.reporter("mathematics","remapping")
-
-mathematics = mathematics or { }
-local mathematics = mathematics
-
--- Unfortunately some alphabets have gaps (thereby troubling all applications that
--- need to deal with math). Somewhat strange considering all those weird symbols that
--- were added afterwards. The following trickery (and data) is only to be used for
--- diagnostics and quick and dirty alphabet tracing (s-mat-10.mkiv) as we deal with
--- it otherwise.
-
-mathematics.gaps = {
- [0x1D455] = 0x0210E, -- H
- [0x1D49D] = 0x0212C, -- script B
- [0x1D4A0] = 0x02130, -- script E
- [0x1D4A1] = 0x02131, -- script F
- [0x1D4A3] = 0x0210B, -- script H
- [0x1D4A4] = 0x02110, -- script I
- [0x1D4A7] = 0x02112, -- script L
- [0x1D4A8] = 0x02133, -- script M
- [0x1D4AD] = 0x0211B, -- script R
- [0x1D4BA] = 0x0212F, -- script e
- [0x1D4BC] = 0x0210A, -- script g
- [0x1D4C4] = 0x02134, -- script o
- [0x1D506] = 0x0212D, -- fraktur C
- [0x1D50B] = 0x0210C, -- fraktur H
- [0x1D50C] = 0x02111, -- fraktur I
- [0x1D515] = 0x0211C, -- fraktur R
- [0x1D51D] = 0x02128, -- fraktur Z
- [0x1D53A] = 0x02102, -- bb C
- [0x1D53F] = 0x0210D, -- bb H
- [0x1D545] = 0x02115, -- bb N
- [0x1D547] = 0x02119, -- bb P
- [0x1D548] = 0x0211A, -- bb Q
- [0x1D549] = 0x0211D, -- bb R
- [0x1D551] = 0x02124, -- bb Z
-}
-
-local function fillinmathgaps(tfmdata,key,value)
- local mathgaps = mathematics.gaps
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- for gap, original in next, mathgaps do
- if characters[original] and not characters[gap] then
- characters [gap] = characters [original]
- descriptions[gap] = descriptions[original]
- end
- end
-end
-
-registerotffeature {
- name = "mathgaps",
- description = "plug gaps in math alphabets",
- comment = "regular document sources should not depend on this",
- manipulators = {
- base = fillinmathgaps,
- node = fillinmathgaps,
- }
-}
-
--- we could use one level less and have tf etc be tables directly but the
--- following approach permits easier remapping of a-a, A-Z and 0-9 to
--- fallbacks; symbols is currently mostly greek
-
-local function todigit(n) local t = { } for i=0, 9 do t[0x00030+i] = n+i end return t end
-local function toupper(n) local t = { } for i=0,25 do t[0x00041+i] = n+i end return t end
-local function tolower(n) local t = { } for i=0,25 do t[0x00061+i] = n+i end return t end
-
-local regular_tf = {
- digits = todigit(0x00030),
- ucletters = toupper(0x00041),
- lcletters = tolower(0x00061),
- ucgreek = {
- [0x0391]=0x0391, [0x0392]=0x0392, [0x0393]=0x0393, [0x0394]=0x0394, [0x0395]=0x0395,
- [0x0396]=0x0396, [0x0397]=0x0397, [0x0398]=0x0398, [0x0399]=0x0399, [0x039A]=0x039A,
- [0x039B]=0x039B, [0x039C]=0x039C, [0x039D]=0x039D, [0x039E]=0x039E, [0x039F]=0x039F,
- [0x03A0]=0x03A0, [0x03A1]=0x03A1, [0x03A3]=0x03A3, [0x03A4]=0x03A4, [0x03A5]=0x03A5,
- [0x03A6]=0x03A6, [0x03A7]=0x03A7, [0x03A8]=0x03A8, [0x03A9]=0x03A9,
- },
- lcgreek = {
- [0x03B1]=0x03B1, [0x03B2]=0x03B2, [0x03B3]=0x03B3, [0x03B4]=0x03B4, [0x03B5]=0x03B5,
- [0x03B6]=0x03B6, [0x03B7]=0x03B7, [0x03B8]=0x03B8, [0x03B9]=0x03B9, [0x03BA]=0x03BA,
- [0x03BB]=0x03BB, [0x03BC]=0x03BC, [0x03BD]=0x03BD, [0x03BE]=0x03BE, [0x03BF]=0x03BF,
- [0x03C0]=0x03C0, [0x03C1]=0x03C1, [0x03C2]=0x03C2, [0x03C3]=0x03C3, [0x03C4]=0x03C4,
- [0x03C5]=0x03C5, [0x03C6]=0x03C6, [0x03C7]=0x03C7, [0x03C8]=0x03C8, [0x03C9]=0x03C9,
- [0x03D1]=0x03D1, [0x03D5]=0x03D5, [0x03D6]=0x03D6, [0x03F0]=0x03F0, [0x03F1]=0x03F1,
- [0x03F4]=0x03F4, [0x03F5]=0x03F5,
- },
- symbols = {
- [0x2202]=0x2202, [0x2207]=0x2207,
- },
-}
-
-local regular_it = {
- digits = regular_tf.digits,
- ucletters = toupper(0x1D434),
- lcletters = { -- H
- [0x00061]=0x1D44E, [0x00062]=0x1D44F, [0x00063]=0x1D450, [0x00064]=0x1D451, [0x00065]=0x1D452,
- [0x00066]=0x1D453, [0x00067]=0x1D454, [0x00068]=0x0210E, [0x00069]=0x1D456, [0x0006A]=0x1D457,
- [0x0006B]=0x1D458, [0x0006C]=0x1D459, [0x0006D]=0x1D45A, [0x0006E]=0x1D45B, [0x0006F]=0x1D45C,
- [0x00070]=0x1D45D, [0x00071]=0x1D45E, [0x00072]=0x1D45F, [0x00073]=0x1D460, [0x00074]=0x1D461,
- [0x00075]=0x1D462, [0x00076]=0x1D463, [0x00077]=0x1D464, [0x00078]=0x1D465, [0x00079]=0x1D466,
- [0x0007A]=0x1D467,
- },
- ucgreek = {
- [0x0391]=0x1D6E2, [0x0392]=0x1D6E3, [0x0393]=0x1D6E4, [0x0394]=0x1D6E5, [0x0395]=0x1D6E6,
- [0x0396]=0x1D6E7, [0x0397]=0x1D6E8, [0x0398]=0x1D6E9, [0x0399]=0x1D6EA, [0x039A]=0x1D6EB,
- [0x039B]=0x1D6EC, [0x039C]=0x1D6ED, [0x039D]=0x1D6EE, [0x039E]=0x1D6EF, [0x039F]=0x1D6F0,
- [0x03A0]=0x1D6F1, [0x03A1]=0x1D6F2, [0x03A3]=0x1D6F4, [0x03A4]=0x1D6F5, [0x03A5]=0x1D6F6,
- [0x03A6]=0x1D6F7, [0x03A7]=0x1D6F8, [0x03A8]=0x1D6F9, [0x03A9]=0x1D6FA,
- },
- lcgreek = {
- [0x03B1]=0x1D6FC, [0x03B2]=0x1D6FD, [0x03B3]=0x1D6FE, [0x03B4]=0x1D6FF, [0x03B5]=0x1D700,
- [0x03B6]=0x1D701, [0x03B7]=0x1D702, [0x03B8]=0x1D703, [0x03B9]=0x1D704, [0x03BA]=0x1D705,
- [0x03BB]=0x1D706, [0x03BC]=0x1D707, [0x03BD]=0x1D708, [0x03BE]=0x1D709, [0x03BF]=0x1D70A,
- [0x03C0]=0x1D70B, [0x03C1]=0x1D70C, [0x03C2]=0x1D70D, [0x03C3]=0x1D70E, [0x03C4]=0x1D70F,
- [0x03C5]=0x1D710, [0x03C6]=0x1D711, [0x03C7]=0x1D712, [0x03C8]=0x1D713, [0x03C9]=0x1D714,
- [0x03D1]=0x1D717, [0x03D5]=0x1D719, [0x03D6]=0x1D71B, [0x03F0]=0x1D718, [0x03F1]=0x1D71A,
- [0x03F4]=0x1D6F3, [0x03F5]=0x1D716,
- },
- symbols = {
- [0x2202]=0x1D715, [0x2207]=0x1D6FB,
- },
-}
-
-local regular_bf= {
- digits = todigit(0x1D7CE),
- ucletters = toupper(0x1D400),
- lcletters = tolower(0x1D41A),
- ucgreek = {
- [0x0391]=0x1D6A8, [0x0392]=0x1D6A9, [0x0393]=0x1D6AA, [0x0394]=0x1D6AB, [0x0395]=0x1D6AC,
- [0x0396]=0x1D6AD, [0x0397]=0x1D6AE, [0x0398]=0x1D6AF, [0x0399]=0x1D6B0, [0x039A]=0x1D6B1,
- [0x039B]=0x1D6B2, [0x039C]=0x1D6B3, [0x039D]=0x1D6B4, [0x039E]=0x1D6B5, [0x039F]=0x1D6B6,
- [0x03A0]=0x1D6B7, [0x03A1]=0x1D6B8, [0x03A3]=0x1D6BA, [0x03A4]=0x1D6BB, [0x03A5]=0x1D6BC,
- [0x03A6]=0x1D6BD, [0x03A7]=0x1D6BE, [0x03A8]=0x1D6BF, [0x03A9]=0x1D6C0,
- },
- lcgreek = {
- [0x03B1]=0x1D6C2, [0x03B2]=0x1D6C3, [0x03B3]=0x1D6C4, [0x03B4]=0x1D6C5, [0x03B5]=0x1D6C6,
- [0x03B6]=0x1D6C7, [0x03B7]=0x1D6C8, [0x03B8]=0x1D6C9, [0x03B9]=0x1D6CA, [0x03BA]=0x1D6CB,
- [0x03BB]=0x1D6CC, [0x03BC]=0x1D6CD, [0x03BD]=0x1D6CE, [0x03BE]=0x1D6CF, [0x03BF]=0x1D6D0,
- [0x03C0]=0x1D6D1, [0x03C1]=0x1D6D2, [0x03C2]=0x1D6D3, [0x03C3]=0x1D6D4, [0x03C4]=0x1D6D5,
- [0x03C5]=0x1D6D6, [0x03C6]=0x1D6D7, [0x03C7]=0x1D6D8, [0x03C8]=0x1D6D9, [0x03C9]=0x1D6DA,
- [0x03D1]=0x1D6DD, [0x03D5]=0x1D6DF, [0x03D6]=0x1D6E1, [0x03F0]=0x1D6DE, [0x03F1]=0x1D6E0,
- [0x03F4]=0x1D6B9, [0x03F5]=0x1D6DC,
- },
- symbols = {
- [0x2202]=0x1D6DB, [0x2207]=0x1D6C1,
- },
-}
-
-local regular_bi = {
- digits = regular_bf.digits,
- ucletters = toupper(0x1D468),
- lcletters = tolower(0x1D482),
- ucgreek = {
- [0x0391]=0x1D71C, [0x0392]=0x1D71D, [0x0393]=0x1D71E, [0x0394]=0x1D71F, [0x0395]=0x1D720,
- [0x0396]=0x1D721, [0x0397]=0x1D722, [0x0398]=0x1D723, [0x0399]=0x1D724, [0x039A]=0x1D725,
- [0x039B]=0x1D726, [0x039C]=0x1D727, [0x039D]=0x1D728, [0x039E]=0x1D729, [0x039F]=0x1D72A,
- [0x03A0]=0x1D72B, [0x03A1]=0x1D72C, [0x03A3]=0x1D72E, [0x03A4]=0x1D72F, [0x03A5]=0x1D730,
- [0x03A6]=0x1D731, [0x03A7]=0x1D732, [0x03A8]=0x1D733, [0x03A9]=0x1D734,
- },
- lcgreek = {
- [0x03B1]=0x1D736, [0x03B2]=0x1D737, [0x03B3]=0x1D738, [0x03B4]=0x1D739, [0x03B5]=0x1D73A,
- [0x03B6]=0x1D73B, [0x03B7]=0x1D73C, [0x03B8]=0x1D73D, [0x03B9]=0x1D73E, [0x03BA]=0x1D73F,
- [0x03BB]=0x1D740, [0x03BC]=0x1D741, [0x03BD]=0x1D742, [0x03BE]=0x1D743, [0x03BF]=0x1D744,
- [0x03C0]=0x1D745, [0x03C1]=0x1D746, [0x03C2]=0x1D747, [0x03C3]=0x1D748, [0x03C4]=0x1D749,
- [0x03C5]=0x1D74A, [0x03C6]=0x1D74B, [0x03C7]=0x1D74C, [0x03C8]=0x1D74D, [0x03C9]=0x1D74E,
- [0x03D1]=0x1D751, [0x03D5]=0x1D753, [0x03D6]=0x1D755, [0x03F0]=0x1D752, [0x03F1]=0x1D754,
- [0x03F4]=0x1D72D, [0x03F5]=0x1D750,
- },
- symbols = {
- [0x2202]=0x1D74F, [0x2207]=0x1D735,
- },
-}
-
-local regular = {
- tf = regular_tf,
- it = regular_it,
- bf = regular_bf,
- bi = regular_bi,
-}
-
-local sansserif_tf = {
- digits = todigit(0x1D7E2),
- ucletters = toupper(0x1D5A0),
- lcletters = tolower(0x1D5BA),
- lcgreek = regular_tf.lcgreek,
- ucgreek = regular_tf.ucgreek,
- symbols = regular_tf.symbols,
-}
-
-local sansserif_it = {
- digits = regular_tf.digits,
- ucletters = toupper(0x1D608),
- lcletters = tolower(0x1D622),
- lcgreek = regular_tf.lcgreek,
- ucgreek = regular_tf.ucgreek,
- symbols = regular_tf.symbols,
-}
-
-local sansserif_bf = {
- digits = todigit(0x1D7EC),
- ucletters = toupper(0x1D5D4),
- lcletters = tolower(0x1D5EE),
- ucgreek = {
- [0x0391]=0x1D756, [0x0392]=0x1D757, [0x0393]=0x1D758, [0x0394]=0x1D759, [0x0395]=0x1D75A,
- [0x0396]=0x1D75B, [0x0397]=0x1D75C, [0x0398]=0x1D75D, [0x0399]=0x1D75E, [0x039A]=0x1D75F,
- [0x039B]=0x1D760, [0x039C]=0x1D761, [0x039D]=0x1D762, [0x039E]=0x1D763, [0x039F]=0x1D764,
- [0x03A0]=0x1D765, [0x03A1]=0x1D766, [0x03A3]=0x1D768, [0x03A4]=0x1D769, [0x03A5]=0x1D76A,
- [0x03A6]=0x1D76B, [0x03A7]=0x1D76C, [0x03A8]=0x1D76D, [0x03A9]=0x1D76E,
- },
- lcgreek = {
- [0x03B1]=0x1D770, [0x03B2]=0x1D771, [0x03B3]=0x1D772, [0x03B4]=0x1D773, [0x03B5]=0x1D774,
- [0x03B6]=0x1D775, [0x03B7]=0x1D776, [0x03B8]=0x1D777, [0x03B9]=0x1D778, [0x03BA]=0x1D779,
- [0x03BB]=0x1D77A, [0x03BC]=0x1D77B, [0x03BD]=0x1D77C, [0x03BE]=0x1D77D, [0x03BF]=0x1D77E,
- [0x03C0]=0x1D77F, [0x03C1]=0x1D780, [0x03C2]=0x1D781, [0x03C3]=0x1D782, [0x03C4]=0x1D783,
- [0x03C5]=0x1D784, [0x03C6]=0x1D785, [0x03C7]=0x1D786, [0x03C8]=0x1D787, [0x03C9]=0x1D788,
- [0x03D1]=0x1D78B, [0x03D5]=0x1D78D, [0x03D6]=0x1D78F, [0x03F0]=0x1D78C, [0x03F1]=0x1D78E,
- [0x03F4]=0x1D767, [0x03F5]=0x1D78A,
- },
- symbols = {
- [0x2202]=0x1D789, [0x2207]=0x1D76F,
- },
-}
-
-local sansserif_bi = {
- digits = sansserif_bf.digits,
- ucletters = toupper(0x1D63C),
- lcletters = tolower(0x1D656),
- ucgreek = {
- [0x0391]=0x1D790, [0x0392]=0x1D791, [0x0393]=0x1D792, [0x0394]=0x1D793, [0x0395]=0x1D794,
- [0x0396]=0x1D795, [0x0397]=0x1D796, [0x0398]=0x1D797, [0x0399]=0x1D798, [0x039A]=0x1D799,
- [0x039B]=0x1D79A, [0x039C]=0x1D79B, [0x039D]=0x1D79C, [0x039E]=0x1D79D, [0x039F]=0x1D79E,
- [0x03A0]=0x1D79F, [0x03A1]=0x1D7A0, [0x03A3]=0x1D7A2, [0x03A4]=0x1D7A3, [0x03A5]=0x1D7A4,
- [0x03A6]=0x1D7A5, [0x03A7]=0x1D7A6, [0x03A8]=0x1D7A7, [0x03A9]=0x1D7A8,
- },
- lcgreek = {
- [0x03B1]=0x1D7AA, [0x03B2]=0x1D7AB, [0x03B3]=0x1D7AC, [0x03B4]=0x1D7AD, [0x03B5]=0x1D7AE,
- [0x03B6]=0x1D7AF, [0x03B7]=0x1D7B0, [0x03B8]=0x1D7B1, [0x03B9]=0x1D7B2, [0x03BA]=0x1D7B3,
- [0x03BB]=0x1D7B4, [0x03BC]=0x1D7B5, [0x03BD]=0x1D7B6, [0x03BE]=0x1D7B7, [0x03BF]=0x1D7B8,
- [0x03C0]=0x1D7B9, [0x03C1]=0x1D7BA, [0x03C2]=0x1D7BB, [0x03C3]=0x1D7BC, [0x03C4]=0x1D7BD,
- [0x03C5]=0x1D7BE, [0x03C6]=0x1D7BF, [0x03C7]=0x1D7C0, [0x03C8]=0x1D7C1, [0x03C9]=0x1D7C2,
- [0x03D1]=0x1D7C5, [0x03D5]=0x1D7C7, [0x03D6]=0x1D7C9, [0x03F0]=0x1D7C6, [0x03F1]=0x1D7C8,
- [0x03F4]=0x1D7A1, [0x03F5]=0x1D7C4,
- },
- symbols = {
- [0x2202]=0x1D7C3, [0x2207]=0x1D7A9,
- },
-}
-
-local sansserif = {
- tf = sansserif_tf,
- it = sansserif_it,
- bf = sansserif_bf,
- bi = sansserif_bi,
-}
-
-local monospaced_tf = {
- digits = todigit(0x1D7F6),
- ucletters = toupper(0x1D670),
- lcletters = tolower(0x1D68A),
- lcgreek = sansserif_tf.lcgreek,
- ucgreek = sansserif_tf.ucgreek,
- symbols = sansserif_tf.symbols,
-}
-
-local monospaced = {
- tf = monospaced_tf,
- it = sansserif_tf,
- bf = sansserif_tf,
- bi = sansserif_bf,
-}
-
-local blackboard_tf = {
- digits = todigit(0x1D7D8),
- ucletters = { -- C H N P Q R Z
- [0x00041]=0x1D538, [0x00042]=0x1D539, [0x00043]=0x02102, [0x00044]=0x1D53B, [0x00045]=0x1D53C,
- [0x00046]=0x1D53D, [0x00047]=0x1D53E, [0x00048]=0x0210D, [0x00049]=0x1D540, [0x0004A]=0x1D541,
- [0x0004B]=0x1D542, [0x0004C]=0x1D543, [0x0004D]=0x1D544, [0x0004E]=0x02115, [0x0004F]=0x1D546,
- [0x00050]=0x02119, [0x00051]=0x0211A, [0x00052]=0x0211D, [0x00053]=0x1D54A, [0x00054]=0x1D54B,
- [0x00055]=0x1D54C, [0x00056]=0x1D54D, [0x00057]=0x1D54E, [0x00058]=0x1D54F, [0x00059]=0x1D550,
- [0x0005A]=0x02124,
- },
- lcletters = tolower(0x1D552),
- lcgreek = { -- gamma pi
- [0x03B3]=0x0213C, [0x03C0]=0x0213D,
- },
- ucgreek = { -- Gamma pi
- [0x0393]=0x0213E, [0x03A0]=0x0213F,
- },
- symbols = { -- sum
- [0x2211]=0x02140,
- },
-}
-
-blackboard_tf.lcgreek = merged(regular_tf.lcgreek, blackboard_tf.lcgreek)
-blackboard_tf.ucgreek = merged(regular_tf.ucgreek, blackboard_tf.ucgreek)
-blackboard_tf.symbols = merged(regular_tf.symbols, blackboard_tf.symbols)
-
-local blackboard = {
- tf = blackboard_tf,
- it = blackboard_tf,
- bf = blackboard_tf,
- bi = blackboard_tf,
-}
-
-local fraktur_tf= {
- digits = regular_tf.digits,
- ucletters = { -- C H I R Z
- [0x00041]=0x1D504, [0x00042]=0x1D505, [0x00043]=0x0212D, [0x00044]=0x1D507, [0x00045]=0x1D508,
- [0x00046]=0x1D509, [0x00047]=0x1D50A, [0x00048]=0x0210C, [0x00049]=0x02111, [0x0004A]=0x1D50D,
- [0x0004B]=0x1D50E, [0x0004C]=0x1D50F, [0x0004D]=0x1D510, [0x0004E]=0x1D511, [0x0004F]=0x1D512,
- [0x00050]=0x1D513, [0x00051]=0x1D514, [0x00052]=0x0211C, [0x00053]=0x1D516, [0x00054]=0x1D517,
- [0x00055]=0x1D518, [0x00056]=0x1D519, [0x00057]=0x1D51A, [0x00058]=0x1D51B, [0x00059]=0x1D51C,
- [0x0005A]=0x02128,
- },
- lcletters = tolower(0x1D51E),
- lcgreek = regular_tf.lcgreek,
- ucgreek = regular_tf.ucgreek,
- symbols = regular_tf.symbols,
-}
-
-local fraktur_bf = {
- digits = regular_bf.digits,
- ucletters = toupper(0x1D56C),
- lcletters = tolower(0x1D586),
- lcgreek = regular_bf.lcgreek,
- ucgreek = regular_bf.ucgreek,
- symbols = regular_bf.symbols,
-}
-
-local fraktur = { -- ok
- tf = fraktur_tf,
- bf = fraktur_bf,
- it = fraktur_tf,
- bi = fraktur_bf,
-}
-
-local script_tf = {
- digits = regular_tf.digits,
- ucletters = { -- B E F H I L M R -- P 2118
- [0x00041]=0x1D49C, [0x00042]=0x0212C, [0x00043]=0x1D49E, [0x00044]=0x1D49F, [0x00045]=0x02130,
- [0x00046]=0x02131, [0x00047]=0x1D4A2, [0x00048]=0x0210B, [0x00049]=0x02110, [0x0004A]=0x1D4A5,
- [0x0004B]=0x1D4A6, [0x0004C]=0x02112, [0x0004D]=0x02133, [0x0004E]=0x1D4A9, [0x0004F]=0x1D4AA,
- [0x00050]=0x1D4AB, [0x00051]=0x1D4AC, [0x00052]=0x0211B, [0x00053]=0x1D4AE, [0x00054]=0x1D4AF,
- [0x00055]=0x1D4B0, [0x00056]=0x1D4B1, [0x00057]=0x1D4B2, [0x00058]=0x1D4B3, [0x00059]=0x1D4B4,
- [0x0005A]=0x1D4B5,
- },
- lcletters = { -- E G O -- L 2113
- [0x00061]=0x1D4B6, [0x00062]=0x1D4B7, [0x00063]=0x1D4B8, [0x00064]=0x1D4B9, [0x00065]=0x0212F,
- [0x00066]=0x1D4BB, [0x00067]=0x0210A, [0x00068]=0x1D4BD, [0x00069]=0x1D4BE, [0x0006A]=0x1D4BF,
- [0x0006B]=0x1D4C0, [0x0006C]=0x1D4C1, [0x0006D]=0x1D4C2, [0x0006E]=0x1D4C3, [0x0006F]=0x02134,
- [0x00070]=0x1D4C5, [0x00071]=0x1D4C6, [0x00072]=0x1D4C7, [0x00073]=0x1D4C8, [0x00074]=0x1D4C9,
- [0x00075]=0x1D4CA, [0x00076]=0x1D4CB, [0x00077]=0x1D4CC, [0x00078]=0x1D4CD, [0x00079]=0x1D4CE,
- [0x0007A]=0x1D4CF,
- },
- lcgreek = regular_tf.lcgreek,
- ucgreek = regular_tf.ucgreek,
- symbols = regular_tf.symbols,
-}
-
-local script_bf = {
- digits = regular_bf.digits,
- ucletters = toupper(0x1D4D0),
- lcletters = tolower(0x1D4EA),
- lcgreek = regular_bf.lcgreek,
- ucgreek = regular_bf.ucgreek,
- symbols = regular_bf.symbols,
-}
-
-local script = {
- tf = script_tf,
- bf = script_bf,
- it = script_tf,
- bi = script_bf,
-}
-
-local alphabets = allocate {
- regular = regular,
- sansserif = sansserif,
- monospaced = monospaced,
- blackboard = blackboard,
- fraktur = fraktur,
- script = script,
-}
-
-mathematics.alphabets = alphabets
-
-local boldmap = { }
-
-local function remap(tf,bf)
- for _, alphabet in next, alphabets do
- local tfdata = alphabet[tf]
- local bfdata = alphabet[bf]
- if tfdata then
- for k, tfd in next, tfdata do
- if type(tfd) == "table" then
- local bfd = bfdata[k]
- if bfd then
- for n, u in next, tfd do
- local bn = bfd[n]
- if bn then
- boldmap[u] = bn
- end
- end
- end
- end
- end
- end
- end
-end
-
-remap("tf","bf")
-remap("it","bi")
-
-mathematics.boldmap = boldmap
-
-local mathremap = allocate { }
-
-for alphabet, styles in next, alphabets do -- per 9/6/2011 we also have attr for missing
- for style, data in next, styles do
- -- let's keep the long names (for tracing)
- local n = #mathremap + 1
- data.attribute = n
- data.alphabet = alphabet
- data.style = style
- mathremap[n] = data
- end
-end
-
-mathematics.mapremap = mathremap
-
--- beware, these are shared tables (no problem since they're not
--- in unicode)
-
-alphabets.tt = monospaced
-alphabets.ss = sansserif
-alphabets.rm = regular
-alphabets.bb = blackboard
-alphabets.fr = fraktur
-alphabets.sr = script
-
-alphabets.serif = regular
-alphabets.type = monospaced
-alphabets.teletype = monospaced
-
-regular.normal = regular_tf
-regular.italic = regular_it
-regular.bold = regular_bf
-regular.bolditalic = regular_bi
-
-sansserif.normal = sansserif_tf
-sansserif.italic = sansserif_it
-sansserif.bold = sansserif_bf
-sansserif.bolditalic = sansserif_bi
-
-monospaced.normal = monospaced_tf
-monospaced.italic = monospaced_it
-monospaced.bold = monospaced_bf
-monospaced.bolditalic = monospaced_bi
-
-function mathematics.tostyle(attribute)
- local r = mathremap[attribute]
- return r and r.style or "tf"
-end
-
-function mathematics.toname(attribute)
- local r = mathremap[attribute]
- return r and r.alphabet or "regular"
-end
-
--- of course we could do some div/mod trickery instead
-
-local mathalphabet = attributes.private("mathalphabet")
-
-function mathematics.getboth(alphabet,style)
- local data = alphabet and alphabets[alphabet] or regular
- data = data[style or "tf"] or data.tf
- return data and data.attribute
-end
-
-function mathematics.getstyle(style)
- local r = mathremap[texattribute[mathalphabet]]
- local alphabet = r and r.alphabet or "regular"
- local data = alphabets[alphabet][style]
- return data and data.attribute
-end
-
-function mathematics.syncboth(alphabet,style)
- local data = alphabet and alphabets[alphabet] or regular
- data = style and data[style] or data.tf
- texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet]
-end
-
-function mathematics.syncstyle(style)
- local r = mathremap[texattribute[mathalphabet]]
- local alphabet = r and r.alphabet or "regular"
- local data = alphabets[alphabet][style]
- texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet]
-end
-
-function mathematics.syncname(alphabet)
- -- local r = mathremap[mathalphabet]
- local r = mathremap[texattribute[mathalphabet]]
- local style = r and r.style or "tf"
- local data = alphabets[alphabet][style]
- texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet]
-end
-
-local islcgreek = regular_tf.lcgreek
-local isucgreek = regular_tf.ucgreek
-local issygreek = regular_tf.symbols
-local isgreek = merged(islcgreek,isucgreek,issygreek)
-
-local greekremapping = {
- [1] = { what = "unchanged" }, -- upright
- [2] = { what = "upright", it = "tf", bi = "bf" }, -- upright
- [3] = { what = "italic", tf = "it", bf = "bi" }, -- italic
-}
-
-local usedremap = { }
-
-local function resolver(map)
- return function (t,k)
- local v =
- map.digits [k] or
- map.lcletters[k] or map.ucletters[k] or
- map.lcgreek [k] or map.ucgreek [k] or
- map.symbols [k] or k
- t[k] = v
- return v
- end
-end
-
-for k, v in next, mathremap do
- local t = { }
- setmetatableindex(t,resolver(v))
- usedremap[k] = t
-end
-
-local function remapgreek(mathalphabet,how,detail,char)
- local r = mathremap[mathalphabet] -- what if 0
- local alphabet = r and r.alphabet or "regular"
- local style = r and r.style or "tf"
- local remapping = greekremapping[how]
- if trace_greek then
- report_remapping("greek %s, %s char %C, alphabet %a %a, method %a","before",detail,char,alphabet,style,remapping.what)
- end
- local newstyle = remapping[style]
- if newstyle then
- local data = alphabets[alphabet][newstyle] -- always something
- mathalphabet = data and data.attribute or mathalphabet
- style = newstyle
- end
- if trace_greek then
- report_remapping("greek %s, %s char %C, alphabet %a %a, method %a","after",detail,char,alphabet,style,remapping.what)
- end
- return mathalphabet, style
-end
-
-function mathematics.remapalphabets(char,mathalphabet,mathgreek)
- if not mathalphabet then
- return
- end
- if mathgreek and mathgreek > 0 then
- if not isgreek[char] then
- -- nothing needed
- elseif islcgreek[char] then
- local lc = extract(mathgreek,4,4)
- if lc > 1 then
- mathalphabet = remapgreek(mathalphabet,lc,"lowercase",char)
- end
- elseif isucgreek[char] then
- local uc = extract(mathgreek,0,4)
- if uc > 1 then
- mathalphabet = remapgreek(mathalphabet,uc,"uppercase",char)
- end
- elseif issygreek[char] then
- local sy = extract(mathgreek,8,4)
- if sy > 1 then
- mathalphabet = remapgreek(mathalphabet,sy,"symbol",char)
- end
- end
- end
- if mathalphabet > 0 then
- local remap = usedremap[mathalphabet] -- redundant check
- if remap then
- local newchar = remap[char]
- return newchar ~= char and newchar
- end
- end
- -- return nil
-end
-
--- begin of experiment
-
-local fallback = {
- tf = "bf",
- it = "bi",
- bf = "tf",
- bi = "it",
-}
-
-function mathematics.fallbackstyleattr(attribute)
- local r = mathremap[attribute]
- local alphabet = r.alphabet or "regular"
- local style = r.style or "tf"
- local fback = fallback[style]
- if fback then
- local data = alphabets[alphabet][fback]
- if data then
- local attr = data.attribute
- return attribute ~= attr and attr
- end
- end
-end
-
--- end of experiment
-
-local function checkedcopy(characters,child,parent)
- for k, v in next, child do
- if not characters[v] then
- characters[v] = characters[parent[k]]
- end
- end
-end
-
-function mathematics.addfallbacks(main)
- local characters = main.characters
- checkedcopy(characters,regular.bf.ucgreek,regular.tf.ucgreek)
- checkedcopy(characters,regular.bf.lcgreek,regular.tf.lcgreek)
- checkedcopy(characters,regular.bi.ucgreek,regular.it.ucgreek)
- checkedcopy(characters,regular.bi.lcgreek,regular.it.lcgreek)
-end
-
--- interface
-
-commands.setmathattribute = mathematics.syncboth
-commands.setmathalphabet = mathematics.syncname
-commands.setmathstyle = mathematics.syncstyle
+if not modules then modules = { } end modules ['math-map'] = {
+ version = 1.001,
+ comment = "companion to math-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: make sparse .. if self
+
+--[[ldx--
+
Remapping mathematics alphabets.
+--ldx]]--
+
+-- oldstyle: not really mathematics but happened to be part of
+-- the mathematics fonts in cmr
+--
+-- persian: we will also provide mappers for other
+-- scripts
+
+-- todo: alphabets namespace
+-- maybe: script/scriptscript dynamic,
+
+-- to be looked into once the fonts are ready (will become font
+-- goodie):
+--
+-- (U+2202,U+1D715) : upright
+-- (U+2202,U+1D715) : italic
+-- (U+2202,U+1D715) : upright
+--
+-- plus add them to the regular vectors below so that they honor \it etc
+
+local type, next = type, next
+local floor, div = math.floor, math.div
+local merged = table.merged
+local extract = bit32.extract
+
+local allocate = utilities.storage.allocate
+local texattribute = tex.attribute
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+local setmetatableindex = table.setmetatableindex
+
+local trace_greek = false trackers.register("math.greek", function(v) trace_greek = v end)
+local report_remapping = logs.reporter("mathematics","remapping")
+
+mathematics = mathematics or { }
+local mathematics = mathematics
+
+-- Unfortunately some alphabets have gaps (thereby troubling all applications that
+-- need to deal with math). Somewhat strange considering all those weird symbols that
+-- were added afterwards. The following trickery (and data) is only to be used for
+-- diagnostics and quick and dirty alphabet tracing (s-mat-10.mkiv) as we deal with
+-- it otherwise.
+
+mathematics.gaps = {
+ [0x1D455] = 0x0210E, -- H
+ [0x1D49D] = 0x0212C, -- script B
+ [0x1D4A0] = 0x02130, -- script E
+ [0x1D4A1] = 0x02131, -- script F
+ [0x1D4A3] = 0x0210B, -- script H
+ [0x1D4A4] = 0x02110, -- script I
+ [0x1D4A7] = 0x02112, -- script L
+ [0x1D4A8] = 0x02133, -- script M
+ [0x1D4AD] = 0x0211B, -- script R
+ [0x1D4BA] = 0x0212F, -- script e
+ [0x1D4BC] = 0x0210A, -- script g
+ [0x1D4C4] = 0x02134, -- script o
+ [0x1D506] = 0x0212D, -- fraktur C
+ [0x1D50B] = 0x0210C, -- fraktur H
+ [0x1D50C] = 0x02111, -- fraktur I
+ [0x1D515] = 0x0211C, -- fraktur R
+ [0x1D51D] = 0x02128, -- fraktur Z
+ [0x1D53A] = 0x02102, -- bb C
+ [0x1D53F] = 0x0210D, -- bb H
+ [0x1D545] = 0x02115, -- bb N
+ [0x1D547] = 0x02119, -- bb P
+ [0x1D548] = 0x0211A, -- bb Q
+ [0x1D549] = 0x0211D, -- bb R
+ [0x1D551] = 0x02124, -- bb Z
+}
+
+local function fillinmathgaps(tfmdata,key,value)
+ local mathgaps = mathematics.gaps
+ local characters = tfmdata.characters
+ local descriptions = tfmdata.descriptions
+ for gap, original in next, mathgaps do
+ if characters[original] and not characters[gap] then
+ characters [gap] = characters [original]
+ descriptions[gap] = descriptions[original]
+ end
+ end
+end
+
+registerotffeature {
+ name = "mathgaps",
+ description = "plug gaps in math alphabets",
+ comment = "regular document sources should not depend on this",
+ manipulators = {
+ base = fillinmathgaps,
+ node = fillinmathgaps,
+ }
+}
+
+-- we could use one level less and have tf etc be tables directly but the
+-- following approach permits easier remapping of a-a, A-Z and 0-9 to
+-- fallbacks; symbols is currently mostly greek
+
+local function todigit(n) local t = { } for i=0, 9 do t[0x00030+i] = n+i end return t end
+local function toupper(n) local t = { } for i=0,25 do t[0x00041+i] = n+i end return t end
+local function tolower(n) local t = { } for i=0,25 do t[0x00061+i] = n+i end return t end
+
+local regular_tf = {
+ digits = todigit(0x00030),
+ ucletters = toupper(0x00041),
+ lcletters = tolower(0x00061),
+ ucgreek = {
+ [0x0391]=0x0391, [0x0392]=0x0392, [0x0393]=0x0393, [0x0394]=0x0394, [0x0395]=0x0395,
+ [0x0396]=0x0396, [0x0397]=0x0397, [0x0398]=0x0398, [0x0399]=0x0399, [0x039A]=0x039A,
+ [0x039B]=0x039B, [0x039C]=0x039C, [0x039D]=0x039D, [0x039E]=0x039E, [0x039F]=0x039F,
+ [0x03A0]=0x03A0, [0x03A1]=0x03A1, [0x03A3]=0x03A3, [0x03A4]=0x03A4, [0x03A5]=0x03A5,
+ [0x03A6]=0x03A6, [0x03A7]=0x03A7, [0x03A8]=0x03A8, [0x03A9]=0x03A9,
+ },
+ lcgreek = {
+ [0x03B1]=0x03B1, [0x03B2]=0x03B2, [0x03B3]=0x03B3, [0x03B4]=0x03B4, [0x03B5]=0x03B5,
+ [0x03B6]=0x03B6, [0x03B7]=0x03B7, [0x03B8]=0x03B8, [0x03B9]=0x03B9, [0x03BA]=0x03BA,
+ [0x03BB]=0x03BB, [0x03BC]=0x03BC, [0x03BD]=0x03BD, [0x03BE]=0x03BE, [0x03BF]=0x03BF,
+ [0x03C0]=0x03C0, [0x03C1]=0x03C1, [0x03C2]=0x03C2, [0x03C3]=0x03C3, [0x03C4]=0x03C4,
+ [0x03C5]=0x03C5, [0x03C6]=0x03C6, [0x03C7]=0x03C7, [0x03C8]=0x03C8, [0x03C9]=0x03C9,
+ [0x03D1]=0x03D1, [0x03D5]=0x03D5, [0x03D6]=0x03D6, [0x03F0]=0x03F0, [0x03F1]=0x03F1,
+ [0x03F4]=0x03F4, [0x03F5]=0x03F5,
+ },
+ symbols = {
+ [0x2202]=0x2202, [0x2207]=0x2207,
+ },
+}
+
+local regular_it = {
+ digits = regular_tf.digits,
+ ucletters = toupper(0x1D434),
+ lcletters = { -- H
+ [0x00061]=0x1D44E, [0x00062]=0x1D44F, [0x00063]=0x1D450, [0x00064]=0x1D451, [0x00065]=0x1D452,
+ [0x00066]=0x1D453, [0x00067]=0x1D454, [0x00068]=0x0210E, [0x00069]=0x1D456, [0x0006A]=0x1D457,
+ [0x0006B]=0x1D458, [0x0006C]=0x1D459, [0x0006D]=0x1D45A, [0x0006E]=0x1D45B, [0x0006F]=0x1D45C,
+ [0x00070]=0x1D45D, [0x00071]=0x1D45E, [0x00072]=0x1D45F, [0x00073]=0x1D460, [0x00074]=0x1D461,
+ [0x00075]=0x1D462, [0x00076]=0x1D463, [0x00077]=0x1D464, [0x00078]=0x1D465, [0x00079]=0x1D466,
+ [0x0007A]=0x1D467,
+ },
+ ucgreek = {
+ [0x0391]=0x1D6E2, [0x0392]=0x1D6E3, [0x0393]=0x1D6E4, [0x0394]=0x1D6E5, [0x0395]=0x1D6E6,
+ [0x0396]=0x1D6E7, [0x0397]=0x1D6E8, [0x0398]=0x1D6E9, [0x0399]=0x1D6EA, [0x039A]=0x1D6EB,
+ [0x039B]=0x1D6EC, [0x039C]=0x1D6ED, [0x039D]=0x1D6EE, [0x039E]=0x1D6EF, [0x039F]=0x1D6F0,
+ [0x03A0]=0x1D6F1, [0x03A1]=0x1D6F2, [0x03A3]=0x1D6F4, [0x03A4]=0x1D6F5, [0x03A5]=0x1D6F6,
+ [0x03A6]=0x1D6F7, [0x03A7]=0x1D6F8, [0x03A8]=0x1D6F9, [0x03A9]=0x1D6FA,
+ },
+ lcgreek = {
+ [0x03B1]=0x1D6FC, [0x03B2]=0x1D6FD, [0x03B3]=0x1D6FE, [0x03B4]=0x1D6FF, [0x03B5]=0x1D700,
+ [0x03B6]=0x1D701, [0x03B7]=0x1D702, [0x03B8]=0x1D703, [0x03B9]=0x1D704, [0x03BA]=0x1D705,
+ [0x03BB]=0x1D706, [0x03BC]=0x1D707, [0x03BD]=0x1D708, [0x03BE]=0x1D709, [0x03BF]=0x1D70A,
+ [0x03C0]=0x1D70B, [0x03C1]=0x1D70C, [0x03C2]=0x1D70D, [0x03C3]=0x1D70E, [0x03C4]=0x1D70F,
+ [0x03C5]=0x1D710, [0x03C6]=0x1D711, [0x03C7]=0x1D712, [0x03C8]=0x1D713, [0x03C9]=0x1D714,
+ [0x03D1]=0x1D717, [0x03D5]=0x1D719, [0x03D6]=0x1D71B, [0x03F0]=0x1D718, [0x03F1]=0x1D71A,
+ [0x03F4]=0x1D6F3, [0x03F5]=0x1D716,
+ },
+ symbols = {
+ [0x2202]=0x1D715, [0x2207]=0x1D6FB,
+ },
+}
+
+local regular_bf= {
+ digits = todigit(0x1D7CE),
+ ucletters = toupper(0x1D400),
+ lcletters = tolower(0x1D41A),
+ ucgreek = {
+ [0x0391]=0x1D6A8, [0x0392]=0x1D6A9, [0x0393]=0x1D6AA, [0x0394]=0x1D6AB, [0x0395]=0x1D6AC,
+ [0x0396]=0x1D6AD, [0x0397]=0x1D6AE, [0x0398]=0x1D6AF, [0x0399]=0x1D6B0, [0x039A]=0x1D6B1,
+ [0x039B]=0x1D6B2, [0x039C]=0x1D6B3, [0x039D]=0x1D6B4, [0x039E]=0x1D6B5, [0x039F]=0x1D6B6,
+ [0x03A0]=0x1D6B7, [0x03A1]=0x1D6B8, [0x03A3]=0x1D6BA, [0x03A4]=0x1D6BB, [0x03A5]=0x1D6BC,
+ [0x03A6]=0x1D6BD, [0x03A7]=0x1D6BE, [0x03A8]=0x1D6BF, [0x03A9]=0x1D6C0,
+ },
+ lcgreek = {
+ [0x03B1]=0x1D6C2, [0x03B2]=0x1D6C3, [0x03B3]=0x1D6C4, [0x03B4]=0x1D6C5, [0x03B5]=0x1D6C6,
+ [0x03B6]=0x1D6C7, [0x03B7]=0x1D6C8, [0x03B8]=0x1D6C9, [0x03B9]=0x1D6CA, [0x03BA]=0x1D6CB,
+ [0x03BB]=0x1D6CC, [0x03BC]=0x1D6CD, [0x03BD]=0x1D6CE, [0x03BE]=0x1D6CF, [0x03BF]=0x1D6D0,
+ [0x03C0]=0x1D6D1, [0x03C1]=0x1D6D2, [0x03C2]=0x1D6D3, [0x03C3]=0x1D6D4, [0x03C4]=0x1D6D5,
+ [0x03C5]=0x1D6D6, [0x03C6]=0x1D6D7, [0x03C7]=0x1D6D8, [0x03C8]=0x1D6D9, [0x03C9]=0x1D6DA,
+ [0x03D1]=0x1D6DD, [0x03D5]=0x1D6DF, [0x03D6]=0x1D6E1, [0x03F0]=0x1D6DE, [0x03F1]=0x1D6E0,
+ [0x03F4]=0x1D6B9, [0x03F5]=0x1D6DC,
+ },
+ symbols = {
+ [0x2202]=0x1D6DB, [0x2207]=0x1D6C1,
+ },
+}
+
+local regular_bi = {
+ digits = regular_bf.digits,
+ ucletters = toupper(0x1D468),
+ lcletters = tolower(0x1D482),
+ ucgreek = {
+ [0x0391]=0x1D71C, [0x0392]=0x1D71D, [0x0393]=0x1D71E, [0x0394]=0x1D71F, [0x0395]=0x1D720,
+ [0x0396]=0x1D721, [0x0397]=0x1D722, [0x0398]=0x1D723, [0x0399]=0x1D724, [0x039A]=0x1D725,
+ [0x039B]=0x1D726, [0x039C]=0x1D727, [0x039D]=0x1D728, [0x039E]=0x1D729, [0x039F]=0x1D72A,
+ [0x03A0]=0x1D72B, [0x03A1]=0x1D72C, [0x03A3]=0x1D72E, [0x03A4]=0x1D72F, [0x03A5]=0x1D730,
+ [0x03A6]=0x1D731, [0x03A7]=0x1D732, [0x03A8]=0x1D733, [0x03A9]=0x1D734,
+ },
+ lcgreek = {
+ [0x03B1]=0x1D736, [0x03B2]=0x1D737, [0x03B3]=0x1D738, [0x03B4]=0x1D739, [0x03B5]=0x1D73A,
+ [0x03B6]=0x1D73B, [0x03B7]=0x1D73C, [0x03B8]=0x1D73D, [0x03B9]=0x1D73E, [0x03BA]=0x1D73F,
+ [0x03BB]=0x1D740, [0x03BC]=0x1D741, [0x03BD]=0x1D742, [0x03BE]=0x1D743, [0x03BF]=0x1D744,
+ [0x03C0]=0x1D745, [0x03C1]=0x1D746, [0x03C2]=0x1D747, [0x03C3]=0x1D748, [0x03C4]=0x1D749,
+ [0x03C5]=0x1D74A, [0x03C6]=0x1D74B, [0x03C7]=0x1D74C, [0x03C8]=0x1D74D, [0x03C9]=0x1D74E,
+ [0x03D1]=0x1D751, [0x03D5]=0x1D753, [0x03D6]=0x1D755, [0x03F0]=0x1D752, [0x03F1]=0x1D754,
+ [0x03F4]=0x1D72D, [0x03F5]=0x1D750,
+ },
+ symbols = {
+ [0x2202]=0x1D74F, [0x2207]=0x1D735,
+ },
+}
+
+local regular = {
+ tf = regular_tf,
+ it = regular_it,
+ bf = regular_bf,
+ bi = regular_bi,
+}
+
+local sansserif_tf = {
+ digits = todigit(0x1D7E2),
+ ucletters = toupper(0x1D5A0),
+ lcletters = tolower(0x1D5BA),
+ lcgreek = regular_tf.lcgreek,
+ ucgreek = regular_tf.ucgreek,
+ symbols = regular_tf.symbols,
+}
+
+local sansserif_it = {
+ digits = regular_tf.digits,
+ ucletters = toupper(0x1D608),
+ lcletters = tolower(0x1D622),
+ lcgreek = regular_tf.lcgreek,
+ ucgreek = regular_tf.ucgreek,
+ symbols = regular_tf.symbols,
+}
+
+local sansserif_bf = {
+ digits = todigit(0x1D7EC),
+ ucletters = toupper(0x1D5D4),
+ lcletters = tolower(0x1D5EE),
+ ucgreek = {
+ [0x0391]=0x1D756, [0x0392]=0x1D757, [0x0393]=0x1D758, [0x0394]=0x1D759, [0x0395]=0x1D75A,
+ [0x0396]=0x1D75B, [0x0397]=0x1D75C, [0x0398]=0x1D75D, [0x0399]=0x1D75E, [0x039A]=0x1D75F,
+ [0x039B]=0x1D760, [0x039C]=0x1D761, [0x039D]=0x1D762, [0x039E]=0x1D763, [0x039F]=0x1D764,
+ [0x03A0]=0x1D765, [0x03A1]=0x1D766, [0x03A3]=0x1D768, [0x03A4]=0x1D769, [0x03A5]=0x1D76A,
+ [0x03A6]=0x1D76B, [0x03A7]=0x1D76C, [0x03A8]=0x1D76D, [0x03A9]=0x1D76E,
+ },
+ lcgreek = {
+ [0x03B1]=0x1D770, [0x03B2]=0x1D771, [0x03B3]=0x1D772, [0x03B4]=0x1D773, [0x03B5]=0x1D774,
+ [0x03B6]=0x1D775, [0x03B7]=0x1D776, [0x03B8]=0x1D777, [0x03B9]=0x1D778, [0x03BA]=0x1D779,
+ [0x03BB]=0x1D77A, [0x03BC]=0x1D77B, [0x03BD]=0x1D77C, [0x03BE]=0x1D77D, [0x03BF]=0x1D77E,
+ [0x03C0]=0x1D77F, [0x03C1]=0x1D780, [0x03C2]=0x1D781, [0x03C3]=0x1D782, [0x03C4]=0x1D783,
+ [0x03C5]=0x1D784, [0x03C6]=0x1D785, [0x03C7]=0x1D786, [0x03C8]=0x1D787, [0x03C9]=0x1D788,
+ [0x03D1]=0x1D78B, [0x03D5]=0x1D78D, [0x03D6]=0x1D78F, [0x03F0]=0x1D78C, [0x03F1]=0x1D78E,
+ [0x03F4]=0x1D767, [0x03F5]=0x1D78A,
+ },
+ symbols = {
+ [0x2202]=0x1D789, [0x2207]=0x1D76F,
+ },
+}
+
+local sansserif_bi = {
+ digits = sansserif_bf.digits,
+ ucletters = toupper(0x1D63C),
+ lcletters = tolower(0x1D656),
+ ucgreek = {
+ [0x0391]=0x1D790, [0x0392]=0x1D791, [0x0393]=0x1D792, [0x0394]=0x1D793, [0x0395]=0x1D794,
+ [0x0396]=0x1D795, [0x0397]=0x1D796, [0x0398]=0x1D797, [0x0399]=0x1D798, [0x039A]=0x1D799,
+ [0x039B]=0x1D79A, [0x039C]=0x1D79B, [0x039D]=0x1D79C, [0x039E]=0x1D79D, [0x039F]=0x1D79E,
+ [0x03A0]=0x1D79F, [0x03A1]=0x1D7A0, [0x03A3]=0x1D7A2, [0x03A4]=0x1D7A3, [0x03A5]=0x1D7A4,
+ [0x03A6]=0x1D7A5, [0x03A7]=0x1D7A6, [0x03A8]=0x1D7A7, [0x03A9]=0x1D7A8,
+ },
+ lcgreek = {
+ [0x03B1]=0x1D7AA, [0x03B2]=0x1D7AB, [0x03B3]=0x1D7AC, [0x03B4]=0x1D7AD, [0x03B5]=0x1D7AE,
+ [0x03B6]=0x1D7AF, [0x03B7]=0x1D7B0, [0x03B8]=0x1D7B1, [0x03B9]=0x1D7B2, [0x03BA]=0x1D7B3,
+ [0x03BB]=0x1D7B4, [0x03BC]=0x1D7B5, [0x03BD]=0x1D7B6, [0x03BE]=0x1D7B7, [0x03BF]=0x1D7B8,
+ [0x03C0]=0x1D7B9, [0x03C1]=0x1D7BA, [0x03C2]=0x1D7BB, [0x03C3]=0x1D7BC, [0x03C4]=0x1D7BD,
+ [0x03C5]=0x1D7BE, [0x03C6]=0x1D7BF, [0x03C7]=0x1D7C0, [0x03C8]=0x1D7C1, [0x03C9]=0x1D7C2,
+ [0x03D1]=0x1D7C5, [0x03D5]=0x1D7C7, [0x03D6]=0x1D7C9, [0x03F0]=0x1D7C6, [0x03F1]=0x1D7C8,
+ [0x03F4]=0x1D7A1, [0x03F5]=0x1D7C4,
+ },
+ symbols = {
+ [0x2202]=0x1D7C3, [0x2207]=0x1D7A9,
+ },
+}
+
+local sansserif = {
+ tf = sansserif_tf,
+ it = sansserif_it,
+ bf = sansserif_bf,
+ bi = sansserif_bi,
+}
+
+local monospaced_tf = {
+ digits = todigit(0x1D7F6),
+ ucletters = toupper(0x1D670),
+ lcletters = tolower(0x1D68A),
+ lcgreek = sansserif_tf.lcgreek,
+ ucgreek = sansserif_tf.ucgreek,
+ symbols = sansserif_tf.symbols,
+}
+
+local monospaced = {
+ tf = monospaced_tf,
+ it = sansserif_tf,
+ bf = sansserif_tf,
+ bi = sansserif_bf,
+}
+
+local blackboard_tf = {
+ digits = todigit(0x1D7D8),
+ ucletters = { -- C H N P Q R Z
+ [0x00041]=0x1D538, [0x00042]=0x1D539, [0x00043]=0x02102, [0x00044]=0x1D53B, [0x00045]=0x1D53C,
+ [0x00046]=0x1D53D, [0x00047]=0x1D53E, [0x00048]=0x0210D, [0x00049]=0x1D540, [0x0004A]=0x1D541,
+ [0x0004B]=0x1D542, [0x0004C]=0x1D543, [0x0004D]=0x1D544, [0x0004E]=0x02115, [0x0004F]=0x1D546,
+ [0x00050]=0x02119, [0x00051]=0x0211A, [0x00052]=0x0211D, [0x00053]=0x1D54A, [0x00054]=0x1D54B,
+ [0x00055]=0x1D54C, [0x00056]=0x1D54D, [0x00057]=0x1D54E, [0x00058]=0x1D54F, [0x00059]=0x1D550,
+ [0x0005A]=0x02124,
+ },
+ lcletters = tolower(0x1D552),
+ lcgreek = { -- gamma pi
+ [0x03B3]=0x0213C, [0x03C0]=0x0213D,
+ },
+ ucgreek = { -- Gamma pi
+ [0x0393]=0x0213E, [0x03A0]=0x0213F,
+ },
+ symbols = { -- sum
+ [0x2211]=0x02140,
+ },
+}
+
+blackboard_tf.lcgreek = merged(regular_tf.lcgreek, blackboard_tf.lcgreek)
+blackboard_tf.ucgreek = merged(regular_tf.ucgreek, blackboard_tf.ucgreek)
+blackboard_tf.symbols = merged(regular_tf.symbols, blackboard_tf.symbols)
+
+local blackboard = {
+ tf = blackboard_tf,
+ it = blackboard_tf,
+ bf = blackboard_tf,
+ bi = blackboard_tf,
+}
+
+local fraktur_tf= {
+ digits = regular_tf.digits,
+ ucletters = { -- C H I R Z
+ [0x00041]=0x1D504, [0x00042]=0x1D505, [0x00043]=0x0212D, [0x00044]=0x1D507, [0x00045]=0x1D508,
+ [0x00046]=0x1D509, [0x00047]=0x1D50A, [0x00048]=0x0210C, [0x00049]=0x02111, [0x0004A]=0x1D50D,
+ [0x0004B]=0x1D50E, [0x0004C]=0x1D50F, [0x0004D]=0x1D510, [0x0004E]=0x1D511, [0x0004F]=0x1D512,
+ [0x00050]=0x1D513, [0x00051]=0x1D514, [0x00052]=0x0211C, [0x00053]=0x1D516, [0x00054]=0x1D517,
+ [0x00055]=0x1D518, [0x00056]=0x1D519, [0x00057]=0x1D51A, [0x00058]=0x1D51B, [0x00059]=0x1D51C,
+ [0x0005A]=0x02128,
+ },
+ lcletters = tolower(0x1D51E),
+ lcgreek = regular_tf.lcgreek,
+ ucgreek = regular_tf.ucgreek,
+ symbols = regular_tf.symbols,
+}
+
+local fraktur_bf = {
+ digits = regular_bf.digits,
+ ucletters = toupper(0x1D56C),
+ lcletters = tolower(0x1D586),
+ lcgreek = regular_bf.lcgreek,
+ ucgreek = regular_bf.ucgreek,
+ symbols = regular_bf.symbols,
+}
+
+local fraktur = { -- ok
+ tf = fraktur_tf,
+ bf = fraktur_bf,
+ it = fraktur_tf,
+ bi = fraktur_bf,
+}
+
+local script_tf = {
+ digits = regular_tf.digits,
+ ucletters = { -- B E F H I L M R -- P 2118
+ [0x00041]=0x1D49C, [0x00042]=0x0212C, [0x00043]=0x1D49E, [0x00044]=0x1D49F, [0x00045]=0x02130,
+ [0x00046]=0x02131, [0x00047]=0x1D4A2, [0x00048]=0x0210B, [0x00049]=0x02110, [0x0004A]=0x1D4A5,
+ [0x0004B]=0x1D4A6, [0x0004C]=0x02112, [0x0004D]=0x02133, [0x0004E]=0x1D4A9, [0x0004F]=0x1D4AA,
+ [0x00050]=0x1D4AB, [0x00051]=0x1D4AC, [0x00052]=0x0211B, [0x00053]=0x1D4AE, [0x00054]=0x1D4AF,
+ [0x00055]=0x1D4B0, [0x00056]=0x1D4B1, [0x00057]=0x1D4B2, [0x00058]=0x1D4B3, [0x00059]=0x1D4B4,
+ [0x0005A]=0x1D4B5,
+ },
+ lcletters = { -- E G O -- L 2113
+ [0x00061]=0x1D4B6, [0x00062]=0x1D4B7, [0x00063]=0x1D4B8, [0x00064]=0x1D4B9, [0x00065]=0x0212F,
+ [0x00066]=0x1D4BB, [0x00067]=0x0210A, [0x00068]=0x1D4BD, [0x00069]=0x1D4BE, [0x0006A]=0x1D4BF,
+ [0x0006B]=0x1D4C0, [0x0006C]=0x1D4C1, [0x0006D]=0x1D4C2, [0x0006E]=0x1D4C3, [0x0006F]=0x02134,
+ [0x00070]=0x1D4C5, [0x00071]=0x1D4C6, [0x00072]=0x1D4C7, [0x00073]=0x1D4C8, [0x00074]=0x1D4C9,
+ [0x00075]=0x1D4CA, [0x00076]=0x1D4CB, [0x00077]=0x1D4CC, [0x00078]=0x1D4CD, [0x00079]=0x1D4CE,
+ [0x0007A]=0x1D4CF,
+ },
+ lcgreek = regular_tf.lcgreek,
+ ucgreek = regular_tf.ucgreek,
+ symbols = regular_tf.symbols,
+}
+
+local script_bf = {
+ digits = regular_bf.digits,
+ ucletters = toupper(0x1D4D0),
+ lcletters = tolower(0x1D4EA),
+ lcgreek = regular_bf.lcgreek,
+ ucgreek = regular_bf.ucgreek,
+ symbols = regular_bf.symbols,
+}
+
+local script = {
+ tf = script_tf,
+ bf = script_bf,
+ it = script_tf,
+ bi = script_bf,
+}
+
+local alphabets = allocate {
+ regular = regular,
+ sansserif = sansserif,
+ monospaced = monospaced,
+ blackboard = blackboard,
+ fraktur = fraktur,
+ script = script,
+}
+
+mathematics.alphabets = alphabets
+
+local boldmap = { }
+
+local function remap(tf,bf)
+ for _, alphabet in next, alphabets do
+ local tfdata = alphabet[tf]
+ local bfdata = alphabet[bf]
+ if tfdata then
+ for k, tfd in next, tfdata do
+ if type(tfd) == "table" then
+ local bfd = bfdata[k]
+ if bfd then
+ for n, u in next, tfd do
+ local bn = bfd[n]
+ if bn then
+ boldmap[u] = bn
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+remap("tf","bf")
+remap("it","bi")
+
+mathematics.boldmap = boldmap
+
+local mathremap = allocate { }
+
+for alphabet, styles in next, alphabets do -- per 9/6/2011 we also have attr for missing
+ for style, data in next, styles do
+ -- let's keep the long names (for tracing)
+ local n = #mathremap + 1
+ data.attribute = n
+ data.alphabet = alphabet
+ data.style = style
+ mathremap[n] = data
+ end
+end
+
+mathematics.mapremap = mathremap
+
+-- beware, these are shared tables (no problem since they're not
+-- in unicode)
+
+alphabets.tt = monospaced
+alphabets.ss = sansserif
+alphabets.rm = regular
+alphabets.bb = blackboard
+alphabets.fr = fraktur
+alphabets.sr = script
+
+alphabets.serif = regular
+alphabets.type = monospaced
+alphabets.teletype = monospaced
+
+regular.normal = regular_tf
+regular.italic = regular_it
+regular.bold = regular_bf
+regular.bolditalic = regular_bi
+
+sansserif.normal = sansserif_tf
+sansserif.italic = sansserif_it
+sansserif.bold = sansserif_bf
+sansserif.bolditalic = sansserif_bi
+
+monospaced.normal = monospaced_tf
+monospaced.italic = monospaced_it
+monospaced.bold = monospaced_bf
+monospaced.bolditalic = monospaced_bi
+
+function mathematics.tostyle(attribute)
+ local r = mathremap[attribute]
+ return r and r.style or "tf"
+end
+
+function mathematics.toname(attribute)
+ local r = mathremap[attribute]
+ return r and r.alphabet or "regular"
+end
+
+-- of course we could do some div/mod trickery instead
+
+local mathalphabet = attributes.private("mathalphabet")
+
+function mathematics.getboth(alphabet,style)
+ local data = alphabet and alphabets[alphabet] or regular
+ data = data[style or "tf"] or data.tf
+ return data and data.attribute
+end
+
+function mathematics.getstyle(style)
+ local r = mathremap[texattribute[mathalphabet]]
+ local alphabet = r and r.alphabet or "regular"
+ local data = alphabets[alphabet][style]
+ return data and data.attribute
+end
+
+function mathematics.syncboth(alphabet,style)
+ local data = alphabet and alphabets[alphabet] or regular
+ data = style and data[style] or data.tf
+ texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet]
+end
+
+function mathematics.syncstyle(style)
+ local r = mathremap[texattribute[mathalphabet]]
+ local alphabet = r and r.alphabet or "regular"
+ local data = alphabets[alphabet][style]
+ texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet]
+end
+
+function mathematics.syncname(alphabet)
+ -- local r = mathremap[mathalphabet]
+ local r = mathremap[texattribute[mathalphabet]]
+ local style = r and r.style or "tf"
+ local data = alphabets[alphabet][style]
+ texattribute[mathalphabet] = data and data.attribute or texattribute[mathalphabet]
+end
+
+local islcgreek = regular_tf.lcgreek
+local isucgreek = regular_tf.ucgreek
+local issygreek = regular_tf.symbols
+local isgreek = merged(islcgreek,isucgreek,issygreek)
+
+local greekremapping = {
+ [1] = { what = "unchanged" }, -- upright
+ [2] = { what = "upright", it = "tf", bi = "bf" }, -- upright
+ [3] = { what = "italic", tf = "it", bf = "bi" }, -- italic
+}
+
+local usedremap = { }
+
+local function resolver(map)
+ return function (t,k)
+ local v =
+ map.digits [k] or
+ map.lcletters[k] or map.ucletters[k] or
+ map.lcgreek [k] or map.ucgreek [k] or
+ map.symbols [k] or k
+ t[k] = v
+ return v
+ end
+end
+
+for k, v in next, mathremap do
+ local t = { }
+ setmetatableindex(t,resolver(v))
+ usedremap[k] = t
+end
+
+local function remapgreek(mathalphabet,how,detail,char)
+ local r = mathremap[mathalphabet] -- what if 0
+ local alphabet = r and r.alphabet or "regular"
+ local style = r and r.style or "tf"
+ local remapping = greekremapping[how]
+ if trace_greek then
+ report_remapping("greek %s, %s char %C, alphabet %a %a, method %a","before",detail,char,alphabet,style,remapping.what)
+ end
+ local newstyle = remapping[style]
+ if newstyle then
+ local data = alphabets[alphabet][newstyle] -- always something
+ mathalphabet = data and data.attribute or mathalphabet
+ style = newstyle
+ end
+ if trace_greek then
+ report_remapping("greek %s, %s char %C, alphabet %a %a, method %a","after",detail,char,alphabet,style,remapping.what)
+ end
+ return mathalphabet, style
+end
+
+function mathematics.remapalphabets(char,mathalphabet,mathgreek)
+ if not mathalphabet then
+ return
+ end
+ if mathgreek and mathgreek > 0 then
+ if not isgreek[char] then
+ -- nothing needed
+ elseif islcgreek[char] then
+ local lc = extract(mathgreek,4,4)
+ if lc > 1 then
+ mathalphabet = remapgreek(mathalphabet,lc,"lowercase",char)
+ end
+ elseif isucgreek[char] then
+ local uc = extract(mathgreek,0,4)
+ if uc > 1 then
+ mathalphabet = remapgreek(mathalphabet,uc,"uppercase",char)
+ end
+ elseif issygreek[char] then
+ local sy = extract(mathgreek,8,4)
+ if sy > 1 then
+ mathalphabet = remapgreek(mathalphabet,sy,"symbol",char)
+ end
+ end
+ end
+ if mathalphabet > 0 then
+ local remap = usedremap[mathalphabet] -- redundant check
+ if remap then
+ local newchar = remap[char]
+ return newchar ~= char and newchar
+ end
+ end
+ -- return nil
+end
+
+-- begin of experiment
+
+local fallback = {
+ tf = "bf",
+ it = "bi",
+ bf = "tf",
+ bi = "it",
+}
+
+function mathematics.fallbackstyleattr(attribute)
+ local r = mathremap[attribute]
+ local alphabet = r.alphabet or "regular"
+ local style = r.style or "tf"
+ local fback = fallback[style]
+ if fback then
+ local data = alphabets[alphabet][fback]
+ if data then
+ local attr = data.attribute
+ return attribute ~= attr and attr
+ end
+ end
+end
+
+-- end of experiment
+
+local function checkedcopy(characters,child,parent)
+ for k, v in next, child do
+ if not characters[v] then
+ characters[v] = characters[parent[k]]
+ end
+ end
+end
+
+function mathematics.addfallbacks(main)
+ local characters = main.characters
+ checkedcopy(characters,regular.bf.ucgreek,regular.tf.ucgreek)
+ checkedcopy(characters,regular.bf.lcgreek,regular.tf.lcgreek)
+ checkedcopy(characters,regular.bi.ucgreek,regular.it.ucgreek)
+ checkedcopy(characters,regular.bi.lcgreek,regular.it.lcgreek)
+end
+
+-- interface
+
+commands.setmathattribute = mathematics.syncboth
+commands.setmathalphabet = mathematics.syncname
+commands.setmathstyle = mathematics.syncstyle
diff --git a/tex/context/base/math-noa.lua b/tex/context/base/math-noa.lua
index b309ba077..51c89ea77 100644
--- a/tex/context/base/math-noa.lua
+++ b/tex/context/base/math-noa.lua
@@ -1,1192 +1,1192 @@
-if not modules then modules = { } end modules ['math-noa'] = {
- version = 1.001,
- comment = "companion to math-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- beware: this is experimental code and there will be a more
--- generic (attribute value driven) interface too but for the
--- moment this is ok
---
--- we will also make dedicated processors (faster)
---
--- beware: names will change as we wil make noads.xxx.handler i.e. xxx
--- subnamespaces
-
--- 20D6 -> 2190
--- 20D7 -> 2192
-
-local utfchar, utfbyte = utf.char, utf.byte
-local formatters = string.formatters
-
-local fonts, nodes, node, mathematics = fonts, nodes, node, mathematics
-
-local otf = fonts.handlers.otf
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
-local trace_remapping = false trackers.register("math.remapping", function(v) trace_remapping = v end)
-local trace_processing = false trackers.register("math.processing", function(v) trace_processing = v end)
-local trace_analyzing = false trackers.register("math.analyzing", function(v) trace_analyzing = v end)
-local trace_normalizing = false trackers.register("math.normalizing", function(v) trace_normalizing = v end)
-local trace_collapsing = false trackers.register("math.collapsing", function(v) trace_collapsing = v end)
-local trace_goodies = false trackers.register("math.goodies", function(v) trace_goodies = v end)
-local trace_variants = false trackers.register("math.variants", function(v) trace_variants = v end)
-local trace_alternates = false trackers.register("math.alternates", function(v) trace_alternates = v end)
-local trace_italics = false trackers.register("math.italics", function(v) trace_italics = v end)
-local trace_families = false trackers.register("math.families", function(v) trace_families = v end)
-
-local check_coverage = true directives.register("math.checkcoverage", function(v) check_coverage = v end)
-
-local report_processing = logs.reporter("mathematics","processing")
-local report_remapping = logs.reporter("mathematics","remapping")
-local report_normalizing = logs.reporter("mathematics","normalizing")
-local report_collapsing = logs.reporter("mathematics","collapsing")
-local report_goodies = logs.reporter("mathematics","goodies")
-local report_variants = logs.reporter("mathematics","variants")
-local report_alternates = logs.reporter("mathematics","alternates")
-local report_italics = logs.reporter("mathematics","italics")
-local report_families = logs.reporter("mathematics","families")
-
-local a_mathrendering = attributes.private("mathrendering")
-local a_exportstatus = attributes.private("exportstatus")
-
-local mlist_to_hlist = node.mlist_to_hlist
-local font_of_family = node.family_font
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
-local free_node = node.free
-local new_node = node.new -- todo: pool: math_noad math_sub
-
-local new_kern = nodes.pool.kern
-local new_rule = nodes.pool.rule
-local concat_nodes = nodes.concat
-
-local topoints = number.points
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-local fontcharacters = fonthashes.characters
-local fontproperties = fonthashes.properties
-local fontitalics = fonthashes.italics
-local fontemwidths = fonthashes.emwidths
-local fontexheights = fonthashes.exheights
-
-local variables = interfaces.variables
-local texattribute = tex.attribute
-local unsetvalue = attributes.unsetvalue
-
-local chardata = characters.data
-
-noads = noads or { } -- todo: only here
-local noads = noads
-
-noads.processors = noads.processors or { }
-local processors = noads.processors
-
-noads.handlers = noads.handlers or { }
-local handlers = noads.handlers
-
-local tasks = nodes.tasks
-
-local nodecodes = nodes.nodecodes
-local noadcodes = nodes.noadcodes
-
-local noad_ord = noadcodes.ord
-local noad_rel = noadcodes.rel
-local noad_punct = noadcodes.punct
-local noad_opdisplaylimits= noadcodes.opdisplaylimits
-local noad_oplimits = noadcodes.oplimits
-local noad_opnolimits = noadcodes.opnolimits
-
-local math_noad = nodecodes.noad -- attr nucleus sub sup
-local math_accent = nodecodes.accent -- attr nucleus sub sup accent
-local math_radical = nodecodes.radical -- attr nucleus sub sup left degree
-local math_fraction = nodecodes.fraction -- attr nucleus sub sup left right
-local math_box = nodecodes.subbox -- attr list
-local math_sub = nodecodes.submlist -- attr list
-local math_char = nodecodes.mathchar -- attr fam char
-local math_textchar = nodecodes.mathtextchar -- attr fam char
-local math_delim = nodecodes.delim -- attr small_fam small_char large_fam large_char
-local math_style = nodecodes.style -- attr style
-local math_choice = nodecodes.choice -- attr display text script scriptscript
-local math_fence = nodecodes.fence -- attr subtype
-
-local hlist_code = nodecodes.hlist
-local glyph_code = nodecodes.glyph
-
-local left_fence_code = 1
-
-local function process(start,what,n,parent)
- if n then n = n + 1 else n = 0 end
- while start do
- local id = start.id
- if trace_processing then
- if id == math_noad then
- report_processing("%w%S, class %a",n*2,start,noadcodes[start.subtype])
- elseif id == math_char then
- local char = start.char
- local fam = start.fam
- local font = font_of_family(fam)
- report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,start,fam,font,char,char)
- else
- report_processing("%w%S",n*2,start)
- end
- end
- local proc = what[id]
- if proc then
- -- report_processing("start processing")
- local done, newstart = proc(start,what,n,parent) -- prev is bugged: or start.prev
- if newstart then
- start = newstart
- -- report_processing("stop processing (new start)")
- else
- -- report_processing("stop processing")
- end
- elseif id == math_char or id == math_textchar or id == math_delim then
- break
- elseif id == math_noad then
- local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
- noad = start.sup if noad then process(noad,what,n,start) end -- list
- noad = start.sub if noad then process(noad,what,n,start) end -- list
- elseif id == math_box or id == math_sub then
- -- local noad = start.list if noad then process(noad,what,n,start) end -- list
- local noad = start.head if noad then process(noad,what,n,start) end -- list
- elseif id == math_fraction then
- local noad = start.num if noad then process(noad,what,n,start) end -- list
- noad = start.denom if noad then process(noad,what,n,start) end -- list
- noad = start.left if noad then process(noad,what,n,start) end -- delimiter
- noad = start.right if noad then process(noad,what,n,start) end -- delimiter
- elseif id == math_choice then
- local noad = start.display if noad then process(noad,what,n,start) end -- list
- noad = start.text if noad then process(noad,what,n,start) end -- list
- noad = start.script if noad then process(noad,what,n,start) end -- list
- noad = start.scriptscript if noad then process(noad,what,n,start) end -- list
- elseif id == math_fence then
- local noad = start.delim if noad then process(noad,what,n,start) end -- delimiter
- elseif id == math_radical then
- local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
- noad = start.sup if noad then process(noad,what,n,start) end -- list
- noad = start.sub if noad then process(noad,what,n,start) end -- list
- noad = start.left if noad then process(noad,what,n,start) end -- delimiter
- noad = start.degree if noad then process(noad,what,n,start) end -- list
- elseif id == math_accent then
- local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
- noad = start.sup if noad then process(noad,what,n,start) end -- list
- noad = start.sub if noad then process(noad,what,n,start) end -- list
- noad = start.accent if noad then process(noad,what,n,start) end -- list
- noad = start.bot_accent if noad then process(noad,what,n,start) end -- list
- elseif id == math_style then
- -- has a next
- else
- -- glue, penalty, etc
- end
- start = start.next
- end
-end
-
-local function processnoads(head,actions,banner)
- if trace_processing then
- report_processing("start %a",banner)
- process(head,actions)
- report_processing("stop %a",banner)
- else
- process(head,actions)
- end
-end
-
-noads.process = processnoads
-
--- experiment (when not present fall back to fam 0) -- needs documentation
-
--- 0-2 regular
--- 3-5 bold
--- 6-8 pseudobold
-
--- this could best be integrated in the remapper, and if we run into problems, we
--- might as well do this
-
-local families = { }
-local a_mathfamily = attributes.private("mathfamily")
-local boldmap = mathematics.boldmap
-
-local familymap = { [0] =
- "regular",
- "regular",
- "regular",
- "bold",
- "bold",
- "bold",
- "pseudobold",
- "pseudobold",
- "pseudobold",
-}
-
-families[math_char] = function(pointer)
- if pointer.fam == 0 then
- local a = pointer[a_mathfamily]
- if a and a > 0 then
- pointer[a_mathfamily] = 0
- if a > 5 then
- local char = pointer.char
- local bold = boldmap[char]
- local newa = a - 3
- if bold then
- pointer[a_exportstatus] = char
- pointer.char = bold
- if trace_families then
- report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa])
- end
- else
- if trace_families then
- report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
- end
- end
- pointer.fam = newa
- else
- if trace_families then
- local char = pointer.char
- report_families("family of %C becomes %s with remap %s",char,a,familymap[a])
- end
- pointer.fam = a
- end
- else
- -- pointer.fam = 0
- end
- end
-end
-
-families[math_delim] = function(pointer)
- if pointer.small_fam == 0 then
- local a = pointer[a_mathfamily]
- if a and a > 0 then
- pointer[a_mathfamily] = 0
- if a > 5 then
- -- no bold delimiters in unicode
- a = a - 3
- end
- pointer.small_fam = a
- pointer.large_fam = a
- else
- pointer.small_fam = 0
- pointer.large_fam = 0
- end
- end
-end
-
-families[math_textchar] = families[math_char]
-
-function handlers.families(head,style,penalties)
- processnoads(head,families,"families")
- return true
-end
-
--- character remapping
-
-local a_mathalphabet = attributes.private("mathalphabet")
-local a_mathgreek = attributes.private("mathgreek")
-
-processors.relocate = { }
-
-local function report_remap(tag,id,old,new,extra)
- report_remapping("remapping %s in font %s from %C to %C%s",tag,id,old,new,extra)
-end
-
-local remapalphabets = mathematics.remapalphabets
-local fallbackstyleattr = mathematics.fallbackstyleattr
-local setnodecolor = nodes.tracers.colors.set
-
-local function checked(pointer)
- local char = pointer.char
- local fam = pointer.fam
- local id = font_of_family(fam)
- local tc = fontcharacters[id]
- if not tc[char] then
- local specials = characters.data[char].specials
- if specials and (specials[1] == "char" or specials[1] == "font") then
- newchar = specials[#specials]
- if trace_remapping then
- report_remap("fallback",id,char,newchar)
- end
- if trace_analyzing then
- setnodecolor(pointer,"font:isol")
- end
- pointer[a_exportstatus] = char -- testcase: exponentiale
- pointer.char = newchar
- return true
- end
- end
-end
-
-processors.relocate[math_char] = function(pointer)
- local g = pointer[a_mathgreek] or 0
- local a = pointer[a_mathalphabet] or 0
- if a > 0 or g > 0 then
- if a > 0 then
- pointer[a_mathgreek] = 0
- end
- if g > 0 then
- pointer[a_mathalphabet] = 0
- end
- local char = pointer.char
- local newchar = remapalphabets(char,a,g)
- if newchar then
- local fam = pointer.fam
- local id = font_of_family(fam)
- local characters = fontcharacters[id]
- if characters[newchar] then
- if trace_remapping then
- report_remap("char",id,char,newchar)
- end
- if trace_analyzing then
- setnodecolor(pointer,"font:isol")
- end
- pointer.char = newchar
- return true
- else
- local fallback = fallbackstyleattr(a)
- if fallback then
- local newchar = remapalphabets(char,fallback,g)
- if newchar then
- if characters[newchar] then
- if trace_remapping then
- report_remap("char",id,char,newchar," (fallback remapping used)")
- end
- if trace_analyzing then
- setnodecolor(pointer,"font:isol")
- end
- pointer.char = newchar
- return true
- elseif trace_remapping then
- report_remap("char",id,char,newchar," fails (no fallback character)")
- end
- elseif trace_remapping then
- report_remap("char",id,char,newchar," fails (no fallback remap character)")
- end
- elseif trace_remapping then
- report_remap("char",id,char,newchar," fails (no fallback style)")
- end
- end
- end
- end
- if trace_analyzing then
- setnodecolor(pointer,"font:medi")
- end
- if check_coverage then
- return checked(pointer)
- end
-end
-
-processors.relocate[math_textchar] = function(pointer)
- if trace_analyzing then
- setnodecolor(pointer,"font:init")
- end
-end
-
-processors.relocate[math_delim] = function(pointer)
- if trace_analyzing then
- setnodecolor(pointer,"font:fina")
- end
-end
-
-function handlers.relocate(head,style,penalties)
- processnoads(head,processors.relocate,"relocate")
- return true
-end
-
--- rendering (beware, not exported)
-
-processors.render = { }
-
-local rendersets = mathematics.renderings.numbers or { } -- store
-
-processors.render[math_char] = function(pointer)
- local attr = pointer[a_mathrendering]
- if attr and attr > 0 then
- local char = pointer.char
- local renderset = rendersets[attr]
- if renderset then
- local newchar = renderset[char]
- if newchar then
- local fam = pointer.fam
- local id = font_of_family(fam)
- local characters = fontcharacters[id]
- if characters and characters[newchar] then
- pointer.char = newchar
- pointer[a_exportstatus] = char
- end
- end
- end
- end
-end
-
-function handlers.render(head,style,penalties)
- processnoads(head,processors.render,"render")
- return true
-end
-
--- some resize options (this works ok because the content is
--- empty and no larger next will be forced)
---
--- beware: we don't use \delcode but \Udelcode and as such have
--- no large_fam; also, we need to check for subtype and/or
--- small_fam not being 0 because \. sits in 0,0 by default
---
--- todo: just replace the character by an ord noad
--- and remove the right delimiter as well
-
-local mathsize = attributes.private("mathsize")
-
-local resize = { } processors.resize = resize
-
-resize[math_fence] = function(pointer)
- if pointer.subtype == left_fence_code then
- local a = pointer[mathsize]
- if a and a > 0 then
- pointer[mathsize] = 0
- local d = pointer.delim
- local df = d.small_fam
- local id = font_of_family(df)
- if id > 0 then
- local ch = d.small_char
- d.small_char = mathematics.big(fontdata[id],ch,a)
- end
- end
- end
-end
-
-function handlers.resize(head,style,penalties)
- processnoads(head,resize,"resize")
- return true
-end
-
--- respacing
-
--- local mathpunctuation = attributes.private("mathpunctuation")
---
--- local respace = { } processors.respace = respace
-
--- only [nd,ll,ul][po][nd,ll,ul]
-
--- respace[math_char] = function(pointer,what,n,parent) -- not math_noad .. math_char ... and then parent
--- pointer = parent
--- if pointer and pointer.subtype == noad_ord then
--- local a = pointer[mathpunctuation]
--- if a and a > 0 then
--- pointer[mathpunctuation] = 0
--- local current_nucleus = pointer.nucleus
--- if current_nucleus.id == math_char then
--- local current_char = current_nucleus.char
--- local fc = chardata[current_char]
--- fc = fc and fc.category
--- if fc == "nd" or fc == "ll" or fc == "lu" then
--- local next_noad = pointer.next
--- if next_noad and next_noad.id == math_noad and next_noad.subtype == noad_punct then
--- local next_nucleus = next_noad.nucleus
--- if next_nucleus.id == math_char then
--- local next_char = next_nucleus.char
--- local nc = chardata[next_char]
--- nc = nc and nc.category
--- if nc == "po" then
--- local last_noad = next_noad.next
--- if last_noad and last_noad.id == math_noad and last_noad.subtype == noad_ord then
--- local last_nucleus = last_noad.nucleus
--- if last_nucleus.id == math_char then
--- local last_char = last_nucleus.char
--- local lc = chardata[last_char]
--- lc = lc and lc.category
--- if lc == "nd" or lc == "ll" or lc == "lu" then
--- local ord = new_node(math_noad) -- todo: pool
--- ord.subtype, ord.nucleus, ord.sub, ord.sup, ord.attr = noad_ord, next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr
--- -- next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr = nil, nil, nil, nil
--- next_noad.nucleus, next_noad.sub, next_noad.sup = nil, nil, nil -- else crash with attributes ref count
--- --~ next_noad.attr = nil
--- ord.next = last_noad
--- pointer.next = ord
--- free_node(next_noad)
--- end
--- end
--- end
--- end
--- end
--- end
--- end
--- end
--- end
--- end
--- end
-
--- local comma = 0x002C
--- local period = 0x002E
---
--- respace[math_char] = function(pointer,what,n,parent)
--- pointer = parent
--- if pointer and pointer.subtype == noad_punct then
--- local current_nucleus = pointer.nucleus
--- if current_nucleus.id == math_char then
--- local current_nucleus = pointer.nucleus
--- if current_nucleus.id == math_char then
--- local current_char = current_nucleus.char
--- local a = pointer[mathpunctuation]
--- if not a or a == 0 then
--- if current_char == comma then
--- -- default tex: 2,5 or 2, 5 --> 2, 5
--- elseif current_char == period then
--- -- default tex: 2.5 or 2. 5 --> 2.5
--- pointer.subtype = noad_ord
--- end
--- elseif a == 1 then
--- local next_noad = pointer.next
--- if next_noad and next_noad.id == math_noad then
--- local next_nucleus = next_noad.nucleus
--- if next_nucleus.id == math_char and next_nucleus.char == 0 then
--- nodes.remove(pointer,next_noad,true)
--- end
--- if current_char == comma then
--- -- default tex: 2,5 or 2, 5 --> 2, 5
--- elseif current_char == period then
--- -- default tex: 2.5 or 2. 5 --> 2.5
--- pointer.subtype = noad_ord
--- end
--- end
--- elseif a == 2 then
--- if current_char == comma or current_char == period then
--- local next_noad = pointer.next
--- if next_noad and next_noad.id == math_noad then
--- local next_nucleus = next_noad.nucleus
--- if next_nucleus.id == math_char and next_nucleus.char == 0 then
--- if current_char == comma then
--- -- adaptive: 2, 5 --> 2, 5
--- elseif current_char == period then
--- -- adaptive: 2. 5 --> 2. 5
--- end
--- nodes.remove(pointer,next_noad,true)
--- else
--- if current_char == comma then
--- -- adaptive: 2,5 --> 2,5
--- pointer.subtype = noad_ord
--- elseif current_char == period then
--- -- adaptive: 2.5 --> 2.5
--- pointer.subtype = noad_ord
--- end
--- end
--- end
--- end
--- end
--- end
--- end
--- end
--- end
---
--- function handlers.respace(head,style,penalties)
--- processnoads(head,respace,"respace")
--- return true
--- end
-
--- The following code is dedicated to Luigi Scarso who pointed me
--- to the fact that \not= is not producing valid pdf-a code.
--- The code does not solve this for virtual characters but it does
--- a decent job on collapsing so that fonts that have the right
--- glyph will have a decent unicode point. In the meantime this code
--- has been moved elsewhere.
-
-local collapse = { } processors.collapse = collapse
-
-local mathpairs = characters.mathpairs
-
-mathpairs[0x2032] = { [0x2032] = 0x2033, [0x2033] = 0x2034 } -- (prime,prime) (prime,doubleprime)
-mathpairs[0x2033] = { [0x2032] = 0x2034 } -- (doubleprime,prime)
-
-mathpairs[0x222B] = { [0x222B] = 0x222C, [0x222C] = 0x222D }
-mathpairs[0x222C] = { [0x222B] = 0x222D }
-
-mathpairs[0x007C] = { [0x007C] = 0x2016 } -- double bars
-
-local validpair = {
- [noad_rel] = true,
- [noad_ord] = true,
- [noad_opdisplaylimits] = true,
- [noad_oplimits] = true,
- [noad_opnolimits] = true,
-}
-
-local function collapsepair(pointer,what,n,parent) -- todo: switch to turn in on and off
- if parent then
- if validpair[parent.subtype] then
- local current_nucleus = parent.nucleus
- if not parent.sub and not parent.sup and current_nucleus.id == math_char then
- local current_char = current_nucleus.char
- local mathpair = mathpairs[current_char]
- if mathpair then
- local next_noad = parent.next
- if next_noad and next_noad.id == math_noad then
- if validpair[next_noad.subtype] then
- local next_nucleus = next_noad.nucleus
- if next_nucleus.id == math_char then
- local next_char = next_nucleus.char
- local newchar = mathpair[next_char]
- if newchar then
- local fam = current_nucleus.fam
- local id = font_of_family(fam)
- local characters = fontcharacters[id]
- if characters and characters[newchar] then
- if trace_collapsing then
- report_collapsing("%U + %U => %U",current_char,next_char,newchar)
- end
- current_nucleus.char = newchar
- local next_next_noad = next_noad.next
- if next_next_noad then
- parent.next = next_next_noad
- next_next_noad.prev = parent
- else
- parent.next = nil
- end
- parent.sup = next_noad.sup
- parent.sub = next_noad.sub
- next_noad.sup = nil
- next_noad.sub = nil
- free_node(next_noad)
- collapsepair(pointer,what,n,parent)
- end
- end
- end
- end
- end
- end
- end
- end
- end
-end
-
-collapse[math_char] = collapsepair
-
-function noads.handlers.collapse(head,style,penalties)
- processnoads(head,collapse,"collapse")
- return true
-end
-
--- normalize scripts
-
-local unscript = { } noads.processors.unscript = unscript
-
-local superscripts = characters.superscripts
-local subscripts = characters.subscripts
-
-local replaced = { }
-
-local function replace(pointer,what,n,parent)
- pointer = parent -- we're following the parent list (chars trigger this)
- local next = pointer.next
- local start_super, stop_super, start_sub, stop_sub
- local mode = "unset"
- while next and next.id == math_noad do
- local nextnucleus = next.nucleus
- if nextnucleus and nextnucleus.id == math_char and not next.sub and not next.sup then
- local char = nextnucleus.char
- local s = superscripts[char]
- if s then
- if not start_super then
- start_super = next
- mode = "super"
- elseif mode == "sub" then
- break
- end
- stop_super = next
- next = next.next
- nextnucleus.char = s
- replaced[char] = (replaced[char] or 0) + 1
- if trace_normalizing then
- report_normalizing("superscript %C becomes %C",char,s)
- end
- else
- local s = subscripts[char]
- if s then
- if not start_sub then
- start_sub = next
- mode = "sub"
- elseif mode == "super" then
- break
- end
- stop_sub = next
- next = next.next
- nextnucleus.char = s
- replaced[char] = (replaced[char] or 0) + 1
- if trace_normalizing then
- report_normalizing("subscript %C becomes %C",char,s)
- end
- else
- break
- end
- end
- else
- break
- end
- end
- if start_super then
- if start_super == stop_super then
- pointer.sup = start_super.nucleus
- else
- local list = new_node(math_sub) -- todo attr
- list.head = start_super
- pointer.sup = list
- end
- if mode == "super" then
- pointer.next = stop_super.next
- end
- stop_super.next = nil
- end
- if start_sub then
- if start_sub == stop_sub then
- pointer.sub = start_sub.nucleus
- else
- local list = new_node(math_sub) -- todo attr
- list.head = start_sub
- pointer.sub = list
- end
- if mode == "sub" then
- pointer.next = stop_sub.next
- end
- stop_sub.next = nil
- end
- -- we could return stop
-end
-
-unscript[math_char] = replace -- not noads as we need to recurse
-
-function handlers.unscript(head,style,penalties)
- processnoads(head,unscript,"unscript")
- return true
-end
-
-statistics.register("math script replacements", function()
- if next(replaced) then
- local n, t = 0, { }
- for k, v in table.sortedpairs(replaced) do
- n = n + v
- t[#t+1] = formatters["%C"](k)
- end
- return formatters["% t (n=%s)"](t,n)
- end
-end)
-
--- math alternates: (in xits lgf: $ABC$ $\cal ABC$ $\mathalternate{cal}\cal ABC$)
--- math alternates: (in lucidanova lgf: $ABC \mathalternate{italic} ABC$)
-
--- todo: set alternate for specific symbols
-
-local function initializemathalternates(tfmdata)
- local goodies = tfmdata.goodies
- if goodies then
- local shared = tfmdata.shared
- for i=1,#goodies do
- -- first one counts
- -- we can consider sharing the attributes ... todo (only once scan)
- local mathgoodies = goodies[i].mathematics
- local alternates = mathgoodies and mathgoodies.alternates
- if alternates then
- if trace_goodies then
- report_goodies("loading alternates for font %a",tfmdata.properties.name)
- end
- local lastattribute, attributes = 0, { }
- for k, v in next, alternates do
- lastattribute = lastattribute + 1
- v.attribute = lastattribute
- attributes[lastattribute] = v
- end
- shared.mathalternates = alternates -- to be checked if shared is ok here
- shared.mathalternatesattributes = attributes -- to be checked if shared is ok here
- return
- end
- end
- end
-end
-
-registerotffeature {
- name = "mathalternates",
- description = "additional math alternative shapes",
- initializers = {
- base = initializemathalternates,
- node = initializemathalternates,
- }
-}
-
-local getalternate = otf.getalternate
-
-local a_mathalternate = attributes.private("mathalternate")
-
-local alternate = { } -- processors.alternate = alternate
-
-function mathematics.setalternate(fam,tag)
- local id = font_of_family(fam)
- local tfmdata = fontdata[id]
- local mathalternates = tfmdata.shared and tfmdata.shared.mathalternates
- if mathalternates then
- local m = mathalternates[tag]
- tex.attribute[a_mathalternate] = m and m.attribute or unsetvalue
- end
-end
-
-alternate[math_char] = function(pointer)
- local a = pointer[a_mathalternate]
- if a and a > 0 then
- pointer[a_mathalternate] = 0
- local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
- local mathalternatesattributes = tfmdata.shared.mathalternatesattributes
- if mathalternatesattributes then
- local what = mathalternatesattributes[a]
- local alt = getalternate(tfmdata,pointer.char,what.feature,what.value)
- if alt then
- if trace_alternates then
- report_alternates("alternate %a, value %a, replacing glyph %U by glyph %U",
- tostring(what.feature),tostring(what.value),pointer.char,alt)
- end
- pointer.char = alt
- end
- end
- end
-end
-
-function handlers.check(head,style,penalties)
- processnoads(head,alternate,"check")
- return true
-end
-
--- italics: we assume that only characters matter
---
--- = we check for correction first because accessing nodes is slower
--- = the actual glyph is not that important (we can control it with numbers)
-
-local a_mathitalics = attributes.private("mathitalics")
-
-local italics = { }
-local default_factor = 1/20
-
-local function getcorrection(method,font,char) -- -- or character.italic -- (this one is for tex)
-
- local correction, fromvisual
-
- if method == 1 then
- -- only font data triggered by fontitalics
- local italics = fontitalics[font]
- if italics then
- local character = fontcharacters[font][char]
- if character then
- correction = character.italic_correction
- if correction and correction ~= 0 then
- return correction, false
- end
- end
- end
- elseif method == 2 then
- -- only font data triggered by fontdata
- local character = fontcharacters[font][char]
- if character then
- correction = character.italic_correction
- if correction and correction ~= 0 then
- return correction, false
- end
- end
- elseif method == 3 then
- -- only quad based by selective
- local visual = chardata[char].visual
- if not visual then
- -- skip
- elseif visual == "it" or visual == "bi" then
- correction = fontproperties[font].mathitalic_defaultvalue or default_factor*fontemwidths[font]
- if correction and correction ~= 0 then
- return correction, true
- end
- end
- elseif method == 4 then
- -- combination of 1 and 3
- local italics = fontitalics[font]
- if italics then
- local character = fontcharacters[font][char]
- if character then
- correction = character.italic_correction
- if correction and correction ~= 0 then
- return correction, false
- end
- end
- end
- if not correction then
- local visual = chardata[char].visual
- if not visual then
- -- skip
- elseif visual == "it" or visual == "bi" then
- correction = fontproperties[font].mathitalic_defaultvalue or default_factor*fontemwidths[font]
- if correction and correction ~= 0 then
- return correction, true
- end
- end
- end
- end
-
-end
-
-local function insert_kern(current,kern)
- local sub = new_node(math_sub) -- todo: pool
- local noad = new_node(math_noad) -- todo: pool
- sub.head = kern
- kern.next = noad
- noad.nucleus = current
- return sub
-end
-
-local setcolor = nodes.tracers.colors.set
-local italic_kern = new_kern
-local c_positive_d = "trace:db"
-local c_negative_d = "trace:dr"
-
-trackers.register("math.italics", function(v)
- if v then
- italic_kern = function(k,font)
- local ex = 1.5 * fontexheights[font]
- if k > 0 then
- return setcolor(new_rule(k,ex,ex),c_positive_d)
- else
- return concat_nodes {
- old_kern(k),
- setcolor(new_rule(-k,ex,ex),c_negative_d),
- old_kern(k),
- }
- end
- end
- else
- italic_kern = new_kern
- end
-end)
-
-italics[math_char] = function(pointer,what,n,parent)
- local method = pointer[a_mathitalics]
- if method and method > 0 then
- local char = pointer.char
- local font = font_of_family(pointer.fam) -- todo: table
- local correction, visual = getcorrection(method,font,char)
- if correction then
- local pid = parent.id
- local sub, sup
- if pid == math_noad then
- sup = parent.sup
- sub = parent.sub
- end
- if sup or sub then
- local subtype = parent.subtype
- if subtype == noad_oplimits then
- if sup then
- parent.sup = insert_kern(sup,italic_kern(correction,font))
- if trace_italics then
- report_italics("method %a, adding %p italic correction for upper limit of %C",method,correction,char)
- end
- end
- if sub then
- local correction = - correction
- parent.sub = insert_kern(sub,italic_kern(correction,font))
- if trace_italics then
- report_italics("method %a, adding %p italic correction for lower limit of %C",method,correction,char)
- end
- end
- else
- if sup then
- parent.sup = insert_kern(sup,italic_kern(correction,font))
- if trace_italics then
- report_italics("method %a, adding %p italic correction before superscript after %C",method,correction,char)
- end
- end
- end
- else
- local next_noad = parent.next
- if not next_noad then
- if n== 1 then -- only at the outer level .. will become an option (always,endonly,none)
- if trace_italics then
- report_italics("method %a, adding %p italic correction between %C and end math",method,correctio,char)
- end
- insert_node_after(parent,parent,italic_kern(correction,font))
- end
- elseif next_noad.id == math_noad then
- local next_subtype = next_noad.subtype
- if next_subtype == noad_punct or next_subtype == noad_ord then
- local next_nucleus = next_noad.nucleus
- if next_nucleus.id == math_char then
- local next_char = next_nucleus.char
- local next_data = chardata[next_char]
- local visual = next_data.visual
- if visual == "it" or visual == "bi" then
- -- if trace_italics then
- -- report_italics("method %a, skipping %p italic correction between italic %C and italic %C",method,correction,char,next_char)
- -- end
- else
- local category = next_data.category
- if category == "nd" or category == "ll" or category == "lu" then
- if trace_italics then
- report_italics("method %a, adding %p italic correction between italic %C and non italic %C",method,correction,char,next_char)
- end
- insert_node_after(parent,parent,italic_kern(correction,font))
- -- elseif next_data.height > (fontexheights[font]/2) then
- -- if trace_italics then
- -- report_italics("method %a, adding %p italic correction between %C and ascending %C",method,correction,char,next_char)
- -- end
- -- insert_node_after(parent,parent,italic_kern(correction,font))
- -- elseif trace_italics then
- -- -- report_italics("method %a, skipping %p italic correction between %C and %C",method,correction,char,next_char)
- end
- end
- end
- end
- end
- end
- end
- end
-end
-
-function handlers.italics(head,style,penalties)
- processnoads(head,italics,"italics")
- return true
-end
-
-local enable
-
-enable = function()
- tasks.enableaction("math", "noads.handlers.italics")
- if trace_italics then
- report_italics("enabling math italics")
- end
- enable = false
-end
-
--- best do this only on math mode (less overhead)
-
-function mathematics.setitalics(n)
- if enable then
- enable()
- end
- if n == variables.reset then
- texattribute[a_mathitalics] = unsetvalue
- else
- texattribute[a_mathitalics] = tonumber(n) or unsetvalue
- end
-end
-
-function mathematics.resetitalics()
- texattribute[a_mathitalics] = unsetvalue
-end
-
--- variants
-
-local variants = { }
-
-local validvariants = { -- fast check on valid
- [0x2229] = 0xFE00, [0x222A] = 0xFE00,
- [0x2268] = 0xFE00, [0x2269] = 0xFE00,
- [0x2272] = 0xFE00, [0x2273] = 0xFE00,
- [0x228A] = 0xFE00, [0x228B] = 0xFE00,
- [0x2293] = 0xFE00, [0x2294] = 0xFE00,
- [0x2295] = 0xFE00,
- [0x2297] = 0xFE00,
- [0x229C] = 0xFE00,
- [0x22DA] = 0xFE00, [0x22DB] = 0xFE00,
- [0x2A3C] = 0xFE00, [0x2A3D] = 0xFE00,
- [0x2A9D] = 0xFE00, [0x2A9E] = 0xFE00,
- [0x2AAC] = 0xFE00, [0x2AAD] = 0xFE00,
- [0x2ACB] = 0xFE00, [0x2ACC] = 0xFE00,
-}
-
-variants[math_char] = function(pointer,what,n,parent) -- also set export value
- local char = pointer.char
- local selector = validvariants[char]
- if selector then
- local next = parent.next
- if next and next.id == math_noad then
- local nucleus = next.nucleus
- if nucleus and nucleus.id == math_char and nucleus.char == selector then
- local variant
- local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
- local mathvariants = tfmdata.resources.variants -- and variantdata
- if mathvariants then
- mathvariants = mathvariants[selector]
- if mathvariants then
- variant = mathvariants[char]
- end
- end
- if variant then
- pointer.char = variant
- pointer[a_exportstatus] = char -- we don't export the variant as it's visual markup
- if trace_variants then
- report_variants("variant (%U,%U) replaced by %U",char,selector,variant)
- end
- else
- if trace_variants then
- report_variants("no variant (%U,%U)",char,selector)
- end
- end
- next.prev = pointer
- parent.next = next.next
- free_node(next)
- end
- end
- end
-end
-
-function handlers.variants(head,style,penalties)
- processnoads(head,variants,"unicode variant")
- return true
-end
-
--- the normal builder
-
-function builders.kernel.mlist_to_hlist(head,style,penalties)
- return mlist_to_hlist(head,style,penalties), true
-end
-
--- function builders.kernel.mlist_to_hlist(head,style,penalties)
--- print("!!!!!!! BEFORE",penalties)
--- for n in node.traverse(head) do print(n) end
--- print("!!!!!!!")
--- head = mlist_to_hlist(head,style,penalties)
--- print("!!!!!!! AFTER")
--- for n in node.traverse(head) do print(n) end
--- print("!!!!!!!")
--- return head, true
--- end
-
-tasks.new {
- name = "math",
- arguments = 2,
- processor = utilities.sequencers.nodeprocessor,
- sequence = {
- "before",
- "normalizers",
- "builders",
- "after",
- },
-}
-
-tasks.freezegroup("math", "normalizers") -- experimental
-tasks.freezegroup("math", "builders") -- experimental
-
-local actions = tasks.actions("math") -- head, style, penalties
-
-local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
-
-function processors.mlist_to_hlist(head,style,penalties)
- starttiming(noads)
- local head, done = actions(head,style,penalties)
- stoptiming(noads)
- return head, done
-end
-
-callbacks.register('mlist_to_hlist',processors.mlist_to_hlist,"preprocessing math list")
-
--- tracing
-
-statistics.register("math processing time", function()
- return statistics.elapsedseconds(noads)
-end)
-
--- interface
-
-commands.setmathalternate = mathematics.setalternate
-commands.setmathitalics = mathematics.setitalics
-commands.resetmathitalics = mathematics.resetitalics
+if not modules then modules = { } end modules ['math-noa'] = {
+ version = 1.001,
+ comment = "companion to math-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- beware: this is experimental code and there will be a more
+-- generic (attribute value driven) interface too but for the
+-- moment this is ok
+--
+-- we will also make dedicated processors (faster)
+--
+-- beware: names will change as we wil make noads.xxx.handler i.e. xxx
+-- subnamespaces
+
+-- 20D6 -> 2190
+-- 20D7 -> 2192
+
+local utfchar, utfbyte = utf.char, utf.byte
+local formatters = string.formatters
+
+local fonts, nodes, node, mathematics = fonts, nodes, node, mathematics
+
+local otf = fonts.handlers.otf
+local otffeatures = fonts.constructors.newfeatures("otf")
+local registerotffeature = otffeatures.register
+
+local trace_remapping = false trackers.register("math.remapping", function(v) trace_remapping = v end)
+local trace_processing = false trackers.register("math.processing", function(v) trace_processing = v end)
+local trace_analyzing = false trackers.register("math.analyzing", function(v) trace_analyzing = v end)
+local trace_normalizing = false trackers.register("math.normalizing", function(v) trace_normalizing = v end)
+local trace_collapsing = false trackers.register("math.collapsing", function(v) trace_collapsing = v end)
+local trace_goodies = false trackers.register("math.goodies", function(v) trace_goodies = v end)
+local trace_variants = false trackers.register("math.variants", function(v) trace_variants = v end)
+local trace_alternates = false trackers.register("math.alternates", function(v) trace_alternates = v end)
+local trace_italics = false trackers.register("math.italics", function(v) trace_italics = v end)
+local trace_families = false trackers.register("math.families", function(v) trace_families = v end)
+
+local check_coverage = true directives.register("math.checkcoverage", function(v) check_coverage = v end)
+
+local report_processing = logs.reporter("mathematics","processing")
+local report_remapping = logs.reporter("mathematics","remapping")
+local report_normalizing = logs.reporter("mathematics","normalizing")
+local report_collapsing = logs.reporter("mathematics","collapsing")
+local report_goodies = logs.reporter("mathematics","goodies")
+local report_variants = logs.reporter("mathematics","variants")
+local report_alternates = logs.reporter("mathematics","alternates")
+local report_italics = logs.reporter("mathematics","italics")
+local report_families = logs.reporter("mathematics","families")
+
+local a_mathrendering = attributes.private("mathrendering")
+local a_exportstatus = attributes.private("exportstatus")
+
+local mlist_to_hlist = node.mlist_to_hlist
+local font_of_family = node.family_font
+local insert_node_after = node.insert_after
+local insert_node_before = node.insert_before
+local free_node = node.free
+local new_node = node.new -- todo: pool: math_noad math_sub
+
+local new_kern = nodes.pool.kern
+local new_rule = nodes.pool.rule
+local concat_nodes = nodes.concat
+
+local topoints = number.points
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+local fontcharacters = fonthashes.characters
+local fontproperties = fonthashes.properties
+local fontitalics = fonthashes.italics
+local fontemwidths = fonthashes.emwidths
+local fontexheights = fonthashes.exheights
+
+local variables = interfaces.variables
+local texattribute = tex.attribute
+local unsetvalue = attributes.unsetvalue
+
+local chardata = characters.data
+
+noads = noads or { } -- todo: only here
+local noads = noads
+
+noads.processors = noads.processors or { }
+local processors = noads.processors
+
+noads.handlers = noads.handlers or { }
+local handlers = noads.handlers
+
+local tasks = nodes.tasks
+
+local nodecodes = nodes.nodecodes
+local noadcodes = nodes.noadcodes
+
+local noad_ord = noadcodes.ord
+local noad_rel = noadcodes.rel
+local noad_punct = noadcodes.punct
+local noad_opdisplaylimits= noadcodes.opdisplaylimits
+local noad_oplimits = noadcodes.oplimits
+local noad_opnolimits = noadcodes.opnolimits
+
+local math_noad = nodecodes.noad -- attr nucleus sub sup
+local math_accent = nodecodes.accent -- attr nucleus sub sup accent
+local math_radical = nodecodes.radical -- attr nucleus sub sup left degree
+local math_fraction = nodecodes.fraction -- attr nucleus sub sup left right
+local math_box = nodecodes.subbox -- attr list
+local math_sub = nodecodes.submlist -- attr list
+local math_char = nodecodes.mathchar -- attr fam char
+local math_textchar = nodecodes.mathtextchar -- attr fam char
+local math_delim = nodecodes.delim -- attr small_fam small_char large_fam large_char
+local math_style = nodecodes.style -- attr style
+local math_choice = nodecodes.choice -- attr display text script scriptscript
+local math_fence = nodecodes.fence -- attr subtype
+
+local hlist_code = nodecodes.hlist
+local glyph_code = nodecodes.glyph
+
+local left_fence_code = 1
+
+local function process(start,what,n,parent)
+ if n then n = n + 1 else n = 0 end
+ while start do
+ local id = start.id
+ if trace_processing then
+ if id == math_noad then
+ report_processing("%w%S, class %a",n*2,start,noadcodes[start.subtype])
+ elseif id == math_char then
+ local char = start.char
+ local fam = start.fam
+ local font = font_of_family(fam)
+ report_processing("%w%S, family %a, font %a, char %a, shape %c",n*2,start,fam,font,char,char)
+ else
+ report_processing("%w%S",n*2,start)
+ end
+ end
+ local proc = what[id]
+ if proc then
+ -- report_processing("start processing")
+ local done, newstart = proc(start,what,n,parent) -- prev is bugged: or start.prev
+ if newstart then
+ start = newstart
+ -- report_processing("stop processing (new start)")
+ else
+ -- report_processing("stop processing")
+ end
+ elseif id == math_char or id == math_textchar or id == math_delim then
+ break
+ elseif id == math_noad then
+ local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
+ noad = start.sup if noad then process(noad,what,n,start) end -- list
+ noad = start.sub if noad then process(noad,what,n,start) end -- list
+ elseif id == math_box or id == math_sub then
+ -- local noad = start.list if noad then process(noad,what,n,start) end -- list
+ local noad = start.head if noad then process(noad,what,n,start) end -- list
+ elseif id == math_fraction then
+ local noad = start.num if noad then process(noad,what,n,start) end -- list
+ noad = start.denom if noad then process(noad,what,n,start) end -- list
+ noad = start.left if noad then process(noad,what,n,start) end -- delimiter
+ noad = start.right if noad then process(noad,what,n,start) end -- delimiter
+ elseif id == math_choice then
+ local noad = start.display if noad then process(noad,what,n,start) end -- list
+ noad = start.text if noad then process(noad,what,n,start) end -- list
+ noad = start.script if noad then process(noad,what,n,start) end -- list
+ noad = start.scriptscript if noad then process(noad,what,n,start) end -- list
+ elseif id == math_fence then
+ local noad = start.delim if noad then process(noad,what,n,start) end -- delimiter
+ elseif id == math_radical then
+ local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
+ noad = start.sup if noad then process(noad,what,n,start) end -- list
+ noad = start.sub if noad then process(noad,what,n,start) end -- list
+ noad = start.left if noad then process(noad,what,n,start) end -- delimiter
+ noad = start.degree if noad then process(noad,what,n,start) end -- list
+ elseif id == math_accent then
+ local noad = start.nucleus if noad then process(noad,what,n,start) end -- list
+ noad = start.sup if noad then process(noad,what,n,start) end -- list
+ noad = start.sub if noad then process(noad,what,n,start) end -- list
+ noad = start.accent if noad then process(noad,what,n,start) end -- list
+ noad = start.bot_accent if noad then process(noad,what,n,start) end -- list
+ elseif id == math_style then
+ -- has a next
+ else
+ -- glue, penalty, etc
+ end
+ start = start.next
+ end
+end
+
+local function processnoads(head,actions,banner)
+ if trace_processing then
+ report_processing("start %a",banner)
+ process(head,actions)
+ report_processing("stop %a",banner)
+ else
+ process(head,actions)
+ end
+end
+
+noads.process = processnoads
+
+-- experiment (when not present fall back to fam 0) -- needs documentation
+
+-- 0-2 regular
+-- 3-5 bold
+-- 6-8 pseudobold
+
+-- this could best be integrated in the remapper, and if we run into problems, we
+-- might as well do this
+
+local families = { }
+local a_mathfamily = attributes.private("mathfamily")
+local boldmap = mathematics.boldmap
+
+local familymap = { [0] =
+ "regular",
+ "regular",
+ "regular",
+ "bold",
+ "bold",
+ "bold",
+ "pseudobold",
+ "pseudobold",
+ "pseudobold",
+}
+
+families[math_char] = function(pointer)
+ if pointer.fam == 0 then
+ local a = pointer[a_mathfamily]
+ if a and a > 0 then
+ pointer[a_mathfamily] = 0
+ if a > 5 then
+ local char = pointer.char
+ local bold = boldmap[char]
+ local newa = a - 3
+ if bold then
+ pointer[a_exportstatus] = char
+ pointer.char = bold
+ if trace_families then
+ report_families("replacing %C by bold %C, family %s with remap %s becomes %s with remap %s",char,bold,a,familymap[a],newa,familymap[newa])
+ end
+ else
+ if trace_families then
+ report_families("no bold replacement for %C, family %s with remap %s becomes %s with remap %s",char,a,familymap[a],newa,familymap[newa])
+ end
+ end
+ pointer.fam = newa
+ else
+ if trace_families then
+ local char = pointer.char
+ report_families("family of %C becomes %s with remap %s",char,a,familymap[a])
+ end
+ pointer.fam = a
+ end
+ else
+ -- pointer.fam = 0
+ end
+ end
+end
+
+families[math_delim] = function(pointer)
+ if pointer.small_fam == 0 then
+ local a = pointer[a_mathfamily]
+ if a and a > 0 then
+ pointer[a_mathfamily] = 0
+ if a > 5 then
+ -- no bold delimiters in unicode
+ a = a - 3
+ end
+ pointer.small_fam = a
+ pointer.large_fam = a
+ else
+ pointer.small_fam = 0
+ pointer.large_fam = 0
+ end
+ end
+end
+
+families[math_textchar] = families[math_char]
+
+function handlers.families(head,style,penalties)
+ processnoads(head,families,"families")
+ return true
+end
+
+-- character remapping
+
+local a_mathalphabet = attributes.private("mathalphabet")
+local a_mathgreek = attributes.private("mathgreek")
+
+processors.relocate = { }
+
+local function report_remap(tag,id,old,new,extra)
+ report_remapping("remapping %s in font %s from %C to %C%s",tag,id,old,new,extra)
+end
+
+local remapalphabets = mathematics.remapalphabets
+local fallbackstyleattr = mathematics.fallbackstyleattr
+local setnodecolor = nodes.tracers.colors.set
+
+local function checked(pointer)
+ local char = pointer.char
+ local fam = pointer.fam
+ local id = font_of_family(fam)
+ local tc = fontcharacters[id]
+ if not tc[char] then
+ local specials = characters.data[char].specials
+ if specials and (specials[1] == "char" or specials[1] == "font") then
+ newchar = specials[#specials]
+ if trace_remapping then
+ report_remap("fallback",id,char,newchar)
+ end
+ if trace_analyzing then
+ setnodecolor(pointer,"font:isol")
+ end
+ pointer[a_exportstatus] = char -- testcase: exponentiale
+ pointer.char = newchar
+ return true
+ end
+ end
+end
+
+processors.relocate[math_char] = function(pointer)
+ local g = pointer[a_mathgreek] or 0
+ local a = pointer[a_mathalphabet] or 0
+ if a > 0 or g > 0 then
+ if a > 0 then
+ pointer[a_mathgreek] = 0
+ end
+ if g > 0 then
+ pointer[a_mathalphabet] = 0
+ end
+ local char = pointer.char
+ local newchar = remapalphabets(char,a,g)
+ if newchar then
+ local fam = pointer.fam
+ local id = font_of_family(fam)
+ local characters = fontcharacters[id]
+ if characters[newchar] then
+ if trace_remapping then
+ report_remap("char",id,char,newchar)
+ end
+ if trace_analyzing then
+ setnodecolor(pointer,"font:isol")
+ end
+ pointer.char = newchar
+ return true
+ else
+ local fallback = fallbackstyleattr(a)
+ if fallback then
+ local newchar = remapalphabets(char,fallback,g)
+ if newchar then
+ if characters[newchar] then
+ if trace_remapping then
+ report_remap("char",id,char,newchar," (fallback remapping used)")
+ end
+ if trace_analyzing then
+ setnodecolor(pointer,"font:isol")
+ end
+ pointer.char = newchar
+ return true
+ elseif trace_remapping then
+ report_remap("char",id,char,newchar," fails (no fallback character)")
+ end
+ elseif trace_remapping then
+ report_remap("char",id,char,newchar," fails (no fallback remap character)")
+ end
+ elseif trace_remapping then
+ report_remap("char",id,char,newchar," fails (no fallback style)")
+ end
+ end
+ end
+ end
+ if trace_analyzing then
+ setnodecolor(pointer,"font:medi")
+ end
+ if check_coverage then
+ return checked(pointer)
+ end
+end
+
+processors.relocate[math_textchar] = function(pointer)
+ if trace_analyzing then
+ setnodecolor(pointer,"font:init")
+ end
+end
+
+processors.relocate[math_delim] = function(pointer)
+ if trace_analyzing then
+ setnodecolor(pointer,"font:fina")
+ end
+end
+
+function handlers.relocate(head,style,penalties)
+ processnoads(head,processors.relocate,"relocate")
+ return true
+end
+
+-- rendering (beware, not exported)
+
+processors.render = { }
+
+local rendersets = mathematics.renderings.numbers or { } -- store
+
+processors.render[math_char] = function(pointer)
+ local attr = pointer[a_mathrendering]
+ if attr and attr > 0 then
+ local char = pointer.char
+ local renderset = rendersets[attr]
+ if renderset then
+ local newchar = renderset[char]
+ if newchar then
+ local fam = pointer.fam
+ local id = font_of_family(fam)
+ local characters = fontcharacters[id]
+ if characters and characters[newchar] then
+ pointer.char = newchar
+ pointer[a_exportstatus] = char
+ end
+ end
+ end
+ end
+end
+
+function handlers.render(head,style,penalties)
+ processnoads(head,processors.render,"render")
+ return true
+end
+
+-- some resize options (this works ok because the content is
+-- empty and no larger next will be forced)
+--
+-- beware: we don't use \delcode but \Udelcode and as such have
+-- no large_fam; also, we need to check for subtype and/or
+-- small_fam not being 0 because \. sits in 0,0 by default
+--
+-- todo: just replace the character by an ord noad
+-- and remove the right delimiter as well
+
+local mathsize = attributes.private("mathsize")
+
+local resize = { } processors.resize = resize
+
+resize[math_fence] = function(pointer)
+ if pointer.subtype == left_fence_code then
+ local a = pointer[mathsize]
+ if a and a > 0 then
+ pointer[mathsize] = 0
+ local d = pointer.delim
+ local df = d.small_fam
+ local id = font_of_family(df)
+ if id > 0 then
+ local ch = d.small_char
+ d.small_char = mathematics.big(fontdata[id],ch,a)
+ end
+ end
+ end
+end
+
+function handlers.resize(head,style,penalties)
+ processnoads(head,resize,"resize")
+ return true
+end
+
+-- respacing
+
+-- local mathpunctuation = attributes.private("mathpunctuation")
+--
+-- local respace = { } processors.respace = respace
+
+-- only [nd,ll,ul][po][nd,ll,ul]
+
+-- respace[math_char] = function(pointer,what,n,parent) -- not math_noad .. math_char ... and then parent
+-- pointer = parent
+-- if pointer and pointer.subtype == noad_ord then
+-- local a = pointer[mathpunctuation]
+-- if a and a > 0 then
+-- pointer[mathpunctuation] = 0
+-- local current_nucleus = pointer.nucleus
+-- if current_nucleus.id == math_char then
+-- local current_char = current_nucleus.char
+-- local fc = chardata[current_char]
+-- fc = fc and fc.category
+-- if fc == "nd" or fc == "ll" or fc == "lu" then
+-- local next_noad = pointer.next
+-- if next_noad and next_noad.id == math_noad and next_noad.subtype == noad_punct then
+-- local next_nucleus = next_noad.nucleus
+-- if next_nucleus.id == math_char then
+-- local next_char = next_nucleus.char
+-- local nc = chardata[next_char]
+-- nc = nc and nc.category
+-- if nc == "po" then
+-- local last_noad = next_noad.next
+-- if last_noad and last_noad.id == math_noad and last_noad.subtype == noad_ord then
+-- local last_nucleus = last_noad.nucleus
+-- if last_nucleus.id == math_char then
+-- local last_char = last_nucleus.char
+-- local lc = chardata[last_char]
+-- lc = lc and lc.category
+-- if lc == "nd" or lc == "ll" or lc == "lu" then
+-- local ord = new_node(math_noad) -- todo: pool
+-- ord.subtype, ord.nucleus, ord.sub, ord.sup, ord.attr = noad_ord, next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr
+-- -- next_noad.nucleus, next_noad.sub, next_noad.sup, next_noad.attr = nil, nil, nil, nil
+-- next_noad.nucleus, next_noad.sub, next_noad.sup = nil, nil, nil -- else crash with attributes ref count
+-- --~ next_noad.attr = nil
+-- ord.next = last_noad
+-- pointer.next = ord
+-- free_node(next_noad)
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+
+-- local comma = 0x002C
+-- local period = 0x002E
+--
+-- respace[math_char] = function(pointer,what,n,parent)
+-- pointer = parent
+-- if pointer and pointer.subtype == noad_punct then
+-- local current_nucleus = pointer.nucleus
+-- if current_nucleus.id == math_char then
+-- local current_nucleus = pointer.nucleus
+-- if current_nucleus.id == math_char then
+-- local current_char = current_nucleus.char
+-- local a = pointer[mathpunctuation]
+-- if not a or a == 0 then
+-- if current_char == comma then
+-- -- default tex: 2,5 or 2, 5 --> 2, 5
+-- elseif current_char == period then
+-- -- default tex: 2.5 or 2. 5 --> 2.5
+-- pointer.subtype = noad_ord
+-- end
+-- elseif a == 1 then
+-- local next_noad = pointer.next
+-- if next_noad and next_noad.id == math_noad then
+-- local next_nucleus = next_noad.nucleus
+-- if next_nucleus.id == math_char and next_nucleus.char == 0 then
+-- nodes.remove(pointer,next_noad,true)
+-- end
+-- if current_char == comma then
+-- -- default tex: 2,5 or 2, 5 --> 2, 5
+-- elseif current_char == period then
+-- -- default tex: 2.5 or 2. 5 --> 2.5
+-- pointer.subtype = noad_ord
+-- end
+-- end
+-- elseif a == 2 then
+-- if current_char == comma or current_char == period then
+-- local next_noad = pointer.next
+-- if next_noad and next_noad.id == math_noad then
+-- local next_nucleus = next_noad.nucleus
+-- if next_nucleus.id == math_char and next_nucleus.char == 0 then
+-- if current_char == comma then
+-- -- adaptive: 2, 5 --> 2, 5
+-- elseif current_char == period then
+-- -- adaptive: 2. 5 --> 2. 5
+-- end
+-- nodes.remove(pointer,next_noad,true)
+-- else
+-- if current_char == comma then
+-- -- adaptive: 2,5 --> 2,5
+-- pointer.subtype = noad_ord
+-- elseif current_char == period then
+-- -- adaptive: 2.5 --> 2.5
+-- pointer.subtype = noad_ord
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+-- end
+--
+-- function handlers.respace(head,style,penalties)
+-- processnoads(head,respace,"respace")
+-- return true
+-- end
+
+-- The following code is dedicated to Luigi Scarso who pointed me
+-- to the fact that \not= is not producing valid pdf-a code.
+-- The code does not solve this for virtual characters but it does
+-- a decent job on collapsing so that fonts that have the right
+-- glyph will have a decent unicode point. In the meantime this code
+-- has been moved elsewhere.
+
+local collapse = { } processors.collapse = collapse
+
+local mathpairs = characters.mathpairs
+
+mathpairs[0x2032] = { [0x2032] = 0x2033, [0x2033] = 0x2034 } -- (prime,prime) (prime,doubleprime)
+mathpairs[0x2033] = { [0x2032] = 0x2034 } -- (doubleprime,prime)
+
+mathpairs[0x222B] = { [0x222B] = 0x222C, [0x222C] = 0x222D }
+mathpairs[0x222C] = { [0x222B] = 0x222D }
+
+mathpairs[0x007C] = { [0x007C] = 0x2016 } -- double bars
+
+local validpair = {
+ [noad_rel] = true,
+ [noad_ord] = true,
+ [noad_opdisplaylimits] = true,
+ [noad_oplimits] = true,
+ [noad_opnolimits] = true,
+}
+
+local function collapsepair(pointer,what,n,parent) -- todo: switch to turn in on and off
+ if parent then
+ if validpair[parent.subtype] then
+ local current_nucleus = parent.nucleus
+ if not parent.sub and not parent.sup and current_nucleus.id == math_char then
+ local current_char = current_nucleus.char
+ local mathpair = mathpairs[current_char]
+ if mathpair then
+ local next_noad = parent.next
+ if next_noad and next_noad.id == math_noad then
+ if validpair[next_noad.subtype] then
+ local next_nucleus = next_noad.nucleus
+ if next_nucleus.id == math_char then
+ local next_char = next_nucleus.char
+ local newchar = mathpair[next_char]
+ if newchar then
+ local fam = current_nucleus.fam
+ local id = font_of_family(fam)
+ local characters = fontcharacters[id]
+ if characters and characters[newchar] then
+ if trace_collapsing then
+ report_collapsing("%U + %U => %U",current_char,next_char,newchar)
+ end
+ current_nucleus.char = newchar
+ local next_next_noad = next_noad.next
+ if next_next_noad then
+ parent.next = next_next_noad
+ next_next_noad.prev = parent
+ else
+ parent.next = nil
+ end
+ parent.sup = next_noad.sup
+ parent.sub = next_noad.sub
+ next_noad.sup = nil
+ next_noad.sub = nil
+ free_node(next_noad)
+ collapsepair(pointer,what,n,parent)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+collapse[math_char] = collapsepair
+
+function noads.handlers.collapse(head,style,penalties)
+ processnoads(head,collapse,"collapse")
+ return true
+end
+
+-- normalize scripts
+
+local unscript = { } noads.processors.unscript = unscript
+
+local superscripts = characters.superscripts
+local subscripts = characters.subscripts
+
+local replaced = { }
+
+local function replace(pointer,what,n,parent)
+ pointer = parent -- we're following the parent list (chars trigger this)
+ local next = pointer.next
+ local start_super, stop_super, start_sub, stop_sub
+ local mode = "unset"
+ while next and next.id == math_noad do
+ local nextnucleus = next.nucleus
+ if nextnucleus and nextnucleus.id == math_char and not next.sub and not next.sup then
+ local char = nextnucleus.char
+ local s = superscripts[char]
+ if s then
+ if not start_super then
+ start_super = next
+ mode = "super"
+ elseif mode == "sub" then
+ break
+ end
+ stop_super = next
+ next = next.next
+ nextnucleus.char = s
+ replaced[char] = (replaced[char] or 0) + 1
+ if trace_normalizing then
+ report_normalizing("superscript %C becomes %C",char,s)
+ end
+ else
+ local s = subscripts[char]
+ if s then
+ if not start_sub then
+ start_sub = next
+ mode = "sub"
+ elseif mode == "super" then
+ break
+ end
+ stop_sub = next
+ next = next.next
+ nextnucleus.char = s
+ replaced[char] = (replaced[char] or 0) + 1
+ if trace_normalizing then
+ report_normalizing("subscript %C becomes %C",char,s)
+ end
+ else
+ break
+ end
+ end
+ else
+ break
+ end
+ end
+ if start_super then
+ if start_super == stop_super then
+ pointer.sup = start_super.nucleus
+ else
+ local list = new_node(math_sub) -- todo attr
+ list.head = start_super
+ pointer.sup = list
+ end
+ if mode == "super" then
+ pointer.next = stop_super.next
+ end
+ stop_super.next = nil
+ end
+ if start_sub then
+ if start_sub == stop_sub then
+ pointer.sub = start_sub.nucleus
+ else
+ local list = new_node(math_sub) -- todo attr
+ list.head = start_sub
+ pointer.sub = list
+ end
+ if mode == "sub" then
+ pointer.next = stop_sub.next
+ end
+ stop_sub.next = nil
+ end
+ -- we could return stop
+end
+
+unscript[math_char] = replace -- not noads as we need to recurse
+
+function handlers.unscript(head,style,penalties)
+ processnoads(head,unscript,"unscript")
+ return true
+end
+
+statistics.register("math script replacements", function()
+ if next(replaced) then
+ local n, t = 0, { }
+ for k, v in table.sortedpairs(replaced) do
+ n = n + v
+ t[#t+1] = formatters["%C"](k)
+ end
+ return formatters["% t (n=%s)"](t,n)
+ end
+end)
+
+-- math alternates: (in xits lgf: $ABC$ $\cal ABC$ $\mathalternate{cal}\cal ABC$)
+-- math alternates: (in lucidanova lgf: $ABC \mathalternate{italic} ABC$)
+
+-- todo: set alternate for specific symbols
+
+local function initializemathalternates(tfmdata)
+ local goodies = tfmdata.goodies
+ if goodies then
+ local shared = tfmdata.shared
+ for i=1,#goodies do
+ -- first one counts
+ -- we can consider sharing the attributes ... todo (only once scan)
+ local mathgoodies = goodies[i].mathematics
+ local alternates = mathgoodies and mathgoodies.alternates
+ if alternates then
+ if trace_goodies then
+ report_goodies("loading alternates for font %a",tfmdata.properties.name)
+ end
+ local lastattribute, attributes = 0, { }
+ for k, v in next, alternates do
+ lastattribute = lastattribute + 1
+ v.attribute = lastattribute
+ attributes[lastattribute] = v
+ end
+ shared.mathalternates = alternates -- to be checked if shared is ok here
+ shared.mathalternatesattributes = attributes -- to be checked if shared is ok here
+ return
+ end
+ end
+ end
+end
+
+registerotffeature {
+ name = "mathalternates",
+ description = "additional math alternative shapes",
+ initializers = {
+ base = initializemathalternates,
+ node = initializemathalternates,
+ }
+}
+
+local getalternate = otf.getalternate
+
+local a_mathalternate = attributes.private("mathalternate")
+
+local alternate = { } -- processors.alternate = alternate
+
+function mathematics.setalternate(fam,tag)
+ local id = font_of_family(fam)
+ local tfmdata = fontdata[id]
+ local mathalternates = tfmdata.shared and tfmdata.shared.mathalternates
+ if mathalternates then
+ local m = mathalternates[tag]
+ tex.attribute[a_mathalternate] = m and m.attribute or unsetvalue
+ end
+end
+
+alternate[math_char] = function(pointer)
+ local a = pointer[a_mathalternate]
+ if a and a > 0 then
+ pointer[a_mathalternate] = 0
+ local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
+ local mathalternatesattributes = tfmdata.shared.mathalternatesattributes
+ if mathalternatesattributes then
+ local what = mathalternatesattributes[a]
+ local alt = getalternate(tfmdata,pointer.char,what.feature,what.value)
+ if alt then
+ if trace_alternates then
+ report_alternates("alternate %a, value %a, replacing glyph %U by glyph %U",
+ tostring(what.feature),tostring(what.value),pointer.char,alt)
+ end
+ pointer.char = alt
+ end
+ end
+ end
+end
+
+function handlers.check(head,style,penalties)
+ processnoads(head,alternate,"check")
+ return true
+end
+
+-- italics: we assume that only characters matter
+--
+-- = we check for correction first because accessing nodes is slower
+-- = the actual glyph is not that important (we can control it with numbers)
+
+local a_mathitalics = attributes.private("mathitalics")
+
+local italics = { }
+local default_factor = 1/20
+
+local function getcorrection(method,font,char) -- -- or character.italic -- (this one is for tex)
+
+ local correction, fromvisual
+
+ if method == 1 then
+ -- only font data triggered by fontitalics
+ local italics = fontitalics[font]
+ if italics then
+ local character = fontcharacters[font][char]
+ if character then
+ correction = character.italic_correction
+ if correction and correction ~= 0 then
+ return correction, false
+ end
+ end
+ end
+ elseif method == 2 then
+ -- only font data triggered by fontdata
+ local character = fontcharacters[font][char]
+ if character then
+ correction = character.italic_correction
+ if correction and correction ~= 0 then
+ return correction, false
+ end
+ end
+ elseif method == 3 then
+ -- only quad based by selective
+ local visual = chardata[char].visual
+ if not visual then
+ -- skip
+ elseif visual == "it" or visual == "bi" then
+ correction = fontproperties[font].mathitalic_defaultvalue or default_factor*fontemwidths[font]
+ if correction and correction ~= 0 then
+ return correction, true
+ end
+ end
+ elseif method == 4 then
+ -- combination of 1 and 3
+ local italics = fontitalics[font]
+ if italics then
+ local character = fontcharacters[font][char]
+ if character then
+ correction = character.italic_correction
+ if correction and correction ~= 0 then
+ return correction, false
+ end
+ end
+ end
+ if not correction then
+ local visual = chardata[char].visual
+ if not visual then
+ -- skip
+ elseif visual == "it" or visual == "bi" then
+ correction = fontproperties[font].mathitalic_defaultvalue or default_factor*fontemwidths[font]
+ if correction and correction ~= 0 then
+ return correction, true
+ end
+ end
+ end
+ end
+
+end
+
+local function insert_kern(current,kern)
+ local sub = new_node(math_sub) -- todo: pool
+ local noad = new_node(math_noad) -- todo: pool
+ sub.head = kern
+ kern.next = noad
+ noad.nucleus = current
+ return sub
+end
+
+local setcolor = nodes.tracers.colors.set
+local italic_kern = new_kern
+local c_positive_d = "trace:db"
+local c_negative_d = "trace:dr"
+
+trackers.register("math.italics", function(v)
+ if v then
+ italic_kern = function(k,font)
+ local ex = 1.5 * fontexheights[font]
+ if k > 0 then
+ return setcolor(new_rule(k,ex,ex),c_positive_d)
+ else
+ return concat_nodes {
+ old_kern(k),
+ setcolor(new_rule(-k,ex,ex),c_negative_d),
+ old_kern(k),
+ }
+ end
+ end
+ else
+ italic_kern = new_kern
+ end
+end)
+
+italics[math_char] = function(pointer,what,n,parent)
+ local method = pointer[a_mathitalics]
+ if method and method > 0 then
+ local char = pointer.char
+ local font = font_of_family(pointer.fam) -- todo: table
+ local correction, visual = getcorrection(method,font,char)
+ if correction then
+ local pid = parent.id
+ local sub, sup
+ if pid == math_noad then
+ sup = parent.sup
+ sub = parent.sub
+ end
+ if sup or sub then
+ local subtype = parent.subtype
+ if subtype == noad_oplimits then
+ if sup then
+ parent.sup = insert_kern(sup,italic_kern(correction,font))
+ if trace_italics then
+ report_italics("method %a, adding %p italic correction for upper limit of %C",method,correction,char)
+ end
+ end
+ if sub then
+ local correction = - correction
+ parent.sub = insert_kern(sub,italic_kern(correction,font))
+ if trace_italics then
+ report_italics("method %a, adding %p italic correction for lower limit of %C",method,correction,char)
+ end
+ end
+ else
+ if sup then
+ parent.sup = insert_kern(sup,italic_kern(correction,font))
+ if trace_italics then
+ report_italics("method %a, adding %p italic correction before superscript after %C",method,correction,char)
+ end
+ end
+ end
+ else
+ local next_noad = parent.next
+ if not next_noad then
+ if n== 1 then -- only at the outer level .. will become an option (always,endonly,none)
+ if trace_italics then
+ report_italics("method %a, adding %p italic correction between %C and end math",method,correctio,char)
+ end
+ insert_node_after(parent,parent,italic_kern(correction,font))
+ end
+ elseif next_noad.id == math_noad then
+ local next_subtype = next_noad.subtype
+ if next_subtype == noad_punct or next_subtype == noad_ord then
+ local next_nucleus = next_noad.nucleus
+ if next_nucleus.id == math_char then
+ local next_char = next_nucleus.char
+ local next_data = chardata[next_char]
+ local visual = next_data.visual
+ if visual == "it" or visual == "bi" then
+ -- if trace_italics then
+ -- report_italics("method %a, skipping %p italic correction between italic %C and italic %C",method,correction,char,next_char)
+ -- end
+ else
+ local category = next_data.category
+ if category == "nd" or category == "ll" or category == "lu" then
+ if trace_italics then
+ report_italics("method %a, adding %p italic correction between italic %C and non italic %C",method,correction,char,next_char)
+ end
+ insert_node_after(parent,parent,italic_kern(correction,font))
+ -- elseif next_data.height > (fontexheights[font]/2) then
+ -- if trace_italics then
+ -- report_italics("method %a, adding %p italic correction between %C and ascending %C",method,correction,char,next_char)
+ -- end
+ -- insert_node_after(parent,parent,italic_kern(correction,font))
+ -- elseif trace_italics then
+ -- -- report_italics("method %a, skipping %p italic correction between %C and %C",method,correction,char,next_char)
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+
+function handlers.italics(head,style,penalties)
+ processnoads(head,italics,"italics")
+ return true
+end
+
+local enable
+
+enable = function()
+ tasks.enableaction("math", "noads.handlers.italics")
+ if trace_italics then
+ report_italics("enabling math italics")
+ end
+ enable = false
+end
+
+-- best do this only on math mode (less overhead)
+
+function mathematics.setitalics(n)
+ if enable then
+ enable()
+ end
+ if n == variables.reset then
+ texattribute[a_mathitalics] = unsetvalue
+ else
+ texattribute[a_mathitalics] = tonumber(n) or unsetvalue
+ end
+end
+
+function mathematics.resetitalics()
+ texattribute[a_mathitalics] = unsetvalue
+end
+
+-- variants
+
+local variants = { }
+
+local validvariants = { -- fast check on valid
+ [0x2229] = 0xFE00, [0x222A] = 0xFE00,
+ [0x2268] = 0xFE00, [0x2269] = 0xFE00,
+ [0x2272] = 0xFE00, [0x2273] = 0xFE00,
+ [0x228A] = 0xFE00, [0x228B] = 0xFE00,
+ [0x2293] = 0xFE00, [0x2294] = 0xFE00,
+ [0x2295] = 0xFE00,
+ [0x2297] = 0xFE00,
+ [0x229C] = 0xFE00,
+ [0x22DA] = 0xFE00, [0x22DB] = 0xFE00,
+ [0x2A3C] = 0xFE00, [0x2A3D] = 0xFE00,
+ [0x2A9D] = 0xFE00, [0x2A9E] = 0xFE00,
+ [0x2AAC] = 0xFE00, [0x2AAD] = 0xFE00,
+ [0x2ACB] = 0xFE00, [0x2ACC] = 0xFE00,
+}
+
+variants[math_char] = function(pointer,what,n,parent) -- also set export value
+ local char = pointer.char
+ local selector = validvariants[char]
+ if selector then
+ local next = parent.next
+ if next and next.id == math_noad then
+ local nucleus = next.nucleus
+ if nucleus and nucleus.id == math_char and nucleus.char == selector then
+ local variant
+ local tfmdata = fontdata[font_of_family(pointer.fam)] -- we can also have a famdata
+ local mathvariants = tfmdata.resources.variants -- and variantdata
+ if mathvariants then
+ mathvariants = mathvariants[selector]
+ if mathvariants then
+ variant = mathvariants[char]
+ end
+ end
+ if variant then
+ pointer.char = variant
+ pointer[a_exportstatus] = char -- we don't export the variant as it's visual markup
+ if trace_variants then
+ report_variants("variant (%U,%U) replaced by %U",char,selector,variant)
+ end
+ else
+ if trace_variants then
+ report_variants("no variant (%U,%U)",char,selector)
+ end
+ end
+ next.prev = pointer
+ parent.next = next.next
+ free_node(next)
+ end
+ end
+ end
+end
+
+function handlers.variants(head,style,penalties)
+ processnoads(head,variants,"unicode variant")
+ return true
+end
+
+-- the normal builder
+
+function builders.kernel.mlist_to_hlist(head,style,penalties)
+ return mlist_to_hlist(head,style,penalties), true
+end
+
+-- function builders.kernel.mlist_to_hlist(head,style,penalties)
+-- print("!!!!!!! BEFORE",penalties)
+-- for n in node.traverse(head) do print(n) end
+-- print("!!!!!!!")
+-- head = mlist_to_hlist(head,style,penalties)
+-- print("!!!!!!! AFTER")
+-- for n in node.traverse(head) do print(n) end
+-- print("!!!!!!!")
+-- return head, true
+-- end
+
+tasks.new {
+ name = "math",
+ arguments = 2,
+ processor = utilities.sequencers.nodeprocessor,
+ sequence = {
+ "before",
+ "normalizers",
+ "builders",
+ "after",
+ },
+}
+
+tasks.freezegroup("math", "normalizers") -- experimental
+tasks.freezegroup("math", "builders") -- experimental
+
+local actions = tasks.actions("math") -- head, style, penalties
+
+local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
+
+function processors.mlist_to_hlist(head,style,penalties)
+ starttiming(noads)
+ local head, done = actions(head,style,penalties)
+ stoptiming(noads)
+ return head, done
+end
+
+callbacks.register('mlist_to_hlist',processors.mlist_to_hlist,"preprocessing math list")
+
+-- tracing
+
+statistics.register("math processing time", function()
+ return statistics.elapsedseconds(noads)
+end)
+
+-- interface
+
+commands.setmathalternate = mathematics.setalternate
+commands.setmathitalics = mathematics.setitalics
+commands.resetmathitalics = mathematics.resetitalics
diff --git a/tex/context/base/math-ren.lua b/tex/context/base/math-ren.lua
index 2e7dba13d..348d8a2d9 100644
--- a/tex/context/base/math-ren.lua
+++ b/tex/context/base/math-ren.lua
@@ -1,69 +1,69 @@
-if not modules then modules = { } end modules ['math-ren'] = {
- version = 1.001,
- comment = "companion to math-ren.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local next = next
-local gsub = string.gsub
-
-local settings_to_array = utilities.parsers.settings_to_array
-local allocate = storage.allocate
-
-local renderings = { }
-mathematics.renderings = renderings
-
-local mappings = allocate()
-renderings.mappings = mappings
-
-local numbers = allocate()
-renderings.numbers = numbers
-
-local sets = allocate()
-renderings.sets = sets
-
-mappings["blackboard-to-bold"] = {
- [0x1D538] = 0x1D400, [0x1D539] = 0x1D401, [0x02102] = 0x1D402, [0x1D53B] = 0x1D403, [0x1D53C] = 0x1D404,
- [0x1D53D] = 0x1D405, [0x1D53E] = 0x1D406, [0x0210D] = 0x1D407, [0x1D540] = 0x1D408, [0x1D541] = 0x1D409,
- [0x1D542] = 0x1D40A, [0x1D543] = 0x1D40B, [0x1D544] = 0x1D40C, [0x02115] = 0x1D40D, [0x1D546] = 0x1D40E,
- [0x02119] = 0x1D40F, [0x0211A] = 0x1D410, [0x0211D] = 0x1D411, [0x1D54A] = 0x1D412, [0x1D54B] = 0x1D413,
- [0x1D54C] = 0x1D414, [0x1D54D] = 0x1D415, [0x1D54E] = 0x1D416, [0x1D54F] = 0x1D417, [0x1D550] = 0x1D418,
- [0x02124] = 0x1D419,
-}
-
-local function renderset(list) -- order matters
- local tag = gsub(list," ","")
- local n = sets[tag]
- if not n then
- local list = settings_to_array(tag)
- local mapping = { }
- for i=1,#list do
- local m = mappings[list[i]]
- if m then
- for k, v in next, m do
- mapping[k] = v
- end
- end
- end
- if next(mapping) then
- n = #numbers + 1
- numbers[n] = mapping
- else
- n = attributes.unsetvalue
- end
- sets[tag] = n
- end
- return n
-end
-
-mathematics.renderset = renderset
-
-function commands.mathrenderset(list)
- context(renderset(list))
-end
-
--- function commands.setmatrendering(list)
--- tex.setattribute(renderset(list))
--- end
+if not modules then modules = { } end modules ['math-ren'] = {
+ version = 1.001,
+ comment = "companion to math-ren.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local next = next
+local gsub = string.gsub
+
+local settings_to_array = utilities.parsers.settings_to_array
+local allocate = storage.allocate
+
+local renderings = { }
+mathematics.renderings = renderings
+
+local mappings = allocate()
+renderings.mappings = mappings
+
+local numbers = allocate()
+renderings.numbers = numbers
+
+local sets = allocate()
+renderings.sets = sets
+
+mappings["blackboard-to-bold"] = {
+ [0x1D538] = 0x1D400, [0x1D539] = 0x1D401, [0x02102] = 0x1D402, [0x1D53B] = 0x1D403, [0x1D53C] = 0x1D404,
+ [0x1D53D] = 0x1D405, [0x1D53E] = 0x1D406, [0x0210D] = 0x1D407, [0x1D540] = 0x1D408, [0x1D541] = 0x1D409,
+ [0x1D542] = 0x1D40A, [0x1D543] = 0x1D40B, [0x1D544] = 0x1D40C, [0x02115] = 0x1D40D, [0x1D546] = 0x1D40E,
+ [0x02119] = 0x1D40F, [0x0211A] = 0x1D410, [0x0211D] = 0x1D411, [0x1D54A] = 0x1D412, [0x1D54B] = 0x1D413,
+ [0x1D54C] = 0x1D414, [0x1D54D] = 0x1D415, [0x1D54E] = 0x1D416, [0x1D54F] = 0x1D417, [0x1D550] = 0x1D418,
+ [0x02124] = 0x1D419,
+}
+
+local function renderset(list) -- order matters
+ local tag = gsub(list," ","")
+ local n = sets[tag]
+ if not n then
+ local list = settings_to_array(tag)
+ local mapping = { }
+ for i=1,#list do
+ local m = mappings[list[i]]
+ if m then
+ for k, v in next, m do
+ mapping[k] = v
+ end
+ end
+ end
+ if next(mapping) then
+ n = #numbers + 1
+ numbers[n] = mapping
+ else
+ n = attributes.unsetvalue
+ end
+ sets[tag] = n
+ end
+ return n
+end
+
+mathematics.renderset = renderset
+
+function commands.mathrenderset(list)
+ context(renderset(list))
+end
+
+-- function commands.setmatrendering(list)
+-- tex.setattribute(renderset(list))
+-- end
diff --git a/tex/context/base/math-tag.lua b/tex/context/base/math-tag.lua
index ab5902dd4..3dafaaa2f 100644
--- a/tex/context/base/math-tag.lua
+++ b/tex/context/base/math-tag.lua
@@ -1,345 +1,345 @@
-if not modules then modules = { } end modules ['math-tag'] = {
- version = 1.001,
- comment = "companion to math-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- use lpeg matchers
-
-local find, match = string.find, string.match
-local insert, remove = table.insert, table.remove
-
-local attributes, nodes = attributes, nodes
-
-local set_attributes = nodes.setattributes
-local traverse_nodes = node.traverse
-
-local nodecodes = nodes.nodecodes
-
-local math_noad_code = nodecodes.noad -- attr nucleus sub sup
-local math_accent_code = nodecodes.accent -- attr nucleus sub sup accent
-local math_radical_code = nodecodes.radical -- attr nucleus sub sup left degree
-local math_fraction_code = nodecodes.fraction -- attr nucleus sub sup left right
-local math_box_code = nodecodes.subbox -- attr list
-local math_sub_code = nodecodes.submlist -- attr list
-local math_char_code = nodecodes.mathchar -- attr fam char
-local math_textchar_code = nodecodes.mathtextchar -- attr fam char
-local math_delim_code = nodecodes.delim -- attr small_fam small_char large_fam large_char
-local math_style_code = nodecodes.style -- attr style
-local math_choice_code = nodecodes.choice -- attr display text script scriptscript
-local math_fence_code = nodecodes.fence -- attr subtype
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local glyph_code = nodecodes.glyph
-local glue_code = nodecodes.glue
-
-local a_tagged = attributes.private('tagged')
-local a_exportstatus = attributes.private('exportstatus')
-local a_mathcategory = attributes.private('mathcategory')
-local a_mathmode = attributes.private('mathmode')
-
-local tags = structures.tags
-
-local start_tagged = tags.start
-local restart_tagged = tags.restart
-local stop_tagged = tags.stop
-local taglist = tags.taglist
-
-local chardata = characters.data
-
-local getmathcode = tex.getmathcode
-local mathcodes = mathematics.codes
-local ordinary_code = mathcodes.ordinary
-local variable_code = mathcodes.variable
-
-local process
-
-local function processsubsup(start)
- -- At some point we might need to add an attribute signaling the
- -- super- and subscripts because TeX and MathML use a different
- -- order.
- local nucleus, sup, sub = start.nucleus, start.sup, start.sub
- if sub then
- if sup then
- start[a_tagged] = start_tagged("msubsup")
- process(nucleus)
- process(sub)
- process(sup)
- stop_tagged()
- else
- start[a_tagged] = start_tagged("msub")
- process(nucleus)
- process(sub)
- stop_tagged()
- end
- elseif sup then
- start[a_tagged] = start_tagged("msup")
- process(nucleus)
- process(sup)
- stop_tagged()
- else
- process(nucleus)
- end
-end
-
--- todo: check function here and keep attribute the same
-
--- todo: variants -> original
-
-local actionstack = { }
-
-process = function(start) -- we cannot use the processor as we have no finalizers (yet)
- while start do
- local id = start.id
- if id == math_char_code then
- local char = start.char
- -- check for code
- local a = start[a_mathcategory]
- if a then
- a = { detail = a }
- end
- local code = getmathcode(char)
- if code then
- code = code[1]
- end
- local tag
- if code == ordinary_code or code == variable_code then
- local ch = chardata[char]
- local mc = ch and ch.mathclass
- if mc == "number" then
- tag = "mn"
- elseif mc == "variable" or not mc then -- variable is default
- tag = "mi"
- else
- tag = "mo"
- end
- else
- tag = "mo"
- end
- start[a_tagged] = start_tagged(tag,a)
- stop_tagged()
- break -- okay?
- elseif id == math_textchar_code then
- -- check for code
- local a = start[a_mathcategory]
- if a then
- start[a_tagged] = start_tagged("ms",{ detail = a })
- else
- start[a_tagged] = start_tagged("ms")
- end
- stop_tagged()
- break
- elseif id == math_delim_code then
- -- check for code
- start[a_tagged] = start_tagged("mo")
- stop_tagged()
- break
- elseif id == math_style_code then
- -- has a next
- elseif id == math_noad_code then
- processsubsup(start)
- elseif id == math_box_code or id == hlist_code or id == vlist_code then
- -- keep an eye on math_box_code and see what ends up in there
- local attr = start[a_tagged]
- local last = attr and taglist[attr]
- if last and find(last[#last],"formulacaption[:%-]") then
- -- leave alone, will nicely move to the outer level
- else
- local text = start_tagged("mtext")
- start[a_tagged] = text
- local list = start.list
- if not list then
- -- empty list
- elseif not attr then
- -- box comes from strange place
- set_attributes(list,a_tagged,text)
- else
- -- Beware, the first node in list is the actual list so we definitely
- -- need to nest. This approach is a hack, maybe I'll make a proper
- -- nesting feature to deal with this at another level. Here we just
- -- fake structure by enforcing the inner one.
- local tagdata = taglist[attr]
- local common = #tagdata + 1
- local function runner(list) -- quite inefficient
- local cache = { } -- we can have nested unboxed mess so best local to runner
- for n in traverse_nodes(list) do
- local id = n.id
- local aa = n[a_tagged]
- if aa then
- local ac = cache[aa]
- if not ac then
- local tagdata = taglist[aa]
- local extra = #tagdata
- if common <= extra then
- for i=common,extra do
- ac = restart_tagged(tagdata[i]) -- can be made faster
- end
- for i=common,extra do
- stop_tagged() -- can be made faster
- end
- else
- ac = text
- end
- cache[aa] = ac
- end
- n[a_tagged] = ac
- else
- n[a_tagged] = text
- end
- if id == hlist_code or id == vlist_code then
- runner(n.list)
- end
- end
- end
- runner(list)
- end
- stop_tagged()
- end
- elseif id == math_sub_code then
- local list = start.list
- if list then
- local attr = start[a_tagged]
- local last = attr and taglist[attr]
- local action = last and match(last[#last],"maction:(.-)%-")
- if action and action ~= "" then
- if actionstack[#actionstack] == action then
- start[a_tagged] = start_tagged("mrow")
- process(list)
- stop_tagged()
- else
- insert(actionstack,action)
- start[a_tagged] = start_tagged("mrow",{ detail = action })
- process(list)
- stop_tagged()
- remove(actionstack)
- end
- else
- start[a_tagged] = start_tagged("mrow")
- process(list)
- stop_tagged()
- end
- end
- elseif id == math_fraction_code then
- local num, denom, left, right = start.num, start.denom, start.left, start.right
- if left then
- left[a_tagged] = start_tagged("mo")
- process(left)
- stop_tagged()
- end
- start[a_tagged] = start_tagged("mfrac")
- process(num)
- process(denom)
- stop_tagged()
- if right then
- right[a_tagged] = start_tagged("mo")
- process(right)
- stop_tagged()
- end
- elseif id == math_choice_code then
- local display, text, script, scriptscript = start.display, start.text, start.script, start.scriptscript
- if display then
- process(display)
- end
- if text then
- process(text)
- end
- if script then
- process(script)
- end
- if scriptscript then
- process(scriptscript)
- end
- elseif id == math_fence_code then
- local delim = start.delim
- local subtype = start.subtype
- if subtype == 1 then
- -- left
- start[a_tagged] = start_tagged("mfenced")
- if delim then
- start[a_tagged] = start_tagged("mleft")
- process(delim)
- stop_tagged()
- end
- elseif subtype == 2 then
- -- middle
- if delim then
- start[a_tagged] = start_tagged("mmiddle")
- process(delim)
- stop_tagged()
- end
- elseif subtype == 3 then
- if delim then
- start[a_tagged] = start_tagged("mright")
- process(delim)
- stop_tagged()
- end
- stop_tagged()
- else
- -- can't happen
- end
- elseif id == math_radical_code then
- local left, degree = start.left, start.degree
- if left then
- start_tagged("")
- process(left) -- root symbol, ignored
- stop_tagged()
- end
- if degree then -- not good enough, can be empty mlist
- start[a_tagged] = start_tagged("mroot")
- processsubsup(start)
- process(degree)
- stop_tagged()
- else
- start[a_tagged] = start_tagged("msqrt")
- processsubsup(start)
- stop_tagged()
- end
- elseif id == math_accent_code then
- local accent, bot_accent = start.accent, start.bot_accent
- if bot_accent then
- if accent then
- start[a_tagged] = start_tagged("munderover",{ detail = "accent" })
- processsubsup(start)
- process(bot_accent)
- process(accent)
- stop_tagged()
- else
- start[a_tagged] = start_tagged("munder",{ detail = "accent" })
- processsubsup(start)
- process(bot_accent)
- stop_tagged()
- end
- elseif accent then
- start[a_tagged] = start_tagged("mover",{ detail = "accent" })
- processsubsup(start)
- process(accent)
- stop_tagged()
- else
- processsubsup(start)
- end
- elseif id == glue_code then
- start[a_tagged] = start_tagged("mspace")
- stop_tagged()
- else
- start[a_tagged] = start_tagged("merror", { detail = nodecodes[i] })
- stop_tagged()
- end
- start = start.next
- end
-end
-
-function noads.handlers.tags(head,style,penalties)
- local v_math = start_tagged("math")
- local v_mrow = start_tagged("mrow")
- local v_mode = head[a_mathmode]
- head[a_tagged] = v_math
- head[a_tagged] = v_mrow
- tags.setattributehash(v_math,"mode",v_mode == 1 and "display" or "inline")
- process(head)
- stop_tagged()
- stop_tagged()
- return true
-end
+if not modules then modules = { } end modules ['math-tag'] = {
+ version = 1.001,
+ comment = "companion to math-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- use lpeg matchers
+
+local find, match = string.find, string.match
+local insert, remove = table.insert, table.remove
+
+local attributes, nodes = attributes, nodes
+
+local set_attributes = nodes.setattributes
+local traverse_nodes = node.traverse
+
+local nodecodes = nodes.nodecodes
+
+local math_noad_code = nodecodes.noad -- attr nucleus sub sup
+local math_accent_code = nodecodes.accent -- attr nucleus sub sup accent
+local math_radical_code = nodecodes.radical -- attr nucleus sub sup left degree
+local math_fraction_code = nodecodes.fraction -- attr nucleus sub sup left right
+local math_box_code = nodecodes.subbox -- attr list
+local math_sub_code = nodecodes.submlist -- attr list
+local math_char_code = nodecodes.mathchar -- attr fam char
+local math_textchar_code = nodecodes.mathtextchar -- attr fam char
+local math_delim_code = nodecodes.delim -- attr small_fam small_char large_fam large_char
+local math_style_code = nodecodes.style -- attr style
+local math_choice_code = nodecodes.choice -- attr display text script scriptscript
+local math_fence_code = nodecodes.fence -- attr subtype
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local glyph_code = nodecodes.glyph
+local glue_code = nodecodes.glue
+
+local a_tagged = attributes.private('tagged')
+local a_exportstatus = attributes.private('exportstatus')
+local a_mathcategory = attributes.private('mathcategory')
+local a_mathmode = attributes.private('mathmode')
+
+local tags = structures.tags
+
+local start_tagged = tags.start
+local restart_tagged = tags.restart
+local stop_tagged = tags.stop
+local taglist = tags.taglist
+
+local chardata = characters.data
+
+local getmathcode = tex.getmathcode
+local mathcodes = mathematics.codes
+local ordinary_code = mathcodes.ordinary
+local variable_code = mathcodes.variable
+
+local process
+
+local function processsubsup(start)
+ -- At some point we might need to add an attribute signaling the
+ -- super- and subscripts because TeX and MathML use a different
+ -- order.
+ local nucleus, sup, sub = start.nucleus, start.sup, start.sub
+ if sub then
+ if sup then
+ start[a_tagged] = start_tagged("msubsup")
+ process(nucleus)
+ process(sub)
+ process(sup)
+ stop_tagged()
+ else
+ start[a_tagged] = start_tagged("msub")
+ process(nucleus)
+ process(sub)
+ stop_tagged()
+ end
+ elseif sup then
+ start[a_tagged] = start_tagged("msup")
+ process(nucleus)
+ process(sup)
+ stop_tagged()
+ else
+ process(nucleus)
+ end
+end
+
+-- todo: check function here and keep attribute the same
+
+-- todo: variants -> original
+
+local actionstack = { }
+
+process = function(start) -- we cannot use the processor as we have no finalizers (yet)
+ while start do
+ local id = start.id
+ if id == math_char_code then
+ local char = start.char
+ -- check for code
+ local a = start[a_mathcategory]
+ if a then
+ a = { detail = a }
+ end
+ local code = getmathcode(char)
+ if code then
+ code = code[1]
+ end
+ local tag
+ if code == ordinary_code or code == variable_code then
+ local ch = chardata[char]
+ local mc = ch and ch.mathclass
+ if mc == "number" then
+ tag = "mn"
+ elseif mc == "variable" or not mc then -- variable is default
+ tag = "mi"
+ else
+ tag = "mo"
+ end
+ else
+ tag = "mo"
+ end
+ start[a_tagged] = start_tagged(tag,a)
+ stop_tagged()
+ break -- okay?
+ elseif id == math_textchar_code then
+ -- check for code
+ local a = start[a_mathcategory]
+ if a then
+ start[a_tagged] = start_tagged("ms",{ detail = a })
+ else
+ start[a_tagged] = start_tagged("ms")
+ end
+ stop_tagged()
+ break
+ elseif id == math_delim_code then
+ -- check for code
+ start[a_tagged] = start_tagged("mo")
+ stop_tagged()
+ break
+ elseif id == math_style_code then
+ -- has a next
+ elseif id == math_noad_code then
+ processsubsup(start)
+ elseif id == math_box_code or id == hlist_code or id == vlist_code then
+ -- keep an eye on math_box_code and see what ends up in there
+ local attr = start[a_tagged]
+ local last = attr and taglist[attr]
+ if last and find(last[#last],"formulacaption[:%-]") then
+ -- leave alone, will nicely move to the outer level
+ else
+ local text = start_tagged("mtext")
+ start[a_tagged] = text
+ local list = start.list
+ if not list then
+ -- empty list
+ elseif not attr then
+ -- box comes from strange place
+ set_attributes(list,a_tagged,text)
+ else
+ -- Beware, the first node in list is the actual list so we definitely
+ -- need to nest. This approach is a hack, maybe I'll make a proper
+ -- nesting feature to deal with this at another level. Here we just
+ -- fake structure by enforcing the inner one.
+ local tagdata = taglist[attr]
+ local common = #tagdata + 1
+ local function runner(list) -- quite inefficient
+ local cache = { } -- we can have nested unboxed mess so best local to runner
+ for n in traverse_nodes(list) do
+ local id = n.id
+ local aa = n[a_tagged]
+ if aa then
+ local ac = cache[aa]
+ if not ac then
+ local tagdata = taglist[aa]
+ local extra = #tagdata
+ if common <= extra then
+ for i=common,extra do
+ ac = restart_tagged(tagdata[i]) -- can be made faster
+ end
+ for i=common,extra do
+ stop_tagged() -- can be made faster
+ end
+ else
+ ac = text
+ end
+ cache[aa] = ac
+ end
+ n[a_tagged] = ac
+ else
+ n[a_tagged] = text
+ end
+ if id == hlist_code or id == vlist_code then
+ runner(n.list)
+ end
+ end
+ end
+ runner(list)
+ end
+ stop_tagged()
+ end
+ elseif id == math_sub_code then
+ local list = start.list
+ if list then
+ local attr = start[a_tagged]
+ local last = attr and taglist[attr]
+ local action = last and match(last[#last],"maction:(.-)%-")
+ if action and action ~= "" then
+ if actionstack[#actionstack] == action then
+ start[a_tagged] = start_tagged("mrow")
+ process(list)
+ stop_tagged()
+ else
+ insert(actionstack,action)
+ start[a_tagged] = start_tagged("mrow",{ detail = action })
+ process(list)
+ stop_tagged()
+ remove(actionstack)
+ end
+ else
+ start[a_tagged] = start_tagged("mrow")
+ process(list)
+ stop_tagged()
+ end
+ end
+ elseif id == math_fraction_code then
+ local num, denom, left, right = start.num, start.denom, start.left, start.right
+ if left then
+ left[a_tagged] = start_tagged("mo")
+ process(left)
+ stop_tagged()
+ end
+ start[a_tagged] = start_tagged("mfrac")
+ process(num)
+ process(denom)
+ stop_tagged()
+ if right then
+ right[a_tagged] = start_tagged("mo")
+ process(right)
+ stop_tagged()
+ end
+ elseif id == math_choice_code then
+ local display, text, script, scriptscript = start.display, start.text, start.script, start.scriptscript
+ if display then
+ process(display)
+ end
+ if text then
+ process(text)
+ end
+ if script then
+ process(script)
+ end
+ if scriptscript then
+ process(scriptscript)
+ end
+ elseif id == math_fence_code then
+ local delim = start.delim
+ local subtype = start.subtype
+ if subtype == 1 then
+ -- left
+ start[a_tagged] = start_tagged("mfenced")
+ if delim then
+ start[a_tagged] = start_tagged("mleft")
+ process(delim)
+ stop_tagged()
+ end
+ elseif subtype == 2 then
+ -- middle
+ if delim then
+ start[a_tagged] = start_tagged("mmiddle")
+ process(delim)
+ stop_tagged()
+ end
+ elseif subtype == 3 then
+ if delim then
+ start[a_tagged] = start_tagged("mright")
+ process(delim)
+ stop_tagged()
+ end
+ stop_tagged()
+ else
+ -- can't happen
+ end
+ elseif id == math_radical_code then
+ local left, degree = start.left, start.degree
+ if left then
+ start_tagged("")
+ process(left) -- root symbol, ignored
+ stop_tagged()
+ end
+ if degree then -- not good enough, can be empty mlist
+ start[a_tagged] = start_tagged("mroot")
+ processsubsup(start)
+ process(degree)
+ stop_tagged()
+ else
+ start[a_tagged] = start_tagged("msqrt")
+ processsubsup(start)
+ stop_tagged()
+ end
+ elseif id == math_accent_code then
+ local accent, bot_accent = start.accent, start.bot_accent
+ if bot_accent then
+ if accent then
+ start[a_tagged] = start_tagged("munderover",{ detail = "accent" })
+ processsubsup(start)
+ process(bot_accent)
+ process(accent)
+ stop_tagged()
+ else
+ start[a_tagged] = start_tagged("munder",{ detail = "accent" })
+ processsubsup(start)
+ process(bot_accent)
+ stop_tagged()
+ end
+ elseif accent then
+ start[a_tagged] = start_tagged("mover",{ detail = "accent" })
+ processsubsup(start)
+ process(accent)
+ stop_tagged()
+ else
+ processsubsup(start)
+ end
+ elseif id == glue_code then
+ start[a_tagged] = start_tagged("mspace")
+ stop_tagged()
+ else
+ start[a_tagged] = start_tagged("merror", { detail = nodecodes[i] })
+ stop_tagged()
+ end
+ start = start.next
+ end
+end
+
+function noads.handlers.tags(head,style,penalties)
+ local v_math = start_tagged("math")
+ local v_mrow = start_tagged("mrow")
+ local v_mode = head[a_mathmode]
+ head[a_tagged] = v_math
+ head[a_tagged] = v_mrow
+ tags.setattributehash(v_math,"mode",v_mode == 1 and "display" or "inline")
+ process(head)
+ stop_tagged()
+ stop_tagged()
+ return true
+end
diff --git a/tex/context/base/math-ttv.lua b/tex/context/base/math-ttv.lua
index 1f644e788..e5548c730 100644
--- a/tex/context/base/math-ttv.lua
+++ b/tex/context/base/math-ttv.lua
@@ -1,801 +1,801 @@
-if not modules then modules = { } end modules ['math-ttv'] = {
- version = 1.001,
- comment = "traditional tex vectors, companion to math-vfu.lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
- dataonly = true,
-}
-
-local vfmath = fonts.handlers.vf.math
-local setletters = vfmath.setletters
-local setdigits = vfmath.setdigits
-
-local mathencodings = fonts.encodings.math
-
--- varphi is part of the alphabet, contrary to the other var*s'
-
-mathencodings["large-to-small"] = {
- [0x00028] = 0x00, -- (
- [0x00029] = 0x01, -- )
- [0x0005B] = 0x02, -- [
- [0x0005D] = 0x03, -- ]
- [0x0230A] = 0x04, -- lfloor
- [0x0230B] = 0x05, -- rfloor
- [0x02308] = 0x06, -- lceil
- [0x02309] = 0x07, -- rceil
- [0x0007B] = 0x08, -- {
- [0x0007D] = 0x09, -- }
- [0x027E8] = 0x0A, -- <
- [0x027E9] = 0x0B, -- >
- [0x0007C] = 0x0C, -- |
- -- [0x0] = 0x0D, -- lVert rVert Vert
- -- [0x0002F] = 0x0E, -- /
- [0x0005C] = 0x0F, -- \
- -- [0x0] = 0x3A, -- lgroup
- -- [0x0] = 0x3B, -- rgroup
- -- [0x0] = 0x3C, -- arrowvert
- -- [0x0] = 0x3D, -- Arrowvert
- [0x02195] = 0x3F, -- updownarrow
- -- [0x0] = 0x40, -- lmoustache
- -- [0x0] = 0x41, -- rmoustache
- [0x0221A] = 0x70, -- sqrt
- [0x021D5] = 0x77, -- Updownarrow
- [0x02191] = 0x78, -- uparrow
- [0x02193] = 0x79, -- downarrow
- [0x021D1] = 0x7E, -- Uparrow
- [0x021D3] = 0x7F, -- Downarrow
- [0x0220F] = 0x59, -- prod
- [0x02210] = 0x61, -- coprod
- [0x02211] = 0x58, -- sum
- [0x0222B] = 0x5A, -- intop
- [0x0222E] = 0x49, -- ointop
- -- [0xFE302] = 0x62, -- widehat
- -- [0xFE303] = 0x65, -- widetilde
- [0x00302] = 0x62, -- widehat
- [0x00303] = 0x65, -- widetilde
- [0x022C0] = 0x5E, -- bigwedge
- [0x022C1] = 0x5F, -- bigvee
- [0x022C2] = 0x5C, -- bigcap
- [0x022C3] = 0x5B, -- bigcup
- [0x02044] = 0x0E, -- /
-}
-
--- Beware: these are (in cm/lm) below the baseline due to limitations
--- in the tfm format bu the engien (combined with the mathclass) takes
--- care of it. If we need them in textmode, we should make them virtual
--- and move them up but we're in no hurry with that.
-
-mathencodings["tex-ex"] = {
- [0x0220F] = 0x51, -- prod
- [0x02210] = 0x60, -- coprod
- [0x02211] = 0x50, -- sum
- [0x0222B] = 0x52, -- intop
- [0x0222E] = 0x48, -- ointop
- [0x022C0] = 0x56, -- bigwedge
- [0x022C1] = 0x57, -- bigvee
- [0x022C2] = 0x54, -- bigcap
- [0x022C3] = 0x53, -- bigcup
- [0x02A00] = 0x4A, -- bigodot -- fixed BJ
- [0x02A01] = 0x4C, -- bigoplus
- [0x02A02] = 0x4E, -- bigotimes
- -- [0x02A03] = , -- bigudot --
- [0x02A04] = 0x55, -- biguplus
- [0x02A06] = 0x46, -- bigsqcup
-}
-
--- only math stuff is needed, since we always use an lm or gyre
--- font as main font
-
-mathencodings["tex-mr"] = {
- [0x00393] = 0x00, -- Gamma
- [0x00394] = 0x01, -- Delta
- [0x00398] = 0x02, -- Theta
- [0x0039B] = 0x03, -- Lambda
- [0x0039E] = 0x04, -- Xi
- [0x003A0] = 0x05, -- Pi
- [0x003A3] = 0x06, -- Sigma
- [0x003A5] = 0x07, -- Upsilon
- [0x003A6] = 0x08, -- Phi
- [0x003A8] = 0x09, -- Psi
- [0x003A9] = 0x0A, -- Omega
--- [0x00060] = 0x12, -- [math]grave
--- [0x000B4] = 0x13, -- [math]acute
--- [0x002C7] = 0x14, -- [math]check
--- [0x002D8] = 0x15, -- [math]breve
--- [0x000AF] = 0x16, -- [math]bar
--- [0x00021] = 0x21, -- !
--- [0x00028] = 0x28, -- (
--- [0x00029] = 0x29, -- )
--- [0x0002B] = 0x2B, -- +
--- [0x0002F] = 0x2F, -- /
--- [0x0003A] = 0x3A, -- :
--- [0x02236] = 0x3A, -- colon
--- [0x0003B] = 0x3B, -- ;
--- [0x0003C] = 0x3C, -- <
--- [0x0003D] = 0x3D, -- =
--- [0x0003E] = 0x3E, -- >
--- [0x0003F] = 0x3F, -- ?
- [0x00391] = 0x41, -- Alpha
- [0x00392] = 0x42, -- Beta
- [0x02145] = 0x44,
- [0x00395] = 0x45, -- Epsilon
- [0x00397] = 0x48, -- Eta
- [0x00399] = 0x49, -- Iota
- [0x0039A] = 0x4B, -- Kappa
- [0x0039C] = 0x4D, -- Mu
- [0x0039D] = 0x4E, -- Nu
- [0x0039F] = 0x4F, -- Omicron
- [0x003A1] = 0x52, -- Rho
- [0x003A4] = 0x54, -- Tau
- [0x003A7] = 0x58, -- Chi
- [0x00396] = 0x5A, -- Zeta
--- [0x0005B] = 0x5B, -- [
--- [0x0005D] = 0x5D, -- ]
--- [0x0005E] = 0x5E, -- [math]hat -- the text one
- [0x00302] = 0x5E, -- [math]hat -- the real math one
--- [0x002D9] = 0x5F, -- [math]dot
- [0x02146] = 0x64,
- [0x02147] = 0x65,
--- [0x002DC] = 0x7E, -- [math]tilde -- the text one
- [0x00303] = 0x7E, -- [math]tilde -- the real one
--- [0x000A8] = 0x7F, -- [math]ddot
-}
-
-mathencodings["tex-mr-missing"] = {
- [0x02236] = 0x3A, -- colon
-}
-
-mathencodings["tex-mi"] = {
- [0x1D6E4] = 0x00, -- Gamma
- [0x1D6E5] = 0x01, -- Delta
- [0x1D6E9] = 0x02, -- Theta
- [0x1D6F3] = 0x02, -- varTheta (not present in TeX)
- [0x1D6EC] = 0x03, -- Lambda
- [0x1D6EF] = 0x04, -- Xi
- [0x1D6F1] = 0x05, -- Pi
- [0x1D6F4] = 0x06, -- Sigma
- [0x1D6F6] = 0x07, -- Upsilon
- [0x1D6F7] = 0x08, -- Phi
- [0x1D6F9] = 0x09, -- Psi
- [0x1D6FA] = 0x0A, -- Omega
- [0x1D6FC] = 0x0B, -- alpha
- [0x1D6FD] = 0x0C, -- beta
- [0x1D6FE] = 0x0D, -- gamma
- [0x1D6FF] = 0x0E, -- delta
- [0x1D716] = 0x0F, -- epsilon TODO: 1D716
- [0x1D701] = 0x10, -- zeta
- [0x1D702] = 0x11, -- eta
- [0x1D703] = 0x12, -- theta TODO: 1D703
- [0x1D704] = 0x13, -- iota
- [0x1D705] = 0x14, -- kappa
- [0x1D718] = 0x14, -- varkappa, not in tex fonts
- [0x1D706] = 0x15, -- lambda
- [0x1D707] = 0x16, -- mu
- [0x1D708] = 0x17, -- nu
- [0x1D709] = 0x18, -- xi
- [0x1D70B] = 0x19, -- pi
- [0x1D70C] = 0x1A, -- rho
- [0x1D70E] = 0x1B, -- sigma
- [0x1D70F] = 0x1C, -- tau
- [0x1D710] = 0x1D, -- upsilon
- [0x1D719] = 0x1E, -- phi
- [0x1D712] = 0x1F, -- chi
- [0x1D713] = 0x20, -- psi
- [0x1D714] = 0x21, -- omega
- [0x1D700] = 0x22, -- varepsilon (the other way around)
- [0x1D717] = 0x23, -- vartheta
- [0x1D71B] = 0x24, -- varpi
- [0x1D71A] = 0x25, -- varrho
- [0x1D70D] = 0x26, -- varsigma
- [0x1D711] = 0x27, -- varphi (the other way around)
- [0x021BC] = 0x28, -- leftharpoonup
- [0x021BD] = 0x29, -- leftharpoondown
- [0x021C0] = 0x2A, -- rightharpoonup
- [0x021C1] = 0x2B, -- rightharpoondown
- [0xFE322] = 0x2C, -- lhook (hook for combining arrows)
- [0xFE323] = 0x2D, -- rhook (hook for combining arrows)
- [0x025B7] = 0x2E, -- triangleright : cf lmmath / BJ
- [0x025C1] = 0x2F, -- triangleleft : cf lmmath / BJ
- [0x022B3] = 0x2E, -- triangleright : cf lmmath this a cramped triangles / BJ / see *
- [0x022B2] = 0x2F, -- triangleleft : cf lmmath this a cramped triangles / BJ / see *
--- [0x00041] = 0x30, -- 0
--- [0x00041] = 0x31, -- 1
--- [0x00041] = 0x32, -- 2
--- [0x00041] = 0x33, -- 3
--- [0x00041] = 0x34, -- 4
--- [0x00041] = 0x35, -- 5
--- [0x00041] = 0x36, -- 6
--- [0x00041] = 0x37, -- 7
--- [0x00041] = 0x38, -- 8
--- [0x00041] = 0x39, -- 9
---~ [0x0002E] = 0x3A, -- .
- [0x0002C] = 0x3B, -- ,
- [0x0003C] = 0x3C, -- <
--- [0x0002F] = 0x3D, -- /, slash, solidus
- [0x02044] = 0x3D, -- / AM: Not sure
- [0x0003E] = 0x3E, -- >
- [0x022C6] = 0x3F, -- star
- [0x02202] = 0x40, -- partial
---
- [0x0266D] = 0x5B, -- flat
- [0x0266E] = 0x5C, -- natural
- [0x0266F] = 0x5D, -- sharp
- [0x02323] = 0x5E, -- smile
- [0x02322] = 0x5F, -- frown
- [0x02113] = 0x60, -- ell
---
- [0x1D6A4] = 0x7B, -- imath (TODO: also 0131)
- [0x1D6A5] = 0x7C, -- jmath (TODO: also 0237)
- [0x02118] = 0x7D, -- wp
- [0x020D7] = 0x7E, -- vec (TODO: not sure)
--- 0x7F, -- (no idea what that could be)
-}
-
-mathencodings["tex-it"] = {
--- [0x1D434] = 0x41, -- A
- [0x1D6E2] = 0x41, -- Alpha
--- [0x1D435] = 0x42, -- B
- [0x1D6E3] = 0x42, -- Beta
--- [0x1D436] = 0x43, -- C
--- [0x1D437] = 0x44, -- D
--- [0x1D438] = 0x45, -- E
- [0x1D6E6] = 0x45, -- Epsilon
--- [0x1D439] = 0x46, -- F
--- [0x1D43A] = 0x47, -- G
--- [0x1D43B] = 0x48, -- H
- [0x1D6E8] = 0x48, -- Eta
--- [0x1D43C] = 0x49, -- I
- [0x1D6EA] = 0x49, -- Iota
--- [0x1D43D] = 0x4A, -- J
--- [0x1D43E] = 0x4B, -- K
- [0x1D6EB] = 0x4B, -- Kappa
--- [0x1D43F] = 0x4C, -- L
--- [0x1D440] = 0x4D, -- M
- [0x1D6ED] = 0x4D, -- Mu
--- [0x1D441] = 0x4E, -- N
- [0x1D6EE] = 0x4E, -- Nu
--- [0x1D442] = 0x4F, -- O
- [0x1D6F0] = 0x4F, -- Omicron
--- [0x1D443] = 0x50, -- P
- [0x1D6F2] = 0x50, -- Rho
--- [0x1D444] = 0x51, -- Q
--- [0x1D445] = 0x52, -- R
--- [0x1D446] = 0x53, -- S
--- [0x1D447] = 0x54, -- T
- [0x1D6F5] = 0x54, -- Tau
--- [0x1D448] = 0x55, -- U
--- [0x1D449] = 0x56, -- V
--- [0x1D44A] = 0x57, -- W
--- [0x1D44B] = 0x58, -- X
- [0x1D6F8] = 0x58, -- Chi
--- [0x1D44C] = 0x59, -- Y
--- [0x1D44D] = 0x5A, -- Z
---
--- [0x1D44E] = 0x61, -- a
--- [0x1D44F] = 0x62, -- b
--- [0x1D450] = 0x63, -- c
--- [0x1D451] = 0x64, -- d
--- [0x1D452] = 0x65, -- e
--- [0x1D453] = 0x66, -- f
--- [0x1D454] = 0x67, -- g
--- [0x1D455] = 0x68, -- h
- [0x0210E] = 0x68, -- Planck constant (h)
--- [0x1D456] = 0x69, -- i
--- [0x1D457] = 0x6A, -- j
--- [0x1D458] = 0x6B, -- k
--- [0x1D459] = 0x6C, -- l
--- [0x1D45A] = 0x6D, -- m
--- [0x1D45B] = 0x6E, -- n
--- [0x1D45C] = 0x6F, -- o
- [0x1D70A] = 0x6F, -- omicron
--- [0x1D45D] = 0x70, -- p
--- [0x1D45E] = 0x71, -- q
--- [0x1D45F] = 0x72, -- r
--- [0x1D460] = 0x73, -- s
--- [0x1D461] = 0x74, -- t
--- [0x1D462] = 0x75, -- u
--- [0x1D463] = 0x76, -- v
--- [0x1D464] = 0x77, -- w
--- [0x1D465] = 0x78, -- x
--- [0x1D466] = 0x79, -- y
--- [0x1D467] = 0x7A, -- z
-}
-
-mathencodings["tex-ss"] = { }
-mathencodings["tex-tt"] = { }
-mathencodings["tex-bf"] = { }
-mathencodings["tex-bi"] = { }
-mathencodings["tex-fraktur"] = { }
-mathencodings["tex-fraktur-bold"] = { }
-
-mathencodings["tex-sy"] = {
- [0x0002D] = 0x00, -- -
- [0x02212] = 0x00, -- -
--- [0x02201] = 0x00, -- complement
--- [0x02206] = 0x00, -- increment
--- [0x02204] = 0x00, -- not exists
--- [0x000B7] = 0x01, -- cdot
- [0x022C5] = 0x01, -- cdot
- [0x000D7] = 0x02, -- times
- [0x0002A] = 0x03, -- *
- [0x02217] = 0x03, -- *
- [0x000F7] = 0x04, -- div
- [0x022C4] = 0x05, -- diamond
- [0x000B1] = 0x06, -- pm
- [0x02213] = 0x07, -- mp
- [0x02295] = 0x08, -- oplus
- [0x02296] = 0x09, -- ominus
- [0x02297] = 0x0A, -- otimes
- [0x02298] = 0x0B, -- oslash
- [0x02299] = 0x0C, -- odot
- [0x025EF] = 0x0D, -- bigcirc, Orb (either 25EF or 25CB) -- todo
- [0x02218] = 0x0E, -- circ
- [0x02219] = 0x0F, -- bullet
- [0x02022] = 0x0F, -- bullet
- [0x0224D] = 0x10, -- asymp
- [0x02261] = 0x11, -- equiv
- [0x02286] = 0x12, -- subseteq
- [0x02287] = 0x13, -- supseteq
- [0x02264] = 0x14, -- leq
- [0x02265] = 0x15, -- geq
- [0x02AAF] = 0x16, -- preceq
--- [0x0227C] = 0x16, -- preceq, AM:No see 2AAF
- [0x02AB0] = 0x17, -- succeq
--- [0x0227D] = 0x17, -- succeq, AM:No see 2AB0
- [0x0223C] = 0x18, -- sim
- [0x02248] = 0x19, -- approx
- [0x02282] = 0x1A, -- subset
- [0x02283] = 0x1B, -- supset
- [0x0226A] = 0x1C, -- ll
- [0x0226B] = 0x1D, -- gg
- [0x0227A] = 0x1E, -- prec
- [0x0227B] = 0x1F, -- succ
- [0x02190] = 0x20, -- leftarrow
- [0x02192] = 0x21, -- rightarrow
---~ [0xFE190] = 0x20, -- leftarrow
---~ [0xFE192] = 0x21, -- rightarrow
- [0x02191] = 0x22, -- uparrow
- [0x02193] = 0x23, -- downarrow
- [0x02194] = 0x24, -- leftrightarrow
- [0x02197] = 0x25, -- nearrow
- [0x02198] = 0x26, -- searrow
- [0x02243] = 0x27, -- simeq
- [0x021D0] = 0x28, -- Leftarrow
- [0x021D2] = 0x29, -- Rightarrow
- [0x021D1] = 0x2A, -- Uparrow
- [0x021D3] = 0x2B, -- Downarrow
- [0x021D4] = 0x2C, -- Leftrightarrow
- [0x02196] = 0x2D, -- nwarrow
- [0x02199] = 0x2E, -- swarrow
- [0x0221D] = 0x2F, -- propto
- [0x02032] = 0x30, -- prime
- [0x0221E] = 0x31, -- infty
- [0x02208] = 0x32, -- in
- [0x0220B] = 0x33, -- ni
- [0x025B3] = 0x34, -- triangle, bigtriangleup
- [0x025BD] = 0x35, -- bigtriangledown
- [0x00338] = 0x36, -- not
--- 0x37, -- (beginning of arrow)
- [0x02200] = 0x38, -- forall
- [0x02203] = 0x39, -- exists
- [0x000AC] = 0x3A, -- neg, lnot
- [0x02205] = 0x3B, -- empty set
- [0x0211C] = 0x3C, -- Re
- [0x02111] = 0x3D, -- Im
- [0x022A4] = 0x3E, -- top
- [0x022A5] = 0x3F, -- bot, perp
- [0x02135] = 0x40, -- aleph
- [0x1D49C] = 0x41, -- script A
- [0x0212C] = 0x42, -- script B
- [0x1D49E] = 0x43, -- script C
- [0x1D49F] = 0x44, -- script D
- [0x02130] = 0x45, -- script E
- [0x02131] = 0x46, -- script F
- [0x1D4A2] = 0x47, -- script G
- [0x0210B] = 0x48, -- script H
- [0x02110] = 0x49, -- script I
- [0x1D4A5] = 0x4A, -- script J
- [0x1D4A6] = 0x4B, -- script K
- [0x02112] = 0x4C, -- script L
- [0x02133] = 0x4D, -- script M
- [0x1D4A9] = 0x4E, -- script N
- [0x1D4AA] = 0x4F, -- script O
- [0x1D4AB] = 0x50, -- script P
- [0x1D4AC] = 0x51, -- script Q
- [0x0211B] = 0x52, -- script R
- [0x1D4AE] = 0x53, -- script S
- [0x1D4AF] = 0x54, -- script T
- [0x1D4B0] = 0x55, -- script U
- [0x1D4B1] = 0x56, -- script V
- [0x1D4B2] = 0x57, -- script W
- [0x1D4B3] = 0x58, -- script X
- [0x1D4B4] = 0x59, -- script Y
- [0x1D4B5] = 0x5A, -- script Z
- [0x0222A] = 0x5B, -- cup
- [0x02229] = 0x5C, -- cap
- [0x0228E] = 0x5D, -- uplus
- [0x02227] = 0x5E, -- wedge, land
- [0x02228] = 0x5F, -- vee, lor
- [0x022A2] = 0x60, -- vdash
- [0x022A3] = 0x61, -- dashv
- [0x0230A] = 0x62, -- lfloor
- [0x0230B] = 0x63, -- rfloor
- [0x02308] = 0x64, -- lceil
- [0x02309] = 0x65, -- rceil
- [0x0007B] = 0x66, -- {, lbrace
- [0x0007D] = 0x67, -- }, rbrace
- [0x027E8] = 0x68, -- <, langle
- [0x027E9] = 0x69, -- >, rangle
- [0x0007C] = 0x6A, -- |, mid, lvert, rvert
- [0x02225] = 0x6B, -- parallel
- -- [0x0 ] = 0x00, -- Vert, lVert, rVert, arrowvert, Arrowvert
- [0x02195] = 0x6C, -- updownarrow
- [0x021D5] = 0x6D, -- Updownarrow
- [0x0005C] = 0x6E, -- \, backslash, setminus
- [0x02216] = 0x6E, -- setminus
- [0x02240] = 0x6F, -- wr
- [0x0221A] = 0x70, -- sqrt. AM: Check surd??
- [0x02A3F] = 0x71, -- amalg
- [0x1D6FB] = 0x72, -- nabla
--- [0x0222B] = 0x73, -- smallint (TODO: what about intop?)
- [0x02294] = 0x74, -- sqcup
- [0x02293] = 0x75, -- sqcap
- [0x02291] = 0x76, -- sqsubseteq
- [0x02292] = 0x77, -- sqsupseteq
- [0x000A7] = 0x78, -- S
- [0x02020] = 0x79, -- dagger, dag
- [0x02021] = 0x7A, -- ddagger, ddag
- [0x000B6] = 0x7B, -- P
- [0x02663] = 0x7C, -- clubsuit
- [0x02662] = 0x7D, -- diamondsuit
- [0x02661] = 0x7E, -- heartsuit
- [0x02660] = 0x7F, -- spadesuit
- [0xFE321] = 0x37, -- mapstochar
-
- [0xFE325] = 0x30, -- prime 0x02032
-}
-
--- The names in masm10.enc can be trusted best and are shown in the first
--- column, while in the second column we show the tex/ams names. As usual
--- it costs hours to figure out such a table.
-
-mathencodings["tex-ma"] = {
- [0x022A1] = 0x00, -- squaredot \boxdot
- [0x0229E] = 0x01, -- squareplus \boxplus
- [0x022A0] = 0x02, -- squaremultiply \boxtimes
- [0x025A1] = 0x03, -- square \square \Box
- [0x025A0] = 0x04, -- squaresolid \blacksquare
- [0x025AA] = 0x05, -- squaresmallsolid \centerdot
- [0x022C4] = 0x06, -- diamond \Diamond \lozenge
- [0x02666] = 0x07, -- diamondsolid \blacklozenge
- [0x021BB] = 0x08, -- clockwise \circlearrowright
- [0x021BA] = 0x09, -- anticlockwise \circlearrowleft
- [0x021CC] = 0x0A, -- harpoonleftright \rightleftharpoons
- [0x021CB] = 0x0B, -- harpoonrightleft \leftrightharpoons
- [0x0229F] = 0x0C, -- squareminus \boxminus
- [0x022A9] = 0x0D, -- forces \Vdash
- [0x022AA] = 0x0E, -- forcesbar \Vvdash
- [0x022A8] = 0x0F, -- satisfies \vDash
- [0x021A0] = 0x10, -- dblarrowheadright \twoheadrightarrow
- [0x0219E] = 0x11, -- dblarrowheadleft \twoheadleftarrow
- [0x021C7] = 0x12, -- dblarrowleft \leftleftarrows
- [0x021C9] = 0x13, -- dblarrowright \rightrightarrows
- [0x021C8] = 0x14, -- dblarrowup \upuparrows
- [0x021CA] = 0x15, -- dblarrowdwn \downdownarrows
- [0x021BE] = 0x16, -- harpoonupright \upharpoonright \restriction
- [0x021C2] = 0x17, -- harpoondownright \downharpoonright
- [0x021BF] = 0x18, -- harpoonupleft \upharpoonleft
- [0x021C3] = 0x19, -- harpoondownleft \downharpoonleft
- [0x021A3] = 0x1A, -- arrowtailright \rightarrowtail
- [0x021A2] = 0x1B, -- arrowtailleft \leftarrowtail
- [0x021C6] = 0x1C, -- arrowparrleftright \leftrightarrows
--- [0x021C5] = 0x00, -- \updownarrows (missing in lm)
- [0x021C4] = 0x1D, -- arrowparrrightleft \rightleftarrows
- [0x021B0] = 0x1E, -- shiftleft \Lsh
- [0x021B1] = 0x1F, -- shiftright \Rsh
- [0x021DD] = 0x20, -- squiggleright \leadsto \rightsquigarrow
- [0x021AD] = 0x21, -- squiggleleftright \leftrightsquigarrow
- [0x021AB] = 0x22, -- curlyleft \looparrowleft
- [0x021AC] = 0x23, -- curlyright \looparrowright
- [0x02257] = 0x24, -- circleequal \circeq
- [0x0227F] = 0x25, -- followsorequal \succsim
- [0x02273] = 0x26, -- greaterorsimilar \gtrsim
- [0x02A86] = 0x27, -- greaterorapproxeql \gtrapprox
- [0x022B8] = 0x28, -- multimap \multimap
- [0x02234] = 0x29, -- therefore \therefore
- [0x02235] = 0x2A, -- because \because
- [0x02251] = 0x2B, -- equalsdots \Doteq \doteqdot
- [0x0225C] = 0x2C, -- defines \triangleq
- [0x0227E] = 0x2D, -- precedesorequal \precsim
- [0x02272] = 0x2E, -- lessorsimilar \lesssim
- [0x02A85] = 0x2F, -- lessorapproxeql \lessapprox
- [0x02A95] = 0x30, -- equalorless \eqslantless
- [0x02A96] = 0x31, -- equalorgreater \eqslantgtr
- [0x022DE] = 0x32, -- equalorprecedes \curlyeqprec
- [0x022DF] = 0x33, -- equalorfollows \curlyeqsucc
- [0x0227C] = 0x34, -- precedesorcurly \preccurlyeq
- [0x02266] = 0x35, -- lessdblequal \leqq
- [0x02A7D] = 0x36, -- lessorequalslant \leqslant
- [0x02276] = 0x37, -- lessorgreater \lessgtr
- [0x02035] = 0x38, -- primereverse \backprime
- -- [0x0] = 0x39, -- axisshort \dabar
- [0x02253] = 0x3A, -- equaldotrightleft \risingdotseq
- [0x02252] = 0x3B, -- equaldotleftright \fallingdotseq
- [0x0227D] = 0x3C, -- followsorcurly \succcurlyeq
- [0x02267] = 0x3D, -- greaterdblequal \geqq
- [0x02A7E] = 0x3E, -- greaterorequalslant \geqslant
- [0x02277] = 0x3F, -- greaterorless \gtrless
- [0x0228F] = 0x40, -- squareimage \sqsubset
- [0x02290] = 0x41, -- squareoriginal \sqsupset
- -- wrong: see **
- -- [0x022B3] = 0x42, -- triangleright \rhd \vartriangleright
- -- [0x022B2] = 0x43, -- triangleleft \lhd \vartriangleleft
- -- cf lm
- [0x022B5] = 0x44, -- trianglerightequal \unrhd \trianglerighteq
- [0x022B4] = 0x45, -- triangleleftequal \unlhd \trianglelefteq
- --
- [0x02605] = 0x46, -- star \bigstar
- [0x0226C] = 0x47, -- between \between
- [0x025BC] = 0x48, -- triangledownsld \blacktriangledown
- [0x025B6] = 0x49, -- trianglerightsld \blacktriangleright
- [0x025C0] = 0x4A, -- triangleleftsld \blacktriangleleft
- -- [0x0] = 0x4B, -- arrowaxisright
- -- [0x0] = 0x4C, -- arrowaxisleft
- [0x025B2] = 0x4D, -- triangle \triangleup \vartriangle
- [0x025B2] = 0x4E, -- trianglesolid \blacktriangle
- [0x025BD] = 0x4F, -- triangleinv \triangledown
- [0x02256] = 0x50, -- ringinequal \eqcirc
- [0x022DA] = 0x51, -- lessequalgreater \lesseqgtr
- [0x022DB] = 0x52, -- greaterlessequal \gtreqless
- [0x02A8B] = 0x53, -- lessdbleqlgreater \lesseqqgtr
- [0x02A8C] = 0x54, -- greaterdbleqlless \gtreqqless
- [0x000A5] = 0x55, -- Yen \yen
- [0x021DB] = 0x56, -- arrowtripleright \Rrightarrow
- [0x021DA] = 0x57, -- arrowtripleleft \Lleftarrow
- [0x02713] = 0x58, -- check \checkmark
- [0x022BB] = 0x59, -- orunderscore \veebar
- [0x022BC] = 0x5A, -- nand \barwedge
- [0x02306] = 0x5B, -- perpcorrespond \doublebarwedge
- [0x02220] = 0x5C, -- angle \angle
- [0x02221] = 0x5D, -- measuredangle \measuredangle
- [0x02222] = 0x5E, -- sphericalangle \sphericalangle
- -- [0x0] = 0x5F, -- proportional \varpropto
- -- [0x0] = 0x60, -- smile \smallsmile
- -- [0x0] = 0x61, -- frown \smallfrown
- [0x022D0] = 0x62, -- subsetdbl \Subset
- [0x022D1] = 0x63, -- supersetdbl \Supset
- [0x022D3] = 0x64, -- uniondbl \doublecup \Cup
- [0x022D2] = 0x65, -- intersectiondbl \doublecap \Cap
- [0x022CF] = 0x66, -- uprise \curlywedge
- [0x022CE] = 0x67, -- downfall \curlyvee
- [0x022CB] = 0x68, -- multiopenleft \leftthreetimes
- [0x022CC] = 0x69, -- multiopenright \rightthreetimes
- [0x02AC5] = 0x6A, -- subsetdblequal \subseteqq
- [0x02AC6] = 0x6B, -- supersetdblequal \supseteqq
- [0x0224F] = 0x6C, -- difference \bumpeq
- [0x0224E] = 0x6D, -- geomequivalent \Bumpeq
- [0x022D8] = 0x6E, -- muchless \lll \llless
- [0x022D9] = 0x6F, -- muchgreater \ggg \gggtr
- [0x0231C] = 0x70, -- rightanglenw \ulcorner
- [0x0231D] = 0x71, -- rightanglene \urcorner
- [0x024C7] = 0x72, -- circleR \circledR
- [0x024C8] = 0x73, -- circleS \circledS
- [0x022D4] = 0x74, -- fork \pitchfork
- [0x02214] = 0x75, -- dotplus \dotplus
- [0x0223D] = 0x76, -- revsimilar \backsim
- [0x022CD] = 0x77, -- revasymptequal \backsimeq -- AM: Check this! I mapped it to simeq.
- [0x0231E] = 0x78, -- rightanglesw \llcorner
- [0x0231F] = 0x79, -- rightanglese \lrcorner
- [0x02720] = 0x7A, -- maltesecross \maltese
- [0x02201] = 0x7B, -- complement \complement
- [0x022BA] = 0x7C, -- intercal \intercal
- [0x0229A] = 0x7D, -- circlering \circledcirc
- [0x0229B] = 0x7E, -- circleasterisk \circledast
- [0x0229D] = 0x7F, -- circleminus \circleddash
-}
-
-mathencodings["tex-mb"] = {
- -- [0x0] = 0x00, -- lessornotequal \lvertneqq
- -- [0x0] = 0x01, -- greaterornotequal \gvertneqq
- [0x02270] = 0x02, -- notlessequal \nleq
- [0x02271] = 0x03, -- notgreaterequal \ngeq
- [0x0226E] = 0x04, -- notless \nless
- [0x0226F] = 0x05, -- notgreater \ngtr
- [0x02280] = 0x06, -- notprecedes \nprec
- [0x02281] = 0x07, -- notfollows \nsucc
- [0x02268] = 0x08, -- lessornotdbleql \lneqq
- [0x02269] = 0x09, -- greaterornotdbleql \gneqq
- -- [0x0] = 0x0A, -- notlessorslnteql \nleqslant
- -- [0x0] = 0x0B, -- notgreaterorslnteql \ngeqslant
- [0x02A87] = 0x0C, -- lessnotequal \lneq
- [0x02A88] = 0x0D, -- greaternotequal \gneq
- -- [0x0] = 0x0E, -- notprecedesoreql \npreceq
- -- [0x0] = 0x0F, -- notfollowsoreql \nsucceq
- [0x022E8] = 0x10, -- precedeornoteqvlnt \precnsim
- [0x022E9] = 0x11, -- followornoteqvlnt \succnsim
- [0x022E6] = 0x12, -- lessornotsimilar \lnsim
- [0x022E7] = 0x13, -- greaterornotsimilar \gnsim
- -- [0x0] = 0x14, -- notlessdblequal \nleqq
- -- [0x0] = 0x15, -- notgreaterdblequal \ngeqq
- [0x02AB5] = 0x16, -- precedenotslnteql \precneqq
- [0x02AB6] = 0x17, -- follownotslnteql \succneqq
- [0x02AB9] = 0x18, -- precedenotdbleqv \precnapprox
- [0x02ABA] = 0x19, -- follownotdbleqv \succnapprox
- [0x02A89] = 0x1A, -- lessnotdblequal \lnapprox
- [0x02A8A] = 0x1B, -- greaternotdblequal \gnapprox
- [0x02241] = 0x1C, -- notsimilar \nsim
- [0x02247] = 0x1D, -- notapproxequal \ncong
- -- [0x0] = 0x1E, -- upslope \diagup
- -- [0x0] = 0x1F, -- downslope \diagdown
- -- [0x0] = 0x20, -- notsubsetoreql \varsubsetneq
- -- [0x0] = 0x21, -- notsupersetoreql \varsupsetneq
- -- [0x0] = 0x22, -- notsubsetordbleql \nsubseteqq
- -- [0x0] = 0x23, -- notsupersetordbleql \nsupseteqq
- [0x02ACB] = 0x24, -- subsetornotdbleql \subsetneqq
- [0x02ACC] = 0x25, -- supersetornotdbleql \supsetneqq
- -- [0x0] = 0x26, -- subsetornoteql \varsubsetneqq
- -- [0x0] = 0x27, -- supersetornoteql \varsupsetneqq
- [0x0228A] = 0x28, -- subsetnoteql \subsetneq
- [0x0228B] = 0x29, -- supersetnoteql \supsetneq
- [0x02288] = 0x2A, -- notsubseteql \nsubseteq
- [0x02289] = 0x2B, -- notsuperseteql \nsupseteq
- [0x02226] = 0x2C, -- notparallel \nparallel
- [0x02224] = 0x2D, -- notbar \nmid \ndivides
- -- [0x0] = 0x2E, -- notshortbar \nshortmid
- -- [0x0] = 0x2F, -- notshortparallel \nshortparallel
- [0x022AC] = 0x30, -- notturnstile \nvdash
- [0x022AE] = 0x31, -- notforces \nVdash
- [0x022AD] = 0x32, -- notsatisfies \nvDash
- [0x022AF] = 0x33, -- notforcesextra \nVDash
- [0x022ED] = 0x34, -- nottriangeqlright \ntrianglerighteq
- [0x022EC] = 0x35, -- nottriangeqlleft \ntrianglelefteq
- [0x022EA] = 0x36, -- nottriangleleft \ntriangleleft
- [0x022EB] = 0x37, -- nottriangleright \ntriangleright
- [0x0219A] = 0x38, -- notarrowleft \nleftarrow
- [0x0219B] = 0x39, -- notarrowright \nrightarrow
- [0x021CD] = 0x3A, -- notdblarrowleft \nLeftarrow
- [0x021CF] = 0x3B, -- notdblarrowright \nRightarrow
- [0x021CE] = 0x3C, -- notdblarrowboth \nLeftrightarrow
- [0x021AE] = 0x3D, -- notarrowboth \nleftrightarrow
- [0x022C7] = 0x3E, -- dividemultiply \divideontimes
- [0x02300] = 0x3F, -- diametersign \varnothing
- [0x02204] = 0x40, -- notexistential \nexists
- [0x1D538] = 0x41, -- A (blackboard A)
- [0x1D539] = 0x42, -- B
- [0x02102] = 0x43, -- C
- [0x1D53B] = 0x44, -- D
- [0x1D53C] = 0x45, -- E
- [0x1D53D] = 0x46, -- F
- [0x1D53E] = 0x47, -- G
- [0x0210D] = 0x48, -- H
- [0x1D540] = 0x49, -- I
- [0x1D541] = 0x4A, -- J
- [0x1D542] = 0x4B, -- K
- [0x1D543] = 0x4C, -- L
- [0x1D544] = 0x4D, -- M
- [0x02115] = 0x4E, -- N
- [0x1D546] = 0x4F, -- O
- [0x02119] = 0x50, -- P
- [0x0211A] = 0x51, -- Q
- [0x0211D] = 0x52, -- R
- [0x1D54A] = 0x53, -- S
- [0x1D54B] = 0x54, -- T
- [0x1D54C] = 0x55, -- U
- [0x1D54D] = 0x56, -- V
- [0x1D54E] = 0x57, -- W
- [0x1D54F] = 0x58, -- X
- [0x1D550] = 0x59, -- Y
- [0x02124] = 0x5A, -- Z (blackboard Z)
- [0x02132] = 0x60, -- finv \Finv
- [0x02141] = 0x61, -- fmir \Game
- -- [0x0] = 0x62, tildewide
- -- [0x0] = 0x63, tildewider
- -- [0x0] = 0x64, Finv
- -- [0x0] = 0x65, Gmir
- [0x02127] = 0x66, -- Omegainv \mho
- [0x000F0] = 0x67, -- eth \eth
- [0x02242] = 0x68, -- equalorsimilar \eqsim
- [0x02136] = 0x69, -- beth \beth
- [0x02137] = 0x6A, -- gimel \gimel
- [0x02138] = 0x6B, -- daleth \daleth
- [0x022D6] = 0x6C, -- lessdot \lessdot
- [0x022D7] = 0x6D, -- greaterdot \gtrdot
- [0x022C9] = 0x6E, -- multicloseleft \ltimes
- [0x022CA] = 0x6F, -- multicloseright \rtimes
- -- [0x0] = 0x70, -- barshort \shortmid
- -- [0x0] = 0x71, -- parallelshort \shortparallel
- -- [0x02216] = 0x72, -- integerdivide \smallsetminus (2216 already part of tex-sy
- -- [0x0] = 0x73, -- similar \thicksim
- -- [0x0] = 0x74, -- approxequal \thickapprox
- [0x0224A] = 0x75, -- approxorequal \approxeq
- [0x02AB8] = 0x76, -- followsorequal \succapprox
- [0x02AB7] = 0x77, -- precedesorequal \precapprox
- [0x021B6] = 0x78, -- archleftdown \curvearrowleft
- [0x021B7] = 0x79, -- archrightdown \curvearrowright
- [0x003DC] = 0x7A, -- Digamma \digamma
- [0x003F0] = 0x7B, -- kappa \varkappa
- [0x1D55C] = 0x7C, -- k \Bbbk (blackboard k)
- [0x0210F] = 0x7D, -- planckover2pi \hslash % 0x7D
- [0x00127] = 0x7E, -- planckover2pi1 \hbar % 0x7E
- [0x003F6] = 0x7F, -- epsiloninv \backepsilon
-}
-
-mathencodings["tex-mc"] = {
- -- this file has no tfm so it gets mapped in the private space
- [0xFE324] = "mapsfromchar",
-}
-
-mathencodings["tex-fraktur"] = {
--- [0x1D504] = 0x41, -- A (fraktur A)
--- [0x1D505] = 0x42, -- B
- [0x0212D] = 0x43, -- C
--- [0x1D507] = 0x44, -- D
--- [0x1D508] = 0x45, -- E
--- [0x1D509] = 0x46, -- F
--- [0x1D50A] = 0x47, -- G
- [0x0210C] = 0x48, -- H
- [0x02111] = 0x49, -- I
--- [0x1D50D] = 0x4A, -- J
--- [0x1D50E] = 0x4B, -- K
--- [0x1D50F] = 0x4C, -- L
--- [0x1D510] = 0x4D, -- M
--- [0x1D511] = 0x4E, -- N
--- [0x1D512] = 0x4F, -- O
--- [0x1D513] = 0x50, -- P
--- [0x1D514] = 0x51, -- Q
- [0x0211C] = 0x52, -- R
--- [0x1D516] = 0x53, -- S
--- [0x1D517] = 0x54, -- T
--- [0x1D518] = 0x55, -- U
--- [0x1D519] = 0x56, -- V
--- [0x1D51A] = 0x57, -- W
--- [0x1D51B] = 0x58, -- X
--- [0x1D51C] = 0x59, -- Y
- [0x02128] = 0x5A, -- Z (fraktur Z)
--- [0x1D51E] = 0x61, -- a (fraktur a)
--- [0x1D51F] = 0x62, -- b
--- [0x1D520] = 0x63, -- c
--- [0x1D521] = 0x64, -- d
--- [0x1D522] = 0x65, -- e
--- [0x1D523] = 0x66, -- f
--- [0x1D524] = 0x67, -- g
--- [0x1D525] = 0x68, -- h
--- [0x1D526] = 0x69, -- i
--- [0x1D527] = 0x6A, -- j
--- [0x1D528] = 0x6B, -- k
--- [0x1D529] = 0x6C, -- l
--- [0x1D52A] = 0x6D, -- m
--- [0x1D52B] = 0x6E, -- n
--- [0x1D52C] = 0x6F, -- o
--- [0x1D52D] = 0x70, -- p
--- [0x1D52E] = 0x71, -- q
--- [0x1D52F] = 0x72, -- r
--- [0x1D530] = 0x73, -- s
--- [0x1D531] = 0x74, -- t
--- [0x1D532] = 0x75, -- u
--- [0x1D533] = 0x76, -- v
--- [0x1D534] = 0x77, -- w
--- [0x1D535] = 0x78, -- x
--- [0x1D536] = 0x79, -- y
--- [0x1D537] = 0x7A, -- z
-}
-
--- now that all other vectors are defined ...
-
-setletters(mathencodings, "tex-it", 0x1D434, 0x1D44E)
-setletters(mathencodings, "tex-ss", 0x1D5A0, 0x1D5BA)
-setletters(mathencodings, "tex-tt", 0x1D670, 0x1D68A)
-setletters(mathencodings, "tex-bf", 0x1D400, 0x1D41A)
-setletters(mathencodings, "tex-bi", 0x1D468, 0x1D482)
-setletters(mathencodings, "tex-fraktur", 0x1D504, 0x1D51E)
-setletters(mathencodings, "tex-fraktur-bold", 0x1D56C, 0x1D586)
-
-setdigits (mathencodings, "tex-ss", 0x1D7E2)
-setdigits (mathencodings, "tex-tt", 0x1D7F6)
-setdigits (mathencodings, "tex-bf", 0x1D7CE)
-
--- setdigits (mathencodings, "tex-bi", 0x1D7CE)
-
--- todo: add ss, tt, bf etc vectors
--- todo: we can make ss tt etc an option
+if not modules then modules = { } end modules ['math-ttv'] = {
+ version = 1.001,
+ comment = "traditional tex vectors, companion to math-vfu.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+ dataonly = true,
+}
+
+local vfmath = fonts.handlers.vf.math
+local setletters = vfmath.setletters
+local setdigits = vfmath.setdigits
+
+local mathencodings = fonts.encodings.math
+
+-- varphi is part of the alphabet, contrary to the other var*s'
+
+mathencodings["large-to-small"] = {
+ [0x00028] = 0x00, -- (
+ [0x00029] = 0x01, -- )
+ [0x0005B] = 0x02, -- [
+ [0x0005D] = 0x03, -- ]
+ [0x0230A] = 0x04, -- lfloor
+ [0x0230B] = 0x05, -- rfloor
+ [0x02308] = 0x06, -- lceil
+ [0x02309] = 0x07, -- rceil
+ [0x0007B] = 0x08, -- {
+ [0x0007D] = 0x09, -- }
+ [0x027E8] = 0x0A, -- <
+ [0x027E9] = 0x0B, -- >
+ [0x0007C] = 0x0C, -- |
+ -- [0x0] = 0x0D, -- lVert rVert Vert
+ -- [0x0002F] = 0x0E, -- /
+ [0x0005C] = 0x0F, -- \
+ -- [0x0] = 0x3A, -- lgroup
+ -- [0x0] = 0x3B, -- rgroup
+ -- [0x0] = 0x3C, -- arrowvert
+ -- [0x0] = 0x3D, -- Arrowvert
+ [0x02195] = 0x3F, -- updownarrow
+ -- [0x0] = 0x40, -- lmoustache
+ -- [0x0] = 0x41, -- rmoustache
+ [0x0221A] = 0x70, -- sqrt
+ [0x021D5] = 0x77, -- Updownarrow
+ [0x02191] = 0x78, -- uparrow
+ [0x02193] = 0x79, -- downarrow
+ [0x021D1] = 0x7E, -- Uparrow
+ [0x021D3] = 0x7F, -- Downarrow
+ [0x0220F] = 0x59, -- prod
+ [0x02210] = 0x61, -- coprod
+ [0x02211] = 0x58, -- sum
+ [0x0222B] = 0x5A, -- intop
+ [0x0222E] = 0x49, -- ointop
+ -- [0xFE302] = 0x62, -- widehat
+ -- [0xFE303] = 0x65, -- widetilde
+ [0x00302] = 0x62, -- widehat
+ [0x00303] = 0x65, -- widetilde
+ [0x022C0] = 0x5E, -- bigwedge
+ [0x022C1] = 0x5F, -- bigvee
+ [0x022C2] = 0x5C, -- bigcap
+ [0x022C3] = 0x5B, -- bigcup
+ [0x02044] = 0x0E, -- /
+}
+
+-- Beware: these are (in cm/lm) below the baseline due to limitations
+-- in the tfm format bu the engien (combined with the mathclass) takes
+-- care of it. If we need them in textmode, we should make them virtual
+-- and move them up but we're in no hurry with that.
+
+mathencodings["tex-ex"] = {
+ [0x0220F] = 0x51, -- prod
+ [0x02210] = 0x60, -- coprod
+ [0x02211] = 0x50, -- sum
+ [0x0222B] = 0x52, -- intop
+ [0x0222E] = 0x48, -- ointop
+ [0x022C0] = 0x56, -- bigwedge
+ [0x022C1] = 0x57, -- bigvee
+ [0x022C2] = 0x54, -- bigcap
+ [0x022C3] = 0x53, -- bigcup
+ [0x02A00] = 0x4A, -- bigodot -- fixed BJ
+ [0x02A01] = 0x4C, -- bigoplus
+ [0x02A02] = 0x4E, -- bigotimes
+ -- [0x02A03] = , -- bigudot --
+ [0x02A04] = 0x55, -- biguplus
+ [0x02A06] = 0x46, -- bigsqcup
+}
+
+-- only math stuff is needed, since we always use an lm or gyre
+-- font as main font
+
+mathencodings["tex-mr"] = {
+ [0x00393] = 0x00, -- Gamma
+ [0x00394] = 0x01, -- Delta
+ [0x00398] = 0x02, -- Theta
+ [0x0039B] = 0x03, -- Lambda
+ [0x0039E] = 0x04, -- Xi
+ [0x003A0] = 0x05, -- Pi
+ [0x003A3] = 0x06, -- Sigma
+ [0x003A5] = 0x07, -- Upsilon
+ [0x003A6] = 0x08, -- Phi
+ [0x003A8] = 0x09, -- Psi
+ [0x003A9] = 0x0A, -- Omega
+-- [0x00060] = 0x12, -- [math]grave
+-- [0x000B4] = 0x13, -- [math]acute
+-- [0x002C7] = 0x14, -- [math]check
+-- [0x002D8] = 0x15, -- [math]breve
+-- [0x000AF] = 0x16, -- [math]bar
+-- [0x00021] = 0x21, -- !
+-- [0x00028] = 0x28, -- (
+-- [0x00029] = 0x29, -- )
+-- [0x0002B] = 0x2B, -- +
+-- [0x0002F] = 0x2F, -- /
+-- [0x0003A] = 0x3A, -- :
+-- [0x02236] = 0x3A, -- colon
+-- [0x0003B] = 0x3B, -- ;
+-- [0x0003C] = 0x3C, -- <
+-- [0x0003D] = 0x3D, -- =
+-- [0x0003E] = 0x3E, -- >
+-- [0x0003F] = 0x3F, -- ?
+ [0x00391] = 0x41, -- Alpha
+ [0x00392] = 0x42, -- Beta
+ [0x02145] = 0x44,
+ [0x00395] = 0x45, -- Epsilon
+ [0x00397] = 0x48, -- Eta
+ [0x00399] = 0x49, -- Iota
+ [0x0039A] = 0x4B, -- Kappa
+ [0x0039C] = 0x4D, -- Mu
+ [0x0039D] = 0x4E, -- Nu
+ [0x0039F] = 0x4F, -- Omicron
+ [0x003A1] = 0x52, -- Rho
+ [0x003A4] = 0x54, -- Tau
+ [0x003A7] = 0x58, -- Chi
+ [0x00396] = 0x5A, -- Zeta
+-- [0x0005B] = 0x5B, -- [
+-- [0x0005D] = 0x5D, -- ]
+-- [0x0005E] = 0x5E, -- [math]hat -- the text one
+ [0x00302] = 0x5E, -- [math]hat -- the real math one
+-- [0x002D9] = 0x5F, -- [math]dot
+ [0x02146] = 0x64,
+ [0x02147] = 0x65,
+-- [0x002DC] = 0x7E, -- [math]tilde -- the text one
+ [0x00303] = 0x7E, -- [math]tilde -- the real one
+-- [0x000A8] = 0x7F, -- [math]ddot
+}
+
+mathencodings["tex-mr-missing"] = {
+ [0x02236] = 0x3A, -- colon
+}
+
+mathencodings["tex-mi"] = {
+ [0x1D6E4] = 0x00, -- Gamma
+ [0x1D6E5] = 0x01, -- Delta
+ [0x1D6E9] = 0x02, -- Theta
+ [0x1D6F3] = 0x02, -- varTheta (not present in TeX)
+ [0x1D6EC] = 0x03, -- Lambda
+ [0x1D6EF] = 0x04, -- Xi
+ [0x1D6F1] = 0x05, -- Pi
+ [0x1D6F4] = 0x06, -- Sigma
+ [0x1D6F6] = 0x07, -- Upsilon
+ [0x1D6F7] = 0x08, -- Phi
+ [0x1D6F9] = 0x09, -- Psi
+ [0x1D6FA] = 0x0A, -- Omega
+ [0x1D6FC] = 0x0B, -- alpha
+ [0x1D6FD] = 0x0C, -- beta
+ [0x1D6FE] = 0x0D, -- gamma
+ [0x1D6FF] = 0x0E, -- delta
+ [0x1D716] = 0x0F, -- epsilon TODO: 1D716
+ [0x1D701] = 0x10, -- zeta
+ [0x1D702] = 0x11, -- eta
+ [0x1D703] = 0x12, -- theta TODO: 1D703
+ [0x1D704] = 0x13, -- iota
+ [0x1D705] = 0x14, -- kappa
+ [0x1D718] = 0x14, -- varkappa, not in tex fonts
+ [0x1D706] = 0x15, -- lambda
+ [0x1D707] = 0x16, -- mu
+ [0x1D708] = 0x17, -- nu
+ [0x1D709] = 0x18, -- xi
+ [0x1D70B] = 0x19, -- pi
+ [0x1D70C] = 0x1A, -- rho
+ [0x1D70E] = 0x1B, -- sigma
+ [0x1D70F] = 0x1C, -- tau
+ [0x1D710] = 0x1D, -- upsilon
+ [0x1D719] = 0x1E, -- phi
+ [0x1D712] = 0x1F, -- chi
+ [0x1D713] = 0x20, -- psi
+ [0x1D714] = 0x21, -- omega
+ [0x1D700] = 0x22, -- varepsilon (the other way around)
+ [0x1D717] = 0x23, -- vartheta
+ [0x1D71B] = 0x24, -- varpi
+ [0x1D71A] = 0x25, -- varrho
+ [0x1D70D] = 0x26, -- varsigma
+ [0x1D711] = 0x27, -- varphi (the other way around)
+ [0x021BC] = 0x28, -- leftharpoonup
+ [0x021BD] = 0x29, -- leftharpoondown
+ [0x021C0] = 0x2A, -- rightharpoonup
+ [0x021C1] = 0x2B, -- rightharpoondown
+ [0xFE322] = 0x2C, -- lhook (hook for combining arrows)
+ [0xFE323] = 0x2D, -- rhook (hook for combining arrows)
+ [0x025B7] = 0x2E, -- triangleright : cf lmmath / BJ
+ [0x025C1] = 0x2F, -- triangleleft : cf lmmath / BJ
+ [0x022B3] = 0x2E, -- triangleright : cf lmmath this a cramped triangles / BJ / see *
+ [0x022B2] = 0x2F, -- triangleleft : cf lmmath this a cramped triangles / BJ / see *
+-- [0x00041] = 0x30, -- 0
+-- [0x00041] = 0x31, -- 1
+-- [0x00041] = 0x32, -- 2
+-- [0x00041] = 0x33, -- 3
+-- [0x00041] = 0x34, -- 4
+-- [0x00041] = 0x35, -- 5
+-- [0x00041] = 0x36, -- 6
+-- [0x00041] = 0x37, -- 7
+-- [0x00041] = 0x38, -- 8
+-- [0x00041] = 0x39, -- 9
+--~ [0x0002E] = 0x3A, -- .
+ [0x0002C] = 0x3B, -- ,
+ [0x0003C] = 0x3C, -- <
+-- [0x0002F] = 0x3D, -- /, slash, solidus
+ [0x02044] = 0x3D, -- / AM: Not sure
+ [0x0003E] = 0x3E, -- >
+ [0x022C6] = 0x3F, -- star
+ [0x02202] = 0x40, -- partial
+--
+ [0x0266D] = 0x5B, -- flat
+ [0x0266E] = 0x5C, -- natural
+ [0x0266F] = 0x5D, -- sharp
+ [0x02323] = 0x5E, -- smile
+ [0x02322] = 0x5F, -- frown
+ [0x02113] = 0x60, -- ell
+--
+ [0x1D6A4] = 0x7B, -- imath (TODO: also 0131)
+ [0x1D6A5] = 0x7C, -- jmath (TODO: also 0237)
+ [0x02118] = 0x7D, -- wp
+ [0x020D7] = 0x7E, -- vec (TODO: not sure)
+-- 0x7F, -- (no idea what that could be)
+}
+
+mathencodings["tex-it"] = {
+-- [0x1D434] = 0x41, -- A
+ [0x1D6E2] = 0x41, -- Alpha
+-- [0x1D435] = 0x42, -- B
+ [0x1D6E3] = 0x42, -- Beta
+-- [0x1D436] = 0x43, -- C
+-- [0x1D437] = 0x44, -- D
+-- [0x1D438] = 0x45, -- E
+ [0x1D6E6] = 0x45, -- Epsilon
+-- [0x1D439] = 0x46, -- F
+-- [0x1D43A] = 0x47, -- G
+-- [0x1D43B] = 0x48, -- H
+ [0x1D6E8] = 0x48, -- Eta
+-- [0x1D43C] = 0x49, -- I
+ [0x1D6EA] = 0x49, -- Iota
+-- [0x1D43D] = 0x4A, -- J
+-- [0x1D43E] = 0x4B, -- K
+ [0x1D6EB] = 0x4B, -- Kappa
+-- [0x1D43F] = 0x4C, -- L
+-- [0x1D440] = 0x4D, -- M
+ [0x1D6ED] = 0x4D, -- Mu
+-- [0x1D441] = 0x4E, -- N
+ [0x1D6EE] = 0x4E, -- Nu
+-- [0x1D442] = 0x4F, -- O
+ [0x1D6F0] = 0x4F, -- Omicron
+-- [0x1D443] = 0x50, -- P
+ [0x1D6F2] = 0x50, -- Rho
+-- [0x1D444] = 0x51, -- Q
+-- [0x1D445] = 0x52, -- R
+-- [0x1D446] = 0x53, -- S
+-- [0x1D447] = 0x54, -- T
+ [0x1D6F5] = 0x54, -- Tau
+-- [0x1D448] = 0x55, -- U
+-- [0x1D449] = 0x56, -- V
+-- [0x1D44A] = 0x57, -- W
+-- [0x1D44B] = 0x58, -- X
+ [0x1D6F8] = 0x58, -- Chi
+-- [0x1D44C] = 0x59, -- Y
+-- [0x1D44D] = 0x5A, -- Z
+--
+-- [0x1D44E] = 0x61, -- a
+-- [0x1D44F] = 0x62, -- b
+-- [0x1D450] = 0x63, -- c
+-- [0x1D451] = 0x64, -- d
+-- [0x1D452] = 0x65, -- e
+-- [0x1D453] = 0x66, -- f
+-- [0x1D454] = 0x67, -- g
+-- [0x1D455] = 0x68, -- h
+ [0x0210E] = 0x68, -- Planck constant (h)
+-- [0x1D456] = 0x69, -- i
+-- [0x1D457] = 0x6A, -- j
+-- [0x1D458] = 0x6B, -- k
+-- [0x1D459] = 0x6C, -- l
+-- [0x1D45A] = 0x6D, -- m
+-- [0x1D45B] = 0x6E, -- n
+-- [0x1D45C] = 0x6F, -- o
+ [0x1D70A] = 0x6F, -- omicron
+-- [0x1D45D] = 0x70, -- p
+-- [0x1D45E] = 0x71, -- q
+-- [0x1D45F] = 0x72, -- r
+-- [0x1D460] = 0x73, -- s
+-- [0x1D461] = 0x74, -- t
+-- [0x1D462] = 0x75, -- u
+-- [0x1D463] = 0x76, -- v
+-- [0x1D464] = 0x77, -- w
+-- [0x1D465] = 0x78, -- x
+-- [0x1D466] = 0x79, -- y
+-- [0x1D467] = 0x7A, -- z
+}
+
+mathencodings["tex-ss"] = { }
+mathencodings["tex-tt"] = { }
+mathencodings["tex-bf"] = { }
+mathencodings["tex-bi"] = { }
+mathencodings["tex-fraktur"] = { }
+mathencodings["tex-fraktur-bold"] = { }
+
+mathencodings["tex-sy"] = {
+ [0x0002D] = 0x00, -- -
+ [0x02212] = 0x00, -- -
+-- [0x02201] = 0x00, -- complement
+-- [0x02206] = 0x00, -- increment
+-- [0x02204] = 0x00, -- not exists
+-- [0x000B7] = 0x01, -- cdot
+ [0x022C5] = 0x01, -- cdot
+ [0x000D7] = 0x02, -- times
+ [0x0002A] = 0x03, -- *
+ [0x02217] = 0x03, -- *
+ [0x000F7] = 0x04, -- div
+ [0x022C4] = 0x05, -- diamond
+ [0x000B1] = 0x06, -- pm
+ [0x02213] = 0x07, -- mp
+ [0x02295] = 0x08, -- oplus
+ [0x02296] = 0x09, -- ominus
+ [0x02297] = 0x0A, -- otimes
+ [0x02298] = 0x0B, -- oslash
+ [0x02299] = 0x0C, -- odot
+ [0x025EF] = 0x0D, -- bigcirc, Orb (either 25EF or 25CB) -- todo
+ [0x02218] = 0x0E, -- circ
+ [0x02219] = 0x0F, -- bullet
+ [0x02022] = 0x0F, -- bullet
+ [0x0224D] = 0x10, -- asymp
+ [0x02261] = 0x11, -- equiv
+ [0x02286] = 0x12, -- subseteq
+ [0x02287] = 0x13, -- supseteq
+ [0x02264] = 0x14, -- leq
+ [0x02265] = 0x15, -- geq
+ [0x02AAF] = 0x16, -- preceq
+-- [0x0227C] = 0x16, -- preceq, AM:No see 2AAF
+ [0x02AB0] = 0x17, -- succeq
+-- [0x0227D] = 0x17, -- succeq, AM:No see 2AB0
+ [0x0223C] = 0x18, -- sim
+ [0x02248] = 0x19, -- approx
+ [0x02282] = 0x1A, -- subset
+ [0x02283] = 0x1B, -- supset
+ [0x0226A] = 0x1C, -- ll
+ [0x0226B] = 0x1D, -- gg
+ [0x0227A] = 0x1E, -- prec
+ [0x0227B] = 0x1F, -- succ
+ [0x02190] = 0x20, -- leftarrow
+ [0x02192] = 0x21, -- rightarrow
+--~ [0xFE190] = 0x20, -- leftarrow
+--~ [0xFE192] = 0x21, -- rightarrow
+ [0x02191] = 0x22, -- uparrow
+ [0x02193] = 0x23, -- downarrow
+ [0x02194] = 0x24, -- leftrightarrow
+ [0x02197] = 0x25, -- nearrow
+ [0x02198] = 0x26, -- searrow
+ [0x02243] = 0x27, -- simeq
+ [0x021D0] = 0x28, -- Leftarrow
+ [0x021D2] = 0x29, -- Rightarrow
+ [0x021D1] = 0x2A, -- Uparrow
+ [0x021D3] = 0x2B, -- Downarrow
+ [0x021D4] = 0x2C, -- Leftrightarrow
+ [0x02196] = 0x2D, -- nwarrow
+ [0x02199] = 0x2E, -- swarrow
+ [0x0221D] = 0x2F, -- propto
+ [0x02032] = 0x30, -- prime
+ [0x0221E] = 0x31, -- infty
+ [0x02208] = 0x32, -- in
+ [0x0220B] = 0x33, -- ni
+ [0x025B3] = 0x34, -- triangle, bigtriangleup
+ [0x025BD] = 0x35, -- bigtriangledown
+ [0x00338] = 0x36, -- not
+-- 0x37, -- (beginning of arrow)
+ [0x02200] = 0x38, -- forall
+ [0x02203] = 0x39, -- exists
+ [0x000AC] = 0x3A, -- neg, lnot
+ [0x02205] = 0x3B, -- empty set
+ [0x0211C] = 0x3C, -- Re
+ [0x02111] = 0x3D, -- Im
+ [0x022A4] = 0x3E, -- top
+ [0x022A5] = 0x3F, -- bot, perp
+ [0x02135] = 0x40, -- aleph
+ [0x1D49C] = 0x41, -- script A
+ [0x0212C] = 0x42, -- script B
+ [0x1D49E] = 0x43, -- script C
+ [0x1D49F] = 0x44, -- script D
+ [0x02130] = 0x45, -- script E
+ [0x02131] = 0x46, -- script F
+ [0x1D4A2] = 0x47, -- script G
+ [0x0210B] = 0x48, -- script H
+ [0x02110] = 0x49, -- script I
+ [0x1D4A5] = 0x4A, -- script J
+ [0x1D4A6] = 0x4B, -- script K
+ [0x02112] = 0x4C, -- script L
+ [0x02133] = 0x4D, -- script M
+ [0x1D4A9] = 0x4E, -- script N
+ [0x1D4AA] = 0x4F, -- script O
+ [0x1D4AB] = 0x50, -- script P
+ [0x1D4AC] = 0x51, -- script Q
+ [0x0211B] = 0x52, -- script R
+ [0x1D4AE] = 0x53, -- script S
+ [0x1D4AF] = 0x54, -- script T
+ [0x1D4B0] = 0x55, -- script U
+ [0x1D4B1] = 0x56, -- script V
+ [0x1D4B2] = 0x57, -- script W
+ [0x1D4B3] = 0x58, -- script X
+ [0x1D4B4] = 0x59, -- script Y
+ [0x1D4B5] = 0x5A, -- script Z
+ [0x0222A] = 0x5B, -- cup
+ [0x02229] = 0x5C, -- cap
+ [0x0228E] = 0x5D, -- uplus
+ [0x02227] = 0x5E, -- wedge, land
+ [0x02228] = 0x5F, -- vee, lor
+ [0x022A2] = 0x60, -- vdash
+ [0x022A3] = 0x61, -- dashv
+ [0x0230A] = 0x62, -- lfloor
+ [0x0230B] = 0x63, -- rfloor
+ [0x02308] = 0x64, -- lceil
+ [0x02309] = 0x65, -- rceil
+ [0x0007B] = 0x66, -- {, lbrace
+ [0x0007D] = 0x67, -- }, rbrace
+ [0x027E8] = 0x68, -- <, langle
+ [0x027E9] = 0x69, -- >, rangle
+ [0x0007C] = 0x6A, -- |, mid, lvert, rvert
+ [0x02225] = 0x6B, -- parallel
+ -- [0x0 ] = 0x00, -- Vert, lVert, rVert, arrowvert, Arrowvert
+ [0x02195] = 0x6C, -- updownarrow
+ [0x021D5] = 0x6D, -- Updownarrow
+ [0x0005C] = 0x6E, -- \, backslash, setminus
+ [0x02216] = 0x6E, -- setminus
+ [0x02240] = 0x6F, -- wr
+ [0x0221A] = 0x70, -- sqrt. AM: Check surd??
+ [0x02A3F] = 0x71, -- amalg
+ [0x1D6FB] = 0x72, -- nabla
+-- [0x0222B] = 0x73, -- smallint (TODO: what about intop?)
+ [0x02294] = 0x74, -- sqcup
+ [0x02293] = 0x75, -- sqcap
+ [0x02291] = 0x76, -- sqsubseteq
+ [0x02292] = 0x77, -- sqsupseteq
+ [0x000A7] = 0x78, -- S
+ [0x02020] = 0x79, -- dagger, dag
+ [0x02021] = 0x7A, -- ddagger, ddag
+ [0x000B6] = 0x7B, -- P
+ [0x02663] = 0x7C, -- clubsuit
+ [0x02662] = 0x7D, -- diamondsuit
+ [0x02661] = 0x7E, -- heartsuit
+ [0x02660] = 0x7F, -- spadesuit
+ [0xFE321] = 0x37, -- mapstochar
+
+ [0xFE325] = 0x30, -- prime 0x02032
+}
+
+-- The names in masm10.enc can be trusted best and are shown in the first
+-- column, while in the second column we show the tex/ams names. As usual
+-- it costs hours to figure out such a table.
+
+mathencodings["tex-ma"] = {
+ [0x022A1] = 0x00, -- squaredot \boxdot
+ [0x0229E] = 0x01, -- squareplus \boxplus
+ [0x022A0] = 0x02, -- squaremultiply \boxtimes
+ [0x025A1] = 0x03, -- square \square \Box
+ [0x025A0] = 0x04, -- squaresolid \blacksquare
+ [0x025AA] = 0x05, -- squaresmallsolid \centerdot
+ [0x022C4] = 0x06, -- diamond \Diamond \lozenge
+ [0x02666] = 0x07, -- diamondsolid \blacklozenge
+ [0x021BB] = 0x08, -- clockwise \circlearrowright
+ [0x021BA] = 0x09, -- anticlockwise \circlearrowleft
+ [0x021CC] = 0x0A, -- harpoonleftright \rightleftharpoons
+ [0x021CB] = 0x0B, -- harpoonrightleft \leftrightharpoons
+ [0x0229F] = 0x0C, -- squareminus \boxminus
+ [0x022A9] = 0x0D, -- forces \Vdash
+ [0x022AA] = 0x0E, -- forcesbar \Vvdash
+ [0x022A8] = 0x0F, -- satisfies \vDash
+ [0x021A0] = 0x10, -- dblarrowheadright \twoheadrightarrow
+ [0x0219E] = 0x11, -- dblarrowheadleft \twoheadleftarrow
+ [0x021C7] = 0x12, -- dblarrowleft \leftleftarrows
+ [0x021C9] = 0x13, -- dblarrowright \rightrightarrows
+ [0x021C8] = 0x14, -- dblarrowup \upuparrows
+ [0x021CA] = 0x15, -- dblarrowdwn \downdownarrows
+ [0x021BE] = 0x16, -- harpoonupright \upharpoonright \restriction
+ [0x021C2] = 0x17, -- harpoondownright \downharpoonright
+ [0x021BF] = 0x18, -- harpoonupleft \upharpoonleft
+ [0x021C3] = 0x19, -- harpoondownleft \downharpoonleft
+ [0x021A3] = 0x1A, -- arrowtailright \rightarrowtail
+ [0x021A2] = 0x1B, -- arrowtailleft \leftarrowtail
+ [0x021C6] = 0x1C, -- arrowparrleftright \leftrightarrows
+-- [0x021C5] = 0x00, -- \updownarrows (missing in lm)
+ [0x021C4] = 0x1D, -- arrowparrrightleft \rightleftarrows
+ [0x021B0] = 0x1E, -- shiftleft \Lsh
+ [0x021B1] = 0x1F, -- shiftright \Rsh
+ [0x021DD] = 0x20, -- squiggleright \leadsto \rightsquigarrow
+ [0x021AD] = 0x21, -- squiggleleftright \leftrightsquigarrow
+ [0x021AB] = 0x22, -- curlyleft \looparrowleft
+ [0x021AC] = 0x23, -- curlyright \looparrowright
+ [0x02257] = 0x24, -- circleequal \circeq
+ [0x0227F] = 0x25, -- followsorequal \succsim
+ [0x02273] = 0x26, -- greaterorsimilar \gtrsim
+ [0x02A86] = 0x27, -- greaterorapproxeql \gtrapprox
+ [0x022B8] = 0x28, -- multimap \multimap
+ [0x02234] = 0x29, -- therefore \therefore
+ [0x02235] = 0x2A, -- because \because
+ [0x02251] = 0x2B, -- equalsdots \Doteq \doteqdot
+ [0x0225C] = 0x2C, -- defines \triangleq
+ [0x0227E] = 0x2D, -- precedesorequal \precsim
+ [0x02272] = 0x2E, -- lessorsimilar \lesssim
+ [0x02A85] = 0x2F, -- lessorapproxeql \lessapprox
+ [0x02A95] = 0x30, -- equalorless \eqslantless
+ [0x02A96] = 0x31, -- equalorgreater \eqslantgtr
+ [0x022DE] = 0x32, -- equalorprecedes \curlyeqprec
+ [0x022DF] = 0x33, -- equalorfollows \curlyeqsucc
+ [0x0227C] = 0x34, -- precedesorcurly \preccurlyeq
+ [0x02266] = 0x35, -- lessdblequal \leqq
+ [0x02A7D] = 0x36, -- lessorequalslant \leqslant
+ [0x02276] = 0x37, -- lessorgreater \lessgtr
+ [0x02035] = 0x38, -- primereverse \backprime
+ -- [0x0] = 0x39, -- axisshort \dabar
+ [0x02253] = 0x3A, -- equaldotrightleft \risingdotseq
+ [0x02252] = 0x3B, -- equaldotleftright \fallingdotseq
+ [0x0227D] = 0x3C, -- followsorcurly \succcurlyeq
+ [0x02267] = 0x3D, -- greaterdblequal \geqq
+ [0x02A7E] = 0x3E, -- greaterorequalslant \geqslant
+ [0x02277] = 0x3F, -- greaterorless \gtrless
+ [0x0228F] = 0x40, -- squareimage \sqsubset
+ [0x02290] = 0x41, -- squareoriginal \sqsupset
+ -- wrong: see **
+ -- [0x022B3] = 0x42, -- triangleright \rhd \vartriangleright
+ -- [0x022B2] = 0x43, -- triangleleft \lhd \vartriangleleft
+ -- cf lm
+ [0x022B5] = 0x44, -- trianglerightequal \unrhd \trianglerighteq
+ [0x022B4] = 0x45, -- triangleleftequal \unlhd \trianglelefteq
+ --
+ [0x02605] = 0x46, -- star \bigstar
+ [0x0226C] = 0x47, -- between \between
+ [0x025BC] = 0x48, -- triangledownsld \blacktriangledown
+ [0x025B6] = 0x49, -- trianglerightsld \blacktriangleright
+ [0x025C0] = 0x4A, -- triangleleftsld \blacktriangleleft
+ -- [0x0] = 0x4B, -- arrowaxisright
+ -- [0x0] = 0x4C, -- arrowaxisleft
+ [0x025B2] = 0x4D, -- triangle \triangleup \vartriangle
+ [0x025B2] = 0x4E, -- trianglesolid \blacktriangle
+ [0x025BD] = 0x4F, -- triangleinv \triangledown
+ [0x02256] = 0x50, -- ringinequal \eqcirc
+ [0x022DA] = 0x51, -- lessequalgreater \lesseqgtr
+ [0x022DB] = 0x52, -- greaterlessequal \gtreqless
+ [0x02A8B] = 0x53, -- lessdbleqlgreater \lesseqqgtr
+ [0x02A8C] = 0x54, -- greaterdbleqlless \gtreqqless
+ [0x000A5] = 0x55, -- Yen \yen
+ [0x021DB] = 0x56, -- arrowtripleright \Rrightarrow
+ [0x021DA] = 0x57, -- arrowtripleleft \Lleftarrow
+ [0x02713] = 0x58, -- check \checkmark
+ [0x022BB] = 0x59, -- orunderscore \veebar
+ [0x022BC] = 0x5A, -- nand \barwedge
+ [0x02306] = 0x5B, -- perpcorrespond \doublebarwedge
+ [0x02220] = 0x5C, -- angle \angle
+ [0x02221] = 0x5D, -- measuredangle \measuredangle
+ [0x02222] = 0x5E, -- sphericalangle \sphericalangle
+ -- [0x0] = 0x5F, -- proportional \varpropto
+ -- [0x0] = 0x60, -- smile \smallsmile
+ -- [0x0] = 0x61, -- frown \smallfrown
+ [0x022D0] = 0x62, -- subsetdbl \Subset
+ [0x022D1] = 0x63, -- supersetdbl \Supset
+ [0x022D3] = 0x64, -- uniondbl \doublecup \Cup
+ [0x022D2] = 0x65, -- intersectiondbl \doublecap \Cap
+ [0x022CF] = 0x66, -- uprise \curlywedge
+ [0x022CE] = 0x67, -- downfall \curlyvee
+ [0x022CB] = 0x68, -- multiopenleft \leftthreetimes
+ [0x022CC] = 0x69, -- multiopenright \rightthreetimes
+ [0x02AC5] = 0x6A, -- subsetdblequal \subseteqq
+ [0x02AC6] = 0x6B, -- supersetdblequal \supseteqq
+ [0x0224F] = 0x6C, -- difference \bumpeq
+ [0x0224E] = 0x6D, -- geomequivalent \Bumpeq
+ [0x022D8] = 0x6E, -- muchless \lll \llless
+ [0x022D9] = 0x6F, -- muchgreater \ggg \gggtr
+ [0x0231C] = 0x70, -- rightanglenw \ulcorner
+ [0x0231D] = 0x71, -- rightanglene \urcorner
+ [0x024C7] = 0x72, -- circleR \circledR
+ [0x024C8] = 0x73, -- circleS \circledS
+ [0x022D4] = 0x74, -- fork \pitchfork
+ [0x02214] = 0x75, -- dotplus \dotplus
+ [0x0223D] = 0x76, -- revsimilar \backsim
+ [0x022CD] = 0x77, -- revasymptequal \backsimeq -- AM: Check this! I mapped it to simeq.
+ [0x0231E] = 0x78, -- rightanglesw \llcorner
+ [0x0231F] = 0x79, -- rightanglese \lrcorner
+ [0x02720] = 0x7A, -- maltesecross \maltese
+ [0x02201] = 0x7B, -- complement \complement
+ [0x022BA] = 0x7C, -- intercal \intercal
+ [0x0229A] = 0x7D, -- circlering \circledcirc
+ [0x0229B] = 0x7E, -- circleasterisk \circledast
+ [0x0229D] = 0x7F, -- circleminus \circleddash
+}
+
+mathencodings["tex-mb"] = {
+ -- [0x0] = 0x00, -- lessornotequal \lvertneqq
+ -- [0x0] = 0x01, -- greaterornotequal \gvertneqq
+ [0x02270] = 0x02, -- notlessequal \nleq
+ [0x02271] = 0x03, -- notgreaterequal \ngeq
+ [0x0226E] = 0x04, -- notless \nless
+ [0x0226F] = 0x05, -- notgreater \ngtr
+ [0x02280] = 0x06, -- notprecedes \nprec
+ [0x02281] = 0x07, -- notfollows \nsucc
+ [0x02268] = 0x08, -- lessornotdbleql \lneqq
+ [0x02269] = 0x09, -- greaterornotdbleql \gneqq
+ -- [0x0] = 0x0A, -- notlessorslnteql \nleqslant
+ -- [0x0] = 0x0B, -- notgreaterorslnteql \ngeqslant
+ [0x02A87] = 0x0C, -- lessnotequal \lneq
+ [0x02A88] = 0x0D, -- greaternotequal \gneq
+ -- [0x0] = 0x0E, -- notprecedesoreql \npreceq
+ -- [0x0] = 0x0F, -- notfollowsoreql \nsucceq
+ [0x022E8] = 0x10, -- precedeornoteqvlnt \precnsim
+ [0x022E9] = 0x11, -- followornoteqvlnt \succnsim
+ [0x022E6] = 0x12, -- lessornotsimilar \lnsim
+ [0x022E7] = 0x13, -- greaterornotsimilar \gnsim
+ -- [0x0] = 0x14, -- notlessdblequal \nleqq
+ -- [0x0] = 0x15, -- notgreaterdblequal \ngeqq
+ [0x02AB5] = 0x16, -- precedenotslnteql \precneqq
+ [0x02AB6] = 0x17, -- follownotslnteql \succneqq
+ [0x02AB9] = 0x18, -- precedenotdbleqv \precnapprox
+ [0x02ABA] = 0x19, -- follownotdbleqv \succnapprox
+ [0x02A89] = 0x1A, -- lessnotdblequal \lnapprox
+ [0x02A8A] = 0x1B, -- greaternotdblequal \gnapprox
+ [0x02241] = 0x1C, -- notsimilar \nsim
+ [0x02247] = 0x1D, -- notapproxequal \ncong
+ -- [0x0] = 0x1E, -- upslope \diagup
+ -- [0x0] = 0x1F, -- downslope \diagdown
+ -- [0x0] = 0x20, -- notsubsetoreql \varsubsetneq
+ -- [0x0] = 0x21, -- notsupersetoreql \varsupsetneq
+ -- [0x0] = 0x22, -- notsubsetordbleql \nsubseteqq
+ -- [0x0] = 0x23, -- notsupersetordbleql \nsupseteqq
+ [0x02ACB] = 0x24, -- subsetornotdbleql \subsetneqq
+ [0x02ACC] = 0x25, -- supersetornotdbleql \supsetneqq
+ -- [0x0] = 0x26, -- subsetornoteql \varsubsetneqq
+ -- [0x0] = 0x27, -- supersetornoteql \varsupsetneqq
+ [0x0228A] = 0x28, -- subsetnoteql \subsetneq
+ [0x0228B] = 0x29, -- supersetnoteql \supsetneq
+ [0x02288] = 0x2A, -- notsubseteql \nsubseteq
+ [0x02289] = 0x2B, -- notsuperseteql \nsupseteq
+ [0x02226] = 0x2C, -- notparallel \nparallel
+ [0x02224] = 0x2D, -- notbar \nmid \ndivides
+ -- [0x0] = 0x2E, -- notshortbar \nshortmid
+ -- [0x0] = 0x2F, -- notshortparallel \nshortparallel
+ [0x022AC] = 0x30, -- notturnstile \nvdash
+ [0x022AE] = 0x31, -- notforces \nVdash
+ [0x022AD] = 0x32, -- notsatisfies \nvDash
+ [0x022AF] = 0x33, -- notforcesextra \nVDash
+ [0x022ED] = 0x34, -- nottriangeqlright \ntrianglerighteq
+ [0x022EC] = 0x35, -- nottriangeqlleft \ntrianglelefteq
+ [0x022EA] = 0x36, -- nottriangleleft \ntriangleleft
+ [0x022EB] = 0x37, -- nottriangleright \ntriangleright
+ [0x0219A] = 0x38, -- notarrowleft \nleftarrow
+ [0x0219B] = 0x39, -- notarrowright \nrightarrow
+ [0x021CD] = 0x3A, -- notdblarrowleft \nLeftarrow
+ [0x021CF] = 0x3B, -- notdblarrowright \nRightarrow
+ [0x021CE] = 0x3C, -- notdblarrowboth \nLeftrightarrow
+ [0x021AE] = 0x3D, -- notarrowboth \nleftrightarrow
+ [0x022C7] = 0x3E, -- dividemultiply \divideontimes
+ [0x02300] = 0x3F, -- diametersign \varnothing
+ [0x02204] = 0x40, -- notexistential \nexists
+ [0x1D538] = 0x41, -- A (blackboard A)
+ [0x1D539] = 0x42, -- B
+ [0x02102] = 0x43, -- C
+ [0x1D53B] = 0x44, -- D
+ [0x1D53C] = 0x45, -- E
+ [0x1D53D] = 0x46, -- F
+ [0x1D53E] = 0x47, -- G
+ [0x0210D] = 0x48, -- H
+ [0x1D540] = 0x49, -- I
+ [0x1D541] = 0x4A, -- J
+ [0x1D542] = 0x4B, -- K
+ [0x1D543] = 0x4C, -- L
+ [0x1D544] = 0x4D, -- M
+ [0x02115] = 0x4E, -- N
+ [0x1D546] = 0x4F, -- O
+ [0x02119] = 0x50, -- P
+ [0x0211A] = 0x51, -- Q
+ [0x0211D] = 0x52, -- R
+ [0x1D54A] = 0x53, -- S
+ [0x1D54B] = 0x54, -- T
+ [0x1D54C] = 0x55, -- U
+ [0x1D54D] = 0x56, -- V
+ [0x1D54E] = 0x57, -- W
+ [0x1D54F] = 0x58, -- X
+ [0x1D550] = 0x59, -- Y
+ [0x02124] = 0x5A, -- Z (blackboard Z)
+ [0x02132] = 0x60, -- finv \Finv
+ [0x02141] = 0x61, -- fmir \Game
+ -- [0x0] = 0x62, tildewide
+ -- [0x0] = 0x63, tildewider
+ -- [0x0] = 0x64, Finv
+ -- [0x0] = 0x65, Gmir
+ [0x02127] = 0x66, -- Omegainv \mho
+ [0x000F0] = 0x67, -- eth \eth
+ [0x02242] = 0x68, -- equalorsimilar \eqsim
+ [0x02136] = 0x69, -- beth \beth
+ [0x02137] = 0x6A, -- gimel \gimel
+ [0x02138] = 0x6B, -- daleth \daleth
+ [0x022D6] = 0x6C, -- lessdot \lessdot
+ [0x022D7] = 0x6D, -- greaterdot \gtrdot
+ [0x022C9] = 0x6E, -- multicloseleft \ltimes
+ [0x022CA] = 0x6F, -- multicloseright \rtimes
+ -- [0x0] = 0x70, -- barshort \shortmid
+ -- [0x0] = 0x71, -- parallelshort \shortparallel
+ -- [0x02216] = 0x72, -- integerdivide \smallsetminus (2216 already part of tex-sy
+ -- [0x0] = 0x73, -- similar \thicksim
+ -- [0x0] = 0x74, -- approxequal \thickapprox
+ [0x0224A] = 0x75, -- approxorequal \approxeq
+ [0x02AB8] = 0x76, -- followsorequal \succapprox
+ [0x02AB7] = 0x77, -- precedesorequal \precapprox
+ [0x021B6] = 0x78, -- archleftdown \curvearrowleft
+ [0x021B7] = 0x79, -- archrightdown \curvearrowright
+ [0x003DC] = 0x7A, -- Digamma \digamma
+ [0x003F0] = 0x7B, -- kappa \varkappa
+ [0x1D55C] = 0x7C, -- k \Bbbk (blackboard k)
+ [0x0210F] = 0x7D, -- planckover2pi \hslash % 0x7D
+ [0x00127] = 0x7E, -- planckover2pi1 \hbar % 0x7E
+ [0x003F6] = 0x7F, -- epsiloninv \backepsilon
+}
+
+mathencodings["tex-mc"] = {
+ -- this file has no tfm so it gets mapped in the private space
+ [0xFE324] = "mapsfromchar",
+}
+
+mathencodings["tex-fraktur"] = {
+-- [0x1D504] = 0x41, -- A (fraktur A)
+-- [0x1D505] = 0x42, -- B
+ [0x0212D] = 0x43, -- C
+-- [0x1D507] = 0x44, -- D
+-- [0x1D508] = 0x45, -- E
+-- [0x1D509] = 0x46, -- F
+-- [0x1D50A] = 0x47, -- G
+ [0x0210C] = 0x48, -- H
+ [0x02111] = 0x49, -- I
+-- [0x1D50D] = 0x4A, -- J
+-- [0x1D50E] = 0x4B, -- K
+-- [0x1D50F] = 0x4C, -- L
+-- [0x1D510] = 0x4D, -- M
+-- [0x1D511] = 0x4E, -- N
+-- [0x1D512] = 0x4F, -- O
+-- [0x1D513] = 0x50, -- P
+-- [0x1D514] = 0x51, -- Q
+ [0x0211C] = 0x52, -- R
+-- [0x1D516] = 0x53, -- S
+-- [0x1D517] = 0x54, -- T
+-- [0x1D518] = 0x55, -- U
+-- [0x1D519] = 0x56, -- V
+-- [0x1D51A] = 0x57, -- W
+-- [0x1D51B] = 0x58, -- X
+-- [0x1D51C] = 0x59, -- Y
+ [0x02128] = 0x5A, -- Z (fraktur Z)
+-- [0x1D51E] = 0x61, -- a (fraktur a)
+-- [0x1D51F] = 0x62, -- b
+-- [0x1D520] = 0x63, -- c
+-- [0x1D521] = 0x64, -- d
+-- [0x1D522] = 0x65, -- e
+-- [0x1D523] = 0x66, -- f
+-- [0x1D524] = 0x67, -- g
+-- [0x1D525] = 0x68, -- h
+-- [0x1D526] = 0x69, -- i
+-- [0x1D527] = 0x6A, -- j
+-- [0x1D528] = 0x6B, -- k
+-- [0x1D529] = 0x6C, -- l
+-- [0x1D52A] = 0x6D, -- m
+-- [0x1D52B] = 0x6E, -- n
+-- [0x1D52C] = 0x6F, -- o
+-- [0x1D52D] = 0x70, -- p
+-- [0x1D52E] = 0x71, -- q
+-- [0x1D52F] = 0x72, -- r
+-- [0x1D530] = 0x73, -- s
+-- [0x1D531] = 0x74, -- t
+-- [0x1D532] = 0x75, -- u
+-- [0x1D533] = 0x76, -- v
+-- [0x1D534] = 0x77, -- w
+-- [0x1D535] = 0x78, -- x
+-- [0x1D536] = 0x79, -- y
+-- [0x1D537] = 0x7A, -- z
+}
+
+-- now that all other vectors are defined ...
+
+setletters(mathencodings, "tex-it", 0x1D434, 0x1D44E)
+setletters(mathencodings, "tex-ss", 0x1D5A0, 0x1D5BA)
+setletters(mathencodings, "tex-tt", 0x1D670, 0x1D68A)
+setletters(mathencodings, "tex-bf", 0x1D400, 0x1D41A)
+setletters(mathencodings, "tex-bi", 0x1D468, 0x1D482)
+setletters(mathencodings, "tex-fraktur", 0x1D504, 0x1D51E)
+setletters(mathencodings, "tex-fraktur-bold", 0x1D56C, 0x1D586)
+
+setdigits (mathencodings, "tex-ss", 0x1D7E2)
+setdigits (mathencodings, "tex-tt", 0x1D7F6)
+setdigits (mathencodings, "tex-bf", 0x1D7CE)
+
+-- setdigits (mathencodings, "tex-bi", 0x1D7CE)
+
+-- todo: add ss, tt, bf etc vectors
+-- todo: we can make ss tt etc an option
diff --git a/tex/context/base/meta-fun.lua b/tex/context/base/meta-fun.lua
index 78ee25baf..e12298e8b 100644
--- a/tex/context/base/meta-fun.lua
+++ b/tex/context/base/meta-fun.lua
@@ -1,57 +1,57 @@
-if not modules then modules = { } end modules ['meta-fun'] = {
- version = 1.001,
- comment = "companion to meta-fun.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- very experimental, actually a joke ... see metafun manual for usage
-
-local format, load, type = string.format, load, type
-
-local metapost = metapost
-
-metapost.metafun = metapost.metafun or { }
-local metafun = metapost.metafun
-
-function metafun.topath(t,connector)
- context("(")
- if #t > 0 then
- for i=1,#t do
- if i > 1 then
- context(connector or "..")
- end
- local ti = t[i]
- if type(ti) == "string" then
- context(ti)
- else
- context("(%s,%s)",ti.x or ti[1] or 0,ti.y or ti[2] or 0)
- end
- end
- else
- context("origin")
- end
- context(")")
-end
-
-function metafun.interpolate(f,b,e,s,c)
- local done = false
- context("(")
- for i=b,e,(e-b)/s do
- local d = load(format("return function(x) return %s end",f))
- if d then
- d = d()
- if done then
- context(c or "...")
- else
- done = true
- end
- context("(%s,%s)",i,d(i))
- end
- end
- if not done then
- context("origin")
- end
- context(")")
-end
+if not modules then modules = { } end modules ['meta-fun'] = {
+ version = 1.001,
+ comment = "companion to meta-fun.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- very experimental, actually a joke ... see metafun manual for usage
+
+local format, load, type = string.format, load, type
+
+local metapost = metapost
+
+metapost.metafun = metapost.metafun or { }
+local metafun = metapost.metafun
+
+function metafun.topath(t,connector)
+ context("(")
+ if #t > 0 then
+ for i=1,#t do
+ if i > 1 then
+ context(connector or "..")
+ end
+ local ti = t[i]
+ if type(ti) == "string" then
+ context(ti)
+ else
+ context("(%s,%s)",ti.x or ti[1] or 0,ti.y or ti[2] or 0)
+ end
+ end
+ else
+ context("origin")
+ end
+ context(")")
+end
+
+function metafun.interpolate(f,b,e,s,c)
+ local done = false
+ context("(")
+ for i=b,e,(e-b)/s do
+ local d = load(format("return function(x) return %s end",f))
+ if d then
+ d = d()
+ if done then
+ context(c or "...")
+ else
+ done = true
+ end
+ context("(%s,%s)",i,d(i))
+ end
+ end
+ if not done then
+ context("origin")
+ end
+ context(")")
+end
diff --git a/tex/context/base/meta-ini.lua b/tex/context/base/meta-ini.lua
index 713ba3d5d..460738930 100644
--- a/tex/context/base/meta-ini.lua
+++ b/tex/context/base/meta-ini.lua
@@ -1,165 +1,165 @@
-if not modules then modules = { } end modules ['meta-ini'] = {
- version = 1.001,
- comment = "companion to meta-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local tonumber = tonumber
-local format = string.format
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local P, Cs, R, S, C, Cc = lpeg.P, lpeg.Cs, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc
-
-local context = context
-
-metapost = metapost or { }
-
--- for the moment downward compatible
-
-local report_metapost = logs.reporter ("metapost")
-local status_metapost = logs.messenger("metapost")
-
-local patterns = { "meta-imp-%s.mkiv", "meta-imp-%s.tex", "meta-%s.mkiv", "meta-%s.tex" } -- we are compatible
-
-local function action(name,foundname)
- status_metapost("library %a is loaded",name)
- context.startreadingfile()
- context.input(foundname)
- context.stopreadingfile()
-end
-
-local function failure(name)
- report_metapost("library %a is unknown or invalid",name)
-end
-
-function commands.useMPlibrary(name)
- commands.uselibrary {
- name = name,
- patterns = patterns,
- action = action,
- failure = failure,
- onlyonce = true,
- }
-end
-
--- experimental
-
-local colorhash = attributes.list[attributes.private('color')]
-
-local textype = tex.type
-local MPcolor = context.MPcolor
-
--- local validdimen = lpegpatterns.validdimen * P(-1)
---
--- function commands.prepareMPvariable(v) -- slow but ok
--- if v == "" then
--- MPcolor("black")
--- else
--- local typ, var = match(v,"(.):(.*)")
--- if not typ then
--- -- parse
--- if colorhash[v] then
--- MPcolor(v)
--- elseif tonumber(v) then
--- context(v)
--- elseif lpegmatch(validdimen,v) then
--- return context("\\the\\dimexpr %s",v)
--- else
--- for s in gmatch(v,"\\([a-zA-Z]+)") do -- can have trailing space
--- local t = textype(s)
--- if t == "dimen" then
--- return context("\\the\\dimexpr %s",v)
--- elseif t == "count" then
--- return context("\\the\\numexpr %s",v)
--- end
--- end
--- context("\\number %s",v) -- 0.4 ...
--- end
--- elseif typ == "d" then -- to be documented
--- -- dimension
--- context("\\the\\dimexpr %s",var)
--- elseif typ == "n" then -- to be documented
--- -- number
--- context("\\the\\numexpr %s",var)
--- elseif typ == "s" then -- to be documented
--- -- string
--- context(var)
--- elseif typ == "c" then -- to be documented
--- -- color
--- MPcolor(var)
--- else
--- context(var)
--- end
--- end
--- end
-
--- we can actually get the dimen/count values here
-
-local dimenorname =
- lpegpatterns.validdimen / function(s)
- context("\\the\\dimexpr %s",s)
- end
- + (C(lpegpatterns.float) + Cc(1)) * lpegpatterns.space^0 * P("\\") * C(lpegpatterns.letter^1) / function(f,s)
- local t = textype(s)
- if t == "dimen" then
- context("\\the\\dimexpr %s\\%s",f,s)
- elseif t == "count" then
- context("\\the\\numexpr \\%s * %s\\relax",s,f) -- \scratchcounter is not permitted
- end
- end
-
-local splitter = lpeg.splitat(":",true)
-
-function commands.prepareMPvariable(v) -- slow but ok
- if v == "" then
- MPcolor("black")
- else
- local typ, var = lpegmatch(splitter,v)
- if not var then
- -- parse
- if colorhash[v] then
- MPcolor(v)
- elseif tonumber(v) then
- context(v)
- elseif not lpegmatch(dimenorname,v) then
- context("\\number %s",v) -- 0.4 ...
- end
- elseif typ == "d" then -- to be documented
- -- dimension
- context("\\the\\dimexpr %s",var)
- elseif typ == "n" then -- to be documented
- -- number
- context("\\the\\numexpr %s",var)
- elseif typ == "s" then -- to be documented
- -- string
- context(var)
- elseif typ == "c" then -- to be documented
- -- color
- MPcolor(var)
- else
- context(var)
- end
- end
-end
-
--- function metapost.formatnumber(f,n) -- just lua format
--- f = gsub(f,"@(%d)","%%.%1")
--- f = gsub(f,"@","%%")
--- f = format(f,tonumber(n) or 0)
--- f = gsub(f,"e([%+%-%d]+)",function(s)
--- return format("\\times10^{%s}",tonumber(s) or s) -- strips leading zeros
--- end)
--- context.mathematics(f)
--- end
-
--- formatters["\\times10^{%N}"](s) -- strips leading zeros too
-
-local one = Cs((P("@")/"%%." * (R("09")^1) + P("@")/"%%" + 1)^0)
-local two = Cs((P("e")/"" * ((S("+-")^0 * R("09")^1) / function(s) return format("\\times10^{%s}",tonumber(s) or s) end) + 1)^1)
-
--- local two = Cs((P("e")/"" * ((S("+-")^0 * R("09")^1) / formatters["\\times10^{%N}"]) + 1)^1)
-
-function metapost.formatnumber(fmt,n) -- just lua format
- context.mathematics(lpegmatch(two,format(lpegmatch(one,fmt),n)))
-end
+if not modules then modules = { } end modules ['meta-ini'] = {
+ version = 1.001,
+ comment = "companion to meta-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local tonumber = tonumber
+local format = string.format
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local P, Cs, R, S, C, Cc = lpeg.P, lpeg.Cs, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc
+
+local context = context
+
+metapost = metapost or { }
+
+-- for the moment downward compatible
+
+local report_metapost = logs.reporter ("metapost")
+local status_metapost = logs.messenger("metapost")
+
+local patterns = { "meta-imp-%s.mkiv", "meta-imp-%s.tex", "meta-%s.mkiv", "meta-%s.tex" } -- we are compatible
+
+local function action(name,foundname)
+ status_metapost("library %a is loaded",name)
+ context.startreadingfile()
+ context.input(foundname)
+ context.stopreadingfile()
+end
+
+local function failure(name)
+ report_metapost("library %a is unknown or invalid",name)
+end
+
+function commands.useMPlibrary(name)
+ commands.uselibrary {
+ name = name,
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = true,
+ }
+end
+
+-- experimental
+
+local colorhash = attributes.list[attributes.private('color')]
+
+local textype = tex.type
+local MPcolor = context.MPcolor
+
+-- local validdimen = lpegpatterns.validdimen * P(-1)
+--
+-- function commands.prepareMPvariable(v) -- slow but ok
+-- if v == "" then
+-- MPcolor("black")
+-- else
+-- local typ, var = match(v,"(.):(.*)")
+-- if not typ then
+-- -- parse
+-- if colorhash[v] then
+-- MPcolor(v)
+-- elseif tonumber(v) then
+-- context(v)
+-- elseif lpegmatch(validdimen,v) then
+-- return context("\\the\\dimexpr %s",v)
+-- else
+-- for s in gmatch(v,"\\([a-zA-Z]+)") do -- can have trailing space
+-- local t = textype(s)
+-- if t == "dimen" then
+-- return context("\\the\\dimexpr %s",v)
+-- elseif t == "count" then
+-- return context("\\the\\numexpr %s",v)
+-- end
+-- end
+-- context("\\number %s",v) -- 0.4 ...
+-- end
+-- elseif typ == "d" then -- to be documented
+-- -- dimension
+-- context("\\the\\dimexpr %s",var)
+-- elseif typ == "n" then -- to be documented
+-- -- number
+-- context("\\the\\numexpr %s",var)
+-- elseif typ == "s" then -- to be documented
+-- -- string
+-- context(var)
+-- elseif typ == "c" then -- to be documented
+-- -- color
+-- MPcolor(var)
+-- else
+-- context(var)
+-- end
+-- end
+-- end
+
+-- we can actually get the dimen/count values here
+
+local dimenorname =
+ lpegpatterns.validdimen / function(s)
+ context("\\the\\dimexpr %s",s)
+ end
+ + (C(lpegpatterns.float) + Cc(1)) * lpegpatterns.space^0 * P("\\") * C(lpegpatterns.letter^1) / function(f,s)
+ local t = textype(s)
+ if t == "dimen" then
+ context("\\the\\dimexpr %s\\%s",f,s)
+ elseif t == "count" then
+ context("\\the\\numexpr \\%s * %s\\relax",s,f) -- \scratchcounter is not permitted
+ end
+ end
+
+local splitter = lpeg.splitat(":",true)
+
+function commands.prepareMPvariable(v) -- slow but ok
+ if v == "" then
+ MPcolor("black")
+ else
+ local typ, var = lpegmatch(splitter,v)
+ if not var then
+ -- parse
+ if colorhash[v] then
+ MPcolor(v)
+ elseif tonumber(v) then
+ context(v)
+ elseif not lpegmatch(dimenorname,v) then
+ context("\\number %s",v) -- 0.4 ...
+ end
+ elseif typ == "d" then -- to be documented
+ -- dimension
+ context("\\the\\dimexpr %s",var)
+ elseif typ == "n" then -- to be documented
+ -- number
+ context("\\the\\numexpr %s",var)
+ elseif typ == "s" then -- to be documented
+ -- string
+ context(var)
+ elseif typ == "c" then -- to be documented
+ -- color
+ MPcolor(var)
+ else
+ context(var)
+ end
+ end
+end
+
+-- function metapost.formatnumber(f,n) -- just lua format
+-- f = gsub(f,"@(%d)","%%.%1")
+-- f = gsub(f,"@","%%")
+-- f = format(f,tonumber(n) or 0)
+-- f = gsub(f,"e([%+%-%d]+)",function(s)
+-- return format("\\times10^{%s}",tonumber(s) or s) -- strips leading zeros
+-- end)
+-- context.mathematics(f)
+-- end
+
+-- formatters["\\times10^{%N}"](s) -- strips leading zeros too
+
+local one = Cs((P("@")/"%%." * (R("09")^1) + P("@")/"%%" + 1)^0)
+local two = Cs((P("e")/"" * ((S("+-")^0 * R("09")^1) / function(s) return format("\\times10^{%s}",tonumber(s) or s) end) + 1)^1)
+
+-- local two = Cs((P("e")/"" * ((S("+-")^0 * R("09")^1) / formatters["\\times10^{%N}"]) + 1)^1)
+
+function metapost.formatnumber(fmt,n) -- just lua format
+ context.mathematics(lpegmatch(two,format(lpegmatch(one,fmt),n)))
+end
diff --git a/tex/context/base/meta-pdf.lua b/tex/context/base/meta-pdf.lua
index 32e48902a..15211b560 100644
--- a/tex/context/base/meta-pdf.lua
+++ b/tex/context/base/meta-pdf.lua
@@ -1,567 +1,567 @@
-if not modules then modules = { } end modules ['meta-pdf'] = {
- version = 1.001,
- comment = "companion to meta-pdf.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- Finally we used an optimized version. The test code can be found in
--- meta-pdh.lua but since we no longer want to overload functione we use
--- more locals now. This module keeps changing as it is also a testbed.
---
--- We can make it even more efficient if needed, but as we don't use this
--- code often in \MKIV\ it makes no sense.
-
-local concat, unpack = table.concat, table.unpack
-local gsub, find, byte, gmatch, match = string.gsub, string.find, string.byte, string.gmatch, string.match
-local lpegmatch = lpeg.match
-local round = math.round
-local formatters, format = string.formatters, string.format
-
-local report_mptopdf = logs.reporter("graphics","mptopdf")
-
-local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context
-
-local pdfrgbcode = lpdf.rgbcode
-local pdfcmykcode = lpdf.cmykcode
-local pdfgraycode = lpdf.graycode
-local pdfspotcode = lpdf.spotcode
-local pdftransparencycode = lpdf.transparencycode
-local pdffinishtransparencycode = lpdf.finishtransparencycode
-local pdfliteral = nodes.pool.pdfliteral
-
-metapost.mptopdf = metapost.mptopdf or { }
-local mptopdf = metapost.mptopdf
-
-mptopdf.nofconverted = 0
-
-local f_translate = formatters["1 0 0 0 1 %f %f cm"] -- no %s due to 1e-035 issues
-local f_concat = formatters["%f %f %f %f %f %f cm"] -- no %s due to 1e-035 issues
-
-local m_path, m_stack, m_texts, m_version, m_date, m_shortcuts = { }, { }, { }, 0, 0, false
-
-local m_stack_close, m_stack_path, m_stack_concat = false, { }, nil
-local extra_path_code, ignore_path = nil, false
-local specials = { }
-
-local function resetpath()
- m_stack_close, m_stack_path, m_stack_concat = false, { }, nil
-end
-
-local function resetall()
- m_path, m_stack, m_texts, m_version, m_shortcuts = { }, { }, { }, 0, false
- extra_path_code, ignore_path = nil, false
- specials = { }
- resetpath()
-end
-
-resetall()
-
--- -- this does not work as expected (displacement of text) beware, needs another
--- -- comment hack
---
--- local function pdfcode(str)
--- context(pdfliteral(str))
--- end
-
-local pdfcode = context.pdfliteral
-
-local function mpscode(str)
- if ignore_path then
- pdfcode("h W n")
- if extra_path_code then
- pdfcode(extra_path_code)
- extra_path_code = nil
- end
- ignore_path = false
- else
- pdfcode(str)
- end
-end
-
--- auxiliary functions
-
-local function flushconcat()
- if m_stack_concat then
- mpscode(f_concatm(unpack(m_stack_concat)))
- m_stack_concat = nil
- end
-end
-
-local function flushpath(cmd)
- if #m_stack_path > 0 then
- local path = { }
- if m_stack_concat then
- local sx, sy = m_stack_concat[1], m_stack_concat[4]
- local rx, ry = m_stack_concat[2], m_stack_concat[3]
- local tx, ty = m_stack_concat[5], m_stack_concat[6]
- local d = (sx*sy) - (rx*ry)
- for k=1,#m_stack_path do
- local v = m_stack_path[k]
- local px, py = v[1], v[2] ; v[1], v[2] = (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d -- mpconcat(v[1],v[2])
- if #v == 7 then
- local px, py = v[3], v[4] ; v[3], v[4] = (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d -- mpconcat(v[3],v[4])
- local px, py = v[5], v[6] ; v[5], v[6] = (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d -- mpconcat(v[5],v[6])
- end
- path[k] = concat(v," ")
- end
- else
- for k=1,#m_stack_path do
- path[k] = concat(m_stack_path[k]," ")
- end
- end
- flushconcat()
- pdfcode(concat(path," "))
- if m_stack_close then
- mpscode("h " .. cmd)
- else
- mpscode(cmd)
- end
- end
- resetpath()
-end
-
--- mp interface
-
-local mps = { }
-
-function mps.creator(a, b, c)
- m_version = tonumber(b)
-end
-
-function mps.creationdate(a)
- m_date = a
-end
-
-function mps.newpath()
- m_stack_path = { }
-end
-
-function mps.boundingbox(llx, lly, urx, ury)
- context.setMPboundingbox(llx,lly,urx,ury)
-end
-
-function mps.moveto(x,y)
- m_stack_path[#m_stack_path+1] = { x, y, "m" }
-end
-
-function mps.curveto(ax, ay, bx, by, cx, cy)
- m_stack_path[#m_stack_path+1] = { ax, ay, bx, by, cx, cy, "c" }
-end
-
-function mps.lineto(x,y)
- m_stack_path[#m_stack_path+1] = { x, y, "l" }
-end
-
-function mps.rlineto(x,y)
- local dx, dy = 0, 0
- local topofstack = #m_stack_path
- if topofstack > 0 then
- local msp = m_stack_path[topofstack]
- dx = msp[1]
- dy = msp[2]
- end
- m_stack_path[topofstack+1] = {dx,dy,"l"}
-end
-
-function mps.translate(tx,ty)
- mpscode(f_translate(tx,ty)
-end
-
-function mps.scale(sx,sy)
- m_stack_concat = {sx,0,0,sy,0,0}
-end
-
-function mps.concat(sx, rx, ry, sy, tx, ty)
- m_stack_concat = {sx,rx,ry,sy,tx,ty}
-end
-
-function mps.setlinejoin(d)
- mpscode(d .. " j")
-end
-
-function mps.setlinecap(d)
- mpscode(d .. " J")
-end
-
-function mps.setmiterlimit(d)
- mpscode(d .. " M")
-end
-
-function mps.gsave()
- mpscode("q")
-end
-
-function mps.grestore()
- mpscode("Q")
-end
-
-function mps.setdash(...) -- can be made faster, operate on t = { ... }
- local n = select("#",...)
- mpscode("[" .. concat({...}," ",1,n-1) .. "] " .. select(n,...) .. " d")
- -- mpscode("[" .. concat({select(1,n-1)}," ") .. "] " .. select(n,...) .. " d")
-end
-
-function mps.resetdash()
- mpscode("[ ] 0 d")
-end
-
-function mps.setlinewidth(d)
- mpscode(d .. " w")
-end
-
-function mps.closepath()
- m_stack_close = true
-end
-
-function mps.fill()
- flushpath('f')
-end
-
-function mps.stroke()
- flushpath('S')
-end
-
-function mps.both()
- flushpath('B')
-end
-
-function mps.clip()
- flushpath('W n')
-end
-
-function mps.textext(font, scale, str) -- old parser
- local dx, dy = 0, 0
- if #m_stack_path > 0 then
- dx, dy = m_stack_path[1][1], m_stack_path[1][2]
- end
- flushconcat()
- context.MPtextext(font,scale,str,dx,dy)
- resetpath()
-end
-
-local handlers = { }
-
-handlers[1] = function(s)
- pdfcode(pdffinishtransparencycode())
- pdfcode(pdfcmykcode(mps.colormodel,s[3],s[4],s[5],s[6]))
-end
-handlers[2] = function(s)
- pdfcode(pdffinishtransparencycode())
- pdfcode(pdfspotcode(mps.colormodel,s[3],s[4],s[5],s[6]))
-end
-handlers[3] = function(s)
- pdfcode(pdfrgbcode(mps.colormodel,s[4],s[5],s[6]))
- pdfcode(pdftransparencycode(s[2],s[3]))
-end
-handlers[4] = function(s)
- pdfcode(pdfcmykcode(mps.colormodel,s[4],s[5],s[6],s[7]))
- pdfcode(pdftransparencycode(s[2],s[3]))
-end
-handlers[5] = function(s)
- pdfcode(pdfspotcode(mps.colormodel,s[4],s[5],s[6],s[7]))
- pdfcode(pdftransparencycode(s[2],s[3]))
-end
-
--- todo: color conversion
-
-local nofshades, tn = 0, tonumber
-
-local function linearshade(colorspace,domain,ca,cb,coordinates)
- pdfcode(pdffinishtransparencycode())
- nofshades = nofshades + 1
- local name = formatters["MpsSh%s"](nofshades)
- lpdf.linearshade(name,domain,ca,cb,1,colorspace,coordinates)
- extra_path_code, ignore_path = formatters["/%s sh Q"](name), true
- pdfcode("q /Pattern cs")
-end
-
-local function circularshade(colorspace,domain,ca,cb,coordinates)
- pdfcode(pdffinishtransparencycode())
- nofshades = nofshades + 1
- local name = formatters["MpsSh%s"](nofshades)
- lpdf.circularshade(name,domain,ca,cb,1,colorspace,coordinates)
- extra_path_code, ignore_path = formatters["/%s sh Q"](name), true
- pdfcode("q /Pattern cs")
-end
-
-handlers[30] = function(s)
- linearshade("DeviceRGB", { tn(s[ 2]), tn(s[ 3]) },
- { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]) }, { tn(s[10]), tn(s[11]), tn(s[12]) },
- { tn(s[ 8]), tn(s[ 9]), tn(s[13]), tn(s[14]) } )
-end
-
-handlers[31] = function(s)
- circularshade("DeviceRGB", { tn(s[ 2]), tn(s[ 3]) },
- { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]) }, { tn(s[11]), tn(s[12]), tn(s[13]) },
- { tn(s[ 8]), tn(s[ 9]), tn(s[10]), tn(s[14]), tn(s[15]), tn(s[16]) } )
-end
-
-handlers[32] = function(s)
- linearshade("DeviceCMYK", { tn(s[ 2]), tn(s[ 3]) },
- { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]), tn(s[ 8]) }, { tn(s[11]), tn(s[12]), tn(s[13]), tn(s[14]) },
- { tn(s[ 9]), tn(s[10]), tn(s[15]), tn(s[16]) } )
-end
-
-handlers[33] = function(s)
- circularshade("DeviceCMYK", { tn(s[ 2]), tn(s[ 3]) },
- { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]), tn(s[ 8]) }, { tn(s[12]), tn(s[13]), tn(s[14]), tn(s[15]) },
- { tn(s[ 9]), tn(s[10]), tn(s[11]), tn(s[16]), tn(s[17]), tn(s[18]) } )
-end
-
-handlers[34] = function(s) -- todo (after further cleanup)
- linearshade("DeviceGray", { tn(s[ 2]), tn(s[ 3]) }, { 0 }, { 1 }, { tn(s[9]), tn(s[10]), tn(s[15]), tn(s[16]) } )
-end
-
-handlers[35] = function(s) -- todo (after further cleanup)
- circularshade("DeviceGray", { tn(s[ 2]), tn(s[ 3]) }, { 0 }, { 1 }, { tn(s[9]), tn(s[10]), tn(s[15]), tn(s[16]) } )
-end
-
--- not supported in mkiv , use mplib instead
-
-handlers[10] = function() report_mptopdf("skipping special %s",10) end
-handlers[20] = function() report_mptopdf("skipping special %s",20) end
-handlers[50] = function() report_mptopdf("skipping special %s",50) end
-
---end of not supported
-
-function mps.setrgbcolor(r,g,b) -- extra check
- r, g = tonumber(r), tonumber(g) -- needed when we use lpeg
- if r == 0.0123 and g < 0.1 then
- g, b = round(g*10000), round(b*10000)
- local s = specials[b]
- local h = round(s[#s])
- local handler = handlers[h]
- if handler then
- handler(s)
- else
- report_mptopdf("unknown special handler %s (1)",h)
- end
- elseif r == 0.123 and g < 0.1 then
- g, b = round(g*1000), round(b*1000)
- local s = specials[b]
- local h = round(s[#s])
- local handler = handlers[h]
- if handler then
- handler(s)
- else
- report_mptopdf("unknown special handler %s (2)",h)
- end
- else
- pdfcode(pdffinishtransparencycode())
- pdfcode(pdfrgbcode(mps.colormodel,r,g,b))
- end
-end
-
-function mps.setcmykcolor(c,m,y,k)
- pdfcode(pdffinishtransparencycode())
- pdfcode(pdfcmykcode(mps.colormodel,c,m,y,k))
-end
-
-function mps.setgray(s)
- pdfcode(pdffinishtransparencycode())
- pdfcode(pdfgraycode(mps.colormodel,s))
-end
-
-function mps.specials(version,signal,factor) -- 2.0 123 1000
-end
-
-function mps.special(...) -- 7 1 0.5 1 0 0 1 3
- local t = { ... }
- local n = tonumber(t[#t-1])
- specials[n] = t
-end
-
-function mps.begindata()
-end
-
-function mps.enddata()
-end
-
-function mps.showpage()
-end
-
--- lpeg parser
-
--- The lpeg based parser is rather optimized for the kind of output
--- that MetaPost produces. It's my first real lpeg code, which may
--- show. Because the parser binds to functions, we define it last.
-
-local lpegP, lpegR, lpegS, lpegC, lpegCc, lpegCs = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs
-
-local digit = lpegR("09")
-local eol = lpegS('\r\n')^1
-local sp = lpegP(' ')^1
-local space = lpegS(' \r\n')^1
-local number = lpegS('0123456789.-+')^1
-local nonspace = lpegP(1-lpegS(' \r\n'))^1
-
-local spec = digit^2 * lpegP("::::") * digit^2
-local text = lpegCc("{") * (
- lpegP("\\") * ( (digit * digit * digit) / function(n) return "c" .. tonumber(n,8) end) +
- lpegP(" ") / function(n) return "\\c32" end + -- never in new mp
- lpegP(1) / function(n) return "\\c" .. byte(n) end
- ) * lpegCc("}")
-local package = lpegCs(spec + text^0)
-
-function mps.fshow(str,font,scale) -- lpeg parser
- mps.textext(font,scale,lpegmatch(package,str))
-end
-
-local cnumber = lpegC(number)
-local cstring = lpegC(nonspace)
-
-local specials = (lpegP("%%MetaPostSpecials:") * sp * (cstring * sp^0)^0 * eol) / mps.specials
-local special = (lpegP("%%MetaPostSpecial:") * sp * (cstring * sp^0)^0 * eol) / mps.special
-local boundingbox = (lpegP("%%BoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox
-local highresboundingbox = (lpegP("%%HiResBoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox
-
-local setup = lpegP("%%BeginSetup") * (1 - lpegP("%%EndSetup") )^1
-local prolog = lpegP("%%BeginProlog") * (1 - lpegP("%%EndProlog"))^1
-local comment = lpegP('%')^1 * (1 - eol)^1
-
-local curveto = ((cnumber * sp)^6 * lpegP("curveto") ) / mps.curveto
-local lineto = ((cnumber * sp)^2 * lpegP("lineto") ) / mps.lineto
-local rlineto = ((cnumber * sp)^2 * lpegP("rlineto") ) / mps.rlineto
-local moveto = ((cnumber * sp)^2 * lpegP("moveto") ) / mps.moveto
-local setrgbcolor = ((cnumber * sp)^3 * lpegP("setrgbcolor") ) / mps.setrgbcolor
-local setcmykcolor = ((cnumber * sp)^4 * lpegP("setcmykcolor") ) / mps.setcmykcolor
-local setgray = ((cnumber * sp)^1 * lpegP("setgray") ) / mps.setgray
-local newpath = ( lpegP("newpath") ) / mps.newpath
-local closepath = ( lpegP("closepath") ) / mps.closepath
-local fill = ( lpegP("fill") ) / mps.fill
-local stroke = ( lpegP("stroke") ) / mps.stroke
-local clip = ( lpegP("clip") ) / mps.clip
-local both = ( lpegP("gsave fill grestore")) / mps.both
-local showpage = ( lpegP("showpage") )
-local setlinejoin = ((cnumber * sp)^1 * lpegP("setlinejoin") ) / mps.setlinejoin
-local setlinecap = ((cnumber * sp)^1 * lpegP("setlinecap") ) / mps.setlinecap
-local setmiterlimit = ((cnumber * sp)^1 * lpegP("setmiterlimit") ) / mps.setmiterlimit
-local gsave = ( lpegP("gsave") ) / mps.gsave
-local grestore = ( lpegP("grestore") ) / mps.grestore
-
-local setdash = (lpegP("[") * (cnumber * sp^0)^0 * lpegP("]") * sp * cnumber * sp * lpegP("setdash")) / mps.setdash
-local concat = (lpegP("[") * (cnumber * sp^0)^6 * lpegP("]") * sp * lpegP("concat") ) / mps.concat
-local scale = ( (cnumber * sp^0)^6 * sp * lpegP("concat") ) / mps.concat
-
-local fshow = (lpegP("(") * lpegC((1-lpegP(")"))^1) * lpegP(")") * space * cstring * space * cnumber * space * lpegP("fshow")) / mps.fshow
-local fshow = (lpegP("(") * lpegCs( ( lpegP("\\(")/"\\050" + lpegP("\\)")/"\\051" + (1-lpegP(")")) )^1 )
- * lpegP(")") * space * cstring * space * cnumber * space * lpegP("fshow")) / mps.fshow
-
-local setlinewidth_x = (lpegP("0") * sp * cnumber * sp * lpegP("dtransform truncate idtransform setlinewidth pop")) / mps.setlinewidth
-local setlinewidth_y = (cnumber * sp * lpegP("0 dtransform exch truncate exch idtransform pop setlinewidth") ) / mps.setlinewidth
-
-local c = ((cnumber * sp)^6 * lpegP("c") ) / mps.curveto -- ^6 very inefficient, ^1 ok too
-local l = ((cnumber * sp)^2 * lpegP("l") ) / mps.lineto
-local r = ((cnumber * sp)^2 * lpegP("r") ) / mps.rlineto
-local m = ((cnumber * sp)^2 * lpegP("m") ) / mps.moveto
-local vlw = ((cnumber * sp)^1 * lpegP("vlw")) / mps.setlinewidth
-local hlw = ((cnumber * sp)^1 * lpegP("hlw")) / mps.setlinewidth
-
-local R = ((cnumber * sp)^3 * lpegP("R") ) / mps.setrgbcolor
-local C = ((cnumber * sp)^4 * lpegP("C") ) / mps.setcmykcolor
-local G = ((cnumber * sp)^1 * lpegP("G") ) / mps.setgray
-
-local lj = ((cnumber * sp)^1 * lpegP("lj") ) / mps.setlinejoin
-local ml = ((cnumber * sp)^1 * lpegP("ml") ) / mps.setmiterlimit
-local lc = ((cnumber * sp)^1 * lpegP("lc") ) / mps.setlinecap
-
-local n = lpegP("n") / mps.newpath
-local p = lpegP("p") / mps.closepath
-local S = lpegP("S") / mps.stroke
-local F = lpegP("F") / mps.fill
-local B = lpegP("B") / mps.both
-local W = lpegP("W") / mps.clip
-local P = lpegP("P") / mps.showpage
-
-local q = lpegP("q") / mps.gsave
-local Q = lpegP("Q") / mps.grestore
-
-local sd = (lpegP("[") * (cnumber * sp^0)^0 * lpegP("]") * sp * cnumber * sp * lpegP("sd")) / mps.setdash
-local rd = ( lpegP("rd")) / mps.resetdash
-
-local s = ( (cnumber * sp^0)^2 * lpegP("s") ) / mps.scale
-local t = (lpegP("[") * (cnumber * sp^0)^6 * lpegP("]") * sp * lpegP("t") ) / mps.concat
-
--- experimental
-
-local preamble = (
- prolog + setup +
- boundingbox + highresboundingbox + specials + special +
- comment
-)
-
-local procset = (
- lj + ml + lc +
- c + l + m + n + p + r +
- R + C + G +
- S + F + B + W +
- vlw + hlw +
- Q + q +
- sd + rd +
- t + s +
- fshow +
- P
-)
-
-local verbose = (
- curveto + lineto + moveto + newpath + closepath + rlineto +
- setrgbcolor + setcmykcolor + setgray +
- setlinejoin + setmiterlimit + setlinecap +
- stroke + fill + clip + both +
- setlinewidth_x + setlinewidth_y +
- gsave + grestore +
- concat + scale +
- fshow +
- setdash + -- no resetdash
- showpage
-)
-
--- order matters in terms of speed / we could check for procset first
-
-local captures_old = ( space + verbose + preamble )^0
-local captures_new = ( space + verbose + procset + preamble )^0
-
-local function parse(m_data)
- if find(m_data,"%%%%BeginResource: procset mpost") then
- lpegmatch(captures_new,m_data)
- else
- lpegmatch(captures_old,m_data)
- end
-end
-
--- main converter
-
-local a_colorspace = attributes.private('colormodel')
-
-function mptopdf.convertmpstopdf(name)
- resetall()
- local ok, m_data, n = resolvers.loadbinfile(name, 'tex') -- we need a binary load !
- if ok then
- mps.colormodel = tex.attribute[a_colorspace]
- statistics.starttiming(mptopdf)
- mptopdf.nofconverted = mptopdf.nofconverted + 1
- pdfcode(formatters["\\letterpercent\\space mptopdf begin: n=%s, file=%s"](mptopdf.nofconverted,file.basename(name)))
- pdfcode("q 1 0 0 1 0 0 cm")
- parse(m_data)
- pdfcode(pdffinishtransparencycode())
- pdfcode("Q")
- pdfcode("\\letterpercent\\space mptopdf end")
- resetall()
- statistics.stoptiming(mptopdf)
- else
- report_mptopdf("file %a not found",name)
- end
-end
-
--- status info
-
-statistics.register("mps conversion time",function()
- local n = mptopdf.nofconverted
- if n > 0 then
- return format("%s seconds, %s conversions", statistics.elapsedtime(mptopdf),n)
- else
- return nil
- end
-end)
+if not modules then modules = { } end modules ['meta-pdf'] = {
+ version = 1.001,
+ comment = "companion to meta-pdf.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Finally we used an optimized version. The test code can be found in
+-- meta-pdh.lua but since we no longer want to overload functione we use
+-- more locals now. This module keeps changing as it is also a testbed.
+--
+-- We can make it even more efficient if needed, but as we don't use this
+-- code often in \MKIV\ it makes no sense.
+
+local concat, unpack = table.concat, table.unpack
+local gsub, find, byte, gmatch, match = string.gsub, string.find, string.byte, string.gmatch, string.match
+local lpegmatch = lpeg.match
+local round = math.round
+local formatters, format = string.formatters, string.format
+
+local report_mptopdf = logs.reporter("graphics","mptopdf")
+
+local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context
+
+local pdfrgbcode = lpdf.rgbcode
+local pdfcmykcode = lpdf.cmykcode
+local pdfgraycode = lpdf.graycode
+local pdfspotcode = lpdf.spotcode
+local pdftransparencycode = lpdf.transparencycode
+local pdffinishtransparencycode = lpdf.finishtransparencycode
+local pdfliteral = nodes.pool.pdfliteral
+
+metapost.mptopdf = metapost.mptopdf or { }
+local mptopdf = metapost.mptopdf
+
+mptopdf.nofconverted = 0
+
+local f_translate = formatters["1 0 0 0 1 %f %f cm"] -- no %s due to 1e-035 issues
+local f_concat = formatters["%f %f %f %f %f %f cm"] -- no %s due to 1e-035 issues
+
+local m_path, m_stack, m_texts, m_version, m_date, m_shortcuts = { }, { }, { }, 0, 0, false
+
+local m_stack_close, m_stack_path, m_stack_concat = false, { }, nil
+local extra_path_code, ignore_path = nil, false
+local specials = { }
+
+local function resetpath()
+ m_stack_close, m_stack_path, m_stack_concat = false, { }, nil
+end
+
+local function resetall()
+ m_path, m_stack, m_texts, m_version, m_shortcuts = { }, { }, { }, 0, false
+ extra_path_code, ignore_path = nil, false
+ specials = { }
+ resetpath()
+end
+
+resetall()
+
+-- -- this does not work as expected (displacement of text) beware, needs another
+-- -- comment hack
+--
+-- local function pdfcode(str)
+-- context(pdfliteral(str))
+-- end
+
+local pdfcode = context.pdfliteral
+
+local function mpscode(str)
+ if ignore_path then
+ pdfcode("h W n")
+ if extra_path_code then
+ pdfcode(extra_path_code)
+ extra_path_code = nil
+ end
+ ignore_path = false
+ else
+ pdfcode(str)
+ end
+end
+
+-- auxiliary functions
+
+local function flushconcat()
+ if m_stack_concat then
+ mpscode(f_concatm(unpack(m_stack_concat)))
+ m_stack_concat = nil
+ end
+end
+
+local function flushpath(cmd)
+ if #m_stack_path > 0 then
+ local path = { }
+ if m_stack_concat then
+ local sx, sy = m_stack_concat[1], m_stack_concat[4]
+ local rx, ry = m_stack_concat[2], m_stack_concat[3]
+ local tx, ty = m_stack_concat[5], m_stack_concat[6]
+ local d = (sx*sy) - (rx*ry)
+ for k=1,#m_stack_path do
+ local v = m_stack_path[k]
+ local px, py = v[1], v[2] ; v[1], v[2] = (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d -- mpconcat(v[1],v[2])
+ if #v == 7 then
+ local px, py = v[3], v[4] ; v[3], v[4] = (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d -- mpconcat(v[3],v[4])
+ local px, py = v[5], v[6] ; v[5], v[6] = (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d -- mpconcat(v[5],v[6])
+ end
+ path[k] = concat(v," ")
+ end
+ else
+ for k=1,#m_stack_path do
+ path[k] = concat(m_stack_path[k]," ")
+ end
+ end
+ flushconcat()
+ pdfcode(concat(path," "))
+ if m_stack_close then
+ mpscode("h " .. cmd)
+ else
+ mpscode(cmd)
+ end
+ end
+ resetpath()
+end
+
+-- mp interface
+
+local mps = { }
+
+function mps.creator(a, b, c)
+ m_version = tonumber(b)
+end
+
+function mps.creationdate(a)
+ m_date = a
+end
+
+function mps.newpath()
+ m_stack_path = { }
+end
+
+function mps.boundingbox(llx, lly, urx, ury)
+ context.setMPboundingbox(llx,lly,urx,ury)
+end
+
+function mps.moveto(x,y)
+ m_stack_path[#m_stack_path+1] = { x, y, "m" }
+end
+
+function mps.curveto(ax, ay, bx, by, cx, cy)
+ m_stack_path[#m_stack_path+1] = { ax, ay, bx, by, cx, cy, "c" }
+end
+
+function mps.lineto(x,y)
+ m_stack_path[#m_stack_path+1] = { x, y, "l" }
+end
+
+function mps.rlineto(x,y)
+ local dx, dy = 0, 0
+ local topofstack = #m_stack_path
+ if topofstack > 0 then
+ local msp = m_stack_path[topofstack]
+ dx = msp[1]
+ dy = msp[2]
+ end
+ m_stack_path[topofstack+1] = {dx,dy,"l"}
+end
+
+function mps.translate(tx,ty)
+ mpscode(f_translate(tx,ty)
+end
+
+function mps.scale(sx,sy)
+ m_stack_concat = {sx,0,0,sy,0,0}
+end
+
+function mps.concat(sx, rx, ry, sy, tx, ty)
+ m_stack_concat = {sx,rx,ry,sy,tx,ty}
+end
+
+function mps.setlinejoin(d)
+ mpscode(d .. " j")
+end
+
+function mps.setlinecap(d)
+ mpscode(d .. " J")
+end
+
+function mps.setmiterlimit(d)
+ mpscode(d .. " M")
+end
+
+function mps.gsave()
+ mpscode("q")
+end
+
+function mps.grestore()
+ mpscode("Q")
+end
+
+function mps.setdash(...) -- can be made faster, operate on t = { ... }
+ local n = select("#",...)
+ mpscode("[" .. concat({...}," ",1,n-1) .. "] " .. select(n,...) .. " d")
+ -- mpscode("[" .. concat({select(1,n-1)}," ") .. "] " .. select(n,...) .. " d")
+end
+
+function mps.resetdash()
+ mpscode("[ ] 0 d")
+end
+
+function mps.setlinewidth(d)
+ mpscode(d .. " w")
+end
+
+function mps.closepath()
+ m_stack_close = true
+end
+
+function mps.fill()
+ flushpath('f')
+end
+
+function mps.stroke()
+ flushpath('S')
+end
+
+function mps.both()
+ flushpath('B')
+end
+
+function mps.clip()
+ flushpath('W n')
+end
+
+function mps.textext(font, scale, str) -- old parser
+ local dx, dy = 0, 0
+ if #m_stack_path > 0 then
+ dx, dy = m_stack_path[1][1], m_stack_path[1][2]
+ end
+ flushconcat()
+ context.MPtextext(font,scale,str,dx,dy)
+ resetpath()
+end
+
+local handlers = { }
+
+handlers[1] = function(s)
+ pdfcode(pdffinishtransparencycode())
+ pdfcode(pdfcmykcode(mps.colormodel,s[3],s[4],s[5],s[6]))
+end
+handlers[2] = function(s)
+ pdfcode(pdffinishtransparencycode())
+ pdfcode(pdfspotcode(mps.colormodel,s[3],s[4],s[5],s[6]))
+end
+handlers[3] = function(s)
+ pdfcode(pdfrgbcode(mps.colormodel,s[4],s[5],s[6]))
+ pdfcode(pdftransparencycode(s[2],s[3]))
+end
+handlers[4] = function(s)
+ pdfcode(pdfcmykcode(mps.colormodel,s[4],s[5],s[6],s[7]))
+ pdfcode(pdftransparencycode(s[2],s[3]))
+end
+handlers[5] = function(s)
+ pdfcode(pdfspotcode(mps.colormodel,s[4],s[5],s[6],s[7]))
+ pdfcode(pdftransparencycode(s[2],s[3]))
+end
+
+-- todo: color conversion
+
+local nofshades, tn = 0, tonumber
+
+local function linearshade(colorspace,domain,ca,cb,coordinates)
+ pdfcode(pdffinishtransparencycode())
+ nofshades = nofshades + 1
+ local name = formatters["MpsSh%s"](nofshades)
+ lpdf.linearshade(name,domain,ca,cb,1,colorspace,coordinates)
+ extra_path_code, ignore_path = formatters["/%s sh Q"](name), true
+ pdfcode("q /Pattern cs")
+end
+
+local function circularshade(colorspace,domain,ca,cb,coordinates)
+ pdfcode(pdffinishtransparencycode())
+ nofshades = nofshades + 1
+ local name = formatters["MpsSh%s"](nofshades)
+ lpdf.circularshade(name,domain,ca,cb,1,colorspace,coordinates)
+ extra_path_code, ignore_path = formatters["/%s sh Q"](name), true
+ pdfcode("q /Pattern cs")
+end
+
+handlers[30] = function(s)
+ linearshade("DeviceRGB", { tn(s[ 2]), tn(s[ 3]) },
+ { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]) }, { tn(s[10]), tn(s[11]), tn(s[12]) },
+ { tn(s[ 8]), tn(s[ 9]), tn(s[13]), tn(s[14]) } )
+end
+
+handlers[31] = function(s)
+ circularshade("DeviceRGB", { tn(s[ 2]), tn(s[ 3]) },
+ { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]) }, { tn(s[11]), tn(s[12]), tn(s[13]) },
+ { tn(s[ 8]), tn(s[ 9]), tn(s[10]), tn(s[14]), tn(s[15]), tn(s[16]) } )
+end
+
+handlers[32] = function(s)
+ linearshade("DeviceCMYK", { tn(s[ 2]), tn(s[ 3]) },
+ { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]), tn(s[ 8]) }, { tn(s[11]), tn(s[12]), tn(s[13]), tn(s[14]) },
+ { tn(s[ 9]), tn(s[10]), tn(s[15]), tn(s[16]) } )
+end
+
+handlers[33] = function(s)
+ circularshade("DeviceCMYK", { tn(s[ 2]), tn(s[ 3]) },
+ { tn(s[ 5]), tn(s[ 6]), tn(s[ 7]), tn(s[ 8]) }, { tn(s[12]), tn(s[13]), tn(s[14]), tn(s[15]) },
+ { tn(s[ 9]), tn(s[10]), tn(s[11]), tn(s[16]), tn(s[17]), tn(s[18]) } )
+end
+
+handlers[34] = function(s) -- todo (after further cleanup)
+ linearshade("DeviceGray", { tn(s[ 2]), tn(s[ 3]) }, { 0 }, { 1 }, { tn(s[9]), tn(s[10]), tn(s[15]), tn(s[16]) } )
+end
+
+handlers[35] = function(s) -- todo (after further cleanup)
+ circularshade("DeviceGray", { tn(s[ 2]), tn(s[ 3]) }, { 0 }, { 1 }, { tn(s[9]), tn(s[10]), tn(s[15]), tn(s[16]) } )
+end
+
+-- not supported in mkiv , use mplib instead
+
+handlers[10] = function() report_mptopdf("skipping special %s",10) end
+handlers[20] = function() report_mptopdf("skipping special %s",20) end
+handlers[50] = function() report_mptopdf("skipping special %s",50) end
+
+--end of not supported
+
+function mps.setrgbcolor(r,g,b) -- extra check
+ r, g = tonumber(r), tonumber(g) -- needed when we use lpeg
+ if r == 0.0123 and g < 0.1 then
+ g, b = round(g*10000), round(b*10000)
+ local s = specials[b]
+ local h = round(s[#s])
+ local handler = handlers[h]
+ if handler then
+ handler(s)
+ else
+ report_mptopdf("unknown special handler %s (1)",h)
+ end
+ elseif r == 0.123 and g < 0.1 then
+ g, b = round(g*1000), round(b*1000)
+ local s = specials[b]
+ local h = round(s[#s])
+ local handler = handlers[h]
+ if handler then
+ handler(s)
+ else
+ report_mptopdf("unknown special handler %s (2)",h)
+ end
+ else
+ pdfcode(pdffinishtransparencycode())
+ pdfcode(pdfrgbcode(mps.colormodel,r,g,b))
+ end
+end
+
+function mps.setcmykcolor(c,m,y,k)
+ pdfcode(pdffinishtransparencycode())
+ pdfcode(pdfcmykcode(mps.colormodel,c,m,y,k))
+end
+
+function mps.setgray(s)
+ pdfcode(pdffinishtransparencycode())
+ pdfcode(pdfgraycode(mps.colormodel,s))
+end
+
+function mps.specials(version,signal,factor) -- 2.0 123 1000
+end
+
+function mps.special(...) -- 7 1 0.5 1 0 0 1 3
+ local t = { ... }
+ local n = tonumber(t[#t-1])
+ specials[n] = t
+end
+
+function mps.begindata()
+end
+
+function mps.enddata()
+end
+
+function mps.showpage()
+end
+
+-- lpeg parser
+
+-- The lpeg based parser is rather optimized for the kind of output
+-- that MetaPost produces. It's my first real lpeg code, which may
+-- show. Because the parser binds to functions, we define it last.
+
+local lpegP, lpegR, lpegS, lpegC, lpegCc, lpegCs = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs
+
+local digit = lpegR("09")
+local eol = lpegS('\r\n')^1
+local sp = lpegP(' ')^1
+local space = lpegS(' \r\n')^1
+local number = lpegS('0123456789.-+')^1
+local nonspace = lpegP(1-lpegS(' \r\n'))^1
+
+local spec = digit^2 * lpegP("::::") * digit^2
+local text = lpegCc("{") * (
+ lpegP("\\") * ( (digit * digit * digit) / function(n) return "c" .. tonumber(n,8) end) +
+ lpegP(" ") / function(n) return "\\c32" end + -- never in new mp
+ lpegP(1) / function(n) return "\\c" .. byte(n) end
+ ) * lpegCc("}")
+local package = lpegCs(spec + text^0)
+
+function mps.fshow(str,font,scale) -- lpeg parser
+ mps.textext(font,scale,lpegmatch(package,str))
+end
+
+local cnumber = lpegC(number)
+local cstring = lpegC(nonspace)
+
+local specials = (lpegP("%%MetaPostSpecials:") * sp * (cstring * sp^0)^0 * eol) / mps.specials
+local special = (lpegP("%%MetaPostSpecial:") * sp * (cstring * sp^0)^0 * eol) / mps.special
+local boundingbox = (lpegP("%%BoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox
+local highresboundingbox = (lpegP("%%HiResBoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox
+
+local setup = lpegP("%%BeginSetup") * (1 - lpegP("%%EndSetup") )^1
+local prolog = lpegP("%%BeginProlog") * (1 - lpegP("%%EndProlog"))^1
+local comment = lpegP('%')^1 * (1 - eol)^1
+
+local curveto = ((cnumber * sp)^6 * lpegP("curveto") ) / mps.curveto
+local lineto = ((cnumber * sp)^2 * lpegP("lineto") ) / mps.lineto
+local rlineto = ((cnumber * sp)^2 * lpegP("rlineto") ) / mps.rlineto
+local moveto = ((cnumber * sp)^2 * lpegP("moveto") ) / mps.moveto
+local setrgbcolor = ((cnumber * sp)^3 * lpegP("setrgbcolor") ) / mps.setrgbcolor
+local setcmykcolor = ((cnumber * sp)^4 * lpegP("setcmykcolor") ) / mps.setcmykcolor
+local setgray = ((cnumber * sp)^1 * lpegP("setgray") ) / mps.setgray
+local newpath = ( lpegP("newpath") ) / mps.newpath
+local closepath = ( lpegP("closepath") ) / mps.closepath
+local fill = ( lpegP("fill") ) / mps.fill
+local stroke = ( lpegP("stroke") ) / mps.stroke
+local clip = ( lpegP("clip") ) / mps.clip
+local both = ( lpegP("gsave fill grestore")) / mps.both
+local showpage = ( lpegP("showpage") )
+local setlinejoin = ((cnumber * sp)^1 * lpegP("setlinejoin") ) / mps.setlinejoin
+local setlinecap = ((cnumber * sp)^1 * lpegP("setlinecap") ) / mps.setlinecap
+local setmiterlimit = ((cnumber * sp)^1 * lpegP("setmiterlimit") ) / mps.setmiterlimit
+local gsave = ( lpegP("gsave") ) / mps.gsave
+local grestore = ( lpegP("grestore") ) / mps.grestore
+
+local setdash = (lpegP("[") * (cnumber * sp^0)^0 * lpegP("]") * sp * cnumber * sp * lpegP("setdash")) / mps.setdash
+local concat = (lpegP("[") * (cnumber * sp^0)^6 * lpegP("]") * sp * lpegP("concat") ) / mps.concat
+local scale = ( (cnumber * sp^0)^6 * sp * lpegP("concat") ) / mps.concat
+
+local fshow = (lpegP("(") * lpegC((1-lpegP(")"))^1) * lpegP(")") * space * cstring * space * cnumber * space * lpegP("fshow")) / mps.fshow
+local fshow = (lpegP("(") * lpegCs( ( lpegP("\\(")/"\\050" + lpegP("\\)")/"\\051" + (1-lpegP(")")) )^1 )
+ * lpegP(")") * space * cstring * space * cnumber * space * lpegP("fshow")) / mps.fshow
+
+local setlinewidth_x = (lpegP("0") * sp * cnumber * sp * lpegP("dtransform truncate idtransform setlinewidth pop")) / mps.setlinewidth
+local setlinewidth_y = (cnumber * sp * lpegP("0 dtransform exch truncate exch idtransform pop setlinewidth") ) / mps.setlinewidth
+
+local c = ((cnumber * sp)^6 * lpegP("c") ) / mps.curveto -- ^6 very inefficient, ^1 ok too
+local l = ((cnumber * sp)^2 * lpegP("l") ) / mps.lineto
+local r = ((cnumber * sp)^2 * lpegP("r") ) / mps.rlineto
+local m = ((cnumber * sp)^2 * lpegP("m") ) / mps.moveto
+local vlw = ((cnumber * sp)^1 * lpegP("vlw")) / mps.setlinewidth
+local hlw = ((cnumber * sp)^1 * lpegP("hlw")) / mps.setlinewidth
+
+local R = ((cnumber * sp)^3 * lpegP("R") ) / mps.setrgbcolor
+local C = ((cnumber * sp)^4 * lpegP("C") ) / mps.setcmykcolor
+local G = ((cnumber * sp)^1 * lpegP("G") ) / mps.setgray
+
+local lj = ((cnumber * sp)^1 * lpegP("lj") ) / mps.setlinejoin
+local ml = ((cnumber * sp)^1 * lpegP("ml") ) / mps.setmiterlimit
+local lc = ((cnumber * sp)^1 * lpegP("lc") ) / mps.setlinecap
+
+local n = lpegP("n") / mps.newpath
+local p = lpegP("p") / mps.closepath
+local S = lpegP("S") / mps.stroke
+local F = lpegP("F") / mps.fill
+local B = lpegP("B") / mps.both
+local W = lpegP("W") / mps.clip
+local P = lpegP("P") / mps.showpage
+
+local q = lpegP("q") / mps.gsave
+local Q = lpegP("Q") / mps.grestore
+
+local sd = (lpegP("[") * (cnumber * sp^0)^0 * lpegP("]") * sp * cnumber * sp * lpegP("sd")) / mps.setdash
+local rd = ( lpegP("rd")) / mps.resetdash
+
+local s = ( (cnumber * sp^0)^2 * lpegP("s") ) / mps.scale
+local t = (lpegP("[") * (cnumber * sp^0)^6 * lpegP("]") * sp * lpegP("t") ) / mps.concat
+
+-- experimental
+
+local preamble = (
+ prolog + setup +
+ boundingbox + highresboundingbox + specials + special +
+ comment
+)
+
+local procset = (
+ lj + ml + lc +
+ c + l + m + n + p + r +
+ R + C + G +
+ S + F + B + W +
+ vlw + hlw +
+ Q + q +
+ sd + rd +
+ t + s +
+ fshow +
+ P
+)
+
+local verbose = (
+ curveto + lineto + moveto + newpath + closepath + rlineto +
+ setrgbcolor + setcmykcolor + setgray +
+ setlinejoin + setmiterlimit + setlinecap +
+ stroke + fill + clip + both +
+ setlinewidth_x + setlinewidth_y +
+ gsave + grestore +
+ concat + scale +
+ fshow +
+ setdash + -- no resetdash
+ showpage
+)
+
+-- order matters in terms of speed / we could check for procset first
+
+local captures_old = ( space + verbose + preamble )^0
+local captures_new = ( space + verbose + procset + preamble )^0
+
+local function parse(m_data)
+ if find(m_data,"%%%%BeginResource: procset mpost") then
+ lpegmatch(captures_new,m_data)
+ else
+ lpegmatch(captures_old,m_data)
+ end
+end
+
+-- main converter
+
+local a_colorspace = attributes.private('colormodel')
+
+function mptopdf.convertmpstopdf(name)
+ resetall()
+ local ok, m_data, n = resolvers.loadbinfile(name, 'tex') -- we need a binary load !
+ if ok then
+ mps.colormodel = tex.attribute[a_colorspace]
+ statistics.starttiming(mptopdf)
+ mptopdf.nofconverted = mptopdf.nofconverted + 1
+ pdfcode(formatters["\\letterpercent\\space mptopdf begin: n=%s, file=%s"](mptopdf.nofconverted,file.basename(name)))
+ pdfcode("q 1 0 0 1 0 0 cm")
+ parse(m_data)
+ pdfcode(pdffinishtransparencycode())
+ pdfcode("Q")
+ pdfcode("\\letterpercent\\space mptopdf end")
+ resetall()
+ statistics.stoptiming(mptopdf)
+ else
+ report_mptopdf("file %a not found",name)
+ end
+end
+
+-- status info
+
+statistics.register("mps conversion time",function()
+ local n = mptopdf.nofconverted
+ if n > 0 then
+ return format("%s seconds, %s conversions", statistics.elapsedtime(mptopdf),n)
+ else
+ return nil
+ end
+end)
diff --git a/tex/context/base/meta-pdh.lua b/tex/context/base/meta-pdh.lua
index 5040715c4..10fbad141 100644
--- a/tex/context/base/meta-pdh.lua
+++ b/tex/context/base/meta-pdh.lua
@@ -1,610 +1,610 @@
-if not modules then modules = { } end modules ['meta-pdf'] = {
- version = 1.001,
- comment = "companion to meta-pdf.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-if true then
- return -- or os.exit()
-end
-
--- This file contains the history of the converter. We keep it around as it
--- relates to the development of luatex.
-
--- This is the third version. Version 1 converted to Lua code,
--- version 2 gsubbed the file into TeX code, and version 3 uses
--- the new lpeg functionality and streams the result into TeX.
-
--- We will move old stuff to edu.
-
---~ old lpeg 0.4 lpeg 0.5
---~ 100 times test graphic 2.45 (T:1.07) 0.72 (T:0.24) 0.580 (0.560 no table) -- 0.54 optimized for one space (T:0.19)
---~ 100 times big graphic 10.44 4.30/3.35 nogb 2.914 (2.050 no table) -- 1.99 optimized for one space (T:0.85)
---~ 500 times test graphic T:1.29 T:1.16 (T:1.10 no table) -- T:1.10
-
--- only needed for mp output on disk
-
-local concat, format, find, gsub, gmatch = table.concat, string.format, string.find, string.gsub, string.gmatch
-local tostring, tonumber, select = tostring, tonumber, select
-local lpegmatch = lpeg.match
-
-local metapost = metapost
-
-metapost.mptopdf = metapost.mptopdf or { }
-local mptopdf = metapost.mptopdf
-
-mptopdf.parsers = { }
-mptopdf.parser = 'none'
-mptopdf.nofconverted = 0
-
-function mptopdf.reset()
- mptopdf.data = ""
- mptopdf.path = { }
- mptopdf.stack = { }
- mptopdf.texts = { }
- mptopdf.version = 0
- mptopdf.shortcuts = false
- mptopdf.resetpath()
-end
-
-function mptopdf.resetpath()
- mptopdf.stack.close = false
- mptopdf.stack.path = { }
- mptopdf.stack.concat = nil
- mptopdf.stack.special = false
-end
-
-mptopdf.reset()
-
-function mptopdf.parsers.none()
- -- no parser set
-end
-
-function mptopdf.parse()
- mptopdf.parsers[mptopdf.parser]()
-end
-
--- old code
-
-mptopdf.steps = { }
-
-mptopdf.descapes = {
- ['('] = "\\\\char40 ",
- [')'] = "\\\\char41 ",
- ['"'] = "\\\\char92 "
-}
-
-function mptopdf.descape(str)
- str = gsub(str,"\\(%d%d%d)",function(n)
- return "\\char" .. tonumber(n,8) .. " "
- end)
- return gsub(str,"\\([%(%)\\])",mptopdf.descapes)
-end
-
-function mptopdf.steps.descape(str)
- str = gsub(str,"\\(%d%d%d)",function(n)
- return "\\\\char" .. tonumber(n,8) .. " "
- end)
- return gsub(str,"\\([%(%)\\])",mptopdf.descapes)
-end
-
-function mptopdf.steps.strip() -- .3 per expr
- mptopdf.data = gsub(mptopdf.data,"^(.-)%%+Page:.-%c+(.*)%s+%a+%s+%%+EOF.*$", function(preamble, graphic)
- local bbox = "0 0 0 0"
- for b in gmatch(preamble,"%%%%%a+oundingBox: +(.-)%c+") do
- bbox = b
- end
- local name, version = gmatch(preamble,"%%%%Creator: +(.-) +(.-) ")
- mptopdf.version = tostring(version or "0")
- if find(preamble,"/hlw{0 dtransform") then
- mptopdf.shortcuts = true
- end
- -- the boundingbox specification needs to come before data, well, not really
- return bbox .. " boundingbox\n" .. "\nbegindata\n" .. graphic .. "\nenddata\n"
- end, 1)
- mptopdf.data = gsub(mptopdf.data,"%%%%MetaPostSpecials: +(.-)%c+", "%1 specials\n", 1)
- mptopdf.data = gsub(mptopdf.data,"%%%%MetaPostSpecial: +(.-)%c+", "%1 special\n")
- mptopdf.data = gsub(mptopdf.data,"%%.-%c+", "")
-end
-
-function mptopdf.steps.cleanup()
- if not mptopdf.shortcuts then
- mptopdf.data = gsub(mptopdf.data,"gsave%s+fill%s+grestore%s+stroke", "both")
- mptopdf.data = gsub(mptopdf.data,"([%d%.]+)%s+([%d%.]+)%s+dtransform%s+exch%s+truncate%s+exch%s+idtransform%s+pop%s+setlinewidth", function(wx,wy)
- if tonumber(wx) > 0 then return wx .. " setlinewidth" else return wy .. " setlinewidth" end
- end)
- mptopdf.data = gsub(mptopdf.data,"([%d%.]+)%s+([%d%.]+)%s+dtransform%s+truncate%s+idtransform%s+setlinewidth%s+pop", function(wx,wy)
- if tonumber(wx) > 0 then return wx .. " setlinewidth" else return wy .. " setlinewidth" end
- end)
- end
-end
-
-function mptopdf.steps.convert()
- mptopdf.data = gsub(mptopdf.data,"%c%((.-)%) (.-) (.-) fshow", function(str,font,scale)
- mptopdf.texts[mptopdf.texts+1] = {mptopdf.steps.descape(str), font, scale}
- return "\n" .. #mptopdf.texts .. " textext"
- end)
- mptopdf.data = gsub(mptopdf.data,"%[%s*(.-)%s*%]", function(str)
- return gsub(str,"%s+"," ")
- end)
- local t
- mptopdf.data = gsub(mptopdf.data,"%s*([^%a]-)%s*(%a+)", function(args,cmd)
- if cmd == "textext" then
- t = mptopdf.texts[tonumber(args)]
- return "metapost.mps.textext(" .. "\"" .. t[2] .. "\"," .. t[3] .. ",\"" .. t[1] .. "\")\n"
- else
- return "metapost.mps." .. cmd .. "(" .. gsub(args," +",",") .. ")\n"
- end
- end)
-end
-
-function mptopdf.steps.process()
- assert(loadstring(mptopdf.data))() -- () runs the loaded chunk
-end
-
-function mptopdf.parsers.gsub()
- mptopdf.steps.strip()
- mptopdf.steps.cleanup()
- mptopdf.steps.convert()
- mptopdf.steps.process()
-end
-
--- end of old code
-
--- from lua to tex
-
-function mptopdf.pdfcode(str)
- context.pdfliteral(str) -- \\MPScode
-end
-
-function mptopdf.texcode(str)
- context(str)
-end
-
--- auxiliary functions
-
-function mptopdf.flushconcat()
- if mptopdf.stack.concat then
- mptopdf.pdfcode(concat(mptopdf.stack.concat," ") .. " cm")
- mptopdf.stack.concat = nil
- end
-end
-
-function mptopdf.flushpath(cmd)
- -- faster: no local function and loop
- if #mptopdf.stack.path > 0 then
- local path = { }
- if mptopdf.stack.concat then
- local sx, sy = mptopdf.stack.concat[1], mptopdf.stack.concat[4]
- local rx, ry = mptopdf.stack.concat[2], mptopdf.stack.concat[3]
- local tx, ty = mptopdf.stack.concat[5], mptopdf.stack.concat[6]
- local d = (sx*sy) - (rx*ry)
- local function mpconcat(px, py)
- return (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d
- end
- local stackpath = mptopdf.stack.path
- for k=1,#stackpath do
- local v = stackpath[k]
- v[1],v[2] = mpconcat(v[1],v[2])
- if #v == 7 then
- v[3],v[4] = mpconcat(v[3],v[4])
- v[5],v[6] = mpconcat(v[5],v[6])
- end
- path[#path+1] = concat(v," ")
- end
- else
- local stackpath = mptopdf.stack.path
- for k=1,#stackpath do
- path[#path+1] = concat(stackpath[k]," ")
- end
- end
- mptopdf.flushconcat()
- mptopdf.texcode("\\MPSpath{" .. concat(path," ") .. "}")
- if mptopdf.stack.close then
- mptopdf.texcode("\\MPScode{h " .. cmd .. "}")
- else
- mptopdf.texcode("\\MPScode{" .. cmd .."}")
- end
- end
- mptopdf.resetpath()
-end
-
-function mptopdf.loaded(name)
- local ok, n
- mptopdf.reset()
- ok, mptopdf.data, n = resolvers.loadbinfile(name, 'tex') -- we need a binary load !
- return ok
-end
-
-if not mptopdf.parse then
- function mptopdf.parse() end -- forward declaration
-end
-
-function mptopdf.convertmpstopdf(name)
- if mptopdf.loaded(name) then
- mptopdf.nofconverted = mptopdf.nofconverted + 1
- statistics.starttiming(mptopdf)
- mptopdf.parse()
- mptopdf.reset()
- statistics.stoptiming(mptopdf)
- else
- context("file " .. name .. " not found")
- end
-end
-
--- mp interface
-
-metapost.mps = metapost.mps or { }
-local mps = metapost.mps or { }
-
-function mps.creator(a, b, c)
- mptopdf.version = tonumber(b)
-end
-
-function mps.creationdate(a)
- mptopdf.date= a
-end
-
-function mps.newpath()
- mptopdf.stack.path = { }
-end
-
-function mps.boundingbox(llx, lly, urx, ury)
- mptopdf.texcode("\\MPSboundingbox{" .. llx .. "}{" .. lly .. "}{" .. urx .. "}{" .. ury .. "}")
-end
-
-function mps.moveto(x,y)
- mptopdf.stack.path[#mptopdf.stack.path+1] = {x,y,"m"}
-end
-
-function mps.curveto(ax, ay, bx, by, cx, cy)
- mptopdf.stack.path[#mptopdf.stack.path+1] = {ax,ay,bx,by,cx,cy,"c"}
-end
-
-function mps.lineto(x,y)
- mptopdf.stack.path[#mptopdf.stack.path+1] = {x,y,"l"}
-end
-
-function mps.rlineto(x,y)
- local dx, dy = 0, 0
- if #mptopdf.stack.path > 0 then
- dx, dy = mptopdf.stack.path[#mptopdf.stack.path][1], mptopdf.stack.path[#mptopdf.stack.path][2]
- end
- mptopdf.stack.path[#mptopdf.stack.path+1] = {dx,dy,"l"}
-end
-
-function mps.translate(tx,ty)
- mptopdf.pdfcode("1 0 0 0 1 " .. tx .. " " .. ty .. " cm")
-end
-
-function mps.scale(sx,sy)
- mptopdf.stack.concat = {sx,0,0,sy,0,0}
-end
-
-function mps.concat(sx, rx, ry, sy, tx, ty)
- mptopdf.stack.concat = {sx,rx,ry,sy,tx,ty}
-end
-
-function mps.setlinejoin(d)
- mptopdf.pdfcode(d .. " j")
-end
-
-function mps.setlinecap(d)
- mptopdf.pdfcode(d .. " J")
-end
-
-function mps.setmiterlimit(d)
- mptopdf.pdfcode(d .. " M")
-end
-
-function mps.gsave()
- mptopdf.pdfcode("q")
-end
-
-function mps.grestore()
- mptopdf.pdfcode("Q")
-end
-
-function mps.setdash(...)
- local n = select("#",...)
- mptopdf.pdfcode("[" .. concat({...}," ",1,n-1) .. "] " .. select(n,...) .. " d")
-end
-
-function mps.resetdash()
- mptopdf.pdfcode("[ ] 0 d")
-end
-
-function mps.setlinewidth(d)
- mptopdf.pdfcode(d .. " w")
-end
-
-function mps.closepath()
- mptopdf.stack.close = true
-end
-
-function mps.fill()
- mptopdf.flushpath('f')
-end
-
-function mps.stroke()
- mptopdf.flushpath('S')
-end
-
-function mps.both()
- mptopdf.flushpath('B')
-end
-
-function mps.clip()
- mptopdf.flushpath('W n')
-end
-
-function mps.textext(font, scale, str) -- old parser
- local dx, dy = 0, 0
- if #mptopdf.stack.path > 0 then
- dx, dy = mptopdf.stack.path[1][1], mptopdf.stack.path[1][2]
- end
- mptopdf.flushconcat()
- mptopdf.texcode("\\MPStextext{"..font.."}{"..scale.."}{"..str.."}{"..dx.."}{"..dy.."}")
- mptopdf.resetpath()
-end
-
---~ function mps.handletext(font,scale.str,dx,dy)
---~ local one, two = string.match(str, "^(%d+)::::(%d+)")
---~ if one and two then
---~ mptopdf.texcode("\\MPTOPDFtextext{"..font.."}{"..scale.."}{"..one.."}{"..two.."}{"..dx.."}{"..dy.."}")
---~ else
---~ mptopdf.texcode("\\MPTOPDFtexcode{"..font.."}{"..scale.."}{"..str.."}{"..dx.."}{"..dy.."}")
---~ end
---~ end
-
-function mps.setrgbcolor(r,g,b) -- extra check
- r, g = tonumber(r), tonumber(g) -- needed when we use lpeg
- if r == 0.0123 and g < 0.1 then
- mptopdf.texcode("\\MPSspecial{" .. g*10000 .. "}{" .. b*10000 .. "}")
- elseif r == 0.123 and g < 0.1 then
- mptopdf.texcode("\\MPSspecial{" .. g* 1000 .. "}{" .. b* 1000 .. "}")
- else
- mptopdf.texcode("\\MPSrgb{" .. r .. "}{" .. g .. "}{" .. b .. "}")
- end
-end
-
-function mps.setcmykcolor(c,m,y,k)
- mptopdf.texcode("\\MPScmyk{" .. c .. "}{" .. m .. "}{" .. y .. "}{" .. k .. "}")
-end
-
-function mps.setgray(s)
- mptopdf.texcode("\\MPSgray{" .. s .. "}")
-end
-
-function mps.specials(version,signal,factor) -- 2.0 123 1000
-end
-
-function mps.special(...) -- 7 1 0.5 1 0 0 1 3
- local n = select("#",...)
- mptopdf.texcode("\\MPSbegin\\MPSset{" .. concat({...},"}\\MPSset{",2,n) .. "}\\MPSend")
-end
-
-function mps.begindata()
-end
-
-function mps.enddata()
-end
-
-function mps.showpage()
-end
-
-mps.n = mps.newpath -- n
-mps.p = mps.closepath -- h
-mps.l = mps.lineto -- l
-mps.r = mps.rlineto -- r
-mps.m = mps.moveto -- m
-mps.c = mps.curveto -- c
-mps.hlw = mps.setlinewidth
-mps.vlw = mps.setlinewidth
-
-mps.C = mps.setcmykcolor -- k
-mps.G = mps.setgray -- g
-mps.R = mps.setrgbcolor -- rg
-
-mps.lj = mps.setlinejoin -- j
-mps.ml = mps.setmiterlimit -- M
-mps.lc = mps.setlinecap -- J
-mps.sd = mps.setdash -- d
-mps.rd = mps.resetdash
-
-mps.S = mps.stroke -- S
-mps.F = mps.fill -- f
-mps.B = mps.both -- B
-mps.W = mps.clip -- W
-
-mps.q = mps.gsave -- q
-mps.Q = mps.grestore -- Q
-
-mps.s = mps.scale -- (not in pdf)
-mps.t = mps.concat -- (not the same as pdf anyway)
-
-mps.P = mps.showpage
-
--- experimental
-
-function mps.attribute(id,value)
- mptopdf.texcode("\\attribute " .. id .. "=" .. value .. " ")
--- mptopdf.texcode("\\dompattribute{" .. id .. "}{" .. value .. "}")
-end
-
--- lpeg parser
-
--- The lpeg based parser is rather optimized for the kind of output
--- that MetaPost produces. It's my first real lpeg code, which may
--- show. Because the parser binds to functions, we define it last.
-
-do -- assumes \let\c\char
-
- local byte = string.byte
- local digit = lpeg.R("09")
- local spec = digit^2 * lpeg.P("::::") * digit^2
- local text = lpeg.Cc("{") * (
- lpeg.P("\\") * ( (digit * digit * digit) / function(n) return "c" .. tonumber(n,8) end) +
- lpeg.P(" ") / function(n) return "\\c32" end + -- never in new mp
- lpeg.P(1) / function(n) return "\\c" .. byte(n) end
- ) * lpeg.Cc("}")
- local package = lpeg.Cs(spec + text^0)
-
- function mps.fshow(str,font,scale) -- lpeg parser
- mps.textext(font,scale,lpegmatch(package,str))
- end
-
-end
-
-do
-
- local eol = lpeg.S('\r\n')^1
- local sp = lpeg.P(' ')^1
- local space = lpeg.S(' \r\n')^1
- local number = lpeg.S('0123456789.-+')^1
- local nonspace = lpeg.P(1-lpeg.S(' \r\n'))^1
-
- local cnumber = lpeg.C(number)
- local cstring = lpeg.C(nonspace)
-
- local specials = (lpeg.P("%%MetaPostSpecials:") * sp * (cstring * sp^0)^0 * eol) / mps.specials
- local special = (lpeg.P("%%MetaPostSpecial:") * sp * (cstring * sp^0)^0 * eol) / mps.special
- local boundingbox = (lpeg.P("%%BoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox
- local highresboundingbox = (lpeg.P("%%HiResBoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox
-
- local setup = lpeg.P("%%BeginSetup") * (1 - lpeg.P("%%EndSetup") )^1
- local prolog = lpeg.P("%%BeginProlog") * (1 - lpeg.P("%%EndProlog"))^1
- local comment = lpeg.P('%')^1 * (1 - eol)^1
-
- local curveto = ((cnumber * sp)^6 * lpeg.P("curveto") ) / mps.curveto
- local lineto = ((cnumber * sp)^2 * lpeg.P("lineto") ) / mps.lineto
- local rlineto = ((cnumber * sp)^2 * lpeg.P("rlineto") ) / mps.rlineto
- local moveto = ((cnumber * sp)^2 * lpeg.P("moveto") ) / mps.moveto
- local setrgbcolor = ((cnumber * sp)^3 * lpeg.P("setrgbcolor") ) / mps.setrgbcolor
- local setcmykcolor = ((cnumber * sp)^4 * lpeg.P("setcmykcolor") ) / mps.setcmykcolor
- local setgray = ((cnumber * sp)^1 * lpeg.P("setgray") ) / mps.setgray
- local newpath = ( lpeg.P("newpath") ) / mps.newpath
- local closepath = ( lpeg.P("closepath") ) / mps.closepath
- local fill = ( lpeg.P("fill") ) / mps.fill
- local stroke = ( lpeg.P("stroke") ) / mps.stroke
- local clip = ( lpeg.P("clip") ) / mps.clip
- local both = ( lpeg.P("gsave fill grestore")) / mps.both
- local showpage = ( lpeg.P("showpage") )
- local setlinejoin = ((cnumber * sp)^1 * lpeg.P("setlinejoin") ) / mps.setlinejoin
- local setlinecap = ((cnumber * sp)^1 * lpeg.P("setlinecap") ) / mps.setlinecap
- local setmiterlimit = ((cnumber * sp)^1 * lpeg.P("setmiterlimit") ) / mps.setmiterlimit
- local gsave = ( lpeg.P("gsave") ) / mps.gsave
- local grestore = ( lpeg.P("grestore") ) / mps.grestore
-
- local setdash = (lpeg.P("[") * (cnumber * sp^0)^0 * lpeg.P("]") * sp * cnumber * sp * lpeg.P("setdash")) / mps.setdash
- local concat = (lpeg.P("[") * (cnumber * sp^0)^6 * lpeg.P("]") * sp * lpeg.P("concat") ) / mps.concat
- local scale = ( (cnumber * sp^0)^6 * sp * lpeg.P("concat") ) / mps.concat
-
- local fshow = (lpeg.P("(") * lpeg.C((1-lpeg.P(")"))^1) * lpeg.P(")") * space * cstring * space * cnumber * space * lpeg.P("fshow")) / mps.fshow
- local fshow = (lpeg.P("(") *
- lpeg.Cs( ( lpeg.P("\\(")/"\\050" + lpeg.P("\\)")/"\\051" + (1-lpeg.P(")")) )^1 )
- * lpeg.P(")") * space * cstring * space * cnumber * space * lpeg.P("fshow")) / mps.fshow
-
- local setlinewidth_x = (lpeg.P("0") * sp * cnumber * sp * lpeg.P("dtransform truncate idtransform setlinewidth pop")) / mps.setlinewidth
- local setlinewidth_y = (cnumber * sp * lpeg.P("0 dtransform exch truncate exch idtransform pop setlinewidth") ) / mps.setlinewidth
-
- local c = ((cnumber * sp)^6 * lpeg.P("c") ) / mps.curveto -- ^6 very inefficient, ^1 ok too
- local l = ((cnumber * sp)^2 * lpeg.P("l") ) / mps.lineto
- local r = ((cnumber * sp)^2 * lpeg.P("r") ) / mps.rlineto
- local m = ((cnumber * sp)^2 * lpeg.P("m") ) / mps.moveto
- local vlw = ((cnumber * sp)^1 * lpeg.P("vlw")) / mps.setlinewidth
- local hlw = ((cnumber * sp)^1 * lpeg.P("hlw")) / mps.setlinewidth
-
- local R = ((cnumber * sp)^3 * lpeg.P("R") ) / mps.setrgbcolor
- local C = ((cnumber * sp)^4 * lpeg.P("C") ) / mps.setcmykcolor
- local G = ((cnumber * sp)^1 * lpeg.P("G") ) / mps.setgray
-
- local lj = ((cnumber * sp)^1 * lpeg.P("lj") ) / mps.setlinejoin
- local ml = ((cnumber * sp)^1 * lpeg.P("ml") ) / mps.setmiterlimit
- local lc = ((cnumber * sp)^1 * lpeg.P("lc") ) / mps.setlinecap
-
- local n = lpeg.P("n") / mps.newpath
- local p = lpeg.P("p") / mps.closepath
- local S = lpeg.P("S") / mps.stroke
- local F = lpeg.P("F") / mps.fill
- local B = lpeg.P("B") / mps.both
- local W = lpeg.P("W") / mps.clip
- local P = lpeg.P("P") / mps.showpage
-
- local q = lpeg.P("q") / mps.gsave
- local Q = lpeg.P("Q") / mps.grestore
-
- local sd = (lpeg.P("[") * (cnumber * sp^0)^0 * lpeg.P("]") * sp * cnumber * sp * lpeg.P("sd")) / mps.setdash
- local rd = ( lpeg.P("rd")) / mps.resetdash
-
- local s = ( (cnumber * sp^0)^2 * lpeg.P("s") ) / mps.scale
- local t = (lpeg.P("[") * (cnumber * sp^0)^6 * lpeg.P("]") * sp * lpeg.P("t") ) / mps.concat
-
- -- experimental
-
- local attribute = ((cnumber * sp)^2 * lpeg.P("attribute")) / mps.attribute
- local A = ((cnumber * sp)^2 * lpeg.P("A")) / mps.attribute
-
- local preamble = (
- prolog + setup +
- boundingbox + highresboundingbox + specials + special +
- comment
- )
-
- local procset = (
- lj + ml + lc +
- c + l + m + n + p + r +
- A +
- R + C + G +
- S + F + B + W +
- vlw + hlw +
- Q + q +
- sd + rd +
- t + s +
- fshow +
- P
- )
-
- local verbose = (
- curveto + lineto + moveto + newpath + closepath + rlineto +
- setrgbcolor + setcmykcolor + setgray +
- attribute +
- setlinejoin + setmiterlimit + setlinecap +
- stroke + fill + clip + both +
- setlinewidth_x + setlinewidth_y +
- gsave + grestore +
- concat + scale +
- fshow +
- setdash + -- no resetdash
- showpage
- )
-
- -- order matters in terms of speed / we could check for procset first
-
- local captures_old = ( space + verbose + preamble )^0
- local captures_new = ( space + procset + preamble + verbose )^0
-
- function mptopdf.parsers.lpeg()
- if find(mptopdf.data,"%%%%BeginResource: procset mpost") then
- lpegmatch(captures_new,mptopdf.data)
- else
- lpegmatch(captures_old,mptopdf.data)
- end
- end
-
-end
-
-mptopdf.parser = 'lpeg'
-
--- status info
-
-statistics.register("mps conversion time",function()
- local n = mptopdf.nofconverted
- if n > 0 then
- return format("%s seconds, %s conversions", statistics.elapsedtime(mptopdf),n)
- else
- return nil
- end
-end)
+if not modules then modules = { } end modules ['meta-pdf'] = {
+ version = 1.001,
+ comment = "companion to meta-pdf.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if true then
+ return -- or os.exit()
+end
+
+-- This file contains the history of the converter. We keep it around as it
+-- relates to the development of luatex.
+
+-- This is the third version. Version 1 converted to Lua code,
+-- version 2 gsubbed the file into TeX code, and version 3 uses
+-- the new lpeg functionality and streams the result into TeX.
+
+-- We will move old stuff to edu.
+
+--~ old lpeg 0.4 lpeg 0.5
+--~ 100 times test graphic 2.45 (T:1.07) 0.72 (T:0.24) 0.580 (0.560 no table) -- 0.54 optimized for one space (T:0.19)
+--~ 100 times big graphic 10.44 4.30/3.35 nogb 2.914 (2.050 no table) -- 1.99 optimized for one space (T:0.85)
+--~ 500 times test graphic T:1.29 T:1.16 (T:1.10 no table) -- T:1.10
+
+-- only needed for mp output on disk
+
+local concat, format, find, gsub, gmatch = table.concat, string.format, string.find, string.gsub, string.gmatch
+local tostring, tonumber, select = tostring, tonumber, select
+local lpegmatch = lpeg.match
+
+local metapost = metapost
+
+metapost.mptopdf = metapost.mptopdf or { }
+local mptopdf = metapost.mptopdf
+
+mptopdf.parsers = { }
+mptopdf.parser = 'none'
+mptopdf.nofconverted = 0
+
+function mptopdf.reset()
+ mptopdf.data = ""
+ mptopdf.path = { }
+ mptopdf.stack = { }
+ mptopdf.texts = { }
+ mptopdf.version = 0
+ mptopdf.shortcuts = false
+ mptopdf.resetpath()
+end
+
+function mptopdf.resetpath()
+ mptopdf.stack.close = false
+ mptopdf.stack.path = { }
+ mptopdf.stack.concat = nil
+ mptopdf.stack.special = false
+end
+
+mptopdf.reset()
+
+function mptopdf.parsers.none()
+ -- no parser set
+end
+
+function mptopdf.parse()
+ mptopdf.parsers[mptopdf.parser]()
+end
+
+-- old code
+
+mptopdf.steps = { }
+
+mptopdf.descapes = {
+ ['('] = "\\\\char40 ",
+ [')'] = "\\\\char41 ",
+ ['"'] = "\\\\char92 "
+}
+
+function mptopdf.descape(str)
+ str = gsub(str,"\\(%d%d%d)",function(n)
+ return "\\char" .. tonumber(n,8) .. " "
+ end)
+ return gsub(str,"\\([%(%)\\])",mptopdf.descapes)
+end
+
+function mptopdf.steps.descape(str)
+ str = gsub(str,"\\(%d%d%d)",function(n)
+ return "\\\\char" .. tonumber(n,8) .. " "
+ end)
+ return gsub(str,"\\([%(%)\\])",mptopdf.descapes)
+end
+
+function mptopdf.steps.strip() -- .3 per expr
+ mptopdf.data = gsub(mptopdf.data,"^(.-)%%+Page:.-%c+(.*)%s+%a+%s+%%+EOF.*$", function(preamble, graphic)
+ local bbox = "0 0 0 0"
+ for b in gmatch(preamble,"%%%%%a+oundingBox: +(.-)%c+") do
+ bbox = b
+ end
+ local name, version = gmatch(preamble,"%%%%Creator: +(.-) +(.-) ")
+ mptopdf.version = tostring(version or "0")
+ if find(preamble,"/hlw{0 dtransform") then
+ mptopdf.shortcuts = true
+ end
+ -- the boundingbox specification needs to come before data, well, not really
+ return bbox .. " boundingbox\n" .. "\nbegindata\n" .. graphic .. "\nenddata\n"
+ end, 1)
+ mptopdf.data = gsub(mptopdf.data,"%%%%MetaPostSpecials: +(.-)%c+", "%1 specials\n", 1)
+ mptopdf.data = gsub(mptopdf.data,"%%%%MetaPostSpecial: +(.-)%c+", "%1 special\n")
+ mptopdf.data = gsub(mptopdf.data,"%%.-%c+", "")
+end
+
+function mptopdf.steps.cleanup()
+ if not mptopdf.shortcuts then
+ mptopdf.data = gsub(mptopdf.data,"gsave%s+fill%s+grestore%s+stroke", "both")
+ mptopdf.data = gsub(mptopdf.data,"([%d%.]+)%s+([%d%.]+)%s+dtransform%s+exch%s+truncate%s+exch%s+idtransform%s+pop%s+setlinewidth", function(wx,wy)
+ if tonumber(wx) > 0 then return wx .. " setlinewidth" else return wy .. " setlinewidth" end
+ end)
+ mptopdf.data = gsub(mptopdf.data,"([%d%.]+)%s+([%d%.]+)%s+dtransform%s+truncate%s+idtransform%s+setlinewidth%s+pop", function(wx,wy)
+ if tonumber(wx) > 0 then return wx .. " setlinewidth" else return wy .. " setlinewidth" end
+ end)
+ end
+end
+
+function mptopdf.steps.convert()
+ mptopdf.data = gsub(mptopdf.data,"%c%((.-)%) (.-) (.-) fshow", function(str,font,scale)
+ mptopdf.texts[mptopdf.texts+1] = {mptopdf.steps.descape(str), font, scale}
+ return "\n" .. #mptopdf.texts .. " textext"
+ end)
+ mptopdf.data = gsub(mptopdf.data,"%[%s*(.-)%s*%]", function(str)
+ return gsub(str,"%s+"," ")
+ end)
+ local t
+ mptopdf.data = gsub(mptopdf.data,"%s*([^%a]-)%s*(%a+)", function(args,cmd)
+ if cmd == "textext" then
+ t = mptopdf.texts[tonumber(args)]
+ return "metapost.mps.textext(" .. "\"" .. t[2] .. "\"," .. t[3] .. ",\"" .. t[1] .. "\")\n"
+ else
+ return "metapost.mps." .. cmd .. "(" .. gsub(args," +",",") .. ")\n"
+ end
+ end)
+end
+
+function mptopdf.steps.process()
+ assert(loadstring(mptopdf.data))() -- () runs the loaded chunk
+end
+
+function mptopdf.parsers.gsub()
+ mptopdf.steps.strip()
+ mptopdf.steps.cleanup()
+ mptopdf.steps.convert()
+ mptopdf.steps.process()
+end
+
+-- end of old code
+
+-- from lua to tex
+
+function mptopdf.pdfcode(str)
+ context.pdfliteral(str) -- \\MPScode
+end
+
+function mptopdf.texcode(str)
+ context(str)
+end
+
+-- auxiliary functions
+
+function mptopdf.flushconcat()
+ if mptopdf.stack.concat then
+ mptopdf.pdfcode(concat(mptopdf.stack.concat," ") .. " cm")
+ mptopdf.stack.concat = nil
+ end
+end
+
+function mptopdf.flushpath(cmd)
+ -- faster: no local function and loop
+ if #mptopdf.stack.path > 0 then
+ local path = { }
+ if mptopdf.stack.concat then
+ local sx, sy = mptopdf.stack.concat[1], mptopdf.stack.concat[4]
+ local rx, ry = mptopdf.stack.concat[2], mptopdf.stack.concat[3]
+ local tx, ty = mptopdf.stack.concat[5], mptopdf.stack.concat[6]
+ local d = (sx*sy) - (rx*ry)
+ local function mpconcat(px, py)
+ return (sy*(px-tx)-ry*(py-ty))/d, (sx*(py-ty)-rx*(px-tx))/d
+ end
+ local stackpath = mptopdf.stack.path
+ for k=1,#stackpath do
+ local v = stackpath[k]
+ v[1],v[2] = mpconcat(v[1],v[2])
+ if #v == 7 then
+ v[3],v[4] = mpconcat(v[3],v[4])
+ v[5],v[6] = mpconcat(v[5],v[6])
+ end
+ path[#path+1] = concat(v," ")
+ end
+ else
+ local stackpath = mptopdf.stack.path
+ for k=1,#stackpath do
+ path[#path+1] = concat(stackpath[k]," ")
+ end
+ end
+ mptopdf.flushconcat()
+ mptopdf.texcode("\\MPSpath{" .. concat(path," ") .. "}")
+ if mptopdf.stack.close then
+ mptopdf.texcode("\\MPScode{h " .. cmd .. "}")
+ else
+ mptopdf.texcode("\\MPScode{" .. cmd .."}")
+ end
+ end
+ mptopdf.resetpath()
+end
+
+function mptopdf.loaded(name)
+ local ok, n
+ mptopdf.reset()
+ ok, mptopdf.data, n = resolvers.loadbinfile(name, 'tex') -- we need a binary load !
+ return ok
+end
+
+if not mptopdf.parse then
+ function mptopdf.parse() end -- forward declaration
+end
+
+function mptopdf.convertmpstopdf(name)
+ if mptopdf.loaded(name) then
+ mptopdf.nofconverted = mptopdf.nofconverted + 1
+ statistics.starttiming(mptopdf)
+ mptopdf.parse()
+ mptopdf.reset()
+ statistics.stoptiming(mptopdf)
+ else
+ context("file " .. name .. " not found")
+ end
+end
+
+-- mp interface
+
+metapost.mps = metapost.mps or { }
+local mps = metapost.mps or { }
+
+function mps.creator(a, b, c)
+ mptopdf.version = tonumber(b)
+end
+
+function mps.creationdate(a)
+ mptopdf.date= a
+end
+
+function mps.newpath()
+ mptopdf.stack.path = { }
+end
+
+function mps.boundingbox(llx, lly, urx, ury)
+ mptopdf.texcode("\\MPSboundingbox{" .. llx .. "}{" .. lly .. "}{" .. urx .. "}{" .. ury .. "}")
+end
+
+function mps.moveto(x,y)
+ mptopdf.stack.path[#mptopdf.stack.path+1] = {x,y,"m"}
+end
+
+function mps.curveto(ax, ay, bx, by, cx, cy)
+ mptopdf.stack.path[#mptopdf.stack.path+1] = {ax,ay,bx,by,cx,cy,"c"}
+end
+
+function mps.lineto(x,y)
+ mptopdf.stack.path[#mptopdf.stack.path+1] = {x,y,"l"}
+end
+
+function mps.rlineto(x,y)
+ local dx, dy = 0, 0
+ if #mptopdf.stack.path > 0 then
+ dx, dy = mptopdf.stack.path[#mptopdf.stack.path][1], mptopdf.stack.path[#mptopdf.stack.path][2]
+ end
+ mptopdf.stack.path[#mptopdf.stack.path+1] = {dx,dy,"l"}
+end
+
+function mps.translate(tx,ty)
+ mptopdf.pdfcode("1 0 0 0 1 " .. tx .. " " .. ty .. " cm")
+end
+
+function mps.scale(sx,sy)
+ mptopdf.stack.concat = {sx,0,0,sy,0,0}
+end
+
+function mps.concat(sx, rx, ry, sy, tx, ty)
+ mptopdf.stack.concat = {sx,rx,ry,sy,tx,ty}
+end
+
+function mps.setlinejoin(d)
+ mptopdf.pdfcode(d .. " j")
+end
+
+function mps.setlinecap(d)
+ mptopdf.pdfcode(d .. " J")
+end
+
+function mps.setmiterlimit(d)
+ mptopdf.pdfcode(d .. " M")
+end
+
+function mps.gsave()
+ mptopdf.pdfcode("q")
+end
+
+function mps.grestore()
+ mptopdf.pdfcode("Q")
+end
+
+function mps.setdash(...)
+ local n = select("#",...)
+ mptopdf.pdfcode("[" .. concat({...}," ",1,n-1) .. "] " .. select(n,...) .. " d")
+end
+
+function mps.resetdash()
+ mptopdf.pdfcode("[ ] 0 d")
+end
+
+function mps.setlinewidth(d)
+ mptopdf.pdfcode(d .. " w")
+end
+
+function mps.closepath()
+ mptopdf.stack.close = true
+end
+
+function mps.fill()
+ mptopdf.flushpath('f')
+end
+
+function mps.stroke()
+ mptopdf.flushpath('S')
+end
+
+function mps.both()
+ mptopdf.flushpath('B')
+end
+
+function mps.clip()
+ mptopdf.flushpath('W n')
+end
+
+function mps.textext(font, scale, str) -- old parser
+ local dx, dy = 0, 0
+ if #mptopdf.stack.path > 0 then
+ dx, dy = mptopdf.stack.path[1][1], mptopdf.stack.path[1][2]
+ end
+ mptopdf.flushconcat()
+ mptopdf.texcode("\\MPStextext{"..font.."}{"..scale.."}{"..str.."}{"..dx.."}{"..dy.."}")
+ mptopdf.resetpath()
+end
+
+--~ function mps.handletext(font,scale.str,dx,dy)
+--~ local one, two = string.match(str, "^(%d+)::::(%d+)")
+--~ if one and two then
+--~ mptopdf.texcode("\\MPTOPDFtextext{"..font.."}{"..scale.."}{"..one.."}{"..two.."}{"..dx.."}{"..dy.."}")
+--~ else
+--~ mptopdf.texcode("\\MPTOPDFtexcode{"..font.."}{"..scale.."}{"..str.."}{"..dx.."}{"..dy.."}")
+--~ end
+--~ end
+
+function mps.setrgbcolor(r,g,b) -- extra check
+ r, g = tonumber(r), tonumber(g) -- needed when we use lpeg
+ if r == 0.0123 and g < 0.1 then
+ mptopdf.texcode("\\MPSspecial{" .. g*10000 .. "}{" .. b*10000 .. "}")
+ elseif r == 0.123 and g < 0.1 then
+ mptopdf.texcode("\\MPSspecial{" .. g* 1000 .. "}{" .. b* 1000 .. "}")
+ else
+ mptopdf.texcode("\\MPSrgb{" .. r .. "}{" .. g .. "}{" .. b .. "}")
+ end
+end
+
+function mps.setcmykcolor(c,m,y,k)
+ mptopdf.texcode("\\MPScmyk{" .. c .. "}{" .. m .. "}{" .. y .. "}{" .. k .. "}")
+end
+
+function mps.setgray(s)
+ mptopdf.texcode("\\MPSgray{" .. s .. "}")
+end
+
+function mps.specials(version,signal,factor) -- 2.0 123 1000
+end
+
+function mps.special(...) -- 7 1 0.5 1 0 0 1 3
+ local n = select("#",...)
+ mptopdf.texcode("\\MPSbegin\\MPSset{" .. concat({...},"}\\MPSset{",2,n) .. "}\\MPSend")
+end
+
+function mps.begindata()
+end
+
+function mps.enddata()
+end
+
+function mps.showpage()
+end
+
+mps.n = mps.newpath -- n
+mps.p = mps.closepath -- h
+mps.l = mps.lineto -- l
+mps.r = mps.rlineto -- r
+mps.m = mps.moveto -- m
+mps.c = mps.curveto -- c
+mps.hlw = mps.setlinewidth
+mps.vlw = mps.setlinewidth
+
+mps.C = mps.setcmykcolor -- k
+mps.G = mps.setgray -- g
+mps.R = mps.setrgbcolor -- rg
+
+mps.lj = mps.setlinejoin -- j
+mps.ml = mps.setmiterlimit -- M
+mps.lc = mps.setlinecap -- J
+mps.sd = mps.setdash -- d
+mps.rd = mps.resetdash
+
+mps.S = mps.stroke -- S
+mps.F = mps.fill -- f
+mps.B = mps.both -- B
+mps.W = mps.clip -- W
+
+mps.q = mps.gsave -- q
+mps.Q = mps.grestore -- Q
+
+mps.s = mps.scale -- (not in pdf)
+mps.t = mps.concat -- (not the same as pdf anyway)
+
+mps.P = mps.showpage
+
+-- experimental
+
+function mps.attribute(id,value)
+ mptopdf.texcode("\\attribute " .. id .. "=" .. value .. " ")
+-- mptopdf.texcode("\\dompattribute{" .. id .. "}{" .. value .. "}")
+end
+
+-- lpeg parser
+
+-- The lpeg based parser is rather optimized for the kind of output
+-- that MetaPost produces. It's my first real lpeg code, which may
+-- show. Because the parser binds to functions, we define it last.
+
+do -- assumes \let\c\char
+
+ local byte = string.byte
+ local digit = lpeg.R("09")
+ local spec = digit^2 * lpeg.P("::::") * digit^2
+ local text = lpeg.Cc("{") * (
+ lpeg.P("\\") * ( (digit * digit * digit) / function(n) return "c" .. tonumber(n,8) end) +
+ lpeg.P(" ") / function(n) return "\\c32" end + -- never in new mp
+ lpeg.P(1) / function(n) return "\\c" .. byte(n) end
+ ) * lpeg.Cc("}")
+ local package = lpeg.Cs(spec + text^0)
+
+ function mps.fshow(str,font,scale) -- lpeg parser
+ mps.textext(font,scale,lpegmatch(package,str))
+ end
+
+end
+
+do
+
+ local eol = lpeg.S('\r\n')^1
+ local sp = lpeg.P(' ')^1
+ local space = lpeg.S(' \r\n')^1
+ local number = lpeg.S('0123456789.-+')^1
+ local nonspace = lpeg.P(1-lpeg.S(' \r\n'))^1
+
+ local cnumber = lpeg.C(number)
+ local cstring = lpeg.C(nonspace)
+
+ local specials = (lpeg.P("%%MetaPostSpecials:") * sp * (cstring * sp^0)^0 * eol) / mps.specials
+ local special = (lpeg.P("%%MetaPostSpecial:") * sp * (cstring * sp^0)^0 * eol) / mps.special
+ local boundingbox = (lpeg.P("%%BoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox
+ local highresboundingbox = (lpeg.P("%%HiResBoundingBox:") * sp * (cnumber * sp^0)^4 * eol) / mps.boundingbox
+
+ local setup = lpeg.P("%%BeginSetup") * (1 - lpeg.P("%%EndSetup") )^1
+ local prolog = lpeg.P("%%BeginProlog") * (1 - lpeg.P("%%EndProlog"))^1
+ local comment = lpeg.P('%')^1 * (1 - eol)^1
+
+ local curveto = ((cnumber * sp)^6 * lpeg.P("curveto") ) / mps.curveto
+ local lineto = ((cnumber * sp)^2 * lpeg.P("lineto") ) / mps.lineto
+ local rlineto = ((cnumber * sp)^2 * lpeg.P("rlineto") ) / mps.rlineto
+ local moveto = ((cnumber * sp)^2 * lpeg.P("moveto") ) / mps.moveto
+ local setrgbcolor = ((cnumber * sp)^3 * lpeg.P("setrgbcolor") ) / mps.setrgbcolor
+ local setcmykcolor = ((cnumber * sp)^4 * lpeg.P("setcmykcolor") ) / mps.setcmykcolor
+ local setgray = ((cnumber * sp)^1 * lpeg.P("setgray") ) / mps.setgray
+ local newpath = ( lpeg.P("newpath") ) / mps.newpath
+ local closepath = ( lpeg.P("closepath") ) / mps.closepath
+ local fill = ( lpeg.P("fill") ) / mps.fill
+ local stroke = ( lpeg.P("stroke") ) / mps.stroke
+ local clip = ( lpeg.P("clip") ) / mps.clip
+ local both = ( lpeg.P("gsave fill grestore")) / mps.both
+ local showpage = ( lpeg.P("showpage") )
+ local setlinejoin = ((cnumber * sp)^1 * lpeg.P("setlinejoin") ) / mps.setlinejoin
+ local setlinecap = ((cnumber * sp)^1 * lpeg.P("setlinecap") ) / mps.setlinecap
+ local setmiterlimit = ((cnumber * sp)^1 * lpeg.P("setmiterlimit") ) / mps.setmiterlimit
+ local gsave = ( lpeg.P("gsave") ) / mps.gsave
+ local grestore = ( lpeg.P("grestore") ) / mps.grestore
+
+ local setdash = (lpeg.P("[") * (cnumber * sp^0)^0 * lpeg.P("]") * sp * cnumber * sp * lpeg.P("setdash")) / mps.setdash
+ local concat = (lpeg.P("[") * (cnumber * sp^0)^6 * lpeg.P("]") * sp * lpeg.P("concat") ) / mps.concat
+ local scale = ( (cnumber * sp^0)^6 * sp * lpeg.P("concat") ) / mps.concat
+
+ local fshow = (lpeg.P("(") * lpeg.C((1-lpeg.P(")"))^1) * lpeg.P(")") * space * cstring * space * cnumber * space * lpeg.P("fshow")) / mps.fshow
+ local fshow = (lpeg.P("(") *
+ lpeg.Cs( ( lpeg.P("\\(")/"\\050" + lpeg.P("\\)")/"\\051" + (1-lpeg.P(")")) )^1 )
+ * lpeg.P(")") * space * cstring * space * cnumber * space * lpeg.P("fshow")) / mps.fshow
+
+ local setlinewidth_x = (lpeg.P("0") * sp * cnumber * sp * lpeg.P("dtransform truncate idtransform setlinewidth pop")) / mps.setlinewidth
+ local setlinewidth_y = (cnumber * sp * lpeg.P("0 dtransform exch truncate exch idtransform pop setlinewidth") ) / mps.setlinewidth
+
+ local c = ((cnumber * sp)^6 * lpeg.P("c") ) / mps.curveto -- ^6 very inefficient, ^1 ok too
+ local l = ((cnumber * sp)^2 * lpeg.P("l") ) / mps.lineto
+ local r = ((cnumber * sp)^2 * lpeg.P("r") ) / mps.rlineto
+ local m = ((cnumber * sp)^2 * lpeg.P("m") ) / mps.moveto
+ local vlw = ((cnumber * sp)^1 * lpeg.P("vlw")) / mps.setlinewidth
+ local hlw = ((cnumber * sp)^1 * lpeg.P("hlw")) / mps.setlinewidth
+
+ local R = ((cnumber * sp)^3 * lpeg.P("R") ) / mps.setrgbcolor
+ local C = ((cnumber * sp)^4 * lpeg.P("C") ) / mps.setcmykcolor
+ local G = ((cnumber * sp)^1 * lpeg.P("G") ) / mps.setgray
+
+ local lj = ((cnumber * sp)^1 * lpeg.P("lj") ) / mps.setlinejoin
+ local ml = ((cnumber * sp)^1 * lpeg.P("ml") ) / mps.setmiterlimit
+ local lc = ((cnumber * sp)^1 * lpeg.P("lc") ) / mps.setlinecap
+
+ local n = lpeg.P("n") / mps.newpath
+ local p = lpeg.P("p") / mps.closepath
+ local S = lpeg.P("S") / mps.stroke
+ local F = lpeg.P("F") / mps.fill
+ local B = lpeg.P("B") / mps.both
+ local W = lpeg.P("W") / mps.clip
+ local P = lpeg.P("P") / mps.showpage
+
+ local q = lpeg.P("q") / mps.gsave
+ local Q = lpeg.P("Q") / mps.grestore
+
+ local sd = (lpeg.P("[") * (cnumber * sp^0)^0 * lpeg.P("]") * sp * cnumber * sp * lpeg.P("sd")) / mps.setdash
+ local rd = ( lpeg.P("rd")) / mps.resetdash
+
+ local s = ( (cnumber * sp^0)^2 * lpeg.P("s") ) / mps.scale
+ local t = (lpeg.P("[") * (cnumber * sp^0)^6 * lpeg.P("]") * sp * lpeg.P("t") ) / mps.concat
+
+ -- experimental
+
+ local attribute = ((cnumber * sp)^2 * lpeg.P("attribute")) / mps.attribute
+ local A = ((cnumber * sp)^2 * lpeg.P("A")) / mps.attribute
+
+ local preamble = (
+ prolog + setup +
+ boundingbox + highresboundingbox + specials + special +
+ comment
+ )
+
+ local procset = (
+ lj + ml + lc +
+ c + l + m + n + p + r +
+ A +
+ R + C + G +
+ S + F + B + W +
+ vlw + hlw +
+ Q + q +
+ sd + rd +
+ t + s +
+ fshow +
+ P
+ )
+
+ local verbose = (
+ curveto + lineto + moveto + newpath + closepath + rlineto +
+ setrgbcolor + setcmykcolor + setgray +
+ attribute +
+ setlinejoin + setmiterlimit + setlinecap +
+ stroke + fill + clip + both +
+ setlinewidth_x + setlinewidth_y +
+ gsave + grestore +
+ concat + scale +
+ fshow +
+ setdash + -- no resetdash
+ showpage
+ )
+
+ -- order matters in terms of speed / we could check for procset first
+
+ local captures_old = ( space + verbose + preamble )^0
+ local captures_new = ( space + procset + preamble + verbose )^0
+
+ function mptopdf.parsers.lpeg()
+ if find(mptopdf.data,"%%%%BeginResource: procset mpost") then
+ lpegmatch(captures_new,mptopdf.data)
+ else
+ lpegmatch(captures_old,mptopdf.data)
+ end
+ end
+
+end
+
+mptopdf.parser = 'lpeg'
+
+-- status info
+
+statistics.register("mps conversion time",function()
+ local n = mptopdf.nofconverted
+ if n > 0 then
+ return format("%s seconds, %s conversions", statistics.elapsedtime(mptopdf),n)
+ else
+ return nil
+ end
+end)
diff --git a/tex/context/base/meta-tex.lua b/tex/context/base/meta-tex.lua
index c29498ad1..117d604b3 100644
--- a/tex/context/base/meta-tex.lua
+++ b/tex/context/base/meta-tex.lua
@@ -1,38 +1,38 @@
-if not modules then modules = { } end modules ['meta-tex'] = {
- version = 1.001,
- comment = "companion to meta-tex.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---~ local P, C, lpegmatch = lpeg.P, lpeg.C, lpeg.match
-
--- local left = P("[")
--- local right = P("]")
--- local space = P(" ")
--- local argument = left * C((1-right)^1) * right
--- local pattern = (argument + space)^0
-
--- function metapost.sometxt(optional,str)
--- if optional == "" then
--- context.sometxta(str)
--- else
--- local one, two = lpegmatch(pattern,optional)
--- if two then
--- context.sometxtc(one,two,str)
--- elseif one then
--- context.sometxtb(one,str)
--- else
--- context.sometxta(str)
--- end
--- end
--- end
-
-local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
-
-local pattern = Cs((P([[\"]]) + P([["]])/"\\quotedbl{}" + P(1))^0) -- or \char
-
-function metapost.escaped(str)
- context(lpegmatch(pattern,str))
-end
+if not modules then modules = { } end modules ['meta-tex'] = {
+ version = 1.001,
+ comment = "companion to meta-tex.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--~ local P, C, lpegmatch = lpeg.P, lpeg.C, lpeg.match
+
+-- local left = P("[")
+-- local right = P("]")
+-- local space = P(" ")
+-- local argument = left * C((1-right)^1) * right
+-- local pattern = (argument + space)^0
+
+-- function metapost.sometxt(optional,str)
+-- if optional == "" then
+-- context.sometxta(str)
+-- else
+-- local one, two = lpegmatch(pattern,optional)
+-- if two then
+-- context.sometxtc(one,two,str)
+-- elseif one then
+-- context.sometxtb(one,str)
+-- else
+-- context.sometxta(str)
+-- end
+-- end
+-- end
+
+local P, Cs, lpegmatch = lpeg.P, lpeg.Cs, lpeg.match
+
+local pattern = Cs((P([[\"]]) + P([["]])/"\\quotedbl{}" + P(1))^0) -- or \char
+
+function metapost.escaped(str)
+ context(lpegmatch(pattern,str))
+end
diff --git a/tex/context/base/mlib-ctx.lua b/tex/context/base/mlib-ctx.lua
index 04e0efcb4..8d6d7aa3e 100644
--- a/tex/context/base/mlib-ctx.lua
+++ b/tex/context/base/mlib-ctx.lua
@@ -1,178 +1,178 @@
-if not modules then modules = { } end modules ['mlib-ctx'] = {
- version = 1.001,
- comment = "companion to mlib-ctx.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- todo
-
-local format, concat = string.format, table.concat
-local settings_to_hash = utilities.parsers.settings_to_hash
-
-local report_metapost = logs.reporter("metapost")
-
-local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
-
-local mplib = mplib
-
-metapost = metapost or {}
-local metapost = metapost
-
-local v_no = interfaces.variables.no
-
-metapost.defaultformat = "metafun"
-metapost.defaultinstance = "metafun"
-metapost.defaultmethod = "default"
-
-local function setmpsformat(specification)
- local instance = specification.instance
- local format = specification.format
- local method = specification.method
- if not instance or instance == "" then
- instance = metapost.defaultinstance
- specification.instance = instance
- end
- if not format or format == "" then
- format = metapost.defaultformat
- specification.format = format
- end
- if not method or method == "" then
- method = metapost.defaultmethod
- specification.method = method
- end
- specification.mpx = metapost.format(instance,format,method)
-end
-
-local extensiondata = metapost.extensiondata or storage.allocate { }
-metapost.extensiondata = extensiondata
-
-storage.register("metapost/extensiondata",extensiondata,"metapost.extensiondata")
-
-function metapost.setextensions(instances,data)
- if data and data ~= "" then
- extensiondata[#extensiondata+1] = {
- usedinall = not instances or instances == "",
- instances = settings_to_hash(instances or ""),
- extensions = data,
- }
- end
-end
-
-function metapost.getextensions(instance,state)
- if state and state == v_no then
- return ""
- else
- local t = { }
- for i=1,#extensiondata do
- local e = extensiondata[i]
- local status = e.instances[instance]
- if (status ~= true) and (e.usedinall or status) then
- t[#t+1] = e.extensions
- e.instances[instance] = true
- end
- end
- return concat(t," ")
- end
-end
-
-function commands.getmpextensions(instance,state)
- context(metapost.getextensions(instance,state))
-end
-
-function metapost.graphic(specification)
- setmpsformat(specification)
- metapost.graphic_base_pass(specification)
-end
-
-function metapost.getclippath(specification) -- why not a special instance for this
- setmpsformat(specification)
- local mpx = specification.mpx
- local data = specification.data or ""
- if mpx and data ~= "" then
- starttiming(metapost)
- starttiming(metapost.exectime)
- local result = mpx:execute ( format ( "%s;%s;beginfig(1);%s;%s;endfig;",
- specification.extensions or "",
- specification.inclusions or "",
- specification.initializations or "",
- data
- ) )
- stoptiming(metapost.exectime)
- if result.status > 0 then
- report_metapost("%s: %s", result.status, result.error or result.term or result.log)
- result = nil
- else
- result = metapost.filterclippath(result)
- end
- stoptiming(metapost)
- return result
- end
-end
-
-function metapost.filterclippath(result)
- if result then
- local figures = result.fig
- if figures and #figures > 0 then
- local figure = figures[1]
- local objects = figure:objects()
- if objects then
- local lastclippath
- for o=1,#objects do
- local object = objects[o]
- if object.type == "start_clip" then
- lastclippath = object.path
- end
- end
- return lastclippath
- end
- end
- end
-end
-
-function metapost.theclippath(...)
- local result = metapost.getclippath(...)
- if result then -- we could just print the table
- result = concat(metapost.flushnormalpath(result),"\n")
- context(result)
- end
-end
-
-statistics.register("metapost processing time", function()
- local n = metapost.n
- if n and n > 0 then
- local nofconverted = metapost.makempy.nofconverted
- local elapsedtime = statistics.elapsedtime
- local elapsed = statistics.elapsed
- local str = format("%s seconds, loading: %s, execution: %s, n: %s, average: %s",
- elapsedtime(metapost), elapsedtime(mplib), elapsedtime(metapost.exectime), n,
- elapsedtime((elapsed(metapost) + elapsed(mplib) + elapsed(metapost.exectime)) / n))
- if nofconverted > 0 then
- return format("%s, external: %s (%s calls)",
- str, elapsedtime(metapost.makempy), nofconverted)
- else
- return str
- end
- else
- return nil
- end
-end)
-
--- only used in graphictexts
-
-metapost.tex = metapost.tex or { }
-
-local environments = { }
-
-function metapost.tex.set(str)
- environments[#environments+1] = str
-end
-
-function metapost.tex.reset()
- environments = { }
-end
-
-function metapost.tex.get()
- return concat(environments,"\n")
-end
+if not modules then modules = { } end modules ['mlib-ctx'] = {
+ version = 1.001,
+ comment = "companion to mlib-ctx.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- todo
+
+local format, concat = string.format, table.concat
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local report_metapost = logs.reporter("metapost")
+
+local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
+
+local mplib = mplib
+
+metapost = metapost or {}
+local metapost = metapost
+
+local v_no = interfaces.variables.no
+
+metapost.defaultformat = "metafun"
+metapost.defaultinstance = "metafun"
+metapost.defaultmethod = "default"
+
+local function setmpsformat(specification)
+ local instance = specification.instance
+ local format = specification.format
+ local method = specification.method
+ if not instance or instance == "" then
+ instance = metapost.defaultinstance
+ specification.instance = instance
+ end
+ if not format or format == "" then
+ format = metapost.defaultformat
+ specification.format = format
+ end
+ if not method or method == "" then
+ method = metapost.defaultmethod
+ specification.method = method
+ end
+ specification.mpx = metapost.format(instance,format,method)
+end
+
+local extensiondata = metapost.extensiondata or storage.allocate { }
+metapost.extensiondata = extensiondata
+
+storage.register("metapost/extensiondata",extensiondata,"metapost.extensiondata")
+
+function metapost.setextensions(instances,data)
+ if data and data ~= "" then
+ extensiondata[#extensiondata+1] = {
+ usedinall = not instances or instances == "",
+ instances = settings_to_hash(instances or ""),
+ extensions = data,
+ }
+ end
+end
+
+function metapost.getextensions(instance,state)
+ if state and state == v_no then
+ return ""
+ else
+ local t = { }
+ for i=1,#extensiondata do
+ local e = extensiondata[i]
+ local status = e.instances[instance]
+ if (status ~= true) and (e.usedinall or status) then
+ t[#t+1] = e.extensions
+ e.instances[instance] = true
+ end
+ end
+ return concat(t," ")
+ end
+end
+
+function commands.getmpextensions(instance,state)
+ context(metapost.getextensions(instance,state))
+end
+
+function metapost.graphic(specification)
+ setmpsformat(specification)
+ metapost.graphic_base_pass(specification)
+end
+
+function metapost.getclippath(specification) -- why not a special instance for this
+ setmpsformat(specification)
+ local mpx = specification.mpx
+ local data = specification.data or ""
+ if mpx and data ~= "" then
+ starttiming(metapost)
+ starttiming(metapost.exectime)
+ local result = mpx:execute ( format ( "%s;%s;beginfig(1);%s;%s;endfig;",
+ specification.extensions or "",
+ specification.inclusions or "",
+ specification.initializations or "",
+ data
+ ) )
+ stoptiming(metapost.exectime)
+ if result.status > 0 then
+ report_metapost("%s: %s", result.status, result.error or result.term or result.log)
+ result = nil
+ else
+ result = metapost.filterclippath(result)
+ end
+ stoptiming(metapost)
+ return result
+ end
+end
+
+function metapost.filterclippath(result)
+ if result then
+ local figures = result.fig
+ if figures and #figures > 0 then
+ local figure = figures[1]
+ local objects = figure:objects()
+ if objects then
+ local lastclippath
+ for o=1,#objects do
+ local object = objects[o]
+ if object.type == "start_clip" then
+ lastclippath = object.path
+ end
+ end
+ return lastclippath
+ end
+ end
+ end
+end
+
+function metapost.theclippath(...)
+ local result = metapost.getclippath(...)
+ if result then -- we could just print the table
+ result = concat(metapost.flushnormalpath(result),"\n")
+ context(result)
+ end
+end
+
+statistics.register("metapost processing time", function()
+ local n = metapost.n
+ if n and n > 0 then
+ local nofconverted = metapost.makempy.nofconverted
+ local elapsedtime = statistics.elapsedtime
+ local elapsed = statistics.elapsed
+ local str = format("%s seconds, loading: %s, execution: %s, n: %s, average: %s",
+ elapsedtime(metapost), elapsedtime(mplib), elapsedtime(metapost.exectime), n,
+ elapsedtime((elapsed(metapost) + elapsed(mplib) + elapsed(metapost.exectime)) / n))
+ if nofconverted > 0 then
+ return format("%s, external: %s (%s calls)",
+ str, elapsedtime(metapost.makempy), nofconverted)
+ else
+ return str
+ end
+ else
+ return nil
+ end
+end)
+
+-- only used in graphictexts
+
+metapost.tex = metapost.tex or { }
+
+local environments = { }
+
+function metapost.tex.set(str)
+ environments[#environments+1] = str
+end
+
+function metapost.tex.reset()
+ environments = { }
+end
+
+function metapost.tex.get()
+ return concat(environments,"\n")
+end
diff --git a/tex/context/base/mlib-pdf.lua b/tex/context/base/mlib-pdf.lua
index 963309951..6ca50a12f 100644
--- a/tex/context/base/mlib-pdf.lua
+++ b/tex/context/base/mlib-pdf.lua
@@ -1,530 +1,530 @@
-if not modules then modules = { } end modules ['mlib-pdf'] = {
- version = 1.001,
- comment = "companion to mlib-ctx.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- maybe %s is better than %f
-
-local format, concat, gsub = string.format, table.concat, string.gsub
-local abs, sqrt, round = math.abs, math.sqrt, math.round
-local setmetatable = setmetatable
-local Cf, C, Cg, Ct, P, S, lpegmatch = lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.match
-local formatters = string.formatters
-
-local report_metapost = logs.reporter("metapost")
-
-local mplib, context = mplib, context
-
-local allocate = utilities.storage.allocate
-
-local copy_node = node.copy
-local write_node = node.write
-
-metapost = metapost or { }
-local metapost = metapost
-
-metapost.flushers = metapost.flushers or { }
-local pdfflusher = { }
-metapost.flushers.pdf = pdfflusher
-
-metapost.multipass = false
-metapost.n = 0
-metapost.optimize = true -- false
-
-local experiment = true -- uses context(node) that already does delayed nodes
-
-local savedliterals = nil -- needs checking
-local mpsliteral = nodes.pool.register(node.new("whatsit",nodes.whatsitcodes.pdfliteral)) -- pdfliteral.mode = 1
-
-local pdfliteral = function(s)
- local literal = copy_node(mpsliteral)
- literal.data = s
- return literal
-end
-
--- Because in MKiV we always have two passes, we save the objects. When an extra
--- mp run is done (due to for instance texts identifier in the parse pass), we
--- get a new result table and the stored objects are forgotten. Otherwise they
--- are reused.
-
-local function getobjects(result,figure,f)
- if metapost.optimize then
- local objects = result.objects
- if not objects then
- result.objects = { }
- end
- objects = result.objects[f]
- if not objects then
- objects = figure:objects()
- result.objects[f] = objects
- end
- return objects
- else
- return figure:objects()
- end
-end
-
-function metapost.convert(result, trialrun, flusher, multipass, askedfig)
- if trialrun then
- metapost.multipass = false
- metapost.parse(result, askedfig)
- if multipass and not metapost.multipass and metapost.optimize then
- metapost.flush(result, flusher, askedfig) -- saves a run
- else
- return false
- end
- else
- metapost.flush(result, flusher, askedfig)
- end
- return true -- done
-end
-
-function metapost.flushliteral(d)
- if savedliterals then
- local literal = copy_node(mpsliteral)
- literal.data = savedliterals[d]
- write_node(literal)
- else
- report_metapost("problem flushing literal %a",d)
- end
-end
-
-function metapost.flushreset() -- will become obsolete and internal
- savedliterals = nil
-end
-
-function pdfflusher.comment(message)
- if message then
- message = formatters["%% mps graphic %s: %s"](metapost.n,message)
- if experiment then
- context(pdfliteral(message))
- else
- if savedliterals then
- local last = #savedliterals + 1
- savedliterals[last] = message
- context.MPLIBtoPDF(last)
- else
- savedliterals = { message }
- context.MPLIBtoPDF(1)
- end
- end
- end
-end
-
-function pdfflusher.startfigure(n,llx,lly,urx,ury,message)
- savedliterals = nil
- metapost.n = metapost.n + 1
- context.startMPLIBtoPDF(llx,lly,urx,ury)
- if message then pdfflusher.comment(message) end
-end
-
-function pdfflusher.stopfigure(message)
- if message then pdfflusher.comment(message) end
- context.stopMPLIBtoPDF()
- context.MPLIBflushreset() -- maybe just at the beginning
-end
-
-function pdfflusher.flushfigure(pdfliterals) -- table
- if #pdfliterals > 0 then
- pdfliterals = concat(pdfliterals,"\n")
- if experiment then
- context(pdfliteral(pdfliterals))
- else
- if savedliterals then
- local last = #savedliterals + 1
- savedliterals[last] = pdfliterals
- context.MPLIBtoPDF(last)
- else
- savedliterals = { pdfliterals }
- context.MPLIBtoPDF(1)
- end
- end
- end
-end
-
-function pdfflusher.textfigure(font,size,text,width,height,depth) -- we could save the factor
- text = gsub(text,".","\\hbox{%1}") -- kerning happens in metapost (i have to check if this is true for mplib)
- context.MPtextext(font,size,text,0,-number.dimenfactors.bp*depth)
-end
-
-local bend_tolerance = 131/65536
-
-local rx, sx, sy, ry, tx, ty, divider = 1, 0, 0, 1, 0, 0, 1
-
-local pen_info = mplib.pen_info
-
-local function pen_characteristics(object)
- local t = pen_info(object)
- rx, ry, sx, sy, tx, ty = t.rx, t.ry, t.sx, t.sy, t.tx, t.ty
- divider = sx*sy - rx*ry
- return not (sx==1 and rx==0 and ry==0 and sy==1 and tx==0 and ty==0), t.width
-end
-
-local function mpconcat(px, py) -- no tx, ty here / we can move this one inline if needed
- return (sy*px-ry*py)/divider,(sx*py-rx*px)/divider
-end
-
-local function curved(ith,pth)
- local d = pth.left_x - ith.right_x
- if abs(ith.right_x - ith.x_coord - d) <= bend_tolerance and abs(pth.x_coord - pth.left_x - d) <= bend_tolerance then
- d = pth.left_y - ith.right_y
- if abs(ith.right_y - ith.y_coord - d) <= bend_tolerance and abs(pth.y_coord - pth.left_y - d) <= bend_tolerance then
- return false
- end
- end
- return true
-end
-
-local function flushnormalpath(path, t, open)
- local pth, ith, nt
- if t then
- nt = #t
- else
- t = { }
- nt = 0
- end
- for i=1,#path do
- nt = nt + 1
- pth = path[i]
- if not ith then
- t[nt] = formatters["%f %f m"](pth.x_coord,pth.y_coord)
- elseif curved(ith,pth) then
- t[nt] = formatters["%f %f %f %f %f %f c"](ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord)
- else
- t[nt] = formatters["%f %f l"](pth.x_coord,pth.y_coord)
- end
- ith = pth
- end
- if not open then
- nt = nt + 1
- local one = path[1]
- if curved(pth,one) then
- t[nt] = formatters["%f %f %f %f %f %f c"](pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord )
- else
- t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord)
- end
- elseif #path == 1 then
- -- special case .. draw point
- local one = path[1]
- nt = nt + 1
- t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord)
- end
- return t
-end
-
-local function flushconcatpath(path, t, open)
- local pth, ith, nt
- if t then
- nt = #t
- else
- t = { }
- nt = 0
- end
- nt = nt + 1
- t[nt] = formatters["%f %f %f %f %f %f cm"](sx,rx,ry,sy,tx,ty)
- for i=1,#path do
- nt = nt + 1
- pth = path[i]
- if not ith then
- t[nt] = formatters["%f %f m"](mpconcat(pth.x_coord,pth.y_coord))
- elseif curved(ith,pth) then
- local a, b = mpconcat(ith.right_x,ith.right_y)
- local c, d = mpconcat(pth.left_x,pth.left_y)
- t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(pth.x_coord,pth.y_coord))
- else
- t[nt] = formatters["%f %f l"](mpconcat(pth.x_coord, pth.y_coord))
- end
- ith = pth
- end
- if not open then
- nt = nt + 1
- local one = path[1]
- if curved(pth,one) then
- local a, b = mpconcat(pth.right_x,pth.right_y)
- local c, d = mpconcat(one.left_x,one.left_y)
- t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(one.x_coord, one.y_coord))
- else
- t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord))
- end
- elseif #path == 1 then
- -- special case .. draw point
- nt = nt + 1
- local one = path[1]
- t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord))
- end
- return t
-end
-
-metapost.flushnormalpath = flushnormalpath
-
--- The flusher is pdf based, if another backend is used, we need to overload the
--- flusher; this is beta code, the organization will change (already upgraded in
--- sync with mplib)
---
--- We can avoid the before table but I like symmetry. There is of course a small
--- performance penalty, but so is passing extra arguments (result, flusher, after)
--- and returning stuff.
-
-local function ignore() end
-
-function metapost.flush(result,flusher,askedfig)
- if result then
- local figures = result.fig
- if figures then
- flusher = flusher or pdfflusher
- local resetplugins = metapost.resetplugins or ignore -- before figure
- local processplugins = metapost.processplugins or ignore -- each object
- local synchronizeplugins = metapost.synchronizeplugins or ignore
- local pluginactions = metapost.pluginactions or ignore -- before / after
- local startfigure = flusher.startfigure
- local stopfigure = flusher.stopfigure
- local flushfigure = flusher.flushfigure
- local textfigure = flusher.textfigure
- for f=1, #figures do
- local figure = figures[f]
- local objects = getobjects(result,figure,f)
- local fignum = figure:charcode() or 0
- if askedfig == "direct" or askedfig == "all" or askedfig == fignum then
- local t = { }
- local miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
- local bbox = figure:boundingbox()
- local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4]
- metapost.llx = llx
- metapost.lly = lly
- metapost.urx = urx
- metapost.ury = ury
- if urx < llx then
- -- invalid
- startfigure(fignum,0,0,0,0,"invalid",figure)
- stopfigure()
- else
- startfigure(fignum,llx,lly,urx,ury,"begin",figure)
- t[#t+1] = "q"
- if objects then
- resetplugins(t) -- we should move the colorinitializer here
- for o=1,#objects do
- local object = objects[o]
- local objecttype = object.type
- if objecttype == "start_bounds" or objecttype == "stop_bounds" or objecttype == "special" then
- -- skip
- elseif objecttype == "start_clip" then
- t[#t+1] = "q"
- flushnormalpath(object.path,t,false)
- t[#t+1] = "W n"
- elseif objecttype == "stop_clip" then
- t[#t+1] = "Q"
- miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
- elseif objecttype == "text" then
- t[#t+1] = "q"
- local ot = object.transform -- 3,4,5,6,1,2
- t[#t+1] = formatters["%f %f %f %f %f %f cm"](ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) -- TH: formatters["%f %f m %f %f %f %f 0 0 cm"](unpack(ot))
- flushfigure(t) -- flush accumulated literals
- t = { }
- textfigure(object.font,object.dsize,object.text,object.width,object.height,object.depth)
- t[#t+1] = "Q"
- else
- -- we use an indirect table as we want to overload
- -- entries but this is not possible in userdata
- --
- -- can be optimized if no path
- --
- local original = object
- local object = { }
- setmetatable(object, {
- __index = original
- })
- -- first we analyze
- local before, after = processplugins(object)
- local objecttype = object.type -- can have changed
- if before then
- t = pluginactions(before,t,flushfigure)
- end
- local ml = object.miterlimit
- if ml and ml ~= miterlimit then
- miterlimit = ml
- t[#t+1] = formatters["%f M"](ml)
- end
- local lj = object.linejoin
- if lj and lj ~= linejoin then
- linejoin = lj
- t[#t+1] = formatters["%i j"](lj)
- end
- local lc = object.linecap
- if lc and lc ~= linecap then
- linecap = lc
- t[#t+1] = formatters["%i J"](lc)
- end
- local dl = object.dash
- if dl then
- local d = formatters["[%s] %f d"](concat(dl.dashes or {}," "),dl.offset)
- if d ~= dashed then
- dashed = d
- t[#t+1] = dashed
- end
- elseif dashed then
- t[#t+1] = "[] 0 d"
- dashed = false
- end
- local path = object.path -- newpath
- local transformed, penwidth = false, 1
- local open = path and path[1].left_type and path[#path].right_type -- at this moment only "end_point"
- local pen = object.pen
- if pen then
- if pen.type == 'elliptical' then
- transformed, penwidth = pen_characteristics(original) -- boolean, value
- t[#t+1] = formatters["%f w"](penwidth) -- todo: only if changed
- if objecttype == 'fill' then
- objecttype = 'both'
- end
- else -- calculated by mplib itself
- objecttype = 'fill'
- end
- end
- if transformed then
- t[#t+1] = "q"
- end
- if path then
- if transformed then
- flushconcatpath(path,t,open)
- else
- flushnormalpath(path,t,open)
- end
- if objecttype == "fill" then
- t[#t+1] = "h f"
- elseif objecttype == "outline" then
- t[#t+1] = (open and "S") or "h S"
- elseif objecttype == "both" then
- t[#t+1] = "h B"
- end
- end
- if transformed then
- t[#t+1] = "Q"
- end
- local path = object.htap
- if path then
- if transformed then
- t[#t+1] = "q"
- end
- if transformed then
- flushconcatpath(path,t,open)
- else
- flushnormalpath(path,t,open)
- end
- if objecttype == "fill" then
- t[#t+1] = "h f"
- elseif objecttype == "outline" then
- t[#t+1] = (open and "S") or "h S"
- elseif objecttype == "both" then
- t[#t+1] = "h B"
- end
- if transformed then
- t[#t+1] = "Q"
- end
- end
- if after then
- t = pluginactions(after,t,flushfigure)
- end
- if object.grouped then
- -- can be qQ'd so changes can end up in groups
- miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
- end
- end
- end
- end
- t[#t+1] = "Q"
- flushfigure(t)
- stopfigure("end")
- end
- if askedfig ~= "all" then
- break
- end
- end
- end
- end
- end
-end
-
-function metapost.parse(result,askedfig)
- if result then
- local figures = result.fig
- if figures then
- local analyzeplugins = metapost.analyzeplugins -- each object
- for f=1,#figures do
- local figure = figures[f]
- local fignum = figure:charcode() or 0
- if askedfig == "direct" or askedfig == "all" or askedfig == fignum then
- local bbox = figure:boundingbox()
- metapost.llx = bbox[1]
- metapost.lly = bbox[2]
- metapost.urx = bbox[3]
- metapost.ury = bbox[4]
- local objects = getobjects(result,figure,f)
- if objects then
- for o=1,#objects do
- analyzeplugins(objects[o])
- end
- end
- if askedfig ~= "all" then
- break
- end
- end
- end
- end
- end
-end
-
--- tracing:
-
-local t = { }
-
-local flusher = {
- startfigure = function()
- t = { }
- context.startnointerference()
- end,
- flushfigure = function(literals)
- local n = #t
- for i=1, #literals do
- n = n + 1
- t[n] = literals[i]
- end
- end,
- stopfigure = function()
- context.stopnointerference()
- end
-}
-
-function metapost.pdfliterals(result)
- metapost.flush(result,flusher)
- return t
-end
-
--- so far
-
-function metapost.totable(result)
- local figure = result and result.fig and result.fig[1]
- if figure then
- local t = { }
- local objects = figure:objects()
- for o=1,#objects do
- local object = objects[o]
- local tt = { }
- local fields = mplib.fields(object)
- for f=1,#fields do
- local field = fields[f]
- tt[field] = object[field]
- end
- t[o] = tt
- end
- local b = figure:boundingbox()
- return {
- boundingbox = { llx = b[1], lly = b[2], urx = b[3], ury = b[4] },
- objects = t
- }
- else
- return nil
- end
-end
+if not modules then modules = { } end modules ['mlib-pdf'] = {
+ version = 1.001,
+ comment = "companion to mlib-ctx.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- maybe %s is better than %f
+
+local format, concat, gsub = string.format, table.concat, string.gsub
+local abs, sqrt, round = math.abs, math.sqrt, math.round
+local setmetatable = setmetatable
+local Cf, C, Cg, Ct, P, S, lpegmatch = lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.match
+local formatters = string.formatters
+
+local report_metapost = logs.reporter("metapost")
+
+local mplib, context = mplib, context
+
+local allocate = utilities.storage.allocate
+
+local copy_node = node.copy
+local write_node = node.write
+
+metapost = metapost or { }
+local metapost = metapost
+
+metapost.flushers = metapost.flushers or { }
+local pdfflusher = { }
+metapost.flushers.pdf = pdfflusher
+
+metapost.multipass = false
+metapost.n = 0
+metapost.optimize = true -- false
+
+local experiment = true -- uses context(node) that already does delayed nodes
+
+local savedliterals = nil -- needs checking
+local mpsliteral = nodes.pool.register(node.new("whatsit",nodes.whatsitcodes.pdfliteral)) -- pdfliteral.mode = 1
+
+local pdfliteral = function(s)
+ local literal = copy_node(mpsliteral)
+ literal.data = s
+ return literal
+end
+
+-- Because in MKiV we always have two passes, we save the objects. When an extra
+-- mp run is done (due to for instance texts identifier in the parse pass), we
+-- get a new result table and the stored objects are forgotten. Otherwise they
+-- are reused.
+
+local function getobjects(result,figure,f)
+ if metapost.optimize then
+ local objects = result.objects
+ if not objects then
+ result.objects = { }
+ end
+ objects = result.objects[f]
+ if not objects then
+ objects = figure:objects()
+ result.objects[f] = objects
+ end
+ return objects
+ else
+ return figure:objects()
+ end
+end
+
+function metapost.convert(result, trialrun, flusher, multipass, askedfig)
+ if trialrun then
+ metapost.multipass = false
+ metapost.parse(result, askedfig)
+ if multipass and not metapost.multipass and metapost.optimize then
+ metapost.flush(result, flusher, askedfig) -- saves a run
+ else
+ return false
+ end
+ else
+ metapost.flush(result, flusher, askedfig)
+ end
+ return true -- done
+end
+
+function metapost.flushliteral(d)
+ if savedliterals then
+ local literal = copy_node(mpsliteral)
+ literal.data = savedliterals[d]
+ write_node(literal)
+ else
+ report_metapost("problem flushing literal %a",d)
+ end
+end
+
+function metapost.flushreset() -- will become obsolete and internal
+ savedliterals = nil
+end
+
+function pdfflusher.comment(message)
+ if message then
+ message = formatters["%% mps graphic %s: %s"](metapost.n,message)
+ if experiment then
+ context(pdfliteral(message))
+ else
+ if savedliterals then
+ local last = #savedliterals + 1
+ savedliterals[last] = message
+ context.MPLIBtoPDF(last)
+ else
+ savedliterals = { message }
+ context.MPLIBtoPDF(1)
+ end
+ end
+ end
+end
+
+function pdfflusher.startfigure(n,llx,lly,urx,ury,message)
+ savedliterals = nil
+ metapost.n = metapost.n + 1
+ context.startMPLIBtoPDF(llx,lly,urx,ury)
+ if message then pdfflusher.comment(message) end
+end
+
+function pdfflusher.stopfigure(message)
+ if message then pdfflusher.comment(message) end
+ context.stopMPLIBtoPDF()
+ context.MPLIBflushreset() -- maybe just at the beginning
+end
+
+function pdfflusher.flushfigure(pdfliterals) -- table
+ if #pdfliterals > 0 then
+ pdfliterals = concat(pdfliterals,"\n")
+ if experiment then
+ context(pdfliteral(pdfliterals))
+ else
+ if savedliterals then
+ local last = #savedliterals + 1
+ savedliterals[last] = pdfliterals
+ context.MPLIBtoPDF(last)
+ else
+ savedliterals = { pdfliterals }
+ context.MPLIBtoPDF(1)
+ end
+ end
+ end
+end
+
+function pdfflusher.textfigure(font,size,text,width,height,depth) -- we could save the factor
+ text = gsub(text,".","\\hbox{%1}") -- kerning happens in metapost (i have to check if this is true for mplib)
+ context.MPtextext(font,size,text,0,-number.dimenfactors.bp*depth)
+end
+
+local bend_tolerance = 131/65536
+
+local rx, sx, sy, ry, tx, ty, divider = 1, 0, 0, 1, 0, 0, 1
+
+local pen_info = mplib.pen_info
+
+local function pen_characteristics(object)
+ local t = pen_info(object)
+ rx, ry, sx, sy, tx, ty = t.rx, t.ry, t.sx, t.sy, t.tx, t.ty
+ divider = sx*sy - rx*ry
+ return not (sx==1 and rx==0 and ry==0 and sy==1 and tx==0 and ty==0), t.width
+end
+
+local function mpconcat(px, py) -- no tx, ty here / we can move this one inline if needed
+ return (sy*px-ry*py)/divider,(sx*py-rx*px)/divider
+end
+
+local function curved(ith,pth)
+ local d = pth.left_x - ith.right_x
+ if abs(ith.right_x - ith.x_coord - d) <= bend_tolerance and abs(pth.x_coord - pth.left_x - d) <= bend_tolerance then
+ d = pth.left_y - ith.right_y
+ if abs(ith.right_y - ith.y_coord - d) <= bend_tolerance and abs(pth.y_coord - pth.left_y - d) <= bend_tolerance then
+ return false
+ end
+ end
+ return true
+end
+
+local function flushnormalpath(path, t, open)
+ local pth, ith, nt
+ if t then
+ nt = #t
+ else
+ t = { }
+ nt = 0
+ end
+ for i=1,#path do
+ nt = nt + 1
+ pth = path[i]
+ if not ith then
+ t[nt] = formatters["%f %f m"](pth.x_coord,pth.y_coord)
+ elseif curved(ith,pth) then
+ t[nt] = formatters["%f %f %f %f %f %f c"](ith.right_x,ith.right_y,pth.left_x,pth.left_y,pth.x_coord,pth.y_coord)
+ else
+ t[nt] = formatters["%f %f l"](pth.x_coord,pth.y_coord)
+ end
+ ith = pth
+ end
+ if not open then
+ nt = nt + 1
+ local one = path[1]
+ if curved(pth,one) then
+ t[nt] = formatters["%f %f %f %f %f %f c"](pth.right_x,pth.right_y,one.left_x,one.left_y,one.x_coord,one.y_coord )
+ else
+ t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord)
+ end
+ elseif #path == 1 then
+ -- special case .. draw point
+ local one = path[1]
+ nt = nt + 1
+ t[nt] = formatters["%f %f l"](one.x_coord,one.y_coord)
+ end
+ return t
+end
+
+local function flushconcatpath(path, t, open)
+ local pth, ith, nt
+ if t then
+ nt = #t
+ else
+ t = { }
+ nt = 0
+ end
+ nt = nt + 1
+ t[nt] = formatters["%f %f %f %f %f %f cm"](sx,rx,ry,sy,tx,ty)
+ for i=1,#path do
+ nt = nt + 1
+ pth = path[i]
+ if not ith then
+ t[nt] = formatters["%f %f m"](mpconcat(pth.x_coord,pth.y_coord))
+ elseif curved(ith,pth) then
+ local a, b = mpconcat(ith.right_x,ith.right_y)
+ local c, d = mpconcat(pth.left_x,pth.left_y)
+ t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(pth.x_coord,pth.y_coord))
+ else
+ t[nt] = formatters["%f %f l"](mpconcat(pth.x_coord, pth.y_coord))
+ end
+ ith = pth
+ end
+ if not open then
+ nt = nt + 1
+ local one = path[1]
+ if curved(pth,one) then
+ local a, b = mpconcat(pth.right_x,pth.right_y)
+ local c, d = mpconcat(one.left_x,one.left_y)
+ t[nt] = formatters["%f %f %f %f %f %f c"](a,b,c,d,mpconcat(one.x_coord, one.y_coord))
+ else
+ t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord))
+ end
+ elseif #path == 1 then
+ -- special case .. draw point
+ nt = nt + 1
+ local one = path[1]
+ t[nt] = formatters["%f %f l"](mpconcat(one.x_coord,one.y_coord))
+ end
+ return t
+end
+
+metapost.flushnormalpath = flushnormalpath
+
+-- The flusher is pdf based, if another backend is used, we need to overload the
+-- flusher; this is beta code, the organization will change (already upgraded in
+-- sync with mplib)
+--
+-- We can avoid the before table but I like symmetry. There is of course a small
+-- performance penalty, but so is passing extra arguments (result, flusher, after)
+-- and returning stuff.
+
+local function ignore() end
+
+function metapost.flush(result,flusher,askedfig)
+ if result then
+ local figures = result.fig
+ if figures then
+ flusher = flusher or pdfflusher
+ local resetplugins = metapost.resetplugins or ignore -- before figure
+ local processplugins = metapost.processplugins or ignore -- each object
+ local synchronizeplugins = metapost.synchronizeplugins or ignore
+ local pluginactions = metapost.pluginactions or ignore -- before / after
+ local startfigure = flusher.startfigure
+ local stopfigure = flusher.stopfigure
+ local flushfigure = flusher.flushfigure
+ local textfigure = flusher.textfigure
+ for f=1, #figures do
+ local figure = figures[f]
+ local objects = getobjects(result,figure,f)
+ local fignum = figure:charcode() or 0
+ if askedfig == "direct" or askedfig == "all" or askedfig == fignum then
+ local t = { }
+ local miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
+ local bbox = figure:boundingbox()
+ local llx, lly, urx, ury = bbox[1], bbox[2], bbox[3], bbox[4]
+ metapost.llx = llx
+ metapost.lly = lly
+ metapost.urx = urx
+ metapost.ury = ury
+ if urx < llx then
+ -- invalid
+ startfigure(fignum,0,0,0,0,"invalid",figure)
+ stopfigure()
+ else
+ startfigure(fignum,llx,lly,urx,ury,"begin",figure)
+ t[#t+1] = "q"
+ if objects then
+ resetplugins(t) -- we should move the colorinitializer here
+ for o=1,#objects do
+ local object = objects[o]
+ local objecttype = object.type
+ if objecttype == "start_bounds" or objecttype == "stop_bounds" or objecttype == "special" then
+ -- skip
+ elseif objecttype == "start_clip" then
+ t[#t+1] = "q"
+ flushnormalpath(object.path,t,false)
+ t[#t+1] = "W n"
+ elseif objecttype == "stop_clip" then
+ t[#t+1] = "Q"
+ miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
+ elseif objecttype == "text" then
+ t[#t+1] = "q"
+ local ot = object.transform -- 3,4,5,6,1,2
+ t[#t+1] = formatters["%f %f %f %f %f %f cm"](ot[3],ot[4],ot[5],ot[6],ot[1],ot[2]) -- TH: formatters["%f %f m %f %f %f %f 0 0 cm"](unpack(ot))
+ flushfigure(t) -- flush accumulated literals
+ t = { }
+ textfigure(object.font,object.dsize,object.text,object.width,object.height,object.depth)
+ t[#t+1] = "Q"
+ else
+ -- we use an indirect table as we want to overload
+ -- entries but this is not possible in userdata
+ --
+ -- can be optimized if no path
+ --
+ local original = object
+ local object = { }
+ setmetatable(object, {
+ __index = original
+ })
+ -- first we analyze
+ local before, after = processplugins(object)
+ local objecttype = object.type -- can have changed
+ if before then
+ t = pluginactions(before,t,flushfigure)
+ end
+ local ml = object.miterlimit
+ if ml and ml ~= miterlimit then
+ miterlimit = ml
+ t[#t+1] = formatters["%f M"](ml)
+ end
+ local lj = object.linejoin
+ if lj and lj ~= linejoin then
+ linejoin = lj
+ t[#t+1] = formatters["%i j"](lj)
+ end
+ local lc = object.linecap
+ if lc and lc ~= linecap then
+ linecap = lc
+ t[#t+1] = formatters["%i J"](lc)
+ end
+ local dl = object.dash
+ if dl then
+ local d = formatters["[%s] %f d"](concat(dl.dashes or {}," "),dl.offset)
+ if d ~= dashed then
+ dashed = d
+ t[#t+1] = dashed
+ end
+ elseif dashed then
+ t[#t+1] = "[] 0 d"
+ dashed = false
+ end
+ local path = object.path -- newpath
+ local transformed, penwidth = false, 1
+ local open = path and path[1].left_type and path[#path].right_type -- at this moment only "end_point"
+ local pen = object.pen
+ if pen then
+ if pen.type == 'elliptical' then
+ transformed, penwidth = pen_characteristics(original) -- boolean, value
+ t[#t+1] = formatters["%f w"](penwidth) -- todo: only if changed
+ if objecttype == 'fill' then
+ objecttype = 'both'
+ end
+ else -- calculated by mplib itself
+ objecttype = 'fill'
+ end
+ end
+ if transformed then
+ t[#t+1] = "q"
+ end
+ if path then
+ if transformed then
+ flushconcatpath(path,t,open)
+ else
+ flushnormalpath(path,t,open)
+ end
+ if objecttype == "fill" then
+ t[#t+1] = "h f"
+ elseif objecttype == "outline" then
+ t[#t+1] = (open and "S") or "h S"
+ elseif objecttype == "both" then
+ t[#t+1] = "h B"
+ end
+ end
+ if transformed then
+ t[#t+1] = "Q"
+ end
+ local path = object.htap
+ if path then
+ if transformed then
+ t[#t+1] = "q"
+ end
+ if transformed then
+ flushconcatpath(path,t,open)
+ else
+ flushnormalpath(path,t,open)
+ end
+ if objecttype == "fill" then
+ t[#t+1] = "h f"
+ elseif objecttype == "outline" then
+ t[#t+1] = (open and "S") or "h S"
+ elseif objecttype == "both" then
+ t[#t+1] = "h B"
+ end
+ if transformed then
+ t[#t+1] = "Q"
+ end
+ end
+ if after then
+ t = pluginactions(after,t,flushfigure)
+ end
+ if object.grouped then
+ -- can be qQ'd so changes can end up in groups
+ miterlimit, linecap, linejoin, dashed = -1, -1, -1, false
+ end
+ end
+ end
+ end
+ t[#t+1] = "Q"
+ flushfigure(t)
+ stopfigure("end")
+ end
+ if askedfig ~= "all" then
+ break
+ end
+ end
+ end
+ end
+ end
+end
+
+function metapost.parse(result,askedfig)
+ if result then
+ local figures = result.fig
+ if figures then
+ local analyzeplugins = metapost.analyzeplugins -- each object
+ for f=1,#figures do
+ local figure = figures[f]
+ local fignum = figure:charcode() or 0
+ if askedfig == "direct" or askedfig == "all" or askedfig == fignum then
+ local bbox = figure:boundingbox()
+ metapost.llx = bbox[1]
+ metapost.lly = bbox[2]
+ metapost.urx = bbox[3]
+ metapost.ury = bbox[4]
+ local objects = getobjects(result,figure,f)
+ if objects then
+ for o=1,#objects do
+ analyzeplugins(objects[o])
+ end
+ end
+ if askedfig ~= "all" then
+ break
+ end
+ end
+ end
+ end
+ end
+end
+
+-- tracing:
+
+local t = { }
+
+local flusher = {
+ startfigure = function()
+ t = { }
+ context.startnointerference()
+ end,
+ flushfigure = function(literals)
+ local n = #t
+ for i=1, #literals do
+ n = n + 1
+ t[n] = literals[i]
+ end
+ end,
+ stopfigure = function()
+ context.stopnointerference()
+ end
+}
+
+function metapost.pdfliterals(result)
+ metapost.flush(result,flusher)
+ return t
+end
+
+-- so far
+
+function metapost.totable(result)
+ local figure = result and result.fig and result.fig[1]
+ if figure then
+ local t = { }
+ local objects = figure:objects()
+ for o=1,#objects do
+ local object = objects[o]
+ local tt = { }
+ local fields = mplib.fields(object)
+ for f=1,#fields do
+ local field = fields[f]
+ tt[field] = object[field]
+ end
+ t[o] = tt
+ end
+ local b = figure:boundingbox()
+ return {
+ boundingbox = { llx = b[1], lly = b[2], urx = b[3], ury = b[4] },
+ objects = t
+ }
+ else
+ return nil
+ end
+end
diff --git a/tex/context/base/mlib-pps.lua b/tex/context/base/mlib-pps.lua
index 93bddc2dd..217625bcb 100644
--- a/tex/context/base/mlib-pps.lua
+++ b/tex/context/base/mlib-pps.lua
@@ -1,1216 +1,1216 @@
-if not modules then modules = { } end modules ['mlib-pps'] = {
- version = 1.001,
- comment = "companion to mlib-ctx.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- todo: make a hashed textext variant where we only process the text once (normally
--- we cannot assume that no macros are involved which influence a next textext
-
-local format, gmatch, match, split = string.format, string.gmatch, string.match, string.split
-local tonumber, type = tonumber, type
-local round = math.round
-local insert, concat = table.insert, table.concat
-local Cs, Cf, C, Cg, Ct, P, S, V, Carg = lpeg.Cs, lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.V, lpeg.Carg
-local lpegmatch = lpeg.match
-local formatters = string.formatters
-
-local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context
-
-local texbox = tex.box
-local copy_list = node.copy_list
-local free_list = node.flush_list
-local setmetatableindex = table.setmetatableindex
-local sortedhash = table.sortedhash
-
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-
-local trace_runs = false trackers.register("metapost.runs", function(v) trace_runs = v end)
-local trace_textexts = false trackers.register("metapost.textexts", function(v) trace_textexts = v end)
-local trace_scripts = false trackers.register("metapost.scripts", function(v) trace_scripts = v end)
-
-local report_metapost = logs.reporter("metapost")
-local report_textexts = logs.reporter("metapost","textexts")
-local report_scripts = logs.reporter("metapost","scripts")
-
-local colors = attributes.colors
-
-local rgbtocmyk = colors.rgbtocmyk or function() return 0,0,0,1 end
-local cmyktorgb = colors.cmyktorgb or function() return 0,0,0 end
-local rgbtogray = colors.rgbtogray or function() return 0 end
-local cmyktogray = colors.cmyktogray or function() return 0 end
-
-metapost.makempy = metapost.makempy or { nofconverted = 0 }
-local makempy = metapost.makempy
-
-local nooutercolor = "0 g 0 G"
-local nooutertransparency = "/Tr0 gs" -- only when set
-local outercolormode = 0
-local outercolor = nooutercolor
-local outertransparency = nooutertransparency
-local innercolor = nooutercolor
-local innertransparency = nooutertransparency
-
-local pdfcolor = lpdf.color
-local pdftransparency = lpdf.transparency
-local registercolor = colors.register
-local registerspotcolor = colors.registerspotcolor
-
-local transparencies = attributes.transparencies
-local registertransparency = transparencies.register
-
-function metapost.setoutercolor(mode,colormodel,colorattribute,transparencyattribute)
- -- has always to be called before conversion
- -- todo: transparency (not in the mood now)
- outercolormode = mode
- if mode == 1 or mode == 3 then
- -- inherit from outer (registered color)
- outercolor = pdfcolor(colormodel,colorattribute) or nooutercolor
- outertransparency = pdftransparency(transparencyattribute) or nooutertransparency
- elseif mode == 2 then
- -- stand alone (see m-punk.tex)
- outercolor = ""
- outertransparency = ""
- else -- 0
- outercolor = nooutercolor
- outertransparency = nooutertransparency
- end
- innercolor = outercolor
- innertransparency = outertransparency -- not yet used
-end
-
-local f_gray = formatters["%.3f g %.3f G"]
-local f_rgb = formatters["%.3f %.3f %.3f rg %.3f %.3f %.3f RG"]
-local f_cmyk = formatters["%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K"]
-local f_cm = formatters["q %f %f %f %f %f %f cm"]
-local f_shade = formatters["MpSh%s"]
-
-local function checked_color_pair(color,...)
- if not color then
- return innercolor, outercolor
- end
- if outercolormode == 3 then
- innercolor = color(...)
- return innercolor, innercolor
- else
- return color(...), outercolor
- end
-end
-
-function metapost.colorinitializer()
- innercolor = outercolor
- innertransparency = outertransparency
- return outercolor, outertransparency
-end
-
---~
-
-local specificationsplitter = lpeg.tsplitat(" ")
-local colorsplitter = lpeg.tsplitter(":",tonumber) -- no need for :
-local domainsplitter = lpeg.tsplitter(" ",tonumber)
-local centersplitter = domainsplitter
-local coordinatesplitter = domainsplitter
-
--- thanks to taco's reading of the postscript manual:
---
--- x' = sx * x + ry * y + tx
--- y' = rx * x + sy * y + ty
-
-local nofshades = 0 -- todo: hash resources, start at 1000 in order not to clash with older
-
-local function normalize(ca,cb)
- if #cb == 1 then
- if #ca == 4 then
- cb[1], cb[2], cb[3], cb[4] = 0, 0, 0, 1-cb[1]
- else
- cb[1], cb[2], cb[3] = cb[1], cb[1], cb[1]
- end
- elseif #cb == 3 then
- if #ca == 4 then
- cb[1], cb[2], cb[3], cb[4] = rgbtocmyk(cb[1],cb[2],cb[3])
- else
- cb[1], cb[2], cb[3] = cmyktorgb(cb[1],cb[2],cb[3],cb[4])
- end
- end
-end
-
--- todo: check for the same colorspace (actually a backend issue), now we can
--- have several similar resources
---
--- normalize(ca,cb) fails for spotcolors
-
-local function spotcolorconverter(parent, n, d, p)
- registerspotcolor(parent)
- return pdfcolor(colors.model,registercolor(nil,'spot',parent,n,d,p)), outercolor
-end
-
-local commasplitter = lpeg.tsplitat(",")
-
-local function checkandconvertspot(n_a,f_a,c_a,v_a,n_b,f_b,c_b,v_b)
- -- must be the same but we don't check
- local name = f_shade(nofshades)
- local ca = lpegmatch(commasplitter,v_a)
- local cb = lpegmatch(commasplitter,v_b)
- if #ca == 0 or #cb == 0 then
- return { 0 }, { 1 }, "DeviceGray", name
- else
- for i=1,#ca do ca[i] = tonumber(ca[i]) or 0 end
- for i=1,#cb do cb[i] = tonumber(cb[i]) or 1 end
- --~ spotcolorconverter(n_a,f_a,c_a,v_a) -- not really needed
- return ca, cb, n_a or n_b, name
- end
-end
-
-local function checkandconvert(ca,cb)
- local name = f_shade(nofshades)
- if not ca or not cb or type(ca) == "string" then
- return { 0 }, { 1 }, "DeviceGray", name
- else
- if #ca > #cb then
- normalize(ca,cb)
- elseif #ca < #cb then
- normalize(cb,ca)
- end
- local model = colors.model
- if model == "all" then
- model= (#ca == 4 and "cmyk") or (#ca == 3 and "rgb") or "gray"
- end
- if model == "rgb" then
- if #ca == 4 then
- ca = { cmyktorgb(ca[1],ca[2],ca[3],ca[4]) }
- cb = { cmyktorgb(cb[1],cb[2],cb[3],cb[4]) }
- elseif #ca == 1 then
- local a, b = 1-ca[1], 1-cb[1]
- ca = { a, a, a }
- cb = { b, b, b }
- end
- return ca, cb, "DeviceRGB", name
- elseif model == "cmyk" then
- if #ca == 3 then
- ca = { rgbtocmyk(ca[1],ca[2],ca[3]) }
- cb = { rgbtocmyk(cb[1],cb[2],cb[3]) }
- elseif #ca == 1 then
- ca = { 0, 0, 0, ca[1] }
- cb = { 0, 0, 0, ca[1] }
- end
- return ca, cb, "DeviceCMYK", name
- else
- if #ca == 4 then
- ca = { cmyktogray(ca[1],ca[2],ca[3],ca[4]) }
- cb = { cmyktogray(cb[1],cb[2],cb[3],cb[4]) }
- elseif #ca == 3 then
- ca = { rgbtogray(ca[1],ca[2],ca[3]) }
- cb = { rgbtogray(cb[1],cb[2],cb[3]) }
- end
- -- backend specific (will be renamed)
- return ca, cb, "DeviceGray", name
- end
- end
-end
-
-local current_format, current_graphic, current_initializations
-
-metapost.multipass = false
-
-local textexts = { } -- all boxes, optionally with a different color
-local texslots = { } -- references to textexts in order or usage
-local texorder = { } -- references to textexts by mp index
-local textrial = 0
-local texfinal = 0
-local scratchbox = 0
-
-local function freeboxes()
- for n, box in next, textexts do
- local tn = textexts[n]
- if tn then
- free_list(tn)
- -- texbox[scratchbox] = tn
- -- texbox[scratchbox] = nil -- this frees too
- if trace_textexts then
- report_textexts("freeing box %s",n)
- end
- end
- end
- textexts = { }
- texslots = { }
- texorder = { }
- textrial = 0
- texfinal = 0
-end
-
-metapost.resettextexts = freeboxes
-
-function metapost.settext(box,slot)
- textexts[slot] = copy_list(texbox[box])
- texbox[box] = nil
- -- this will become
- -- textexts[slot] = texbox[box]
- -- unsetbox(box)
-end
-
-function metapost.gettext(box,slot)
- texbox[box] = copy_list(textexts[slot])
- if trace_textexts then
- report_textexts("putting text %s in box %s",slot,box)
- end
- -- textexts[slot] = nil -- no, pictures can be placed several times
-end
-
--- rather generic pdf, so use this elsewhere too it no longer pays
--- off to distinguish between outline and fill (we now have both
--- too, e.g. in arrows)
-
-metapost.reducetogray = true
-
-local models = { }
-
-function models.all(cr)
- local n = #cr
- if n == 0 then
- return checked_color_pair()
- elseif metapost.reducetogray then
- if n == 1 then
- local s = cr[1]
- return checked_color_pair(f_gray,s,s)
- elseif n == 3 then
- local r, g, b = cr[1], cr[2], cr[3]
- if r == g and g == b then
- return checked_color_pair(f_gray,r,r)
- else
- return checked_color_pair(f_rgb,r,g,b,r,g,b)
- end
- else
- local c, m, y, k = cr[1], cr[2], cr[3], cr[4]
- if c == m and m == y and y == 0 then
- k = 1 - k
- return checked_color_pair(f_gray,k,k)
- else
- return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
- end
- end
- elseif n == 1 then
- local s = cr[1]
- return checked_color_pair(f_gray,s,s)
- elseif n == 3 then
- local r, g, b = cr[1], cr[2], cr[3]
- return checked_color_pair(f_rgb,r,g,b,r,g,b)
- else
- local c, m, y, k = cr[1], cr[2], cr[3], cr[4]
- return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
- end
-end
-
-function models.rgb(cr)
- local n = #cr
- if n == 0 then
- return checked_color_pair()
- elseif metapost.reducetogray then
- if n == 1 then
- local s = cr[1]
- checked_color_pair(f_gray,s,s)
- elseif n == 3 then
- local r, g, b = cr[1], cr[2], cr[3]
- if r == g and g == b then
- return checked_color_pair(f_gray,r,r)
- else
- return checked_color_pair(f_rgb,r,g,b,r,g,b)
- end
- else
- local c, m, y, k = cr[1], cr[2], cr[3], cr[4]
- if c == m and m == y and y == 0 then
- k = 1 - k
- return checked_color_pair(f_gray,k,k)
- else
- local r, g, b = cmyktorgb(c,m,y,k)
- return checked_color_pair(f_rgb,r,g,b,r,g,b)
- end
- end
- elseif n == 1 then
- local s = cr[1]
- return checked_color_pair(f_gray,s,s)
- else
- local r, g, b
- if n == 3 then
- r, g, b = cmyktorgb(cr[1],cr[2],cr[3],cr[4])
- else
- r, g, b = cr[1], cr[2], cr[3]
- end
- return checked_color_pair(f_rgb,r,g,b,r,g,b)
- end
-end
-
-function models.cmyk(cr)
- local n = #cr
- if n == 0 then
- return checked_color_pair()
- elseif metapost.reducetogray then
- if n == 1 then
- local s = cr[1]
- return checked_color_pair(f_gray,s,s)
- elseif n == 3 then
- local r, g, b = cr[1], cr[2], cr[3]
- if r == g and g == b then
- return checked_color_pair(f_gray,r,r)
- else
- local c, m, y, k = rgbtocmyk(r,g,b)
- return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
- end
- else
- local c, m, y, k = cr[1], cr[2], cr[3], cr[4]
- if c == m and m == y and y == 0 then
- k = k - 1
- return checked_color_pair(f_gray,k,k)
- else
- return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
- end
- end
- elseif n == 1 then
- local s = cr[1]
- return checked_color_pair(f_gray,s,s)
- else
- local c, m, y, k
- if n == 3 then
- c, m, y, k = rgbtocmyk(cr[1],cr[2],cr[3])
- else
- c, m, y, k = cr[1], cr[2], cr[3], cr[4]
- end
- return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
- end
-end
-
-function models.gray(cr)
- local n, s = #cr, 0
- if n == 0 then
- return checked_color_pair()
- elseif n == 4 then
- s = cmyktogray(cr[1],cr[2],cr[3],cr[4])
- elseif n == 3 then
- s = rgbtogray(cr[1],cr[2],cr[3])
- else
- s = cr[1]
- end
- return checked_color_pair(f_gray,s,s)
-end
-
-setmetatableindex(models, function(t,k)
- local v = models.gray
- t[k] = v
- return v
-end)
-
-local function colorconverter(cs)
- return models[colors.model](cs)
-end
-
-local btex = P("btex")
-local etex = P(" etex")
-local vtex = P("verbatimtex")
-local ttex = P("textext")
-local gtex = P("graphictext")
-local multipass = P("forcemultipass")
-local spacing = S(" \n\r\t\v")^0
-local dquote = P('"')
-
-local found, forced = false, false
-
-local function convert(str)
- found = true
- return "rawtextext(\"" .. str .. "\")" -- centered
-end
-local function ditto(str)
- return "\" & ditto & \""
-end
-local function register()
- found = true
-end
-local function force()
- forced = true
-end
-
-local texmess = (dquote/ditto + (1 - etex))^0
-
-local function ignore(s)
- report_metapost("ignoring verbatim tex: %s",s)
- return ""
-end
-
--- local parser = P {
--- [1] = Cs((V(2)/register + V(4)/ignore + V(3)/convert + V(5)/force + 1)^0),
--- [2] = ttex + gtex,
--- [3] = btex * spacing * Cs(texmess) * etex,
--- [4] = vtex * spacing * Cs(texmess) * etex,
--- [5] = multipass, -- experimental, only for testing
--- }
-
--- currently a a one-liner produces less code
-
--- textext.*(".*") can have "'s but tricky parsing as we can have concatenated strings
--- so this is something for a boring plain or train trip and we might assume proper mp
--- input anyway
-
-local parser = Cs((
- (ttex + gtex)/register
- + (btex * spacing * Cs(texmess) * etex)/convert
- + (vtex * spacing * Cs(texmess) * etex)/ignore
- + 1
-)^0)
-
-local function checktexts(str)
- found, forced = false, false
- return lpegmatch(parser,str), found, forced
-end
-
-metapost.checktexts = checktexts
-
-local factor = 65536*(7227/7200)
-
-function metapost.edefsxsy(wd,ht,dp) -- helper for figure
- local hd = ht + dp
- context.setvalue("sx",wd ~= 0 and factor/wd or 0)
- context.setvalue("sy",hd ~= 0 and factor/hd or 0)
-end
-
-local function sxsy(wd,ht,dp) -- helper for text
- local hd = ht + dp
- return (wd ~= 0 and factor/wd) or 0, (hd ~= 0 and factor/hd) or 0
-end
-
-local no_first_run = "mfun_first_run := false ;"
-local do_first_run = "mfun_first_run := true ;"
-local no_trial_run = "mfun_trial_run := false ;"
-local do_trial_run = "mfun_trial_run := true ;"
-local do_begin_fig = "; beginfig(1) ; "
-local do_end_fig = "; endfig ;"
-local do_safeguard = ";"
-
-local f_text_data = formatters["mfun_tt_w[%i] := %f ; mfun_tt_h[%i] := %f ; mfun_tt_d[%i] := %f ;"]
-
-function metapost.textextsdata()
- local t, nt, n = { }, 0, 0
- for n=1,#texorder do
- local box = textexts[texorder[n]]
- if box then
- local wd, ht, dp = box.width/factor, box.height/factor, box.depth/factor
- if trace_textexts then
- report_textexts("passed data item %s: (%p,%p,%p)",n,wd,ht,dp)
- end
- nt = nt + 1
- t[nt] = f_text_data(n,wd,n,ht,n,dp)
- else
- break
- end
- end
--- inspect(t)
- return t
-end
-
-metapost.intermediate = metapost.intermediate or {}
-metapost.intermediate.actions = metapost.intermediate.actions or {}
-metapost.intermediate.needed = false
-
-metapost.method = 1 -- 1:dumb 2:clever
-
--- maybe we can latelua the texts some day
-
-local nofruns = 0 -- askedfig: "all", "first", number
-
-local function checkaskedfig(askedfig) -- return askedfig, wrappit
- if not askedfig then
- return "direct", true
- elseif askedfig == "all" then
- return "all", false
- elseif askedfig == "direct" then
- return "all", true
- else
- askedfig = tonumber(askedfig)
- if askedfig then
- return askedfig, false
- else
- return "direct", true
- end
- end
-end
-
-function metapost.graphic_base_pass(specification)
- local mpx = specification.mpx -- mandate
- local data = specification.data or ""
- local definitions = specification.definitions or ""
--- local extensions = metapost.getextensions(specification.instance,specification.useextensions)
- local extensions = specification.extensions or ""
- local inclusions = specification.inclusions or ""
- local initializations = specification.initializations or ""
- local askedfig = specification.figure -- no default else no wrapper
- --
- nofruns = nofruns + 1
- local askedfig, wrappit = checkaskedfig(askedfig)
- local done_1, done_2, done_3, forced_1, forced_2, forced_3
- data, done_1, forced_1 = checktexts(data)
- -- we had preamble = extensions + inclusions
- if extensions == "" then
- extensions, done_2, forced_2 = "", false, false
- else
- extensions, done_2, forced_2 = checktexts(extensions)
- end
- if inclusions == "" then
- inclusions, done_3, forced_3 = "", false, false
- else
- inclusions, done_3, forced_3 = checktexts(inclusions)
- end
- metapost.intermediate.needed = false
- metapost.multipass = false -- no needed here
- current_format = mpx
- current_graphic = data
- current_initializations = initializations
- local method = metapost.method
- if trace_runs then
- if method == 1 then
- report_metapost("forcing two runs due to library configuration")
- elseif method ~= 2 then
- report_metapost("ignoring run due to library configuration")
- elseif not (done_1 or done_2 or done_3) then
- report_metapost("forcing one run only due to analysis")
- elseif done_1 then
- report_metapost("forcing at max two runs due to main code")
- elseif done_2 then
- report_metapost("forcing at max two runs due to extensions")
- else
- report_metapost("forcing at max two runs due to inclusions")
- end
- end
- if method == 1 or (method == 2 and (done_1 or done_2 or done_3)) then
- if trace_runs then
- report_metapost("first run of job %s, asked figure %a",nofruns,askedfig)
- end
- -- first true means: trialrun, second true means: avoid extra run if no multipass
- local flushed = metapost.process(mpx, {
- definitions,
- extensions,
- inclusions,
- wrappit and do_begin_fig or "",
- do_first_run,
- do_trial_run,
- current_initializations,
- do_safeguard,
- current_graphic,
- wrappit and do_end_fig or "",
- }, true, nil, not (forced_1 or forced_2 or forced_3), false, askedfig)
- if metapost.intermediate.needed then
- for _, action in next, metapost.intermediate.actions do
- action()
- end
- end
- if not flushed or not metapost.optimize then
- -- tricky, we can only ask once for objects and therefore
- -- we really need a second run when not optimized
- context.MPLIBextrapass(askedfig)
- end
- else
- if trace_runs then
- report_metapost("running job %s, asked figure %a",nofruns,askedfig)
- end
- metapost.process(mpx, {
- preamble,
- wrappit and do_begin_fig or "",
- do_first_run,
- no_trial_run,
- current_initializations,
- do_safeguard,
- current_graphic,
- wrappit and do_end_fig or "",
- }, false, nil, false, false, askedfig)
- end
-end
-
-function metapost.graphic_extra_pass(askedfig)
- if trace_runs then
- report_metapost("second run of job %s, asked figure %a",nofruns,askedfig)
- end
- local askedfig, wrappit = checkaskedfig(askedfig)
- metapost.process(current_format, {
- wrappit and do_begin_fig or "",
- no_trial_run,
- concat(metapost.textextsdata()," ;\n"),
- current_initializations,
- do_safeguard,
- current_graphic,
- wrappit and do_end_fig or "",
- }, false, nil, false, true, askedfig)
- context.MPLIBresettexts() -- must happen afterwards
-end
-
-local start = [[\starttext]]
-local preamble = [[\long\def\MPLIBgraphictext#1{\startTEXpage[scale=10000]#1\stopTEXpage}]]
-local stop = [[\stoptext]]
-
-function makempy.processgraphics(graphics)
- if #graphics > 0 then
- makempy.nofconverted = makempy.nofconverted + 1
- starttiming(makempy)
- local mpofile = tex.jobname .. "-mpgraph"
- local mpyfile = file.replacesuffix(mpofile,"mpy")
- local pdffile = file.replacesuffix(mpofile,"pdf")
- local texfile = file.replacesuffix(mpofile,"tex")
- io.savedata(texfile, { start, preamble, metapost.tex.get(), concat(graphics,"\n"), stop }, "\n")
- local command = format("context --once %s %s", (tex.interactionmode == 0 and "--batchmode") or "", texfile)
- os.execute(command)
- if io.exists(pdffile) then
- command = format("pstoedit -ssp -dt -f mpost %s %s", pdffile, mpyfile)
- os.execute(command)
- local result, r = { }, 0
- if io.exists(mpyfile) then
- local data = io.loaddata(mpyfile)
- for figure in gmatch(data,"beginfig(.-)endfig") do
- r = r + 1
- result[r] = formatters["begingraphictextfig%sendgraphictextfig ;\n"](figure)
- end
- io.savedata(mpyfile,concat(result,""))
- end
- end
- stoptiming(makempy)
- end
-end
-
--- -- the new plugin handler -- --
-
-local sequencers = utilities.sequencers
-local appendgroup = sequencers.appendgroup
-local appendaction = sequencers.appendaction
-
-local resetter = nil
-local analyzer = nil
-local processor = nil
-
-local resetteractions = sequencers.new { arguments = "t" }
-local analyzeractions = sequencers.new { arguments = "object,prescript" }
-local processoractions = sequencers.new { arguments = "object,prescript,before,after" }
-
-appendgroup(resetteractions, "system")
-appendgroup(analyzeractions, "system")
-appendgroup(processoractions, "system")
-
--- later entries come first
-
---~ local scriptsplitter = Cf(Ct("") * (
---~ Cg(C((1-S("= "))^1) * S("= ")^1 * C((1-S("\n\r"))^0) * S("\n\r")^0)
---~ )^0, rawset)
-
-local scriptsplitter = Ct ( Ct (
- C((1-S("= "))^1) * S("= ")^1 * C((1-S("\n\r"))^0) * S("\n\r")^0
-)^0 )
-
-local function splitprescript(script)
- local hash = lpegmatch(scriptsplitter,script)
- for i=#hash,1,-1 do
- local h = hash[i]
- hash[h[1]] = h[2]
- end
- if trace_scripts then
- report_scripts(table.serialize(hash,"prescript"))
- end
- return hash
-end
-
--- -- not used:
---
--- local function splitpostscript(script)
--- local hash = lpegmatch(scriptsplitter,script)
--- for i=1,#hash do
--- local h = hash[i]
--- hash[h[1]] = h[2]
--- end
--- if trace_scripts then
--- report_scripts(table.serialize(hash,"postscript"))
--- end
--- return hash
--- end
-
-function metapost.pluginactions(what,t,flushfigure) -- before/after object, depending on what
- for i=1,#what do
- local wi = what[i]
- if type(wi) == "function" then
- -- assume injection
- flushfigure(t) -- to be checked: too many 0 g 0 G
- t = { }
- wi()
- else
- t[#t+1] = wi
- end
- end
- return t
-end
-
-function metapost.resetplugins(t) -- intialize plugins, before figure
- -- plugins can have been added
- resetter = resetteractions .runner
- analyzer = analyzeractions .runner
- processor = processoractions .runner
- -- let's apply one runner
- resetter(t)
-end
-
-function metapost.analyzeplugins(object) -- each object (first pass)
- local prescript = object.prescript -- specifications
- if prescript and #prescript > 0 then
- return analyzer(object,splitprescript(prescript))
- end
-end
-
-function metapost.processplugins(object) -- each object (second pass)
- local prescript = object.prescript -- specifications
- if prescript and #prescript > 0 then
- local before = { }
- local after = { }
- processor(object,splitprescript(prescript),before,after)
- return #before > 0 and before, #after > 0 and after
- else
- local c = object.color
- if c and #c > 0 then
- local b, a = colorconverter(c)
- return { b }, { a }
- end
- end
-end
-
--- helpers
-
-local basepoints = number.dimenfactors["bp"]
-
-local function cm(object)
- local op = object.path
- if op then
- local first, second, fourth = op[1], op[2], op[4]
- local tx, ty = first.x_coord , first.y_coord
- local sx, sy = second.x_coord - tx, fourth.y_coord - ty
- local rx, ry = second.y_coord - ty, fourth.x_coord - tx
- if sx == 0 then sx = 0.00001 end
- if sy == 0 then sy = 0.00001 end
- return sx, rx, ry, sy, tx, ty
- else
- return 1, 0, 0, 1, 0, 0 -- weird case
- end
-end
-
--- color
-
-local function cl_reset(t)
- t[#t+1] = metapost.colorinitializer() -- only color
-end
-
-local tx_hash = { }
-local tx_last = 0
-
-local function tx_reset()
- tx_hash = { }
- tx_last = 0
-end
-
-local fmt = formatters["%s %s %s % t"]
-
-local function tx_analyze(object,prescript) -- todo: hash content and reuse them
- local tx_stage = prescript.tx_stage
- if tx_stage == "trial" then
- textrial = textrial + 1
- local tx_number = tonumber(prescript.tx_number)
- local s = object.postscript or ""
- local c = object.color -- only simple ones, no transparency
- local a = prescript.tr_alternative
- local t = prescript.tr_transparency
- local h = fmt(tx_number,a or "?",t or "?",c)
- local n = tx_hash[h] -- todo: hashed variant with s (nicer for similar labels)
- if not n then
- tx_last = tx_last + 1
- if not c then
- -- no color
- elseif #c == 1 then
- if a and t then
- s = formatters["\\directcolored[s=%f,a=%f,t=%f]%s"](c[1],a,t,s)
- else
- s = formatters["\\directcolored[s=%f]%s"](c[1],s)
- end
- elseif #c == 3 then
- if a and t then
- s = formatters["\\directcolored[r=%f,g=%f,b=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],a,t,s)
- else
- s = formatters["\\directcolored[r=%f,g=%f,b=%f]%s"](c[1],c[2],c[3],s)
- end
- elseif #c == 4 then
- if a and t then
- s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],c[4],a,t,s)
- else
- s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f]%s"](c[1],c[2],c[3],c[4],s)
- end
- end
- context.MPLIBsettext(tx_last,s)
- metapost.multipass = true
- tx_hash[h] = tx_last
- texslots[textrial] = tx_last
- texorder[tx_number] = tx_last
- if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,textrial,tx_number,tx_last,h)
- end
- else
- texslots[textrial] = n
- if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,textrial,tx_number,n,h)
- end
- end
- elseif tx_stage == "extra" then
- textrial = textrial + 1
- local tx_number = tonumber(prescript.tx_number)
- if not texorder[tx_number] then
- local s = object.postscript or ""
- tx_last = tx_last + 1
- context.MPLIBsettext(tx_last,s)
- metapost.multipass = true
- texslots[textrial] = tx_last
- texorder[tx_number] = tx_last
- if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, extra %a",tx_stage,textrial,tx_number,tx_last)
- end
- end
- end
-end
-
-local function tx_process(object,prescript,before,after)
- local tx_number = prescript.tx_number
- if tx_number then
- tx_number = tonumber(tx_number)
- local tx_stage = prescript.tx_stage
- if tx_stage == "final" then
- texfinal = texfinal + 1
- local n = texslots[texfinal]
- if trace_textexts then
- report_textexts("stage %a, usage %a, number %a, use %a",tx_stage,texfinal,tx_number,n)
- end
- local sx, rx, ry, sy, tx, ty = cm(object) -- needs to be frozen outside the function
- local box = textexts[n]
- if box then
- before[#before+1] = function()
- -- flush always happens, we can have a special flush function injected before
- context.MPLIBgettextscaledcm(n,
- format("%f",sx), -- bah ... %s no longer checks
- format("%f",rx), -- bah ... %s no longer checks
- format("%f",ry), -- bah ... %s no longer checks
- format("%f",sy), -- bah ... %s no longer checks
- format("%f",tx), -- bah ... %s no longer checks
- format("%f",ty), -- bah ... %s no longer checks
- sxsy(box.width,box.height,box.depth))
- end
- else
- before[#before+1] = function()
- report_textexts("unknown %s",tx_number)
- end
- end
- if not trace_textexts then
- object.path = false -- else: keep it
- end
- object.color = false
- object.grouped = true
- end
- end
-end
-
--- graphics
-
-local graphics = { }
-
-function metapost.intermediate.actions.makempy()
- if #graphics > 0 then
- makempy.processgraphics(graphics)
- graphics = { } -- ?
- end
-end
-
-local function gt_analyze(object,prescript)
- local gt_stage = prescript.gt_stage
- if gt_stage == "trial" then
- graphics[#graphics+1] = formatters["\\MPLIBgraphictext{%s}"](object.postscript or "")
- metapost.intermediate.needed = true
- metapost.multipass = true
- end
-end
-
--- local function gt_process(object,prescript,before,after)
--- local gt_stage = prescript.gt_stage
--- if gt_stage == "final" then
--- end
--- end
-
--- shades
-
-local function sh_process(object,prescript,before,after)
- local sh_type = prescript.sh_type
- if sh_type then
- nofshades = nofshades + 1
- local domain = lpegmatch(domainsplitter,prescript.sh_domain)
- local centera = lpegmatch(centersplitter,prescript.sh_center_a)
- local centerb = lpegmatch(centersplitter,prescript.sh_center_b)
- --
- local sh_color_a = prescript.sh_color_a or "1"
- local sh_color_b = prescript.sh_color_b or "1"
- local ca, cb, colorspace, name, separation
- if prescript.sh_color == "into" and prescript.sp_name then
- -- some spotcolor
- local value_a, components_a, fractions_a, name_a
- local value_b, components_b, fractions_b, name_b
- for i=1,#prescript do
- -- { "sh_color_a", "1" },
- -- { "sh_color", "into" },
- -- { "sh_radius_b", "0" },
- -- { "sh_radius_a", "141.73225" },
- -- { "sh_center_b", "425.19676 141.73225" },
- -- { "sh_center_a", "425.19676 0" },
- -- { "sh_factor", "1" },
- local tag = prescript[i][1]
- if not name_a and tag == "sh_color_a" then
- value_a = prescript[i-5][2]
- components_a = prescript[i-4][2]
- fractions_a = prescript[i-3][2]
- name_a = prescript[i-2][2]
- elseif not name_b and tag == "sh_color_b" then
- value_b = prescript[i-5][2]
- components_b = prescript[i-4][2]
- fractions_b = prescript[i-3][2]
- name_b = prescript[i-2][2]
- end
- if name_a and name_b then
- break
- end
- end
- ca, cb, separation, name = checkandconvertspot(
- name_a,fractions_a,components_a,value_a,
- name_b,fractions_b,components_b,value_b
- )
- else
- local colora = lpegmatch(colorsplitter,sh_color_a)
- local colorb = lpegmatch(colorsplitter,sh_color_b)
- ca, cb, colorspace, name = checkandconvert(colora,colorb)
- end
- if not ca or not cb then
- ca, cb, colorspace, name = checkandconvert()
- end
- if sh_type == "linear" then
- local coordinates = { centera[1], centera[2], centerb[1], centerb[2] }
- lpdf.linearshade(name,domain,ca,cb,1,colorspace,coordinates,separation) -- backend specific (will be renamed)
- elseif sh_type == "circular" then
- local radiusa = tonumber(prescript.sh_radius_a)
- local radiusb = tonumber(prescript.sh_radius_b)
- local coordinates = { centera[1], centera[2], radiusa, centerb[1], centerb[2], radiusb }
- lpdf.circularshade(name,domain,ca,cb,1,colorspace,coordinates,separation) -- backend specific (will be renamed)
- else
- -- fatal error
- end
- before[#before+1], after[#after+1] = "q /Pattern cs", formatters["W n /%s sh Q"](name)
- -- false, not nil, else mt triggered
- object.colored = false -- hm, not object.color ?
- object.type = false
- object.grouped = true
- end
-end
-
--- bitmaps
-
-local function bm_process(object,prescript,before,after)
- local bm_xresolution = prescript.bm_xresolution
- if bm_xresolution then
- before[#before+1] = f_cm(cm(object))
- before[#before+1] = function()
- figures.bitmapimage {
- xresolution = tonumber(bm_xresolution),
- yresolution = tonumber(prescript.bm_yresolution),
- width = 1/basepoints,
- height = 1/basepoints,
- data = object.postscript
- }
- end
- before[#before+1] = "Q"
- object.path = false
- object.color = false
- object.grouped = true
- end
-end
-
--- positions
-
-local function ps_process(object,prescript,before,after)
- local ps_label = prescript.ps_label
- if ps_label then
- local op = object.path
- local first, third = op[1], op[3]
- local x, y = first.x_coord, first.y_coord
- local w, h = third.x_coord - x, third.y_coord - y
- x = x - metapost.llx
- y = metapost.ury - y
- before[#before+1] = function()
- context.MPLIBpositionwhd(ps_label,x,y,w,h)
- end
- object.path = false
- end
-end
-
--- figures
-
-local function fg_process(object,prescript,before,after)
- local fg_name = prescript.fg_name
- if fg_name then
- before[#before+1] = f_cm(cm(object)) -- beware: does not use the cm stack
- before[#before+1] = function()
- context.MPLIBfigure(fg_name,prescript.fg_mask or "")
- end
- before[#before+1] = "Q"
- object.path = false
- object.grouped = true
- end
-end
-
--- color and transparency
-
-local value = Cs ( (
- (Carg(1) * C((1-P(","))^1)) / function(a,b) return format("%0.3f",a * tonumber(b)) end
- + P(","))^1
-)
-
--- should be codeinjections
-
-local t_list = attributes.list[attributes.private('transparency')]
-local c_list = attributes.list[attributes.private('color')]
-
-local function tr_process(object,prescript,before,after)
- -- before can be shortcut to t
- local tr_alternative = prescript.tr_alternative
- if tr_alternative then
- tr_alternative = tonumber(tr_alternative)
- local tr_transparency = tonumber(prescript.tr_transparency)
- before[#before+1] = formatters["/Tr%s gs"](registertransparency(nil,tr_alternative,tr_transparency,true))
- after[#after+1] = "/Tr0 gs" -- outertransparency
- end
- local cs = object.color
- if cs and #cs > 0 then
- local c_b, c_a
- local sp_type = prescript.sp_type
- if not sp_type then
- c_b, c_a = colorconverter(cs)
- elseif sp_type == "spot" or sp_type == "multitone" then
- local sp_name = prescript.sp_name or "black"
- local sp_fractions = prescript.sp_fractions or 1
- local sp_components = prescript.sp_components or ""
- local sp_value = prescript.sp_value or "1"
- local cf = cs[1]
- if cf ~= 1 then
- -- beware, we do scale the spotcolors but not the alternative representation
- sp_value = lpegmatch(value,sp_value,1,cf) or sp_value
- end
- c_b, c_a = spotcolorconverter(sp_name,sp_fractions,sp_components,sp_value)
- elseif sp_type == "named" then
- -- we might move this to another namespace .. also, named can be a spotcolor
- -- so we need to check for that too ... also we need to resolve indirect
- -- colors so we might need the second pass for this (draw dots with \MPcolor)
- local sp_name = prescript.sp_name or "black"
- if not tr_alternative then
- -- todo: sp_name is not yet registered at this time
- local t = t_list[sp_name] -- string or attribute
- local v = t and attributes.transparencies.value(t)
- if v then
- before[#before+1] = formatters["/Tr%s gs"](registertransparency(nil,v[1],v[2],true))
- after[#after+1] = "/Tr0 gs" -- outertransparency
- end
- end
- local c = c_list[sp_name] -- string or attribute
- local v = c and attributes.colors.value(c)
- if v then
- -- all=1 gray=2 rgb=3 cmyk=4
- local colorspace = v[1]
- local f = cs[1]
- if colorspace == 2 then
- local s = f*v[2]
- c_b, c_a = checked_color_pair(f_gray,s,s)
- elseif colorspace == 3 then
- local r, g, b = f*v[3], f*v[4], f*v[5]
- c_b, c_a = checked_color_pair(f_rgb,r,g,b,r,g,b)
- elseif colorspace == 4 or colorspace == 1 then
- local c, m, y, k = f*v[6], f*v[7], f*v[8], f*v[9]
- c_b, c_a = checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
- else
- local s = f*v[2]
- c_b, c_a = checked_color_pair(f_gray,s,s)
- end
- end
- --
- end
- if c_a and c_b then
- before[#before+1] = c_b
- after[#after+1] = c_a
- end
- end
-end
-
--- layers (nasty: we need to keep the 'grouping' right
-
-local function la_process(object,prescript,before,after)
- local la_name = prescript.la_name
- if la_name then
- before[#before+1] = backends.codeinjections.startlayer(la_name)
- insert(after,1,backends.codeinjections.stoplayer())
- end
-end
-
--- groups
-
-local types = {
- isolated
-}
-
-local function gr_process(object,prescript,before,after)
- local gr_state = prescript.gr_state
- if gr_state then
- if gr_state == "start" then
- local gr_type = utilities.parsers.settings_to_hash(prescript.gr_type)
- before[#before+1] = function()
- context.MPLIBstartgroup(
- gr_type.isolated and 1 or 0,
- gr_type.knockout and 1 or 0,
- prescript.gr_llx,
- prescript.gr_lly,
- prescript.gr_urx,
- prescript.gr_ury
- )
- end
- elseif gr_state == "stop" then
- after[#after+1] = function()
- context.MPLIBstopgroup()
- end
- end
- object.path = false
- object.color = false
- object.grouped = true
- end
-end
-
--- definitions
-
-appendaction(resetteractions, "system",cl_reset)
-appendaction(resetteractions, "system",tx_reset)
-
-appendaction(processoractions,"system",gr_process)
-
-appendaction(analyzeractions, "system",tx_analyze)
-appendaction(analyzeractions, "system",gt_analyze)
-
-appendaction(processoractions,"system",sh_process)
--- (processoractions,"system",gt_process)
-appendaction(processoractions,"system",bm_process)
-appendaction(processoractions,"system",tx_process)
-appendaction(processoractions,"system",ps_process)
-appendaction(processoractions,"system",fg_process)
-appendaction(processoractions,"system",tr_process) -- last, as color can be reset
-
-appendaction(processoractions,"system",la_process)
-
--- we're nice and set them already
-
-resetter = resetteractions .runner
-analyzer = analyzeractions .runner
-processor = processoractions.runner
+if not modules then modules = { } end modules ['mlib-pps'] = {
+ version = 1.001,
+ comment = "companion to mlib-ctx.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- todo: make a hashed textext variant where we only process the text once (normally
+-- we cannot assume that no macros are involved which influence a next textext
+
+local format, gmatch, match, split = string.format, string.gmatch, string.match, string.split
+local tonumber, type = tonumber, type
+local round = math.round
+local insert, concat = table.insert, table.concat
+local Cs, Cf, C, Cg, Ct, P, S, V, Carg = lpeg.Cs, lpeg.Cf, lpeg.C, lpeg.Cg, lpeg.Ct, lpeg.P, lpeg.S, lpeg.V, lpeg.Carg
+local lpegmatch = lpeg.match
+local formatters = string.formatters
+
+local mplib, metapost, lpdf, context = mplib, metapost, lpdf, context
+
+local texbox = tex.box
+local copy_list = node.copy_list
+local free_list = node.flush_list
+local setmetatableindex = table.setmetatableindex
+local sortedhash = table.sortedhash
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+
+local trace_runs = false trackers.register("metapost.runs", function(v) trace_runs = v end)
+local trace_textexts = false trackers.register("metapost.textexts", function(v) trace_textexts = v end)
+local trace_scripts = false trackers.register("metapost.scripts", function(v) trace_scripts = v end)
+
+local report_metapost = logs.reporter("metapost")
+local report_textexts = logs.reporter("metapost","textexts")
+local report_scripts = logs.reporter("metapost","scripts")
+
+local colors = attributes.colors
+
+local rgbtocmyk = colors.rgbtocmyk or function() return 0,0,0,1 end
+local cmyktorgb = colors.cmyktorgb or function() return 0,0,0 end
+local rgbtogray = colors.rgbtogray or function() return 0 end
+local cmyktogray = colors.cmyktogray or function() return 0 end
+
+metapost.makempy = metapost.makempy or { nofconverted = 0 }
+local makempy = metapost.makempy
+
+local nooutercolor = "0 g 0 G"
+local nooutertransparency = "/Tr0 gs" -- only when set
+local outercolormode = 0
+local outercolor = nooutercolor
+local outertransparency = nooutertransparency
+local innercolor = nooutercolor
+local innertransparency = nooutertransparency
+
+local pdfcolor = lpdf.color
+local pdftransparency = lpdf.transparency
+local registercolor = colors.register
+local registerspotcolor = colors.registerspotcolor
+
+local transparencies = attributes.transparencies
+local registertransparency = transparencies.register
+
+function metapost.setoutercolor(mode,colormodel,colorattribute,transparencyattribute)
+ -- has always to be called before conversion
+ -- todo: transparency (not in the mood now)
+ outercolormode = mode
+ if mode == 1 or mode == 3 then
+ -- inherit from outer (registered color)
+ outercolor = pdfcolor(colormodel,colorattribute) or nooutercolor
+ outertransparency = pdftransparency(transparencyattribute) or nooutertransparency
+ elseif mode == 2 then
+ -- stand alone (see m-punk.tex)
+ outercolor = ""
+ outertransparency = ""
+ else -- 0
+ outercolor = nooutercolor
+ outertransparency = nooutertransparency
+ end
+ innercolor = outercolor
+ innertransparency = outertransparency -- not yet used
+end
+
+local f_gray = formatters["%.3f g %.3f G"]
+local f_rgb = formatters["%.3f %.3f %.3f rg %.3f %.3f %.3f RG"]
+local f_cmyk = formatters["%.3f %.3f %.3f %.3f k %.3f %.3f %.3f %.3f K"]
+local f_cm = formatters["q %f %f %f %f %f %f cm"]
+local f_shade = formatters["MpSh%s"]
+
+local function checked_color_pair(color,...)
+ if not color then
+ return innercolor, outercolor
+ end
+ if outercolormode == 3 then
+ innercolor = color(...)
+ return innercolor, innercolor
+ else
+ return color(...), outercolor
+ end
+end
+
+function metapost.colorinitializer()
+ innercolor = outercolor
+ innertransparency = outertransparency
+ return outercolor, outertransparency
+end
+
+--~
+
+local specificationsplitter = lpeg.tsplitat(" ")
+local colorsplitter = lpeg.tsplitter(":",tonumber) -- no need for :
+local domainsplitter = lpeg.tsplitter(" ",tonumber)
+local centersplitter = domainsplitter
+local coordinatesplitter = domainsplitter
+
+-- thanks to taco's reading of the postscript manual:
+--
+-- x' = sx * x + ry * y + tx
+-- y' = rx * x + sy * y + ty
+
+local nofshades = 0 -- todo: hash resources, start at 1000 in order not to clash with older
+
+local function normalize(ca,cb)
+ if #cb == 1 then
+ if #ca == 4 then
+ cb[1], cb[2], cb[3], cb[4] = 0, 0, 0, 1-cb[1]
+ else
+ cb[1], cb[2], cb[3] = cb[1], cb[1], cb[1]
+ end
+ elseif #cb == 3 then
+ if #ca == 4 then
+ cb[1], cb[2], cb[3], cb[4] = rgbtocmyk(cb[1],cb[2],cb[3])
+ else
+ cb[1], cb[2], cb[3] = cmyktorgb(cb[1],cb[2],cb[3],cb[4])
+ end
+ end
+end
+
+-- todo: check for the same colorspace (actually a backend issue), now we can
+-- have several similar resources
+--
+-- normalize(ca,cb) fails for spotcolors
+
+local function spotcolorconverter(parent, n, d, p)
+ registerspotcolor(parent)
+ return pdfcolor(colors.model,registercolor(nil,'spot',parent,n,d,p)), outercolor
+end
+
+local commasplitter = lpeg.tsplitat(",")
+
+local function checkandconvertspot(n_a,f_a,c_a,v_a,n_b,f_b,c_b,v_b)
+ -- must be the same but we don't check
+ local name = f_shade(nofshades)
+ local ca = lpegmatch(commasplitter,v_a)
+ local cb = lpegmatch(commasplitter,v_b)
+ if #ca == 0 or #cb == 0 then
+ return { 0 }, { 1 }, "DeviceGray", name
+ else
+ for i=1,#ca do ca[i] = tonumber(ca[i]) or 0 end
+ for i=1,#cb do cb[i] = tonumber(cb[i]) or 1 end
+ --~ spotcolorconverter(n_a,f_a,c_a,v_a) -- not really needed
+ return ca, cb, n_a or n_b, name
+ end
+end
+
+local function checkandconvert(ca,cb)
+ local name = f_shade(nofshades)
+ if not ca or not cb or type(ca) == "string" then
+ return { 0 }, { 1 }, "DeviceGray", name
+ else
+ if #ca > #cb then
+ normalize(ca,cb)
+ elseif #ca < #cb then
+ normalize(cb,ca)
+ end
+ local model = colors.model
+ if model == "all" then
+ model= (#ca == 4 and "cmyk") or (#ca == 3 and "rgb") or "gray"
+ end
+ if model == "rgb" then
+ if #ca == 4 then
+ ca = { cmyktorgb(ca[1],ca[2],ca[3],ca[4]) }
+ cb = { cmyktorgb(cb[1],cb[2],cb[3],cb[4]) }
+ elseif #ca == 1 then
+ local a, b = 1-ca[1], 1-cb[1]
+ ca = { a, a, a }
+ cb = { b, b, b }
+ end
+ return ca, cb, "DeviceRGB", name
+ elseif model == "cmyk" then
+ if #ca == 3 then
+ ca = { rgbtocmyk(ca[1],ca[2],ca[3]) }
+ cb = { rgbtocmyk(cb[1],cb[2],cb[3]) }
+ elseif #ca == 1 then
+ ca = { 0, 0, 0, ca[1] }
+ cb = { 0, 0, 0, ca[1] }
+ end
+ return ca, cb, "DeviceCMYK", name
+ else
+ if #ca == 4 then
+ ca = { cmyktogray(ca[1],ca[2],ca[3],ca[4]) }
+ cb = { cmyktogray(cb[1],cb[2],cb[3],cb[4]) }
+ elseif #ca == 3 then
+ ca = { rgbtogray(ca[1],ca[2],ca[3]) }
+ cb = { rgbtogray(cb[1],cb[2],cb[3]) }
+ end
+ -- backend specific (will be renamed)
+ return ca, cb, "DeviceGray", name
+ end
+ end
+end
+
+local current_format, current_graphic, current_initializations
+
+metapost.multipass = false
+
+local textexts = { } -- all boxes, optionally with a different color
+local texslots = { } -- references to textexts in order or usage
+local texorder = { } -- references to textexts by mp index
+local textrial = 0
+local texfinal = 0
+local scratchbox = 0
+
+local function freeboxes()
+ for n, box in next, textexts do
+ local tn = textexts[n]
+ if tn then
+ free_list(tn)
+ -- texbox[scratchbox] = tn
+ -- texbox[scratchbox] = nil -- this frees too
+ if trace_textexts then
+ report_textexts("freeing box %s",n)
+ end
+ end
+ end
+ textexts = { }
+ texslots = { }
+ texorder = { }
+ textrial = 0
+ texfinal = 0
+end
+
+metapost.resettextexts = freeboxes
+
+function metapost.settext(box,slot)
+ textexts[slot] = copy_list(texbox[box])
+ texbox[box] = nil
+ -- this will become
+ -- textexts[slot] = texbox[box]
+ -- unsetbox(box)
+end
+
+function metapost.gettext(box,slot)
+ texbox[box] = copy_list(textexts[slot])
+ if trace_textexts then
+ report_textexts("putting text %s in box %s",slot,box)
+ end
+ -- textexts[slot] = nil -- no, pictures can be placed several times
+end
+
+-- rather generic pdf, so use this elsewhere too it no longer pays
+-- off to distinguish between outline and fill (we now have both
+-- too, e.g. in arrows)
+
+metapost.reducetogray = true
+
+local models = { }
+
+function models.all(cr)
+ local n = #cr
+ if n == 0 then
+ return checked_color_pair()
+ elseif metapost.reducetogray then
+ if n == 1 then
+ local s = cr[1]
+ return checked_color_pair(f_gray,s,s)
+ elseif n == 3 then
+ local r, g, b = cr[1], cr[2], cr[3]
+ if r == g and g == b then
+ return checked_color_pair(f_gray,r,r)
+ else
+ return checked_color_pair(f_rgb,r,g,b,r,g,b)
+ end
+ else
+ local c, m, y, k = cr[1], cr[2], cr[3], cr[4]
+ if c == m and m == y and y == 0 then
+ k = 1 - k
+ return checked_color_pair(f_gray,k,k)
+ else
+ return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
+ end
+ end
+ elseif n == 1 then
+ local s = cr[1]
+ return checked_color_pair(f_gray,s,s)
+ elseif n == 3 then
+ local r, g, b = cr[1], cr[2], cr[3]
+ return checked_color_pair(f_rgb,r,g,b,r,g,b)
+ else
+ local c, m, y, k = cr[1], cr[2], cr[3], cr[4]
+ return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
+ end
+end
+
+function models.rgb(cr)
+ local n = #cr
+ if n == 0 then
+ return checked_color_pair()
+ elseif metapost.reducetogray then
+ if n == 1 then
+ local s = cr[1]
+ checked_color_pair(f_gray,s,s)
+ elseif n == 3 then
+ local r, g, b = cr[1], cr[2], cr[3]
+ if r == g and g == b then
+ return checked_color_pair(f_gray,r,r)
+ else
+ return checked_color_pair(f_rgb,r,g,b,r,g,b)
+ end
+ else
+ local c, m, y, k = cr[1], cr[2], cr[3], cr[4]
+ if c == m and m == y and y == 0 then
+ k = 1 - k
+ return checked_color_pair(f_gray,k,k)
+ else
+ local r, g, b = cmyktorgb(c,m,y,k)
+ return checked_color_pair(f_rgb,r,g,b,r,g,b)
+ end
+ end
+ elseif n == 1 then
+ local s = cr[1]
+ return checked_color_pair(f_gray,s,s)
+ else
+ local r, g, b
+ if n == 3 then
+ r, g, b = cmyktorgb(cr[1],cr[2],cr[3],cr[4])
+ else
+ r, g, b = cr[1], cr[2], cr[3]
+ end
+ return checked_color_pair(f_rgb,r,g,b,r,g,b)
+ end
+end
+
+function models.cmyk(cr)
+ local n = #cr
+ if n == 0 then
+ return checked_color_pair()
+ elseif metapost.reducetogray then
+ if n == 1 then
+ local s = cr[1]
+ return checked_color_pair(f_gray,s,s)
+ elseif n == 3 then
+ local r, g, b = cr[1], cr[2], cr[3]
+ if r == g and g == b then
+ return checked_color_pair(f_gray,r,r)
+ else
+ local c, m, y, k = rgbtocmyk(r,g,b)
+ return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
+ end
+ else
+ local c, m, y, k = cr[1], cr[2], cr[3], cr[4]
+ if c == m and m == y and y == 0 then
+ k = k - 1
+ return checked_color_pair(f_gray,k,k)
+ else
+ return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
+ end
+ end
+ elseif n == 1 then
+ local s = cr[1]
+ return checked_color_pair(f_gray,s,s)
+ else
+ local c, m, y, k
+ if n == 3 then
+ c, m, y, k = rgbtocmyk(cr[1],cr[2],cr[3])
+ else
+ c, m, y, k = cr[1], cr[2], cr[3], cr[4]
+ end
+ return checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
+ end
+end
+
+function models.gray(cr)
+ local n, s = #cr, 0
+ if n == 0 then
+ return checked_color_pair()
+ elseif n == 4 then
+ s = cmyktogray(cr[1],cr[2],cr[3],cr[4])
+ elseif n == 3 then
+ s = rgbtogray(cr[1],cr[2],cr[3])
+ else
+ s = cr[1]
+ end
+ return checked_color_pair(f_gray,s,s)
+end
+
+setmetatableindex(models, function(t,k)
+ local v = models.gray
+ t[k] = v
+ return v
+end)
+
+local function colorconverter(cs)
+ return models[colors.model](cs)
+end
+
+local btex = P("btex")
+local etex = P(" etex")
+local vtex = P("verbatimtex")
+local ttex = P("textext")
+local gtex = P("graphictext")
+local multipass = P("forcemultipass")
+local spacing = S(" \n\r\t\v")^0
+local dquote = P('"')
+
+local found, forced = false, false
+
+local function convert(str)
+ found = true
+ return "rawtextext(\"" .. str .. "\")" -- centered
+end
+local function ditto(str)
+ return "\" & ditto & \""
+end
+local function register()
+ found = true
+end
+local function force()
+ forced = true
+end
+
+local texmess = (dquote/ditto + (1 - etex))^0
+
+local function ignore(s)
+ report_metapost("ignoring verbatim tex: %s",s)
+ return ""
+end
+
+-- local parser = P {
+-- [1] = Cs((V(2)/register + V(4)/ignore + V(3)/convert + V(5)/force + 1)^0),
+-- [2] = ttex + gtex,
+-- [3] = btex * spacing * Cs(texmess) * etex,
+-- [4] = vtex * spacing * Cs(texmess) * etex,
+-- [5] = multipass, -- experimental, only for testing
+-- }
+
+-- currently a a one-liner produces less code
+
+-- textext.*(".*") can have "'s but tricky parsing as we can have concatenated strings
+-- so this is something for a boring plain or train trip and we might assume proper mp
+-- input anyway
+
+local parser = Cs((
+ (ttex + gtex)/register
+ + (btex * spacing * Cs(texmess) * etex)/convert
+ + (vtex * spacing * Cs(texmess) * etex)/ignore
+ + 1
+)^0)
+
+local function checktexts(str)
+ found, forced = false, false
+ return lpegmatch(parser,str), found, forced
+end
+
+metapost.checktexts = checktexts
+
+local factor = 65536*(7227/7200)
+
+function metapost.edefsxsy(wd,ht,dp) -- helper for figure
+ local hd = ht + dp
+ context.setvalue("sx",wd ~= 0 and factor/wd or 0)
+ context.setvalue("sy",hd ~= 0 and factor/hd or 0)
+end
+
+local function sxsy(wd,ht,dp) -- helper for text
+ local hd = ht + dp
+ return (wd ~= 0 and factor/wd) or 0, (hd ~= 0 and factor/hd) or 0
+end
+
+local no_first_run = "mfun_first_run := false ;"
+local do_first_run = "mfun_first_run := true ;"
+local no_trial_run = "mfun_trial_run := false ;"
+local do_trial_run = "mfun_trial_run := true ;"
+local do_begin_fig = "; beginfig(1) ; "
+local do_end_fig = "; endfig ;"
+local do_safeguard = ";"
+
+local f_text_data = formatters["mfun_tt_w[%i] := %f ; mfun_tt_h[%i] := %f ; mfun_tt_d[%i] := %f ;"]
+
+function metapost.textextsdata()
+ local t, nt, n = { }, 0, 0
+ for n=1,#texorder do
+ local box = textexts[texorder[n]]
+ if box then
+ local wd, ht, dp = box.width/factor, box.height/factor, box.depth/factor
+ if trace_textexts then
+ report_textexts("passed data item %s: (%p,%p,%p)",n,wd,ht,dp)
+ end
+ nt = nt + 1
+ t[nt] = f_text_data(n,wd,n,ht,n,dp)
+ else
+ break
+ end
+ end
+-- inspect(t)
+ return t
+end
+
+metapost.intermediate = metapost.intermediate or {}
+metapost.intermediate.actions = metapost.intermediate.actions or {}
+metapost.intermediate.needed = false
+
+metapost.method = 1 -- 1:dumb 2:clever
+
+-- maybe we can latelua the texts some day
+
+local nofruns = 0 -- askedfig: "all", "first", number
+
+local function checkaskedfig(askedfig) -- return askedfig, wrappit
+ if not askedfig then
+ return "direct", true
+ elseif askedfig == "all" then
+ return "all", false
+ elseif askedfig == "direct" then
+ return "all", true
+ else
+ askedfig = tonumber(askedfig)
+ if askedfig then
+ return askedfig, false
+ else
+ return "direct", true
+ end
+ end
+end
+
+function metapost.graphic_base_pass(specification)
+ local mpx = specification.mpx -- mandate
+ local data = specification.data or ""
+ local definitions = specification.definitions or ""
+-- local extensions = metapost.getextensions(specification.instance,specification.useextensions)
+ local extensions = specification.extensions or ""
+ local inclusions = specification.inclusions or ""
+ local initializations = specification.initializations or ""
+ local askedfig = specification.figure -- no default else no wrapper
+ --
+ nofruns = nofruns + 1
+ local askedfig, wrappit = checkaskedfig(askedfig)
+ local done_1, done_2, done_3, forced_1, forced_2, forced_3
+ data, done_1, forced_1 = checktexts(data)
+ -- we had preamble = extensions + inclusions
+ if extensions == "" then
+ extensions, done_2, forced_2 = "", false, false
+ else
+ extensions, done_2, forced_2 = checktexts(extensions)
+ end
+ if inclusions == "" then
+ inclusions, done_3, forced_3 = "", false, false
+ else
+ inclusions, done_3, forced_3 = checktexts(inclusions)
+ end
+ metapost.intermediate.needed = false
+ metapost.multipass = false -- no needed here
+ current_format = mpx
+ current_graphic = data
+ current_initializations = initializations
+ local method = metapost.method
+ if trace_runs then
+ if method == 1 then
+ report_metapost("forcing two runs due to library configuration")
+ elseif method ~= 2 then
+ report_metapost("ignoring run due to library configuration")
+ elseif not (done_1 or done_2 or done_3) then
+ report_metapost("forcing one run only due to analysis")
+ elseif done_1 then
+ report_metapost("forcing at max two runs due to main code")
+ elseif done_2 then
+ report_metapost("forcing at max two runs due to extensions")
+ else
+ report_metapost("forcing at max two runs due to inclusions")
+ end
+ end
+ if method == 1 or (method == 2 and (done_1 or done_2 or done_3)) then
+ if trace_runs then
+ report_metapost("first run of job %s, asked figure %a",nofruns,askedfig)
+ end
+ -- first true means: trialrun, second true means: avoid extra run if no multipass
+ local flushed = metapost.process(mpx, {
+ definitions,
+ extensions,
+ inclusions,
+ wrappit and do_begin_fig or "",
+ do_first_run,
+ do_trial_run,
+ current_initializations,
+ do_safeguard,
+ current_graphic,
+ wrappit and do_end_fig or "",
+ }, true, nil, not (forced_1 or forced_2 or forced_3), false, askedfig)
+ if metapost.intermediate.needed then
+ for _, action in next, metapost.intermediate.actions do
+ action()
+ end
+ end
+ if not flushed or not metapost.optimize then
+ -- tricky, we can only ask once for objects and therefore
+ -- we really need a second run when not optimized
+ context.MPLIBextrapass(askedfig)
+ end
+ else
+ if trace_runs then
+ report_metapost("running job %s, asked figure %a",nofruns,askedfig)
+ end
+ metapost.process(mpx, {
+ preamble,
+ wrappit and do_begin_fig or "",
+ do_first_run,
+ no_trial_run,
+ current_initializations,
+ do_safeguard,
+ current_graphic,
+ wrappit and do_end_fig or "",
+ }, false, nil, false, false, askedfig)
+ end
+end
+
+function metapost.graphic_extra_pass(askedfig)
+ if trace_runs then
+ report_metapost("second run of job %s, asked figure %a",nofruns,askedfig)
+ end
+ local askedfig, wrappit = checkaskedfig(askedfig)
+ metapost.process(current_format, {
+ wrappit and do_begin_fig or "",
+ no_trial_run,
+ concat(metapost.textextsdata()," ;\n"),
+ current_initializations,
+ do_safeguard,
+ current_graphic,
+ wrappit and do_end_fig or "",
+ }, false, nil, false, true, askedfig)
+ context.MPLIBresettexts() -- must happen afterwards
+end
+
+local start = [[\starttext]]
+local preamble = [[\long\def\MPLIBgraphictext#1{\startTEXpage[scale=10000]#1\stopTEXpage}]]
+local stop = [[\stoptext]]
+
+function makempy.processgraphics(graphics)
+ if #graphics > 0 then
+ makempy.nofconverted = makempy.nofconverted + 1
+ starttiming(makempy)
+ local mpofile = tex.jobname .. "-mpgraph"
+ local mpyfile = file.replacesuffix(mpofile,"mpy")
+ local pdffile = file.replacesuffix(mpofile,"pdf")
+ local texfile = file.replacesuffix(mpofile,"tex")
+ io.savedata(texfile, { start, preamble, metapost.tex.get(), concat(graphics,"\n"), stop }, "\n")
+ local command = format("context --once %s %s", (tex.interactionmode == 0 and "--batchmode") or "", texfile)
+ os.execute(command)
+ if io.exists(pdffile) then
+ command = format("pstoedit -ssp -dt -f mpost %s %s", pdffile, mpyfile)
+ os.execute(command)
+ local result, r = { }, 0
+ if io.exists(mpyfile) then
+ local data = io.loaddata(mpyfile)
+ for figure in gmatch(data,"beginfig(.-)endfig") do
+ r = r + 1
+ result[r] = formatters["begingraphictextfig%sendgraphictextfig ;\n"](figure)
+ end
+ io.savedata(mpyfile,concat(result,""))
+ end
+ end
+ stoptiming(makempy)
+ end
+end
+
+-- -- the new plugin handler -- --
+
+local sequencers = utilities.sequencers
+local appendgroup = sequencers.appendgroup
+local appendaction = sequencers.appendaction
+
+local resetter = nil
+local analyzer = nil
+local processor = nil
+
+local resetteractions = sequencers.new { arguments = "t" }
+local analyzeractions = sequencers.new { arguments = "object,prescript" }
+local processoractions = sequencers.new { arguments = "object,prescript,before,after" }
+
+appendgroup(resetteractions, "system")
+appendgroup(analyzeractions, "system")
+appendgroup(processoractions, "system")
+
+-- later entries come first
+
+--~ local scriptsplitter = Cf(Ct("") * (
+--~ Cg(C((1-S("= "))^1) * S("= ")^1 * C((1-S("\n\r"))^0) * S("\n\r")^0)
+--~ )^0, rawset)
+
+local scriptsplitter = Ct ( Ct (
+ C((1-S("= "))^1) * S("= ")^1 * C((1-S("\n\r"))^0) * S("\n\r")^0
+)^0 )
+
+local function splitprescript(script)
+ local hash = lpegmatch(scriptsplitter,script)
+ for i=#hash,1,-1 do
+ local h = hash[i]
+ hash[h[1]] = h[2]
+ end
+ if trace_scripts then
+ report_scripts(table.serialize(hash,"prescript"))
+ end
+ return hash
+end
+
+-- -- not used:
+--
+-- local function splitpostscript(script)
+-- local hash = lpegmatch(scriptsplitter,script)
+-- for i=1,#hash do
+-- local h = hash[i]
+-- hash[h[1]] = h[2]
+-- end
+-- if trace_scripts then
+-- report_scripts(table.serialize(hash,"postscript"))
+-- end
+-- return hash
+-- end
+
+function metapost.pluginactions(what,t,flushfigure) -- before/after object, depending on what
+ for i=1,#what do
+ local wi = what[i]
+ if type(wi) == "function" then
+ -- assume injection
+ flushfigure(t) -- to be checked: too many 0 g 0 G
+ t = { }
+ wi()
+ else
+ t[#t+1] = wi
+ end
+ end
+ return t
+end
+
+function metapost.resetplugins(t) -- intialize plugins, before figure
+ -- plugins can have been added
+ resetter = resetteractions .runner
+ analyzer = analyzeractions .runner
+ processor = processoractions .runner
+ -- let's apply one runner
+ resetter(t)
+end
+
+function metapost.analyzeplugins(object) -- each object (first pass)
+ local prescript = object.prescript -- specifications
+ if prescript and #prescript > 0 then
+ return analyzer(object,splitprescript(prescript))
+ end
+end
+
+function metapost.processplugins(object) -- each object (second pass)
+ local prescript = object.prescript -- specifications
+ if prescript and #prescript > 0 then
+ local before = { }
+ local after = { }
+ processor(object,splitprescript(prescript),before,after)
+ return #before > 0 and before, #after > 0 and after
+ else
+ local c = object.color
+ if c and #c > 0 then
+ local b, a = colorconverter(c)
+ return { b }, { a }
+ end
+ end
+end
+
+-- helpers
+
+local basepoints = number.dimenfactors["bp"]
+
+local function cm(object)
+ local op = object.path
+ if op then
+ local first, second, fourth = op[1], op[2], op[4]
+ local tx, ty = first.x_coord , first.y_coord
+ local sx, sy = second.x_coord - tx, fourth.y_coord - ty
+ local rx, ry = second.y_coord - ty, fourth.x_coord - tx
+ if sx == 0 then sx = 0.00001 end
+ if sy == 0 then sy = 0.00001 end
+ return sx, rx, ry, sy, tx, ty
+ else
+ return 1, 0, 0, 1, 0, 0 -- weird case
+ end
+end
+
+-- color
+
+local function cl_reset(t)
+ t[#t+1] = metapost.colorinitializer() -- only color
+end
+
+local tx_hash = { }
+local tx_last = 0
+
+local function tx_reset()
+ tx_hash = { }
+ tx_last = 0
+end
+
+local fmt = formatters["%s %s %s % t"]
+
+local function tx_analyze(object,prescript) -- todo: hash content and reuse them
+ local tx_stage = prescript.tx_stage
+ if tx_stage == "trial" then
+ textrial = textrial + 1
+ local tx_number = tonumber(prescript.tx_number)
+ local s = object.postscript or ""
+ local c = object.color -- only simple ones, no transparency
+ local a = prescript.tr_alternative
+ local t = prescript.tr_transparency
+ local h = fmt(tx_number,a or "?",t or "?",c)
+ local n = tx_hash[h] -- todo: hashed variant with s (nicer for similar labels)
+ if not n then
+ tx_last = tx_last + 1
+ if not c then
+ -- no color
+ elseif #c == 1 then
+ if a and t then
+ s = formatters["\\directcolored[s=%f,a=%f,t=%f]%s"](c[1],a,t,s)
+ else
+ s = formatters["\\directcolored[s=%f]%s"](c[1],s)
+ end
+ elseif #c == 3 then
+ if a and t then
+ s = formatters["\\directcolored[r=%f,g=%f,b=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],a,t,s)
+ else
+ s = formatters["\\directcolored[r=%f,g=%f,b=%f]%s"](c[1],c[2],c[3],s)
+ end
+ elseif #c == 4 then
+ if a and t then
+ s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f,a=%f,t=%f]%s"](c[1],c[2],c[3],c[4],a,t,s)
+ else
+ s = formatters["\\directcolored[c=%f,m=%f,y=%f,k=%f]%s"](c[1],c[2],c[3],c[4],s)
+ end
+ end
+ context.MPLIBsettext(tx_last,s)
+ metapost.multipass = true
+ tx_hash[h] = tx_last
+ texslots[textrial] = tx_last
+ texorder[tx_number] = tx_last
+ if trace_textexts then
+ report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,textrial,tx_number,tx_last,h)
+ end
+ else
+ texslots[textrial] = n
+ if trace_textexts then
+ report_textexts("stage %a, usage %a, number %a, new %a, hash %a",tx_stage,textrial,tx_number,n,h)
+ end
+ end
+ elseif tx_stage == "extra" then
+ textrial = textrial + 1
+ local tx_number = tonumber(prescript.tx_number)
+ if not texorder[tx_number] then
+ local s = object.postscript or ""
+ tx_last = tx_last + 1
+ context.MPLIBsettext(tx_last,s)
+ metapost.multipass = true
+ texslots[textrial] = tx_last
+ texorder[tx_number] = tx_last
+ if trace_textexts then
+ report_textexts("stage %a, usage %a, number %a, extra %a",tx_stage,textrial,tx_number,tx_last)
+ end
+ end
+ end
+end
+
+local function tx_process(object,prescript,before,after)
+ local tx_number = prescript.tx_number
+ if tx_number then
+ tx_number = tonumber(tx_number)
+ local tx_stage = prescript.tx_stage
+ if tx_stage == "final" then
+ texfinal = texfinal + 1
+ local n = texslots[texfinal]
+ if trace_textexts then
+ report_textexts("stage %a, usage %a, number %a, use %a",tx_stage,texfinal,tx_number,n)
+ end
+ local sx, rx, ry, sy, tx, ty = cm(object) -- needs to be frozen outside the function
+ local box = textexts[n]
+ if box then
+ before[#before+1] = function()
+ -- flush always happens, we can have a special flush function injected before
+ context.MPLIBgettextscaledcm(n,
+ format("%f",sx), -- bah ... %s no longer checks
+ format("%f",rx), -- bah ... %s no longer checks
+ format("%f",ry), -- bah ... %s no longer checks
+ format("%f",sy), -- bah ... %s no longer checks
+ format("%f",tx), -- bah ... %s no longer checks
+ format("%f",ty), -- bah ... %s no longer checks
+ sxsy(box.width,box.height,box.depth))
+ end
+ else
+ before[#before+1] = function()
+ report_textexts("unknown %s",tx_number)
+ end
+ end
+ if not trace_textexts then
+ object.path = false -- else: keep it
+ end
+ object.color = false
+ object.grouped = true
+ end
+ end
+end
+
+-- graphics
+
+local graphics = { }
+
+function metapost.intermediate.actions.makempy()
+ if #graphics > 0 then
+ makempy.processgraphics(graphics)
+ graphics = { } -- ?
+ end
+end
+
+local function gt_analyze(object,prescript)
+ local gt_stage = prescript.gt_stage
+ if gt_stage == "trial" then
+ graphics[#graphics+1] = formatters["\\MPLIBgraphictext{%s}"](object.postscript or "")
+ metapost.intermediate.needed = true
+ metapost.multipass = true
+ end
+end
+
+-- local function gt_process(object,prescript,before,after)
+-- local gt_stage = prescript.gt_stage
+-- if gt_stage == "final" then
+-- end
+-- end
+
+-- shades
+
+local function sh_process(object,prescript,before,after)
+ local sh_type = prescript.sh_type
+ if sh_type then
+ nofshades = nofshades + 1
+ local domain = lpegmatch(domainsplitter,prescript.sh_domain)
+ local centera = lpegmatch(centersplitter,prescript.sh_center_a)
+ local centerb = lpegmatch(centersplitter,prescript.sh_center_b)
+ --
+ local sh_color_a = prescript.sh_color_a or "1"
+ local sh_color_b = prescript.sh_color_b or "1"
+ local ca, cb, colorspace, name, separation
+ if prescript.sh_color == "into" and prescript.sp_name then
+ -- some spotcolor
+ local value_a, components_a, fractions_a, name_a
+ local value_b, components_b, fractions_b, name_b
+ for i=1,#prescript do
+ -- { "sh_color_a", "1" },
+ -- { "sh_color", "into" },
+ -- { "sh_radius_b", "0" },
+ -- { "sh_radius_a", "141.73225" },
+ -- { "sh_center_b", "425.19676 141.73225" },
+ -- { "sh_center_a", "425.19676 0" },
+ -- { "sh_factor", "1" },
+ local tag = prescript[i][1]
+ if not name_a and tag == "sh_color_a" then
+ value_a = prescript[i-5][2]
+ components_a = prescript[i-4][2]
+ fractions_a = prescript[i-3][2]
+ name_a = prescript[i-2][2]
+ elseif not name_b and tag == "sh_color_b" then
+ value_b = prescript[i-5][2]
+ components_b = prescript[i-4][2]
+ fractions_b = prescript[i-3][2]
+ name_b = prescript[i-2][2]
+ end
+ if name_a and name_b then
+ break
+ end
+ end
+ ca, cb, separation, name = checkandconvertspot(
+ name_a,fractions_a,components_a,value_a,
+ name_b,fractions_b,components_b,value_b
+ )
+ else
+ local colora = lpegmatch(colorsplitter,sh_color_a)
+ local colorb = lpegmatch(colorsplitter,sh_color_b)
+ ca, cb, colorspace, name = checkandconvert(colora,colorb)
+ end
+ if not ca or not cb then
+ ca, cb, colorspace, name = checkandconvert()
+ end
+ if sh_type == "linear" then
+ local coordinates = { centera[1], centera[2], centerb[1], centerb[2] }
+ lpdf.linearshade(name,domain,ca,cb,1,colorspace,coordinates,separation) -- backend specific (will be renamed)
+ elseif sh_type == "circular" then
+ local radiusa = tonumber(prescript.sh_radius_a)
+ local radiusb = tonumber(prescript.sh_radius_b)
+ local coordinates = { centera[1], centera[2], radiusa, centerb[1], centerb[2], radiusb }
+ lpdf.circularshade(name,domain,ca,cb,1,colorspace,coordinates,separation) -- backend specific (will be renamed)
+ else
+ -- fatal error
+ end
+ before[#before+1], after[#after+1] = "q /Pattern cs", formatters["W n /%s sh Q"](name)
+ -- false, not nil, else mt triggered
+ object.colored = false -- hm, not object.color ?
+ object.type = false
+ object.grouped = true
+ end
+end
+
+-- bitmaps
+
+local function bm_process(object,prescript,before,after)
+ local bm_xresolution = prescript.bm_xresolution
+ if bm_xresolution then
+ before[#before+1] = f_cm(cm(object))
+ before[#before+1] = function()
+ figures.bitmapimage {
+ xresolution = tonumber(bm_xresolution),
+ yresolution = tonumber(prescript.bm_yresolution),
+ width = 1/basepoints,
+ height = 1/basepoints,
+ data = object.postscript
+ }
+ end
+ before[#before+1] = "Q"
+ object.path = false
+ object.color = false
+ object.grouped = true
+ end
+end
+
+-- positions
+
+local function ps_process(object,prescript,before,after)
+ local ps_label = prescript.ps_label
+ if ps_label then
+ local op = object.path
+ local first, third = op[1], op[3]
+ local x, y = first.x_coord, first.y_coord
+ local w, h = third.x_coord - x, third.y_coord - y
+ x = x - metapost.llx
+ y = metapost.ury - y
+ before[#before+1] = function()
+ context.MPLIBpositionwhd(ps_label,x,y,w,h)
+ end
+ object.path = false
+ end
+end
+
+-- figures
+
+local function fg_process(object,prescript,before,after)
+ local fg_name = prescript.fg_name
+ if fg_name then
+ before[#before+1] = f_cm(cm(object)) -- beware: does not use the cm stack
+ before[#before+1] = function()
+ context.MPLIBfigure(fg_name,prescript.fg_mask or "")
+ end
+ before[#before+1] = "Q"
+ object.path = false
+ object.grouped = true
+ end
+end
+
+-- color and transparency
+
+local value = Cs ( (
+ (Carg(1) * C((1-P(","))^1)) / function(a,b) return format("%0.3f",a * tonumber(b)) end
+ + P(","))^1
+)
+
+-- should be codeinjections
+
+local t_list = attributes.list[attributes.private('transparency')]
+local c_list = attributes.list[attributes.private('color')]
+
+local function tr_process(object,prescript,before,after)
+ -- before can be shortcut to t
+ local tr_alternative = prescript.tr_alternative
+ if tr_alternative then
+ tr_alternative = tonumber(tr_alternative)
+ local tr_transparency = tonumber(prescript.tr_transparency)
+ before[#before+1] = formatters["/Tr%s gs"](registertransparency(nil,tr_alternative,tr_transparency,true))
+ after[#after+1] = "/Tr0 gs" -- outertransparency
+ end
+ local cs = object.color
+ if cs and #cs > 0 then
+ local c_b, c_a
+ local sp_type = prescript.sp_type
+ if not sp_type then
+ c_b, c_a = colorconverter(cs)
+ elseif sp_type == "spot" or sp_type == "multitone" then
+ local sp_name = prescript.sp_name or "black"
+ local sp_fractions = prescript.sp_fractions or 1
+ local sp_components = prescript.sp_components or ""
+ local sp_value = prescript.sp_value or "1"
+ local cf = cs[1]
+ if cf ~= 1 then
+ -- beware, we do scale the spotcolors but not the alternative representation
+ sp_value = lpegmatch(value,sp_value,1,cf) or sp_value
+ end
+ c_b, c_a = spotcolorconverter(sp_name,sp_fractions,sp_components,sp_value)
+ elseif sp_type == "named" then
+ -- we might move this to another namespace .. also, named can be a spotcolor
+ -- so we need to check for that too ... also we need to resolve indirect
+ -- colors so we might need the second pass for this (draw dots with \MPcolor)
+ local sp_name = prescript.sp_name or "black"
+ if not tr_alternative then
+ -- todo: sp_name is not yet registered at this time
+ local t = t_list[sp_name] -- string or attribute
+ local v = t and attributes.transparencies.value(t)
+ if v then
+ before[#before+1] = formatters["/Tr%s gs"](registertransparency(nil,v[1],v[2],true))
+ after[#after+1] = "/Tr0 gs" -- outertransparency
+ end
+ end
+ local c = c_list[sp_name] -- string or attribute
+ local v = c and attributes.colors.value(c)
+ if v then
+ -- all=1 gray=2 rgb=3 cmyk=4
+ local colorspace = v[1]
+ local f = cs[1]
+ if colorspace == 2 then
+ local s = f*v[2]
+ c_b, c_a = checked_color_pair(f_gray,s,s)
+ elseif colorspace == 3 then
+ local r, g, b = f*v[3], f*v[4], f*v[5]
+ c_b, c_a = checked_color_pair(f_rgb,r,g,b,r,g,b)
+ elseif colorspace == 4 or colorspace == 1 then
+ local c, m, y, k = f*v[6], f*v[7], f*v[8], f*v[9]
+ c_b, c_a = checked_color_pair(f_cmyk,c,m,y,k,c,m,y,k)
+ else
+ local s = f*v[2]
+ c_b, c_a = checked_color_pair(f_gray,s,s)
+ end
+ end
+ --
+ end
+ if c_a and c_b then
+ before[#before+1] = c_b
+ after[#after+1] = c_a
+ end
+ end
+end
+
+-- layers (nasty: we need to keep the 'grouping' right
+
+local function la_process(object,prescript,before,after)
+ local la_name = prescript.la_name
+ if la_name then
+ before[#before+1] = backends.codeinjections.startlayer(la_name)
+ insert(after,1,backends.codeinjections.stoplayer())
+ end
+end
+
+-- groups
+
+local types = {
+ isolated
+}
+
+local function gr_process(object,prescript,before,after)
+ local gr_state = prescript.gr_state
+ if gr_state then
+ if gr_state == "start" then
+ local gr_type = utilities.parsers.settings_to_hash(prescript.gr_type)
+ before[#before+1] = function()
+ context.MPLIBstartgroup(
+ gr_type.isolated and 1 or 0,
+ gr_type.knockout and 1 or 0,
+ prescript.gr_llx,
+ prescript.gr_lly,
+ prescript.gr_urx,
+ prescript.gr_ury
+ )
+ end
+ elseif gr_state == "stop" then
+ after[#after+1] = function()
+ context.MPLIBstopgroup()
+ end
+ end
+ object.path = false
+ object.color = false
+ object.grouped = true
+ end
+end
+
+-- definitions
+
+appendaction(resetteractions, "system",cl_reset)
+appendaction(resetteractions, "system",tx_reset)
+
+appendaction(processoractions,"system",gr_process)
+
+appendaction(analyzeractions, "system",tx_analyze)
+appendaction(analyzeractions, "system",gt_analyze)
+
+appendaction(processoractions,"system",sh_process)
+-- (processoractions,"system",gt_process)
+appendaction(processoractions,"system",bm_process)
+appendaction(processoractions,"system",tx_process)
+appendaction(processoractions,"system",ps_process)
+appendaction(processoractions,"system",fg_process)
+appendaction(processoractions,"system",tr_process) -- last, as color can be reset
+
+appendaction(processoractions,"system",la_process)
+
+-- we're nice and set them already
+
+resetter = resetteractions .runner
+analyzer = analyzeractions .runner
+processor = processoractions.runner
diff --git a/tex/context/base/mlib-run.lua b/tex/context/base/mlib-run.lua
index 1fc36dd80..3915196b0 100644
--- a/tex/context/base/mlib-run.lua
+++ b/tex/context/base/mlib-run.lua
@@ -1,591 +1,591 @@
-if not modules then modules = { } end modules ['mlib-run'] = {
- version = 1.001,
- comment = "companion to mlib-ctx.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
---~ cmyk -> done, native
---~ spot -> done, but needs reworking (simpler)
---~ multitone ->
---~ shade -> partly done, todo: cm
---~ figure -> done
---~ hyperlink -> low priority, easy
-
--- new * run
--- or
--- new * execute^1 * finish
-
--- a*[b,c] == b + a * (c-b)
-
---[[ldx--
-
The directional helpers and pen analysis are more or less translated from the
- code. It really helps that Taco know that source so well. Taco and I spent
-quite some time on speeding up the and code. There is not
-much to gain, especially if one keeps in mind that when integrated in
-only a part of the time is spent in . Of course an integrated
-approach is way faster than an external and processing time
-nears zero.
---ldx]]--
-
-local type, tostring, tonumber = type, tostring, tonumber
-local format, gsub, match, find = string.format, string.gsub, string.match, string.find
-local concat = table.concat
-local emptystring = string.is_empty
-local lpegmatch, P = lpeg.match, lpeg.P
-
-local trace_graphics = false trackers.register("metapost.graphics", function(v) trace_graphics = v end)
-local trace_tracingall = false trackers.register("metapost.tracingall", function(v) trace_tracingall = v end)
-
-local report_metapost = logs.reporter("metapost")
-local texerrormessage = logs.texerrormessage
-
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-
-local mplib = mplib
-metapost = metapost or { }
-local metapost = metapost
-
-local mplibone = tonumber(mplib.version()) <= 1.50
-
-metapost.showlog = false
-metapost.lastlog = ""
-metapost.collapse = true -- currently mplib cannot deal with begingroup/endgroup mismatch in stepwise processing
-metapost.texerrors = false
-metapost.exectime = metapost.exectime or { } -- hack
-
--- metapost.collapse = false
-
-directives.register("mplib.texerrors", function(v) metapost.texerrors = v end)
-trackers.register ("metapost.showlog", function(v) metapost.showlog = v end)
-
-function metapost.resetlastlog()
- metapost.lastlog = ""
-end
-
------ mpbasepath = lpeg.instringchecker(lpeg.append { "/metapost/context/", "/metapost/base/" })
-local mpbasepath = lpeg.instringchecker(P("/metapost/") * (P("context") + P("base")) * P("/"))
-
--- local function i_finder(askedname,mode,ftype) -- fake message for mpost.map and metafun.mpvi
--- local foundname = file.is_qualified_path(askedname) and askedname or resolvers.findfile(askedname,ftype)
--- if not mpbasepath(foundname) then
--- -- we could use the via file but we don't have a complete io interface yet
--- local data, found, forced = metapost.checktexts(io.loaddata(foundname) or "")
--- if found then
--- local tempname = luatex.registertempfile(foundname,true)
--- io.savedata(tempname,data)
--- foundname = tempname
--- end
--- end
--- return foundname
--- end
-
--- mplib has no real io interface so we have a different mechanism than
--- tex (as soon as we have more control, we will use the normal code)
-
-local finders = { }
-mplib.finders = finders
-
--- for some reason mp sometimes calls this function twice which is inefficient
--- but we cannot catch this
-
-local function preprocessed(name)
- if not mpbasepath(name) then
- -- we could use the via file but we don't have a complete io interface yet
- local data, found, forced = metapost.checktexts(io.loaddata(name) or "")
- if found then
- local temp = luatex.registertempfile(name,true)
- io.savedata(temp,data)
- return temp
- end
- end
- return name
-end
-
-mplib.preprocessed = preprocessed -- helper
-
-finders.file = function(specification,name,mode,ftype)
- return preprocessed(resolvers.findfile(name,ftype))
-end
-
-local function i_finder(name,mode,ftype) -- fake message for mpost.map and metafun.mpvi
- local specification = url.hashed(name)
- local finder = finders[specification.scheme] or finders.file
- return finder(specification,name,mode,ftype)
-end
-
-local function o_finder(name,mode,ftype)
- -- report_metapost("output file %a, mode %a, ftype %a",name,mode,ftype)
- return name
-end
-
-local function finder(name, mode, ftype)
- if mode == "w" then
- return o_finder(name,mode,ftype)
- else
- return i_finder(name,mode,ftype)
- end
-end
-
-local i_limited = false
-local o_limited = false
-
-directives.register("system.inputmode", function(v)
- if not i_limited then
- local i_limiter = io.i_limiter(v)
- if i_limiter then
- i_finder = i_limiter.protect(i_finder)
- i_limited = true
- end
- end
-end)
-
-directives.register("system.outputmode", function(v)
- if not o_limited then
- local o_limiter = io.o_limiter(v)
- if o_limiter then
- o_finder = o_limiter.protect(o_finder)
- o_limited = true
- end
- end
-end)
-
--- -- --
-
-metapost.finder = finder
-
-function metapost.reporterror(result)
- if not result then
- report_metapost("error: no result object returned")
- elseif result.status > 0 then
- local t, e, l = result.term, result.error, result.log
- if t and t ~= "" then
- (metapost.texerrors and texerrormessage or report_metapost)("terminal: %s",t)
- end
- if e == "" or e == "no-error" then
- e = nil
- end
- if e then
- (metapost.texerrors and texerrormessage or report_metapost)("error: %s",e)
- end
- if not t and not e and l then
- metapost.lastlog = metapost.lastlog .. "\n" .. l
- report_metapost("log: %s",l)
- else
- report_metapost("error: unknown, no error, terminal or log messages")
- end
- else
- return false
- end
- return true
-end
-
-if mplibone then
-
- report_metapost("fatal error: mplib is too old")
-
- os.exit()
-
- -- local preamble = [[
- -- boolean mplib ; mplib := true ;
- -- string mp_parent_version ; mp_parent_version := "%s" ;
- -- input "%s" ; dump ;
- -- ]]
- --
- -- metapost.parameters = {
- -- hash_size = 100000,
- -- main_memory = 4000000,
- -- max_in_open = 50,
- -- param_size = 100000,
- -- }
- --
- -- function metapost.make(name, target, version)
- -- starttiming(mplib)
- -- target = file.replacesuffix(target or name, "mem") -- redundant
- -- local mpx = mplib.new ( table.merged (
- -- metapost.parameters,
- -- {
- -- ini_version = true,
- -- find_file = finder,
- -- job_name = file.removesuffix(target),
- -- }
- -- ) )
- -- if mpx then
- -- starttiming(metapost.exectime)
- -- local result = mpx:execute(format(preamble,version or "unknown",name))
- -- stoptiming(metapost.exectime)
- -- mpx:finish()
- -- end
- -- stoptiming(mplib)
- -- end
- --
- -- function metapost.load(name)
- -- starttiming(mplib)
- -- local mpx = mplib.new ( table.merged (
- -- metapost.parameters,
- -- {
- -- ini_version = false,
- -- mem_name = file.replacesuffix(name,"mem"),
- -- find_file = finder,
- -- -- job_name = "mplib",
- -- }
- -- ) )
- -- local result
- -- if not mpx then
- -- result = { status = 99, error = "out of memory"}
- -- end
- -- stoptiming(mplib)
- -- return mpx, result
- -- end
- --
- -- function metapost.checkformat(mpsinput)
- -- local mpsversion = environment.version or "unset version"
- -- local mpsinput = file.addsuffix(mpsinput or "metafun", "mp")
- -- local mpsformat = file.removesuffix(file.basename(texconfig.formatname or (tex and tex.formatname) or mpsinput))
- -- local mpsbase = file.removesuffix(file.basename(mpsinput))
- -- if mpsbase ~= mpsformat then
- -- mpsformat = mpsformat .. "-" .. mpsbase
- -- end
- -- mpsformat = file.addsuffix(mpsformat, "mem")
- -- local mpsformatfullname = caches.getfirstreadablefile(mpsformat,"formats","metapost") or ""
- -- if mpsformatfullname ~= "" then
- -- report_metapost("loading %a from %a", mpsinput, mpsformatfullname)
- -- local mpx, result = metapost.load(mpsformatfullname)
- -- if mpx then
- -- local result = mpx:execute("show mp_parent_version ;")
- -- if not result.log then
- -- metapost.reporterror(result)
- -- else
- -- local version = match(result.log,">> *(.-)[\n\r]") or "unknown"
- -- version = gsub(version,"[\'\"]","")
- -- if version ~= mpsversion then
- -- report_metapost("version mismatch: %s <> %s", version or "unknown", mpsversion)
- -- else
- -- return mpx
- -- end
- -- end
- -- else
- -- report_metapost("error in loading %a from %a", mpsinput, mpsformatfullname)
- -- metapost.reporterror(result)
- -- end
- -- end
- -- local mpsformatfullname = caches.setfirstwritablefile(mpsformat,"formats")
- -- report_metapost("making %a into %a", mpsinput, mpsformatfullname)
- -- metapost.make(mpsinput,mpsformatfullname,mpsversion) -- somehow return ... fails here
- -- if lfs.isfile(mpsformatfullname) then
- -- report_metapost("loading %a from %a", mpsinput, mpsformatfullname)
- -- return metapost.load(mpsformatfullname)
- -- else
- -- report_metapost("problems with %a from %a", mpsinput, mpsformatfullname)
- -- end
- -- end
-
-else
-
- local preamble = [[
- boolean mplib ; mplib := true ;
- let dump = endinput ;
- input "%s" ;
- ]]
-
- local methods = {
- double = "double",
- scaled = "scaled",
- default = "scaled",
- decimal = false, -- for the moment
- }
-
- function metapost.load(name,method)
- starttiming(mplib)
- method = method and methods[method] or "scaled"
- local mpx = mplib.new {
- ini_version = true,
- find_file = finder,
- math_mode = method,
- }
- report_metapost("initializing number mode %a",method)
- local result
- if not mpx then
- result = { status = 99, error = "out of memory"}
- else
- result = mpx:execute(format(preamble, file.addsuffix(name,"mp"))) -- addsuffix is redundant
- end
- stoptiming(mplib)
- metapost.reporterror(result)
- return mpx, result
- end
-
- function metapost.checkformat(mpsinput,method)
- local mpsversion = environment.version or "unset version"
- local mpsinput = mpsinput or "metafun"
- local foundfile = ""
- if file.suffix(mpsinput) ~= "" then
- foundfile = finder(mpsinput) or ""
- end
- if foundfile == "" then
- foundfile = finder(file.replacesuffix(mpsinput,"mpvi")) or ""
- end
- if foundfile == "" then
- foundfile = finder(file.replacesuffix(mpsinput,"mpiv")) or ""
- end
- if foundfile == "" then
- foundfile = finder(file.replacesuffix(mpsinput,"mp")) or ""
- end
- if foundfile == "" then
- report_metapost("loading %a fails, format not found",mpsinput)
- else
- report_metapost("loading %a as %a using method %a",mpsinput,foundfile,method or "default")
- local mpx, result = metapost.load(foundfile,method)
- if mpx then
- return mpx
- else
- report_metapost("error in loading %a",mpsinput)
- metapost.reporterror(result)
- end
- end
- end
-
-end
-
-function metapost.unload(mpx)
- starttiming(mplib)
- if mpx then
- mpx:finish()
- end
- stoptiming(mplib)
-end
-
-local mpxformats = { }
-
-function metapost.format(instance,name,method)
- if not instance or instance == "" then
- instance = "metafun" -- brrr
- end
- name = name or instance
- local mpx = mpxformats[instance]
- if not mpx then
- report_metapost("initializing instance %a using format %a",instance,name)
- mpx = metapost.checkformat(name,method)
- mpxformats[instance] = mpx
- end
- return mpx
-end
-
-function metapost.instance(instance)
- return mpxformats[instance]
-end
-
-function metapost.reset(mpx)
- if not mpx then
- -- nothing
- elseif type(mpx) == "string" then
- if mpxformats[mpx] then
- mpxformats[mpx]:finish()
- mpxformats[mpx] = nil
- end
- else
- for name, instance in next, mpxformats do
- if instance == mpx then
- mpx:finish()
- mpxformats[name] = nil
- break
- end
- end
- end
-end
-
-local mp_inp, mp_log, mp_tag = { }, { }, 0
-
--- key/values
-
-function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass, askedfig)
- local converted, result = false, { }
- if type(mpx) == "string" then
- mpx = metapost.format(mpx) -- goody
- end
- if mpx and data then
- starttiming(metapost)
- if trace_graphics then
- if not mp_inp[mpx] then
- mp_tag = mp_tag + 1
- local jobname = tex.jobname
- mp_inp[mpx] = io.open(format("%s-mplib-run-%03i.mp", jobname,mp_tag),"w")
- mp_log[mpx] = io.open(format("%s-mplib-run-%03i.log",jobname,mp_tag),"w")
- end
- local banner = format("%% begin graphic: n=%s, trialrun=%s, multipass=%s, isextrapass=%s\n\n", metapost.n, tostring(trialrun), tostring(multipass), tostring(isextrapass))
- mp_inp[mpx]:write(banner)
- mp_log[mpx]:write(banner)
- end
- if type(data) == "table" then
- -- this hack is needed because the library currently barks on \n\n
- -- eventually we can text for "" in the next loop
- local n = 0
- local nofsnippets = #data
- for i=1,nofsnippets do
- local d = data[i]
- if d ~= "" then
- n = n + 1
- data[n] = d
- end
- end
- for i=nofsnippets,n+1,-1 do
- data[i] = nil
- end
- -- and this one because mp cannot handle snippets due to grouping issues
- if metapost.collapse then
- if #data > 1 then
- data = concat(data,"\n")
- else
- data = data[1]
- end
- end
- -- end of hacks
- end
- if type(data) == "table" then
- if trace_tracingall then
- mpx:execute("tracingall;")
- end
- -- table.insert(data,2,"")
- for i=1,#data do
- local d = data[i]
- -- d = string.gsub(d,"\r","")
- if d then
- if trace_graphics then
- mp_inp[mpx]:write(format("\n%% begin snippet %s\n",i))
- mp_inp[mpx]:write(d)
- mp_inp[mpx]:write(format("\n%% end snippet %s\n",i))
- end
- starttiming(metapost.exectime)
- result = mpx:execute(d)
- stoptiming(metapost.exectime)
- if trace_graphics and result then
- local str = result.log or result.error
- if str and str ~= "" then
- mp_log[mpx]:write(str)
- end
- end
- if not metapost.reporterror(result) then
- if metapost.showlog then
- local str = result.term ~= "" and result.term or "no terminal output"
- if not emptystring(str) then
- metapost.lastlog = metapost.lastlog .. "\n" .. str
- report_metapost("log: %s",str)
- end
- end
- if result.fig then
- converted = metapost.convert(result, trialrun, flusher, multipass, askedfig)
- end
- end
- else
- report_metapost("error: invalid graphic component %s",i)
- end
- end
- else
- if trace_tracingall then
- data = "tracingall;" .. data
- end
- if trace_graphics then
- mp_inp[mpx]:write(data)
- end
- starttiming(metapost.exectime)
- result = mpx:execute(data)
- stoptiming(metapost.exectime)
- if trace_graphics and result then
- local str = result.log or result.error
- if str and str ~= "" then
- mp_log[mpx]:write(str)
- end
- end
- -- todo: error message
- if not result then
- report_metapost("error: no result object returned")
- elseif result.status > 0 then
- report_metapost("error: %s",(result.term or "no-term") .. "\n" .. (result.error or "no-error"))
- else
- if metapost.showlog then
- metapost.lastlog = metapost.lastlog .. "\n" .. result.term
- report_metapost("info: %s",result.term or "no-term")
- end
- if result.fig then
- converted = metapost.convert(result, trialrun, flusher, multipass, askedfig)
- end
- end
- end
- if trace_graphics then
- local banner = "\n% end graphic\n\n"
- mp_inp[mpx]:write(banner)
- mp_log[mpx]:write(banner)
- end
- stoptiming(metapost)
- end
- return converted, result
-end
-
-function metapost.convert()
- report_metapost('warning: no converter set')
-end
-
--- handy
-
-function metapost.directrun(formatname,filename,outputformat,astable,mpdata)
- local fullname = file.addsuffix(filename,"mp")
- local data = mpdata or io.loaddata(fullname)
- if outputformat ~= "svg" then
- outputformat = "mps"
- end
- if not data then
- report_metapost("unknown file %a",filename)
- else
- local mpx = metapost.checkformat(formatname)
- if not mpx then
- report_metapost("unknown format %a",formatname)
- else
- report_metapost("processing %a",(mpdata and (filename or "data")) or fullname)
- local result = mpx:execute(data)
- if not result then
- report_metapost("error: no result object returned")
- elseif result.status > 0 then
- report_metapost("error: %s",(result.term or "no-term") .. "\n" .. (result.error or "no-error"))
- else
- if metapost.showlog then
- metapost.lastlog = metapost.lastlog .. "\n" .. result.term
- report_metapost("info: %s",result.term or "no-term")
- end
- local figures = result.fig
- if figures then
- local sorted = table.sortedkeys(figures)
- if astable then
- local result = { }
- report_metapost("storing %s figures in table",#sorted)
- for k=1,#sorted do
- local v = sorted[k]
- if outputformat == "mps" then
- result[v] = figures[v]:postscript()
- else
- result[v] = figures[v]:svg() -- (3) for prologues
- end
- end
- return result
- else
- local basename = file.removesuffix(file.basename(filename))
- for k=1,#sorted do
- local v = sorted[k]
- local output
- if outputformat == "mps" then
- output = figures[v]:postscript()
- else
- output = figures[v]:svg() -- (3) for prologues
- end
- local outname = format("%s-%s.%s",basename,v,outputformat)
- report_metapost("saving %s bytes in %a",#output,outname)
- io.savedata(outname,output)
- end
- return #sorted
- end
- end
- end
- end
- end
-end
+if not modules then modules = { } end modules ['mlib-run'] = {
+ version = 1.001,
+ comment = "companion to mlib-ctx.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+--~ cmyk -> done, native
+--~ spot -> done, but needs reworking (simpler)
+--~ multitone ->
+--~ shade -> partly done, todo: cm
+--~ figure -> done
+--~ hyperlink -> low priority, easy
+
+-- new * run
+-- or
+-- new * execute^1 * finish
+
+-- a*[b,c] == b + a * (c-b)
+
+--[[ldx--
+
The directional helpers and pen analysis are more or less translated from the
+ code. It really helps that Taco know that source so well. Taco and I spent
+quite some time on speeding up the and code. There is not
+much to gain, especially if one keeps in mind that when integrated in
+only a part of the time is spent in . Of course an integrated
+approach is way faster than an external and processing time
+nears zero.
+--ldx]]--
+
+local type, tostring, tonumber = type, tostring, tonumber
+local format, gsub, match, find = string.format, string.gsub, string.match, string.find
+local concat = table.concat
+local emptystring = string.is_empty
+local lpegmatch, P = lpeg.match, lpeg.P
+
+local trace_graphics = false trackers.register("metapost.graphics", function(v) trace_graphics = v end)
+local trace_tracingall = false trackers.register("metapost.tracingall", function(v) trace_tracingall = v end)
+
+local report_metapost = logs.reporter("metapost")
+local texerrormessage = logs.texerrormessage
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+
+local mplib = mplib
+metapost = metapost or { }
+local metapost = metapost
+
+local mplibone = tonumber(mplib.version()) <= 1.50
+
+metapost.showlog = false
+metapost.lastlog = ""
+metapost.collapse = true -- currently mplib cannot deal with begingroup/endgroup mismatch in stepwise processing
+metapost.texerrors = false
+metapost.exectime = metapost.exectime or { } -- hack
+
+-- metapost.collapse = false
+
+directives.register("mplib.texerrors", function(v) metapost.texerrors = v end)
+trackers.register ("metapost.showlog", function(v) metapost.showlog = v end)
+
+function metapost.resetlastlog()
+ metapost.lastlog = ""
+end
+
+----- mpbasepath = lpeg.instringchecker(lpeg.append { "/metapost/context/", "/metapost/base/" })
+local mpbasepath = lpeg.instringchecker(P("/metapost/") * (P("context") + P("base")) * P("/"))
+
+-- local function i_finder(askedname,mode,ftype) -- fake message for mpost.map and metafun.mpvi
+-- local foundname = file.is_qualified_path(askedname) and askedname or resolvers.findfile(askedname,ftype)
+-- if not mpbasepath(foundname) then
+-- -- we could use the via file but we don't have a complete io interface yet
+-- local data, found, forced = metapost.checktexts(io.loaddata(foundname) or "")
+-- if found then
+-- local tempname = luatex.registertempfile(foundname,true)
+-- io.savedata(tempname,data)
+-- foundname = tempname
+-- end
+-- end
+-- return foundname
+-- end
+
+-- mplib has no real io interface so we have a different mechanism than
+-- tex (as soon as we have more control, we will use the normal code)
+
+local finders = { }
+mplib.finders = finders
+
+-- for some reason mp sometimes calls this function twice which is inefficient
+-- but we cannot catch this
+
+local function preprocessed(name)
+ if not mpbasepath(name) then
+ -- we could use the via file but we don't have a complete io interface yet
+ local data, found, forced = metapost.checktexts(io.loaddata(name) or "")
+ if found then
+ local temp = luatex.registertempfile(name,true)
+ io.savedata(temp,data)
+ return temp
+ end
+ end
+ return name
+end
+
+mplib.preprocessed = preprocessed -- helper
+
+finders.file = function(specification,name,mode,ftype)
+ return preprocessed(resolvers.findfile(name,ftype))
+end
+
+local function i_finder(name,mode,ftype) -- fake message for mpost.map and metafun.mpvi
+ local specification = url.hashed(name)
+ local finder = finders[specification.scheme] or finders.file
+ return finder(specification,name,mode,ftype)
+end
+
+local function o_finder(name,mode,ftype)
+ -- report_metapost("output file %a, mode %a, ftype %a",name,mode,ftype)
+ return name
+end
+
+local function finder(name, mode, ftype)
+ if mode == "w" then
+ return o_finder(name,mode,ftype)
+ else
+ return i_finder(name,mode,ftype)
+ end
+end
+
+local i_limited = false
+local o_limited = false
+
+directives.register("system.inputmode", function(v)
+ if not i_limited then
+ local i_limiter = io.i_limiter(v)
+ if i_limiter then
+ i_finder = i_limiter.protect(i_finder)
+ i_limited = true
+ end
+ end
+end)
+
+directives.register("system.outputmode", function(v)
+ if not o_limited then
+ local o_limiter = io.o_limiter(v)
+ if o_limiter then
+ o_finder = o_limiter.protect(o_finder)
+ o_limited = true
+ end
+ end
+end)
+
+-- -- --
+
+metapost.finder = finder
+
+function metapost.reporterror(result)
+ if not result then
+ report_metapost("error: no result object returned")
+ elseif result.status > 0 then
+ local t, e, l = result.term, result.error, result.log
+ if t and t ~= "" then
+ (metapost.texerrors and texerrormessage or report_metapost)("terminal: %s",t)
+ end
+ if e == "" or e == "no-error" then
+ e = nil
+ end
+ if e then
+ (metapost.texerrors and texerrormessage or report_metapost)("error: %s",e)
+ end
+ if not t and not e and l then
+ metapost.lastlog = metapost.lastlog .. "\n" .. l
+ report_metapost("log: %s",l)
+ else
+ report_metapost("error: unknown, no error, terminal or log messages")
+ end
+ else
+ return false
+ end
+ return true
+end
+
+if mplibone then
+
+ report_metapost("fatal error: mplib is too old")
+
+ os.exit()
+
+ -- local preamble = [[
+ -- boolean mplib ; mplib := true ;
+ -- string mp_parent_version ; mp_parent_version := "%s" ;
+ -- input "%s" ; dump ;
+ -- ]]
+ --
+ -- metapost.parameters = {
+ -- hash_size = 100000,
+ -- main_memory = 4000000,
+ -- max_in_open = 50,
+ -- param_size = 100000,
+ -- }
+ --
+ -- function metapost.make(name, target, version)
+ -- starttiming(mplib)
+ -- target = file.replacesuffix(target or name, "mem") -- redundant
+ -- local mpx = mplib.new ( table.merged (
+ -- metapost.parameters,
+ -- {
+ -- ini_version = true,
+ -- find_file = finder,
+ -- job_name = file.removesuffix(target),
+ -- }
+ -- ) )
+ -- if mpx then
+ -- starttiming(metapost.exectime)
+ -- local result = mpx:execute(format(preamble,version or "unknown",name))
+ -- stoptiming(metapost.exectime)
+ -- mpx:finish()
+ -- end
+ -- stoptiming(mplib)
+ -- end
+ --
+ -- function metapost.load(name)
+ -- starttiming(mplib)
+ -- local mpx = mplib.new ( table.merged (
+ -- metapost.parameters,
+ -- {
+ -- ini_version = false,
+ -- mem_name = file.replacesuffix(name,"mem"),
+ -- find_file = finder,
+ -- -- job_name = "mplib",
+ -- }
+ -- ) )
+ -- local result
+ -- if not mpx then
+ -- result = { status = 99, error = "out of memory"}
+ -- end
+ -- stoptiming(mplib)
+ -- return mpx, result
+ -- end
+ --
+ -- function metapost.checkformat(mpsinput)
+ -- local mpsversion = environment.version or "unset version"
+ -- local mpsinput = file.addsuffix(mpsinput or "metafun", "mp")
+ -- local mpsformat = file.removesuffix(file.basename(texconfig.formatname or (tex and tex.formatname) or mpsinput))
+ -- local mpsbase = file.removesuffix(file.basename(mpsinput))
+ -- if mpsbase ~= mpsformat then
+ -- mpsformat = mpsformat .. "-" .. mpsbase
+ -- end
+ -- mpsformat = file.addsuffix(mpsformat, "mem")
+ -- local mpsformatfullname = caches.getfirstreadablefile(mpsformat,"formats","metapost") or ""
+ -- if mpsformatfullname ~= "" then
+ -- report_metapost("loading %a from %a", mpsinput, mpsformatfullname)
+ -- local mpx, result = metapost.load(mpsformatfullname)
+ -- if mpx then
+ -- local result = mpx:execute("show mp_parent_version ;")
+ -- if not result.log then
+ -- metapost.reporterror(result)
+ -- else
+ -- local version = match(result.log,">> *(.-)[\n\r]") or "unknown"
+ -- version = gsub(version,"[\'\"]","")
+ -- if version ~= mpsversion then
+ -- report_metapost("version mismatch: %s <> %s", version or "unknown", mpsversion)
+ -- else
+ -- return mpx
+ -- end
+ -- end
+ -- else
+ -- report_metapost("error in loading %a from %a", mpsinput, mpsformatfullname)
+ -- metapost.reporterror(result)
+ -- end
+ -- end
+ -- local mpsformatfullname = caches.setfirstwritablefile(mpsformat,"formats")
+ -- report_metapost("making %a into %a", mpsinput, mpsformatfullname)
+ -- metapost.make(mpsinput,mpsformatfullname,mpsversion) -- somehow return ... fails here
+ -- if lfs.isfile(mpsformatfullname) then
+ -- report_metapost("loading %a from %a", mpsinput, mpsformatfullname)
+ -- return metapost.load(mpsformatfullname)
+ -- else
+ -- report_metapost("problems with %a from %a", mpsinput, mpsformatfullname)
+ -- end
+ -- end
+
+else
+
+ local preamble = [[
+ boolean mplib ; mplib := true ;
+ let dump = endinput ;
+ input "%s" ;
+ ]]
+
+ local methods = {
+ double = "double",
+ scaled = "scaled",
+ default = "scaled",
+ decimal = false, -- for the moment
+ }
+
+ function metapost.load(name,method)
+ starttiming(mplib)
+ method = method and methods[method] or "scaled"
+ local mpx = mplib.new {
+ ini_version = true,
+ find_file = finder,
+ math_mode = method,
+ }
+ report_metapost("initializing number mode %a",method)
+ local result
+ if not mpx then
+ result = { status = 99, error = "out of memory"}
+ else
+ result = mpx:execute(format(preamble, file.addsuffix(name,"mp"))) -- addsuffix is redundant
+ end
+ stoptiming(mplib)
+ metapost.reporterror(result)
+ return mpx, result
+ end
+
+ function metapost.checkformat(mpsinput,method)
+ local mpsversion = environment.version or "unset version"
+ local mpsinput = mpsinput or "metafun"
+ local foundfile = ""
+ if file.suffix(mpsinput) ~= "" then
+ foundfile = finder(mpsinput) or ""
+ end
+ if foundfile == "" then
+ foundfile = finder(file.replacesuffix(mpsinput,"mpvi")) or ""
+ end
+ if foundfile == "" then
+ foundfile = finder(file.replacesuffix(mpsinput,"mpiv")) or ""
+ end
+ if foundfile == "" then
+ foundfile = finder(file.replacesuffix(mpsinput,"mp")) or ""
+ end
+ if foundfile == "" then
+ report_metapost("loading %a fails, format not found",mpsinput)
+ else
+ report_metapost("loading %a as %a using method %a",mpsinput,foundfile,method or "default")
+ local mpx, result = metapost.load(foundfile,method)
+ if mpx then
+ return mpx
+ else
+ report_metapost("error in loading %a",mpsinput)
+ metapost.reporterror(result)
+ end
+ end
+ end
+
+end
+
+function metapost.unload(mpx)
+ starttiming(mplib)
+ if mpx then
+ mpx:finish()
+ end
+ stoptiming(mplib)
+end
+
+local mpxformats = { }
+
+function metapost.format(instance,name,method)
+ if not instance or instance == "" then
+ instance = "metafun" -- brrr
+ end
+ name = name or instance
+ local mpx = mpxformats[instance]
+ if not mpx then
+ report_metapost("initializing instance %a using format %a",instance,name)
+ mpx = metapost.checkformat(name,method)
+ mpxformats[instance] = mpx
+ end
+ return mpx
+end
+
+function metapost.instance(instance)
+ return mpxformats[instance]
+end
+
+function metapost.reset(mpx)
+ if not mpx then
+ -- nothing
+ elseif type(mpx) == "string" then
+ if mpxformats[mpx] then
+ mpxformats[mpx]:finish()
+ mpxformats[mpx] = nil
+ end
+ else
+ for name, instance in next, mpxformats do
+ if instance == mpx then
+ mpx:finish()
+ mpxformats[name] = nil
+ break
+ end
+ end
+ end
+end
+
+local mp_inp, mp_log, mp_tag = { }, { }, 0
+
+-- key/values
+
+function metapost.process(mpx, data, trialrun, flusher, multipass, isextrapass, askedfig)
+ local converted, result = false, { }
+ if type(mpx) == "string" then
+ mpx = metapost.format(mpx) -- goody
+ end
+ if mpx and data then
+ starttiming(metapost)
+ if trace_graphics then
+ if not mp_inp[mpx] then
+ mp_tag = mp_tag + 1
+ local jobname = tex.jobname
+ mp_inp[mpx] = io.open(format("%s-mplib-run-%03i.mp", jobname,mp_tag),"w")
+ mp_log[mpx] = io.open(format("%s-mplib-run-%03i.log",jobname,mp_tag),"w")
+ end
+ local banner = format("%% begin graphic: n=%s, trialrun=%s, multipass=%s, isextrapass=%s\n\n", metapost.n, tostring(trialrun), tostring(multipass), tostring(isextrapass))
+ mp_inp[mpx]:write(banner)
+ mp_log[mpx]:write(banner)
+ end
+ if type(data) == "table" then
+ -- this hack is needed because the library currently barks on \n\n
+ -- eventually we can text for "" in the next loop
+ local n = 0
+ local nofsnippets = #data
+ for i=1,nofsnippets do
+ local d = data[i]
+ if d ~= "" then
+ n = n + 1
+ data[n] = d
+ end
+ end
+ for i=nofsnippets,n+1,-1 do
+ data[i] = nil
+ end
+ -- and this one because mp cannot handle snippets due to grouping issues
+ if metapost.collapse then
+ if #data > 1 then
+ data = concat(data,"\n")
+ else
+ data = data[1]
+ end
+ end
+ -- end of hacks
+ end
+ if type(data) == "table" then
+ if trace_tracingall then
+ mpx:execute("tracingall;")
+ end
+ -- table.insert(data,2,"")
+ for i=1,#data do
+ local d = data[i]
+ -- d = string.gsub(d,"\r","")
+ if d then
+ if trace_graphics then
+ mp_inp[mpx]:write(format("\n%% begin snippet %s\n",i))
+ mp_inp[mpx]:write(d)
+ mp_inp[mpx]:write(format("\n%% end snippet %s\n",i))
+ end
+ starttiming(metapost.exectime)
+ result = mpx:execute(d)
+ stoptiming(metapost.exectime)
+ if trace_graphics and result then
+ local str = result.log or result.error
+ if str and str ~= "" then
+ mp_log[mpx]:write(str)
+ end
+ end
+ if not metapost.reporterror(result) then
+ if metapost.showlog then
+ local str = result.term ~= "" and result.term or "no terminal output"
+ if not emptystring(str) then
+ metapost.lastlog = metapost.lastlog .. "\n" .. str
+ report_metapost("log: %s",str)
+ end
+ end
+ if result.fig then
+ converted = metapost.convert(result, trialrun, flusher, multipass, askedfig)
+ end
+ end
+ else
+ report_metapost("error: invalid graphic component %s",i)
+ end
+ end
+ else
+ if trace_tracingall then
+ data = "tracingall;" .. data
+ end
+ if trace_graphics then
+ mp_inp[mpx]:write(data)
+ end
+ starttiming(metapost.exectime)
+ result = mpx:execute(data)
+ stoptiming(metapost.exectime)
+ if trace_graphics and result then
+ local str = result.log or result.error
+ if str and str ~= "" then
+ mp_log[mpx]:write(str)
+ end
+ end
+ -- todo: error message
+ if not result then
+ report_metapost("error: no result object returned")
+ elseif result.status > 0 then
+ report_metapost("error: %s",(result.term or "no-term") .. "\n" .. (result.error or "no-error"))
+ else
+ if metapost.showlog then
+ metapost.lastlog = metapost.lastlog .. "\n" .. result.term
+ report_metapost("info: %s",result.term or "no-term")
+ end
+ if result.fig then
+ converted = metapost.convert(result, trialrun, flusher, multipass, askedfig)
+ end
+ end
+ end
+ if trace_graphics then
+ local banner = "\n% end graphic\n\n"
+ mp_inp[mpx]:write(banner)
+ mp_log[mpx]:write(banner)
+ end
+ stoptiming(metapost)
+ end
+ return converted, result
+end
+
+function metapost.convert()
+ report_metapost('warning: no converter set')
+end
+
+-- handy
+
+function metapost.directrun(formatname,filename,outputformat,astable,mpdata)
+ local fullname = file.addsuffix(filename,"mp")
+ local data = mpdata or io.loaddata(fullname)
+ if outputformat ~= "svg" then
+ outputformat = "mps"
+ end
+ if not data then
+ report_metapost("unknown file %a",filename)
+ else
+ local mpx = metapost.checkformat(formatname)
+ if not mpx then
+ report_metapost("unknown format %a",formatname)
+ else
+ report_metapost("processing %a",(mpdata and (filename or "data")) or fullname)
+ local result = mpx:execute(data)
+ if not result then
+ report_metapost("error: no result object returned")
+ elseif result.status > 0 then
+ report_metapost("error: %s",(result.term or "no-term") .. "\n" .. (result.error or "no-error"))
+ else
+ if metapost.showlog then
+ metapost.lastlog = metapost.lastlog .. "\n" .. result.term
+ report_metapost("info: %s",result.term or "no-term")
+ end
+ local figures = result.fig
+ if figures then
+ local sorted = table.sortedkeys(figures)
+ if astable then
+ local result = { }
+ report_metapost("storing %s figures in table",#sorted)
+ for k=1,#sorted do
+ local v = sorted[k]
+ if outputformat == "mps" then
+ result[v] = figures[v]:postscript()
+ else
+ result[v] = figures[v]:svg() -- (3) for prologues
+ end
+ end
+ return result
+ else
+ local basename = file.removesuffix(file.basename(filename))
+ for k=1,#sorted do
+ local v = sorted[k]
+ local output
+ if outputformat == "mps" then
+ output = figures[v]:postscript()
+ else
+ output = figures[v]:svg() -- (3) for prologues
+ end
+ local outname = format("%s-%s.%s",basename,v,outputformat)
+ report_metapost("saving %s bytes in %a",#output,outname)
+ io.savedata(outname,output)
+ end
+ return #sorted
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/tex/context/base/mult-aux.lua b/tex/context/base/mult-aux.lua
index 3c4cbcc0f..05dd112a8 100644
--- a/tex/context/base/mult-aux.lua
+++ b/tex/context/base/mult-aux.lua
@@ -1,154 +1,154 @@
-if not modules then modules = { } end modules ['mult-aux'] = {
- version = 1.001,
- comment = "companion to mult-aux.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local find = string.find
-
-interfaces.namespaces = interfaces.namespaces or { }
-local namespaces = interfaces.namespaces
-local variables = interfaces.variables
-
-local trace_namespaces = false trackers.register("interfaces.namespaces", function(v) trace_namespaces = v end)
-
-local report_namespaces = logs.reporter("interface","namespaces")
-
-local v_yes, v_list = variables.yes, variables.list
-
-local prefix = "????"
-local meaning = "@@@@"
-
-local data = { }
-
-function namespaces.define(namespace,settings)
- if trace_namespaces then
- report_namespaces("installing namespace %a with settings %a",namespace,settings)
- end
- if data[namespace] then
- report_namespaces("namespace %a is already taken",namespace)
- end
- if #namespace < 2 then
- report_namespaces("namespace %a should have more than 1 character",namespace)
- end
- local ns = { }
- data[namespace] = ns
- utilities.parsers.settings_to_hash(settings,ns)
- local name = ns.name
- if not name or name == "" then
- report_namespaces("provide a (command) name in namespace %a",namespace)
- end
- local self = "\\" .. prefix .. namespace
- context.unprotect()
- -- context.installnamespace(namespace)
- context("\\def\\%s%s{%s%s}",prefix,namespace,meaning,namespace) -- or context.setvalue
- if trace_namespaces then
- report_namespaces("using namespace %a for %a",namespace,name)
- end
- local parent = ns.parent or ""
- if parent ~= "" then
- if trace_namespaces then
- report_namespaces("namespace %a for %a uses parent %a",namespace,name,parent)
- end
- if not find(parent,"\\") then
- parent = "\\" .. prefix .. parent
- -- todo: check if defined
- end
- end
- context.installparameterhandler(self,name)
- if trace_namespaces then
- report_namespaces("installing parameter handler for %a",name)
- end
- context.installparameterhashhandler(self,name)
- if trace_namespaces then
- report_namespaces("installing parameterhash handler for %a",name)
- end
- local style = ns.style
- if style == v_yes then
- context.installstyleandcolorhandler(self,name)
- if trace_namespaces then
- report_namespaces("installing attribute handler for %a",name)
- end
- end
- local command = ns.command
- if command == v_yes then
- context.installdefinehandler(self,name,parent)
- if trace_namespaces then
- report_namespaces("installing definition command for %a (single)",name)
- end
- elseif command == v_list then
- context.installdefinehandler(self,name,parent)
- if trace_namespaces then
- report_namespaces("installing definition command for %a (multiple)",name)
- end
- end
- local setup = ns.setup
- if setup == v_yes then
- context.installsetuphandler(self,name)
- if trace_namespaces then
- report_namespaces("installing setup command for %a (%s)",name,"single")
- end
- elseif setup == v_list then
- context.installsetuphandler(self,name)
- if trace_namespaces then
- report_namespaces("installing setup command for %a (%s)",name,"multiple")
- end
- end
- local set = ns.set
- if set == v_yes then
- context.installparametersethandler(self,name)
- if trace_namespaces then
- report_namespaces("installing set/let/reset command for %a (%s)",name,"single")
- end
- elseif set == v_list then
- context.installparametersethandler(self,name)
- if trace_namespaces then
- report_namespaces("installing set/let/reset command for %a (%s)",name,"multiple")
- end
- end
- local frame = ns.frame
- if frame == v_yes then
- context.installinheritedframed(name)
- if trace_namespaces then
- report_namespaces("installing framed command for %a",name)
- end
- end
- context.protect()
-end
-
-function utilities.formatters.list(data,key,keys)
- if not keys then
- keys = { }
- for _, v in next, data do
- for k, _ in next, v do
- keys[k] = true
- end
- end
- keys = table.sortedkeys(keys)
- end
- context.starttabulate { "|"..string.rep("l|",#keys+1) }
- context.NC()
- context(key)
- for i=1,#keys do
- context.NC()
- context(keys[i])
- end context.NR()
- context.HL()
- for k, v in table.sortedhash(data) do
- context.NC()
- context(k)
- for i=1,#keys do
- context.NC()
- context(v[keys[i]])
- end context.NR()
- end
- context.stoptabulate()
-end
-
-function namespaces.list()
- -- utilities.formatters.list(data,"namespace")
- local keys = { "type", "name", "comment", "version", "parent", "definition", "setup", "style" }
- utilities.formatters.list(data,"namespace",keys)
-end
+if not modules then modules = { } end modules ['mult-aux'] = {
+ version = 1.001,
+ comment = "companion to mult-aux.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local find = string.find
+
+interfaces.namespaces = interfaces.namespaces or { }
+local namespaces = interfaces.namespaces
+local variables = interfaces.variables
+
+local trace_namespaces = false trackers.register("interfaces.namespaces", function(v) trace_namespaces = v end)
+
+local report_namespaces = logs.reporter("interface","namespaces")
+
+local v_yes, v_list = variables.yes, variables.list
+
+local prefix = "????"
+local meaning = "@@@@"
+
+local data = { }
+
+function namespaces.define(namespace,settings)
+ if trace_namespaces then
+ report_namespaces("installing namespace %a with settings %a",namespace,settings)
+ end
+ if data[namespace] then
+ report_namespaces("namespace %a is already taken",namespace)
+ end
+ if #namespace < 2 then
+ report_namespaces("namespace %a should have more than 1 character",namespace)
+ end
+ local ns = { }
+ data[namespace] = ns
+ utilities.parsers.settings_to_hash(settings,ns)
+ local name = ns.name
+ if not name or name == "" then
+ report_namespaces("provide a (command) name in namespace %a",namespace)
+ end
+ local self = "\\" .. prefix .. namespace
+ context.unprotect()
+ -- context.installnamespace(namespace)
+ context("\\def\\%s%s{%s%s}",prefix,namespace,meaning,namespace) -- or context.setvalue
+ if trace_namespaces then
+ report_namespaces("using namespace %a for %a",namespace,name)
+ end
+ local parent = ns.parent or ""
+ if parent ~= "" then
+ if trace_namespaces then
+ report_namespaces("namespace %a for %a uses parent %a",namespace,name,parent)
+ end
+ if not find(parent,"\\") then
+ parent = "\\" .. prefix .. parent
+ -- todo: check if defined
+ end
+ end
+ context.installparameterhandler(self,name)
+ if trace_namespaces then
+ report_namespaces("installing parameter handler for %a",name)
+ end
+ context.installparameterhashhandler(self,name)
+ if trace_namespaces then
+ report_namespaces("installing parameterhash handler for %a",name)
+ end
+ local style = ns.style
+ if style == v_yes then
+ context.installstyleandcolorhandler(self,name)
+ if trace_namespaces then
+ report_namespaces("installing attribute handler for %a",name)
+ end
+ end
+ local command = ns.command
+ if command == v_yes then
+ context.installdefinehandler(self,name,parent)
+ if trace_namespaces then
+ report_namespaces("installing definition command for %a (single)",name)
+ end
+ elseif command == v_list then
+ context.installdefinehandler(self,name,parent)
+ if trace_namespaces then
+ report_namespaces("installing definition command for %a (multiple)",name)
+ end
+ end
+ local setup = ns.setup
+ if setup == v_yes then
+ context.installsetuphandler(self,name)
+ if trace_namespaces then
+ report_namespaces("installing setup command for %a (%s)",name,"single")
+ end
+ elseif setup == v_list then
+ context.installsetuphandler(self,name)
+ if trace_namespaces then
+ report_namespaces("installing setup command for %a (%s)",name,"multiple")
+ end
+ end
+ local set = ns.set
+ if set == v_yes then
+ context.installparametersethandler(self,name)
+ if trace_namespaces then
+ report_namespaces("installing set/let/reset command for %a (%s)",name,"single")
+ end
+ elseif set == v_list then
+ context.installparametersethandler(self,name)
+ if trace_namespaces then
+ report_namespaces("installing set/let/reset command for %a (%s)",name,"multiple")
+ end
+ end
+ local frame = ns.frame
+ if frame == v_yes then
+ context.installinheritedframed(name)
+ if trace_namespaces then
+ report_namespaces("installing framed command for %a",name)
+ end
+ end
+ context.protect()
+end
+
+function utilities.formatters.list(data,key,keys)
+ if not keys then
+ keys = { }
+ for _, v in next, data do
+ for k, _ in next, v do
+ keys[k] = true
+ end
+ end
+ keys = table.sortedkeys(keys)
+ end
+ context.starttabulate { "|"..string.rep("l|",#keys+1) }
+ context.NC()
+ context(key)
+ for i=1,#keys do
+ context.NC()
+ context(keys[i])
+ end context.NR()
+ context.HL()
+ for k, v in table.sortedhash(data) do
+ context.NC()
+ context(k)
+ for i=1,#keys do
+ context.NC()
+ context(v[keys[i]])
+ end context.NR()
+ end
+ context.stoptabulate()
+end
+
+function namespaces.list()
+ -- utilities.formatters.list(data,"namespace")
+ local keys = { "type", "name", "comment", "version", "parent", "definition", "setup", "style" }
+ utilities.formatters.list(data,"namespace",keys)
+end
diff --git a/tex/context/base/mult-chk.lua b/tex/context/base/mult-chk.lua
index 2a2dfcd4b..06e7aa8e6 100644
--- a/tex/context/base/mult-chk.lua
+++ b/tex/context/base/mult-chk.lua
@@ -1,76 +1,76 @@
-if not modules then modules = { } end modules ['mult-chk'] = {
- version = 1.001,
- comment = "companion to mult-chk.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format = string.format
-local lpegmatch = lpeg.match
-local type = type
-
-local make_settings_to_hash_pattern = utilities.parsers.make_settings_to_hash_pattern
-local settings_to_set = utilities.parsers.settings_to_set
-local allocate = utilities.storage.allocate
-
-local report_interface = logs.reporter("interface","checking")
-
-interfaces = interfaces or { }
-
-interfaces.syntax = allocate {
- test = { keys = table.tohash { "a","b","c","d","e","f","g" } }
-}
-
-function interfaces.invalidkey(category,key)
- report_interface("invalid key %a for %a in line %a",key,category,tex.inputlineno)
-end
-
-function interfaces.setvalidkeys(category,list)
- local s = interfaces.syntax[category]
- if not s then
- interfaces.syntax[category] = {
- keys = settings_to_set(list)
- }
- else
- s.keys = settings_to_set(list)
- end
-end
-
-function interfaces.addvalidkeys(category,list)
- local s = interfaces.syntax[category]
- if not s then
- interfaces.syntax[category] = {
- keys = settings_to_set(list)
- }
- else
- settings_to_set(list,s.keys)
- end
-end
-
--- weird code, looks incomplete ... probably an experiment
-
-local prefix, category, keys
-
-local setsomevalue = context.setsomevalue
-local invalidkey = interfaces.invalidkey
-
-local function set(key,value)
- if keys and not keys[key] then
- invalidkey(category,key)
- else
- setsomevalue(prefix,key,value)
- end
-end
-
-local pattern = make_settings_to_hash_pattern(set,"tolerant")
-
-function interfaces.getcheckedparameters(k,p,s)
- if s and s ~= "" then
- prefix, category = p, k
- keys = k and k ~= "" and interfaces.syntax[k].keys
- lpegmatch(pattern,s)
- end
-end
-
--- _igcp_ = interfaces.getcheckedparameters
+if not modules then modules = { } end modules ['mult-chk'] = {
+ version = 1.001,
+ comment = "companion to mult-chk.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format = string.format
+local lpegmatch = lpeg.match
+local type = type
+
+local make_settings_to_hash_pattern = utilities.parsers.make_settings_to_hash_pattern
+local settings_to_set = utilities.parsers.settings_to_set
+local allocate = utilities.storage.allocate
+
+local report_interface = logs.reporter("interface","checking")
+
+interfaces = interfaces or { }
+
+interfaces.syntax = allocate {
+ test = { keys = table.tohash { "a","b","c","d","e","f","g" } }
+}
+
+function interfaces.invalidkey(category,key)
+ report_interface("invalid key %a for %a in line %a",key,category,tex.inputlineno)
+end
+
+function interfaces.setvalidkeys(category,list)
+ local s = interfaces.syntax[category]
+ if not s then
+ interfaces.syntax[category] = {
+ keys = settings_to_set(list)
+ }
+ else
+ s.keys = settings_to_set(list)
+ end
+end
+
+function interfaces.addvalidkeys(category,list)
+ local s = interfaces.syntax[category]
+ if not s then
+ interfaces.syntax[category] = {
+ keys = settings_to_set(list)
+ }
+ else
+ settings_to_set(list,s.keys)
+ end
+end
+
+-- weird code, looks incomplete ... probably an experiment
+
+local prefix, category, keys
+
+local setsomevalue = context.setsomevalue
+local invalidkey = interfaces.invalidkey
+
+local function set(key,value)
+ if keys and not keys[key] then
+ invalidkey(category,key)
+ else
+ setsomevalue(prefix,key,value)
+ end
+end
+
+local pattern = make_settings_to_hash_pattern(set,"tolerant")
+
+function interfaces.getcheckedparameters(k,p,s)
+ if s and s ~= "" then
+ prefix, category = p, k
+ keys = k and k ~= "" and interfaces.syntax[k].keys
+ lpegmatch(pattern,s)
+ end
+end
+
+-- _igcp_ = interfaces.getcheckedparameters
diff --git a/tex/context/base/mult-fun.lua b/tex/context/base/mult-fun.lua
index a661c53bb..e263c3559 100644
--- a/tex/context/base/mult-fun.lua
+++ b/tex/context/base/mult-fun.lua
@@ -1,101 +1,101 @@
-return {
- internals = {
- --
- "nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel",
- "shadefactor",
- "textextoffset",
- "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent",
- "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent",
- "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent",
--- "originlength", "tickstep ", "ticklength",
--- "autoarrows", "ahfactor",
--- "angleoffset", anglelength", anglemethod",
- "metapostversion",
- "maxdimensions",
- },
- commands = {
- --
- "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian",
- "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos",
- "invsin", "invcos", "acosh", "asinh", "sinh", "cosh",
- "paired", "tripled",
- "unitcircle", "fulldiamond", "unitdiamond", "fullsquare",
- -- "halfcircle", "quartercircle",
- "llcircle", "lrcircle", "urcircle", "ulcircle",
- "tcircle", "bcircle", "lcircle", "rcircle",
- "lltriangle", "lrtriangle", "urtriangle", "ultriangle",
- "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened",
- "punked", "curved", "unspiked", "simplified", "blownup", "stretched",
- "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged",
- "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed",
- "llenlarged", "lrenlarged", "urenlarged", "ulenlarged",
- "llmoved", "lrmoved", "urmoved", "ulmoved",
- "rightarrow", "leftarrow", "centerarrow",
- "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox",
- "bottomboundary", "leftboundary", "topboundary", "rightboundary",
- "xsized", "ysized", "xysized", "sized", "xyscaled",
- "intersection_point", "intersection_found", "penpoint",
- "bbwidth", "bbheight",
- "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto",
- "withcircularshade", "withlinearshade",
- "cmyk", "spotcolor", "multitonecolor", "namedcolor",
- "drawfill", "undrawfill",
- "inverted", "uncolored", "softened", "grayed", "greyed",
- "onlayer",
- "along",
- "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage",
- "colordecimals", "ddecimal", "dddecimal", "ddddecimal",
- "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign",
- "transparent", "withtransparency",
- "property", "properties", "withproperties",
- "asgroup",
- "infont", -- redefined usign textext
- -- "property", "withproperties", "properties", -- not yet
- "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade",
- "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade",
- "space", "CRLF",
- "grayscale", "greyscale", "withgray", "withgrey",
- "colorpart",
- "readfile",
- "clearxy", "unitvector", "center", -- redefined
- "epsed", "anchored",
- "originpath", "infinite",
- "break",
- "xstretched", "ystretched", "snapped",
- --
- "pathconnectors", "function", "constructedpath", "constructedpairs",
- "punkedfunction", "curvedfunction", "tightfunction",
- "punkedpath", "curvedpath", "tightpath",
- "punkedpairs", "curvedpairs", "tightpairs",
- --
- "evenly", "oddly",
- --
- "condition",
- --
- "pushcurrentpicture", "popcurrentpicture",
- --
- "arrowpath",
--- "colorlike", "dowithpath", "rangepath", "straightpath", "addbackground",
--- "cleanstring", "asciistring", "setunstringed", "getunstringed", "unstringed",
--- "showgrid",
--- "phantom",
--- "xshifted", "yshifted",
--- "drawarrowpath", "midarrowhead", "arrowheadonpath",
--- "drawxticks", "drawyticks", "drawticks",
--- "pointarrow",
--- "thefreelabel", "freelabel", "freedotlabel",
--- "anglebetween", "colorcircle",
--- "remapcolors", "normalcolors", "resetcolormap", "remapcolor", "remappedcolor",
--- "recolor", "refill", "redraw", "retext", "untext", "restroke", "reprocess", "repathed",
- "tensecircle", "roundedsquare",
- "colortype", "whitecolor", "blackcolor",
- --
--- "swappointlabels",
- "normalfill", "normaldraw", "visualizepaths", "naturalizepaths",
- "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox",
- "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels",
- "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions",
- --
- "decorated", "redecorated", "undecorated",
- },
-}
+return {
+ internals = {
+ --
+ "nocolormodel", "greycolormodel", "graycolormodel", "rgbcolormodel", "cmykcolormodel",
+ "shadefactor",
+ "textextoffset",
+ "normaltransparent", "multiplytransparent", "screentransparent", "overlaytransparent", "softlighttransparent",
+ "hardlighttransparent", "colordodgetransparent", "colorburntransparent", "darkentransparent", "lightentransparent",
+ "differencetransparent", "exclusiontransparent", "huetransparent", "saturationtransparent", "colortransparent", "luminositytransparent",
+-- "originlength", "tickstep ", "ticklength",
+-- "autoarrows", "ahfactor",
+-- "angleoffset", anglelength", anglemethod",
+ "metapostversion",
+ "maxdimensions",
+ },
+ commands = {
+ --
+ "sqr", "log", "ln", "exp", "inv", "pow", "pi", "radian",
+ "tand", "cotd", "sin", "cos", "tan", "cot", "atan", "asin", "acos",
+ "invsin", "invcos", "acosh", "asinh", "sinh", "cosh",
+ "paired", "tripled",
+ "unitcircle", "fulldiamond", "unitdiamond", "fullsquare",
+ -- "halfcircle", "quartercircle",
+ "llcircle", "lrcircle", "urcircle", "ulcircle",
+ "tcircle", "bcircle", "lcircle", "rcircle",
+ "lltriangle", "lrtriangle", "urtriangle", "ultriangle",
+ "smoothed", "cornered", "superellipsed", "randomized", "squeezed", "enlonged", "shortened",
+ "punked", "curved", "unspiked", "simplified", "blownup", "stretched",
+ "enlarged", "leftenlarged", "topenlarged", "rightenlarged", "bottomenlarged",
+ "crossed", "laddered", "randomshifted", "interpolated", "paralleled", "cutends", "peepholed",
+ "llenlarged", "lrenlarged", "urenlarged", "ulenlarged",
+ "llmoved", "lrmoved", "urmoved", "ulmoved",
+ "rightarrow", "leftarrow", "centerarrow",
+ "boundingbox", "innerboundingbox", "outerboundingbox", "pushboundingbox", "popboundingbox",
+ "bottomboundary", "leftboundary", "topboundary", "rightboundary",
+ "xsized", "ysized", "xysized", "sized", "xyscaled",
+ "intersection_point", "intersection_found", "penpoint",
+ "bbwidth", "bbheight",
+ "withshade", "withlinearshading", "withcircularshading", "withfromshadecolor", "withtoshadecolor", "withshading", "shadedinto",
+ "withcircularshade", "withlinearshade",
+ "cmyk", "spotcolor", "multitonecolor", "namedcolor",
+ "drawfill", "undrawfill",
+ "inverted", "uncolored", "softened", "grayed", "greyed",
+ "onlayer",
+ "along",
+ "graphictext", "loadfigure", "externalfigure", "withmask", "figure", "register", "bitmapimage",
+ "colordecimals", "ddecimal", "dddecimal", "ddddecimal",
+ "textext", "thetextext", "rawtextext", "textextoffset", "verbatim", "thelabel", "label", "autoalign",
+ "transparent", "withtransparency",
+ "property", "properties", "withproperties",
+ "asgroup",
+ "infont", -- redefined usign textext
+ -- "property", "withproperties", "properties", -- not yet
+ "set_linear_vector", "linear_shade", "define_linear_shade", "define_circular_linear_shade", "define_sampled_linear_shade",
+ "set_circular_vector", "circular_shade", "define_circular_shade", "define_circular_linear_shade", "define_sampled_circular_shade",
+ "space", "CRLF",
+ "grayscale", "greyscale", "withgray", "withgrey",
+ "colorpart",
+ "readfile",
+ "clearxy", "unitvector", "center", -- redefined
+ "epsed", "anchored",
+ "originpath", "infinite",
+ "break",
+ "xstretched", "ystretched", "snapped",
+ --
+ "pathconnectors", "function", "constructedpath", "constructedpairs",
+ "punkedfunction", "curvedfunction", "tightfunction",
+ "punkedpath", "curvedpath", "tightpath",
+ "punkedpairs", "curvedpairs", "tightpairs",
+ --
+ "evenly", "oddly",
+ --
+ "condition",
+ --
+ "pushcurrentpicture", "popcurrentpicture",
+ --
+ "arrowpath",
+-- "colorlike", "dowithpath", "rangepath", "straightpath", "addbackground",
+-- "cleanstring", "asciistring", "setunstringed", "getunstringed", "unstringed",
+-- "showgrid",
+-- "phantom",
+-- "xshifted", "yshifted",
+-- "drawarrowpath", "midarrowhead", "arrowheadonpath",
+-- "drawxticks", "drawyticks", "drawticks",
+-- "pointarrow",
+-- "thefreelabel", "freelabel", "freedotlabel",
+-- "anglebetween", "colorcircle",
+-- "remapcolors", "normalcolors", "resetcolormap", "remapcolor", "remappedcolor",
+-- "recolor", "refill", "redraw", "retext", "untext", "restroke", "reprocess", "repathed",
+ "tensecircle", "roundedsquare",
+ "colortype", "whitecolor", "blackcolor",
+ --
+-- "swappointlabels",
+ "normalfill", "normaldraw", "visualizepaths", "naturalizepaths",
+ "drawboundary", "drawwholepath", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox",
+ "drawpath", "drawpoint", "drawpoints", "drawcontrolpoints", "drawcontrollines", "drawpointlabels",
+ "drawlineoptions", "drawpointoptions", "drawcontroloptions", "drawlabeloptions", "draworiginoptions", "drawboundoptions", "drawpathoptions", "resetdrawoptions",
+ --
+ "decorated", "redecorated", "undecorated",
+ },
+}
diff --git a/tex/context/base/mult-ini.lua b/tex/context/base/mult-ini.lua
index 3b18738de..491557446 100644
--- a/tex/context/base/mult-ini.lua
+++ b/tex/context/base/mult-ini.lua
@@ -1,333 +1,333 @@
-if not modules then modules = { } end modules ['mult-ini'] = {
- version = 1.001,
- comment = "companion to mult-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, gmatch, match = string.format, string.gmatch, string.match
-local lpegmatch = lpeg.match
-local serialize = table.serialize
-
-local allocate = utilities.storage.allocate
-local mark = utilities.storage.mark
-local prtcatcodes = catcodes.numbers.prtcatcodes
-local contextsprint = context.sprint
-local setmetatableindex = table.setmetatableindex
-local formatters = string.formatters
-
-local report_interface = logs.reporter("interface","initialization")
-
-interfaces = interfaces or { }
-interfaces.constants = mark(interfaces.constants or { })
-interfaces.variables = mark(interfaces.variables or { })
-interfaces.elements = mark(interfaces.elements or { })
-interfaces.formats = mark(interfaces.formats or { })
-interfaces.translations = mark(interfaces.translations or { })
-interfaces.corenamespaces = mark(interfaces.corenamespaces or { })
-
-local registerstorage = storage.register
-local sharedstorage = storage.shared
-
-local constants = interfaces.constants
-local variables = interfaces.variables
-local elements = interfaces.elements
-local formats = interfaces.formats
-local translations = interfaces.translations
-local corenamespaces = interfaces.corenamespaces
-local reporters = { } -- just an optimization
-
-registerstorage("interfaces/constants", constants, "interfaces.constants")
-registerstorage("interfaces/variables", variables, "interfaces.variables")
-registerstorage("interfaces/elements", elements, "interfaces.elements")
-registerstorage("interfaces/formats", formats, "interfaces.formats")
-registerstorage("interfaces/translations", translations, "interfaces.translations")
-registerstorage("interfaces/corenamespaces", corenamespaces, "interfaces.corenamespaces")
-
-interfaces.interfaces = {
- "cs", "de", "en", "fr", "it", "nl", "ro", "pe",
-}
-
-sharedstorage.currentinterface = sharedstorage.currentinterface or "en"
-sharedstorage.currentresponse = sharedstorage.currentresponse or "en"
-
-local currentinterface = sharedstorage.currentinterface
-local currentresponse = sharedstorage.currentresponse
-
-local complete = allocate()
-interfaces.complete = complete
-
-local function resolve(t,k) -- one access needed to get loaded (not stored!)
- report_interface("loading interface definitions from 'mult-def.lua'")
- complete = dofile(resolvers.findfile("mult-def.lua"))
- report_interface("loading interface messages from 'mult-mes.lua'")
- complete.messages = dofile(resolvers.findfile("mult-mes.lua"))
- interfaces.complete = complete
- return rawget(complete,k)
-end
-
-setmetatableindex(complete, resolve)
-
-local function valueiskey(t,k) -- will be helper
- t[k] = k
- return k
-end
-
-setmetatableindex(variables, valueiskey)
-setmetatableindex(constants, valueiskey)
-setmetatableindex(elements, valueiskey)
-setmetatableindex(formats, valueiskey)
-setmetatableindex(translations, valueiskey)
-
-function interfaces.registernamespace(n,namespace)
- corenamespaces[n] = namespace
-end
-
-local function resolve(t,k)
- local v = logs.reporter(k)
- t[k] = v
- return v
-end
-
-setmetatableindex(reporters,resolve)
-
-for category, _ in next, translations do
- -- We pre-create reporters for already defined messages
- -- because otherwise listing is incomplete and we want
- -- to use that for checking so delaying makes not much
- -- sense there.
- local r = reporters[category]
-end
-
--- adding messages
-
-local function add(target,tag,values)
- local t = target[tag]
- if not f then
- target[tag] = values
- else
- for k, v in next, values do
- if f[k] then
- -- error
- else
- f[k] = v
- end
- end
- end
-end
-
-function interfaces.settranslation(tag,values)
- add(translations,tag,values)
-end
-
-function interfaces.setformat(tag,values)
- add(formats,tag,values)
-end
-
--- the old method:
-
-local replacer = lpeg.replacer { { "--", "%%a" } }
-
-local function fulltag(category,tag)
- return formatters["%s:%s"](category,lpegmatch(replacer,tag))
-end
-
-function interfaces.setmessages(category,str)
- for tag, message in gmatch(str,"(%S+) *: *(.-) *[\n\r]") do
- if tag == "title" then
- translations[tag] = translations[tag] or tag
- else
- formats[fulltag(category,tag)] = lpegmatch(replacer,message)
- end
- end
-end
-
-function interfaces.setmessage(category,tag,message)
- formats[fulltag(category,tag)] = lpegmatch(replacer,message)
-end
-
-function interfaces.getmessage(category,tag,default)
- return formats[fulltag(category,tag)] or default or "unknown message"
-end
-
-function interfaces.doifelsemessage(category,tag)
- return formats[fulltag(category,tag)]
-end
-
-local splitter = lpeg.splitat(",")
-
-function interfaces.showmessage(category,tag,arguments)
- local r = reporters[category]
- local f = formats[fulltag(category,tag)]
- local t = type(arguments)
- if t == "string" and #arguments > 0 then
- r(f,lpegmatch(splitter,arguments))
- elseif t == "table" then
- r(f,unpack(arguments))
- elseif arguments then
- r(f,arguments)
- else
- r(f)
- end
-end
-
--- till here
-
-function interfaces.setvariable(variable,given)
- variables[given] = variable
-end
-
-function interfaces.setconstant(constant,given)
- constants[given] = constant
-end
-
-function interfaces.setelement(element,given)
- elements[given] = element
-end
-
--- the real thing:
-
-logs.setmessenger(context.verbatim.ctxreport)
-
--- initialization
-
-function interfaces.setuserinterface(interface,response)
- sharedstorage.currentinterface, currentinterface = interface, interface
- sharedstorage.currentresponse, currentresponse = response, response
- if environment.initex then
- local nofconstants = 0
- for given, constant in next, complete.constants do
- constant = constant[interface] or constant.en or given
- constants[constant] = given -- breedte -> width
- contextsprint(prtcatcodes,"\\ui_c{",given,"}{",constant,"}") -- user interface constant
- nofconstants = nofconstants + 1
- end
- local nofvariables = 0
- for given, variable in next, complete.variables do
- variable = variable[interface] or variable.en or given
- variables[given] = variable -- ja -> yes
- contextsprint(prtcatcodes,"\\ui_v{",given,"}{",variable,"}") -- user interface variable
- nofvariables = nofvariables + 1
- end
- local nofelements = 0
- for given, element in next, complete.elements do
- element = element[interface] or element.en or given
- elements[element] = given
- contextsprint(prtcatcodes,"\\ui_e{",given,"}{",element,"}") -- user interface element
- nofelements = nofelements + 1
- end
- local nofcommands = 0
- for given, command in next, complete.commands do
- command = command[interface] or command.en or given
- if command ~= given then
- contextsprint(prtcatcodes,"\\ui_m{",given,"}{",command,"}") -- user interface macro
- end
- nofcommands = nofcommands + 1
- end
- local nofformats = 0
- for given, format in next, complete.messages.formats do
- formats[given] = format[interface] or format.en or given
- nofformats = nofformats + 1
- end
- local noftranslations = 0
- for given, translation in next, complete.messages.translations do
- translations[given] = translation[interface] or translation.en or given
- noftranslations = noftranslations + 1
- end
- report_interface("definitions: %a constants, %a variables, %a elements, %a commands, %a formats, %a translations",
- nofconstants,nofvariables,nofelements,nofcommands,nofformats,noftranslations)
- end
-end
-
-interfaces.cachedsetups = interfaces.cachedsetups or { }
-interfaces.hashedsetups = interfaces.hashedsetups or { }
-
-local cachedsetups = interfaces.cachedsetups
-local hashedsetups = interfaces.hashedsetups
-
-storage.register("interfaces/cachedsetups", cachedsetups, "interfaces.cachedsetups")
-storage.register("interfaces/hashedsetups", hashedsetups, "interfaces.hashedsetups")
-
-function interfaces.cachesetup(t)
- local hash = serialize(t)
- local done = hashedsetups[hash]
- if done then
- return cachedsetups[done]
- else
- done = #cachedsetups + 1
- cachedsetups[done] = t
- hashedsetups[hash] = done
- return t
- end
-end
-
-function interfaces.is_command(str)
- return (str and str ~= "" and token.csname_name(token.create(str)) ~= "") or false -- there will be a proper function for this
-end
-
-function interfaces.interfacedcommand(name)
- local command = complete.commands[name]
- return command and command[currentinterface] or name
-end
-
--- interface
-
-function commands.writestatus(category,message,...)
- local r = reporters[category]
- if r then
- r(message,...)
- end
-end
-
-commands.registernamespace = interfaces.registernamespace
-commands.setinterfaceconstant = interfaces.setconstant
-commands.setinterfacevariable = interfaces.setvariable
-commands.setinterfaceelement = interfaces.setelement
-commands.setinterfacemessage = interfaces.setmessage
-commands.setinterfacemessages = interfaces.setmessages
-commands.showmessage = interfaces.showmessage
-
-function commands.doifelsemessage(category,tag)
- commands.doifelse(interfaces.doifelsemessage(category,tag))
-end
-
-function commands.getmessage(category,tag,default)
- context(interfaces.getmessage(category,tag,default))
-end
-
-function commands.showassignerror(namespace,key,value,line)
- local ns, instance = match(namespace,"^(%d+)[^%a]+(%a+)")
- if ns then
- namespace = corenamespaces[tonumber(ns)] or ns
- end
- if instance then
- context.writestatus("setup",formatters["error in line %a, namespace %a, instance %a, key %a"](line,namespace,instance,key))
- else
- context.writestatus("setup",formatters["error in line %a, namespace %a, key %a"](line,namespace,key))
- end
-end
-
--- a simple helper
-
-local settings_to_hash = utilities.parsers.settings_to_hash
-
-local makesparse = function(t)
- for k, v in next, t do
- if not v or v == "" then
- t[k] = nil
- end
- end
- return t
-end
-
-function interfaces.checkedspecification(specification)
- local kind = type(specification)
- if kind == "table" then
- return makesparse(specification)
- elseif kind == "string" and specification ~= "" then
- return makesparse(settings_to_hash(specification))
- else
- return { }
- end
-end
+if not modules then modules = { } end modules ['mult-ini'] = {
+ version = 1.001,
+ comment = "companion to mult-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format, gmatch, match = string.format, string.gmatch, string.match
+local lpegmatch = lpeg.match
+local serialize = table.serialize
+
+local allocate = utilities.storage.allocate
+local mark = utilities.storage.mark
+local prtcatcodes = catcodes.numbers.prtcatcodes
+local contextsprint = context.sprint
+local setmetatableindex = table.setmetatableindex
+local formatters = string.formatters
+
+local report_interface = logs.reporter("interface","initialization")
+
+interfaces = interfaces or { }
+interfaces.constants = mark(interfaces.constants or { })
+interfaces.variables = mark(interfaces.variables or { })
+interfaces.elements = mark(interfaces.elements or { })
+interfaces.formats = mark(interfaces.formats or { })
+interfaces.translations = mark(interfaces.translations or { })
+interfaces.corenamespaces = mark(interfaces.corenamespaces or { })
+
+local registerstorage = storage.register
+local sharedstorage = storage.shared
+
+local constants = interfaces.constants
+local variables = interfaces.variables
+local elements = interfaces.elements
+local formats = interfaces.formats
+local translations = interfaces.translations
+local corenamespaces = interfaces.corenamespaces
+local reporters = { } -- just an optimization
+
+registerstorage("interfaces/constants", constants, "interfaces.constants")
+registerstorage("interfaces/variables", variables, "interfaces.variables")
+registerstorage("interfaces/elements", elements, "interfaces.elements")
+registerstorage("interfaces/formats", formats, "interfaces.formats")
+registerstorage("interfaces/translations", translations, "interfaces.translations")
+registerstorage("interfaces/corenamespaces", corenamespaces, "interfaces.corenamespaces")
+
+interfaces.interfaces = {
+ "cs", "de", "en", "fr", "it", "nl", "ro", "pe",
+}
+
+sharedstorage.currentinterface = sharedstorage.currentinterface or "en"
+sharedstorage.currentresponse = sharedstorage.currentresponse or "en"
+
+local currentinterface = sharedstorage.currentinterface
+local currentresponse = sharedstorage.currentresponse
+
+local complete = allocate()
+interfaces.complete = complete
+
+local function resolve(t,k) -- one access needed to get loaded (not stored!)
+ report_interface("loading interface definitions from 'mult-def.lua'")
+ complete = dofile(resolvers.findfile("mult-def.lua"))
+ report_interface("loading interface messages from 'mult-mes.lua'")
+ complete.messages = dofile(resolvers.findfile("mult-mes.lua"))
+ interfaces.complete = complete
+ return rawget(complete,k)
+end
+
+setmetatableindex(complete, resolve)
+
+local function valueiskey(t,k) -- will be helper
+ t[k] = k
+ return k
+end
+
+setmetatableindex(variables, valueiskey)
+setmetatableindex(constants, valueiskey)
+setmetatableindex(elements, valueiskey)
+setmetatableindex(formats, valueiskey)
+setmetatableindex(translations, valueiskey)
+
+function interfaces.registernamespace(n,namespace)
+ corenamespaces[n] = namespace
+end
+
+local function resolve(t,k)
+ local v = logs.reporter(k)
+ t[k] = v
+ return v
+end
+
+setmetatableindex(reporters,resolve)
+
+for category, _ in next, translations do
+ -- We pre-create reporters for already defined messages
+ -- because otherwise listing is incomplete and we want
+ -- to use that for checking so delaying makes not much
+ -- sense there.
+ local r = reporters[category]
+end
+
+-- adding messages
+
+local function add(target,tag,values)
+ local t = target[tag]
+ if not f then
+ target[tag] = values
+ else
+ for k, v in next, values do
+ if f[k] then
+ -- error
+ else
+ f[k] = v
+ end
+ end
+ end
+end
+
+function interfaces.settranslation(tag,values)
+ add(translations,tag,values)
+end
+
+function interfaces.setformat(tag,values)
+ add(formats,tag,values)
+end
+
+-- the old method:
+
+local replacer = lpeg.replacer { { "--", "%%a" } }
+
+local function fulltag(category,tag)
+ return formatters["%s:%s"](category,lpegmatch(replacer,tag))
+end
+
+function interfaces.setmessages(category,str)
+ for tag, message in gmatch(str,"(%S+) *: *(.-) *[\n\r]") do
+ if tag == "title" then
+ translations[tag] = translations[tag] or tag
+ else
+ formats[fulltag(category,tag)] = lpegmatch(replacer,message)
+ end
+ end
+end
+
+function interfaces.setmessage(category,tag,message)
+ formats[fulltag(category,tag)] = lpegmatch(replacer,message)
+end
+
+function interfaces.getmessage(category,tag,default)
+ return formats[fulltag(category,tag)] or default or "unknown message"
+end
+
+function interfaces.doifelsemessage(category,tag)
+ return formats[fulltag(category,tag)]
+end
+
+local splitter = lpeg.splitat(",")
+
+function interfaces.showmessage(category,tag,arguments)
+ local r = reporters[category]
+ local f = formats[fulltag(category,tag)]
+ local t = type(arguments)
+ if t == "string" and #arguments > 0 then
+ r(f,lpegmatch(splitter,arguments))
+ elseif t == "table" then
+ r(f,unpack(arguments))
+ elseif arguments then
+ r(f,arguments)
+ else
+ r(f)
+ end
+end
+
+-- till here
+
+function interfaces.setvariable(variable,given)
+ variables[given] = variable
+end
+
+function interfaces.setconstant(constant,given)
+ constants[given] = constant
+end
+
+function interfaces.setelement(element,given)
+ elements[given] = element
+end
+
+-- the real thing:
+
+logs.setmessenger(context.verbatim.ctxreport)
+
+-- initialization
+
+function interfaces.setuserinterface(interface,response)
+ sharedstorage.currentinterface, currentinterface = interface, interface
+ sharedstorage.currentresponse, currentresponse = response, response
+ if environment.initex then
+ local nofconstants = 0
+ for given, constant in next, complete.constants do
+ constant = constant[interface] or constant.en or given
+ constants[constant] = given -- breedte -> width
+ contextsprint(prtcatcodes,"\\ui_c{",given,"}{",constant,"}") -- user interface constant
+ nofconstants = nofconstants + 1
+ end
+ local nofvariables = 0
+ for given, variable in next, complete.variables do
+ variable = variable[interface] or variable.en or given
+ variables[given] = variable -- ja -> yes
+ contextsprint(prtcatcodes,"\\ui_v{",given,"}{",variable,"}") -- user interface variable
+ nofvariables = nofvariables + 1
+ end
+ local nofelements = 0
+ for given, element in next, complete.elements do
+ element = element[interface] or element.en or given
+ elements[element] = given
+ contextsprint(prtcatcodes,"\\ui_e{",given,"}{",element,"}") -- user interface element
+ nofelements = nofelements + 1
+ end
+ local nofcommands = 0
+ for given, command in next, complete.commands do
+ command = command[interface] or command.en or given
+ if command ~= given then
+ contextsprint(prtcatcodes,"\\ui_m{",given,"}{",command,"}") -- user interface macro
+ end
+ nofcommands = nofcommands + 1
+ end
+ local nofformats = 0
+ for given, format in next, complete.messages.formats do
+ formats[given] = format[interface] or format.en or given
+ nofformats = nofformats + 1
+ end
+ local noftranslations = 0
+ for given, translation in next, complete.messages.translations do
+ translations[given] = translation[interface] or translation.en or given
+ noftranslations = noftranslations + 1
+ end
+ report_interface("definitions: %a constants, %a variables, %a elements, %a commands, %a formats, %a translations",
+ nofconstants,nofvariables,nofelements,nofcommands,nofformats,noftranslations)
+ end
+end
+
+interfaces.cachedsetups = interfaces.cachedsetups or { }
+interfaces.hashedsetups = interfaces.hashedsetups or { }
+
+local cachedsetups = interfaces.cachedsetups
+local hashedsetups = interfaces.hashedsetups
+
+storage.register("interfaces/cachedsetups", cachedsetups, "interfaces.cachedsetups")
+storage.register("interfaces/hashedsetups", hashedsetups, "interfaces.hashedsetups")
+
+function interfaces.cachesetup(t)
+ local hash = serialize(t)
+ local done = hashedsetups[hash]
+ if done then
+ return cachedsetups[done]
+ else
+ done = #cachedsetups + 1
+ cachedsetups[done] = t
+ hashedsetups[hash] = done
+ return t
+ end
+end
+
+function interfaces.is_command(str)
+ return (str and str ~= "" and token.csname_name(token.create(str)) ~= "") or false -- there will be a proper function for this
+end
+
+function interfaces.interfacedcommand(name)
+ local command = complete.commands[name]
+ return command and command[currentinterface] or name
+end
+
+-- interface
+
+function commands.writestatus(category,message,...)
+ local r = reporters[category]
+ if r then
+ r(message,...)
+ end
+end
+
+commands.registernamespace = interfaces.registernamespace
+commands.setinterfaceconstant = interfaces.setconstant
+commands.setinterfacevariable = interfaces.setvariable
+commands.setinterfaceelement = interfaces.setelement
+commands.setinterfacemessage = interfaces.setmessage
+commands.setinterfacemessages = interfaces.setmessages
+commands.showmessage = interfaces.showmessage
+
+function commands.doifelsemessage(category,tag)
+ commands.doifelse(interfaces.doifelsemessage(category,tag))
+end
+
+function commands.getmessage(category,tag,default)
+ context(interfaces.getmessage(category,tag,default))
+end
+
+function commands.showassignerror(namespace,key,value,line)
+ local ns, instance = match(namespace,"^(%d+)[^%a]+(%a+)")
+ if ns then
+ namespace = corenamespaces[tonumber(ns)] or ns
+ end
+ if instance then
+ context.writestatus("setup",formatters["error in line %a, namespace %a, instance %a, key %a"](line,namespace,instance,key))
+ else
+ context.writestatus("setup",formatters["error in line %a, namespace %a, key %a"](line,namespace,key))
+ end
+end
+
+-- a simple helper
+
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local makesparse = function(t)
+ for k, v in next, t do
+ if not v or v == "" then
+ t[k] = nil
+ end
+ end
+ return t
+end
+
+function interfaces.checkedspecification(specification)
+ local kind = type(specification)
+ if kind == "table" then
+ return makesparse(specification)
+ elseif kind == "string" and specification ~= "" then
+ return makesparse(settings_to_hash(specification))
+ else
+ return { }
+ end
+end
diff --git a/tex/context/base/mult-low.lua b/tex/context/base/mult-low.lua
index 47e31978b..46c2c24d6 100644
--- a/tex/context/base/mult-low.lua
+++ b/tex/context/base/mult-low.lua
@@ -1,347 +1,347 @@
-if not modules then modules = { } end modules ['mult-low'] = {
- version = 1.001,
- comment = "companion to mult-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- for syntax highlighters, only the ones that are for users (boring to collect them)
-
-return {
- ["constants"] = {
- --
- "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive",
- "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred",
- "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard",
- "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint",
- "points", "halfpoint",
- "zeroskip",
- "zeromuskip", "onemuskip",
- "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi",
- "normalpagebox",
- -- --
- "endoflinetoken", "outputnewlinechar",
- --
- "emptytoks", "empty", "undefined",
- --
- "voidbox", "emptybox", "emptyvbox", "emptyhbox",
- --
- "bigskipamount", "medskipamount", "smallskipamount",
- --
- "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion",
- "luatexengine", "pdftexengine", "xetexengine", "unknownengine",
- "etexversion", "pdftexversion", "xetexversion", "xetexrevision",
- --
- "activecatcode",
- --
- "bgroup", "egroup",
- "endline",
- --
- "conditionaltrue", "conditionalfalse",
- --
- "attributeunsetvalue",
- --
- "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle",
- --
- "inicatcodes",
- "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes",
- "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes",
- "xmlcatcodes",
- --
- "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode",
- "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode",
- "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode",
- --
- "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode",
- "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode",
- "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode",
- "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode",
- "lessthanasciicode", "morethanasciicode", "doublecommentsignal",
- "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode",
- "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode",
- "primeasciicode",
- --
- "activemathcharcode",
- --
- "activetabtoken", "activeformfeedtoken", "activeendoflinetoken",
- --
- "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode",
- --
- "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode",
- "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode",
- "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode",
- "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode",
- --
- "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode",
- "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode",
- "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode",
- --
- "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode",
- "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode",
- "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode",
- --
- "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink",
- "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint",
- "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace",
- "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight",
- --
- -- maybe a different class
- --
- "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset",
- "doifmode", "doifmodeelse", "doifnotmode",
- "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes",
- "startenvironment", "stopenvironment", "environment",
- "startcomponent", "stopcomponent", "component",
- "startproduct", "stopproduct", "product",
- "startproject", "stopproject", "project",
- "starttext", "stoptext", "startnotext", "stopnotext","startdocument", "stopdocument", "documentvariable", "setupdocument",
- "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule",
- --
- "startTEXpage", "stopTEXpage",
- -- "startMPpage", "stopMPpage", -- already catched by nested lexer
- --
- "enablemode", "disablemode", "preventmode",
- "globalenablemode", "globaldisablemode", "globalpreventmode",
- "pushmode", "popmode",
- --
- "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix",
- --
- "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode",
- "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode",
- "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode",
- --
- "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument",
- --
- "continueifinputfile",
- --
- "luastringsep", "!!bs", "!!es",
- },
- ["helpers"] = {
- --
- "startsetups", "stopsetups",
- "startxmlsetups", "stopxmlsetups",
- "startluasetups", "stopluasetups",
- "starttexsetups", "stoptexsetups",
- "startrawsetups", "stoprawsetups",
- "startlocalsetups", "stoplocalsetups",
- "starttexdefinition", "stoptexdefinition",
- "starttexcode", "stoptexcode",
- "startcontextcode", "stopcontextcode",
- --
- "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup",
- "doifelsecommandhandler","doifnotcommandhandler","doifcommandhandler",
- --
- "newmode", "setmode", "resetmode",
- "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode",
- "booleanmodevalue",
- --
- "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif",
- "newlanguage", "newfamily", "newfam", "newhelp", -- not used
- --
- "then",
- "begcsname",
- --
- "strippedcsname",
- --
- "firstargumentfalse", "firstargumenttrue",
- "secondargumentfalse", "secondargumenttrue",
- "thirdargumentfalse", "thirdargumenttrue",
- "fourthargumentfalse", "fourthargumenttrue",
- "fifthargumentfalse", "fifthsargumenttrue",
- "sixthargumentfalse", "sixtsargumenttrue",
- --
- "doglobal", "dodoglobal", "redoglobal", "resetglobal",
- --
- "donothing", "dontcomplain", "forgetall",
- --
- "donetrue", "donefalse",
- --
- "htdp",
- "unvoidbox",
- "hfilll", "vfilll",
- --
- "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha",
- --
- "currentcatcodetable", "defaultcatcodetable", "catcodetablename",
- "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable",
- "pushcatcodetable", "popcatcodetable", "restorecatcodes",
- "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand",
- --
- "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg",
- --
- "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg",
- "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg",
- "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter",
- "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern",
- "ruledhglue", "ruledvglue", "normalhglue", "normalvglue",
- "ruledpenalty",
- --
- "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk",
- --
- "scratchcounter", "globalscratchcounter",
- "scratchdimen", "globalscratchdimen",
- "scratchskip", "globalscratchskip",
- "scratchmuskip", "globalscratchmuskip",
- "scratchtoks", "globalscratchtoks",
- "scratchbox", "globalscratchbox",
- --
- "availablehsize", "localhsize", "setlocalhsize",
- --
- "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs",
- --
- "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance",
- "scratchhsize", "scratchvsize",
- "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset",
- "scratchxposition", "scratchyposition",
- "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset",
- --
- "scratchcounterone", "scratchcountertwo", "scratchcounterthree",
- "scratchdimenone", "scratchdimentwo", "scratchdimenthree",
- "scratchskipone", "scratchskiptwo", "scratchskipthree",
- "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree",
- "scratchtoksone", "scratchtokstwo", "scratchtoksthree",
- "scratchboxone", "scratchboxtwo", "scratchboxthree",
- "scratchnx", "scratchny", "scratchmx", "scratchmy",
- "scratchunicode",
- --
- "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip",
- --
- "doif", "doifnot", "doifelse",
- "doifinset", "doifnotinset", "doifinsetelse",
- "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse",
- "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined",
- "doifelsevalue", "doifvalue", "doifnotvalue",
- "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse",
- "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing",
- "doifdimensionelse", "doifnumberelse", "doifnumber", "doifnotnumber",
- "doifcommonelse", "doifcommon", "doifnotcommon",
- "doifinstring", "doifnotinstring", "doifinstringelse",
- "doifassignmentelse", "docheckassignment",
- --
- "tracingall", "tracingnone", "loggingall",
- --
- "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to",
- --
- "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace",
- "obeyspaces", "obeylines", "obeyedspace", "obeyedline",
- "normalspace",
- --
- "executeifdefined",
- --
- "singleexpandafter", "doubleexpandafter", "tripleexpandafter",
- --
- "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces",
- --
- "wait", "writestatus", "define", "defineexpandable", "redefine",
- --
- "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured",
- --
- "installcorenamespace",
- --
- "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue",
- "resetvalue", "undefinevalue", "ignorevalue",
- "setuvalue", "setuevalue", "setugvalue", "setuxvalue",
- --
- "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique",
- --
- "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters",
- --
- "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter",
- "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter",
- --
- "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist",
- "processaction", "processallactions", "processfirstactioninset", "processallactionsinset",
- --
- "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect",
- --
- "firstofoneargument",
- "firstoftwoarguments", "secondoftwoarguments",
- "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments",
- "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments",
- "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments",
- "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments",
- --
- "firstofoneunexpanded",
- --
- "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments",
- "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals",
- --
- "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith",
- --
- "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant",
- "newmacro", "setnewmacro", "newfraction",
- "newsignal",
- --
- "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty",
- "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument",
- "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty",
- "permitspacesbetweengroups", "dontpermitspacesbetweengroups",
- --
- "nopdfcompression", "maximumpdfcompression", "normalpdfcompression",
- --
- "modulonumber", "dividenumber",
- --
- "getfirstcharacter", "doiffirstcharelse",
- --
- "startnointerference", "stopnointerference",
- --
- "twodigits","threedigits",
- --
- "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight",
- --
- "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing",
- "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing",
- --
- "opordspacing", "opopspacing", "opbinspacing", "oprelspacing",
- "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing",
- --
- "binordspacing", "binopspacing", "binbinspacing", "binrelspacing",
- "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing",
- --
- "relordspacing", "relopspacing", "relbinspacing", "relrelspacing",
- "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing",
- --
- "openordspacing", "openopspacing", "openbinspacing", "openrelspacing",
- "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing",
- --
- "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing",
- "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing",
- --
- "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing",
- "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing",
- --
- "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing",
- "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing",
- --
- "normalreqno",
- --
- "startimath", "stopimath", "normalstartimath", "normalstopimath",
- "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath",
- --
- "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette",
- "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox",
- "mathtext", "setmathsmalltextbox", "setmathtextbox",
- --
- "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle",
- "triggeruncrampedstyle", "triggercrampedstyle",
- "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle",
- "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle",
- --
- "luaexpr", "expdoifelse", "expdoif", "expdoifnot", "expdoifcommonelse", "expdoifinsetelse",
- --
- "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport",
- "ctxlua", "luacode", "lateluacode", "directluacode",
- "registerctxluafile", "ctxloadluafile",
- "luaversion", "luamajorversion", "luaminorversion",
- "ctxluacode", "luaconditional", "luaexpanded",
- "startluaparameterset", "stopluaparameterset", "luaparameterset",
- "definenamedlua",
- "obeylualines", "obeyluatokens",
- "startluacode", "stopluacode", "startlua", "stoplua",
- --
- "carryoverpar",
- --
- "Umathbotaccent",
- }
-}
+if not modules then modules = { } end modules ['mult-low'] = {
+ version = 1.001,
+ comment = "companion to mult-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- for syntax highlighters, only the ones that are for users (boring to collect them)
+
+return {
+ ["constants"] = {
+ --
+ "zerocount", "minusone", "minustwo", "plusone", "plustwo", "plusthree", "plusfour", "plusfive",
+ "plussix", "plusseven", "pluseight", "plusnine", "plusten", "plussixteen", "plushundred",
+ "plusthousand", "plustenthousand", "plustwentythousand", "medcard", "maxcard",
+ "zeropoint", "onepoint", "halfapoint", "onebasepoint", "maxdimen", "scaledpoint", "thousandpoint",
+ "points", "halfpoint",
+ "zeroskip",
+ "zeromuskip", "onemuskip",
+ "pluscxxvii", "pluscxxviii", "pluscclv", "pluscclvi",
+ "normalpagebox",
+ -- --
+ "endoflinetoken", "outputnewlinechar",
+ --
+ "emptytoks", "empty", "undefined",
+ --
+ "voidbox", "emptybox", "emptyvbox", "emptyhbox",
+ --
+ "bigskipamount", "medskipamount", "smallskipamount",
+ --
+ "fmtname", "fmtversion", "texengine", "texenginename", "texengineversion",
+ "luatexengine", "pdftexengine", "xetexengine", "unknownengine",
+ "etexversion", "pdftexversion", "xetexversion", "xetexrevision",
+ --
+ "activecatcode",
+ --
+ "bgroup", "egroup",
+ "endline",
+ --
+ "conditionaltrue", "conditionalfalse",
+ --
+ "attributeunsetvalue",
+ --
+ "uprotationangle", "rightrotationangle", "downrotationangle", "leftrotationangle",
+ --
+ "inicatcodes",
+ "ctxcatcodes", "texcatcodes", "notcatcodes", "txtcatcodes", "vrbcatcodes",
+ "prtcatcodes", "nilcatcodes", "luacatcodes", "tpacatcodes", "tpbcatcodes",
+ "xmlcatcodes",
+ --
+ "escapecatcode", "begingroupcatcode", "endgroupcatcode", "mathshiftcatcode", "alignmentcatcode",
+ "endoflinecatcode", "parametercatcode", "superscriptcatcode", "subscriptcatcode", "ignorecatcode",
+ "spacecatcode", "lettercatcode", "othercatcode", "activecatcode", "commentcatcode", "invalidcatcode",
+ --
+ "tabasciicode", "newlineasciicode", "formfeedasciicode", "endoflineasciicode", "endoffileasciicode",
+ "spaceasciicode", "hashasciicode", "dollarasciicode", "commentasciicode", "ampersandasciicode",
+ "colonasciicode", "backslashasciicode", "circumflexasciicode", "underscoreasciicode",
+ "leftbraceasciicode", "barasciicode", "rightbraceasciicode", "tildeasciicode", "delasciicode",
+ "lessthanasciicode", "morethanasciicode", "doublecommentsignal",
+ "atsignasciicode", "exclamationmarkasciicode", "questionmarkasciicode",
+ "doublequoteasciicode", "singlequoteasciicode", "forwardslashasciicode",
+ "primeasciicode",
+ --
+ "activemathcharcode",
+ --
+ "activetabtoken", "activeformfeedtoken", "activeendoflinetoken",
+ --
+ "batchmodecode", "nonstopmodecode", "scrollmodecode", "errorstopmodecode",
+ --
+ "bottomlevelgroupcode", "simplegroupcode", "hboxgroupcode", "adjustedhboxgroupcode", "vboxgroupcode",
+ "vtopgroupcode", "aligngroupcode", "noaligngroupcode", "outputgroupcode", "mathgroupcode",
+ "discretionarygroupcode", "insertgroupcode", "vcentergroupcode", "mathchoicegroupcode",
+ "semisimplegroupcode", "mathshiftgroupcode", "mathleftgroupcode", "vadjustgroupcode",
+ --
+ "charnodecode", "hlistnodecode", "vlistnodecode", "rulenodecode", "insertnodecode", "marknodecode",
+ "adjustnodecode", "ligaturenodecode", "discretionarynodecode", "whatsitnodecode", "mathnodecode",
+ "gluenodecode", "kernnodecode", "penaltynodecode", "unsetnodecode", "mathsnodecode",
+ --
+ "charifcode", "catifcode", "numifcode", "dimifcode", "oddifcode", "vmodeifcode", "hmodeifcode",
+ "mmodeifcode", "innerifcode", "voidifcode", "hboxifcode", "vboxifcode", "xifcode", "eofifcode",
+ "trueifcode", "falseifcode", "caseifcode", "definedifcode", "csnameifcode", "fontcharifcode",
+ --
+ "fontslantperpoint", "fontinterwordspace", "fontinterwordstretch", "fontinterwordshrink",
+ "fontexheight", "fontemwidth", "fontextraspace", "slantperpoint",
+ "interwordspace", "interwordstretch", "interwordshrink", "exheight", "emwidth", "extraspace",
+ "mathsupdisplay", "mathsupnormal", "mathsupcramped", "mathsubnormal", "mathsubcombined", "mathaxisheight",
+ --
+ -- maybe a different class
+ --
+ "startmode", "stopmode", "startnotmode", "stopnotmode", "startmodeset", "stopmodeset",
+ "doifmode", "doifmodeelse", "doifnotmode",
+ "startallmodes", "stopallmodes", "startnotallmodes", "stopnotallmodes", "doifallmodes", "doifallmodeselse", "doifnotallmodes",
+ "startenvironment", "stopenvironment", "environment",
+ "startcomponent", "stopcomponent", "component",
+ "startproduct", "stopproduct", "product",
+ "startproject", "stopproject", "project",
+ "starttext", "stoptext", "startnotext", "stopnotext","startdocument", "stopdocument", "documentvariable", "setupdocument",
+ "startmodule", "stopmodule", "usemodule", "usetexmodule", "useluamodule",
+ --
+ "startTEXpage", "stopTEXpage",
+ -- "startMPpage", "stopMPpage", -- already catched by nested lexer
+ --
+ "enablemode", "disablemode", "preventmode",
+ "globalenablemode", "globaldisablemode", "globalpreventmode",
+ "pushmode", "popmode",
+ --
+ "typescriptone", "typescripttwo", "typescriptthree", "mathsizesuffix",
+ --
+ "mathordcode", "mathopcode", "mathbincode", "mathrelcode", "mathopencode", "mathclosecode",
+ "mathpunctcode", "mathalphacode", "mathinnercode", "mathnothingcode", "mathlimopcode",
+ "mathnolopcode", "mathboxcode", "mathchoicecode", "mathaccentcode", "mathradicalcode",
+ --
+ "constantnumber", "constantnumberargument", "constantdimen", "constantdimenargument", "constantemptyargument",
+ --
+ "continueifinputfile",
+ --
+ "luastringsep", "!!bs", "!!es",
+ },
+ ["helpers"] = {
+ --
+ "startsetups", "stopsetups",
+ "startxmlsetups", "stopxmlsetups",
+ "startluasetups", "stopluasetups",
+ "starttexsetups", "stoptexsetups",
+ "startrawsetups", "stoprawsetups",
+ "startlocalsetups", "stoplocalsetups",
+ "starttexdefinition", "stoptexdefinition",
+ "starttexcode", "stoptexcode",
+ "startcontextcode", "stopcontextcode",
+ --
+ "doifsetupselse", "doifsetups", "doifnotsetups", "setup", "setups", "texsetup", "xmlsetup", "luasetup", "directsetup",
+ "doifelsecommandhandler","doifnotcommandhandler","doifcommandhandler",
+ --
+ "newmode", "setmode", "resetmode",
+ "newsystemmode", "setsystemmode", "resetsystemmode", "pushsystemmode", "popsystemmode",
+ "booleanmodevalue",
+ --
+ "newcount", "newdimen", "newskip", "newmuskip", "newbox", "newtoks", "newread", "newwrite", "newmarks", "newinsert", "newattribute", "newif",
+ "newlanguage", "newfamily", "newfam", "newhelp", -- not used
+ --
+ "then",
+ "begcsname",
+ --
+ "strippedcsname",
+ --
+ "firstargumentfalse", "firstargumenttrue",
+ "secondargumentfalse", "secondargumenttrue",
+ "thirdargumentfalse", "thirdargumenttrue",
+ "fourthargumentfalse", "fourthargumenttrue",
+ "fifthargumentfalse", "fifthsargumenttrue",
+ "sixthargumentfalse", "sixtsargumenttrue",
+ --
+ "doglobal", "dodoglobal", "redoglobal", "resetglobal",
+ --
+ "donothing", "dontcomplain", "forgetall",
+ --
+ "donetrue", "donefalse",
+ --
+ "htdp",
+ "unvoidbox",
+ "hfilll", "vfilll",
+ --
+ "mathbox", "mathlimop", "mathnolop", "mathnothing", "mathalpha",
+ --
+ "currentcatcodetable", "defaultcatcodetable", "catcodetablename",
+ "newcatcodetable", "startcatcodetable", "stopcatcodetable", "startextendcatcodetable", "stopextendcatcodetable",
+ "pushcatcodetable", "popcatcodetable", "restorecatcodes",
+ "setcatcodetable", "letcatcodecommand", "defcatcodecommand", "uedcatcodecommand",
+ --
+ "hglue", "vglue", "hfillneg", "vfillneg", "hfilllneg", "vfilllneg",
+ --
+ "ruledhss", "ruledhfil", "ruledhfill", "ruledhfilneg", "ruledhfillneg", "normalhfillneg",
+ "ruledvss", "ruledvfil", "ruledvfill", "ruledvfilneg", "ruledvfillneg", "normalvfillneg",
+ "ruledhbox", "ruledvbox", "ruledvtop", "ruledvcenter",
+ "ruledhskip", "ruledvskip", "ruledkern", "ruledmskip", "ruledmkern",
+ "ruledhglue", "ruledvglue", "normalhglue", "normalvglue",
+ "ruledpenalty",
+ --
+ "filledhboxb", "filledhboxr", "filledhboxg", "filledhboxc", "filledhboxm", "filledhboxy", "filledhboxk",
+ --
+ "scratchcounter", "globalscratchcounter",
+ "scratchdimen", "globalscratchdimen",
+ "scratchskip", "globalscratchskip",
+ "scratchmuskip", "globalscratchmuskip",
+ "scratchtoks", "globalscratchtoks",
+ "scratchbox", "globalscratchbox",
+ --
+ "availablehsize", "localhsize", "setlocalhsize",
+ --
+ "nextbox", "dowithnextbox", "dowithnextboxcs", "dowithnextboxcontent", "dowithnextboxcontentcs",
+ --
+ "scratchwidth", "scratchheight", "scratchdepth", "scratchoffset", "scratchdistance",
+ "scratchhsize", "scratchvsize",
+ "scratchxoffset", "scratchyoffset", "scratchhoffset", "scratchvoffset",
+ "scratchxposition", "scratchyposition",
+ "scratchtopoffset", "scratchbottomoffset", "scratchleftoffset", "scratchrightoffset",
+ --
+ "scratchcounterone", "scratchcountertwo", "scratchcounterthree",
+ "scratchdimenone", "scratchdimentwo", "scratchdimenthree",
+ "scratchskipone", "scratchskiptwo", "scratchskipthree",
+ "scratchmuskipone", "scratchmuskiptwo", "scratchmuskipthree",
+ "scratchtoksone", "scratchtokstwo", "scratchtoksthree",
+ "scratchboxone", "scratchboxtwo", "scratchboxthree",
+ "scratchnx", "scratchny", "scratchmx", "scratchmy",
+ "scratchunicode",
+ --
+ "scratchleftskip", "scratchrightskip", "scratchtopskip", "scratchbottomskip",
+ --
+ "doif", "doifnot", "doifelse",
+ "doifinset", "doifnotinset", "doifinsetelse",
+ "doifnextcharelse", "doifnextoptionalelse", "doifnextbgroupelse", "doifnextparenthesiselse", "doiffastoptionalcheckelse",
+ "doifundefinedelse", "doifdefinedelse", "doifundefined", "doifdefined",
+ "doifelsevalue", "doifvalue", "doifnotvalue",
+ "doifnothing", "doifsomething", "doifelsenothing", "doifsomethingelse",
+ "doifvaluenothing", "doifvaluesomething", "doifelsevaluenothing",
+ "doifdimensionelse", "doifnumberelse", "doifnumber", "doifnotnumber",
+ "doifcommonelse", "doifcommon", "doifnotcommon",
+ "doifinstring", "doifnotinstring", "doifinstringelse",
+ "doifassignmentelse", "docheckassignment",
+ --
+ "tracingall", "tracingnone", "loggingall",
+ --
+ "removetoks", "appendtoks", "prependtoks", "appendtotoks", "prependtotoks", "to",
+ --
+ "endgraf", "endpar", "everyendpar", "reseteverypar", "finishpar", "empty", "null", "space", "quad", "enspace",
+ "obeyspaces", "obeylines", "obeyedspace", "obeyedline",
+ "normalspace",
+ --
+ "executeifdefined",
+ --
+ "singleexpandafter", "doubleexpandafter", "tripleexpandafter",
+ --
+ "dontleavehmode", "removelastspace", "removeunwantedspaces", "keepunwantedspaces",
+ --
+ "wait", "writestatus", "define", "defineexpandable", "redefine",
+ --
+ "setmeasure", "setemeasure", "setgmeasure", "setxmeasure", "definemeasure", "freezemeasure", "measure", "measured",
+ --
+ "installcorenamespace",
+ --
+ "getvalue", "getuvalue", "setvalue", "setevalue", "setgvalue", "setxvalue", "letvalue", "letgvalue",
+ "resetvalue", "undefinevalue", "ignorevalue",
+ "setuvalue", "setuevalue", "setugvalue", "setuxvalue",
+ --
+ "globallet", "glet", "udef", "ugdef", "uedef", "uxdef", "checked", "unique",
+ --
+ "getparameters", "geteparameters", "getgparameters", "getxparameters", "forgetparameters", "copyparameters",
+ --
+ "getdummyparameters", "dummyparameter", "directdummyparameter", "setdummyparameter", "letdummyparameter",
+ "usedummystyleandcolor", "usedummystyleparameter", "usedummycolorparameter",
+ --
+ "processcommalist", "processcommacommand", "quitcommalist", "quitprevcommalist",
+ "processaction", "processallactions", "processfirstactioninset", "processallactionsinset",
+ --
+ "unexpanded", "expanded", "startexpanded", "stopexpanded", "protected", "protect", "unprotect",
+ --
+ "firstofoneargument",
+ "firstoftwoarguments", "secondoftwoarguments",
+ "firstofthreearguments", "secondofthreearguments", "thirdofthreearguments",
+ "firstoffourarguments", "secondoffourarguments", "thirdoffourarguments", "fourthoffourarguments",
+ "firstoffivearguments", "secondoffivearguments", "thirdoffivearguments", "fourthoffivearguments", "fifthoffivearguments",
+ "firstofsixarguments", "secondofsixarguments", "thirdofsixarguments", "fourthofsixarguments", "fifthofsixarguments", "sixthofsixarguments",
+ --
+ "firstofoneunexpanded",
+ --
+ "gobbleoneargument", "gobbletwoarguments", "gobblethreearguments", "gobblefourarguments", "gobblefivearguments", "gobblesixarguments", "gobblesevenarguments", "gobbleeightarguments", "gobbleninearguments", "gobbletenarguments",
+ "gobbleoneoptional", "gobbletwooptionals", "gobblethreeoptionals", "gobblefouroptionals", "gobblefiveoptionals",
+ --
+ "dorecurse", "doloop", "exitloop", "dostepwiserecurse", "recurselevel", "recursedepth", "dofastloopcs", "dowith",
+ --
+ "newconstant", "setnewconstant", "newconditional", "settrue", "setfalse", "setconstant",
+ "newmacro", "setnewmacro", "newfraction",
+ "newsignal",
+ --
+ "dosingleempty", "dodoubleempty", "dotripleempty", "doquadrupleempty", "doquintupleempty", "dosixtupleempty", "doseventupleempty",
+ "dosingleargument", "dodoubleargument", "dotripleargument", "doquadrupleargument", "doquintupleargument", "dosixtupleargument", "doseventupleargument",
+ "dosinglegroupempty", "dodoublegroupempty", "dotriplegroupempty", "doquadruplegroupempty", "doquintuplegroupempty",
+ "permitspacesbetweengroups", "dontpermitspacesbetweengroups",
+ --
+ "nopdfcompression", "maximumpdfcompression", "normalpdfcompression",
+ --
+ "modulonumber", "dividenumber",
+ --
+ "getfirstcharacter", "doiffirstcharelse",
+ --
+ "startnointerference", "stopnointerference",
+ --
+ "twodigits","threedigits",
+ --
+ "strut", "setstrut", "strutbox", "strutht", "strutdp", "strutwd", "struthtdp", "begstrut", "endstrut", "lineheight",
+ --
+ "ordordspacing", "ordopspacing", "ordbinspacing", "ordrelspacing",
+ "ordopenspacing", "ordclosespacing", "ordpunctspacing", "ordinnerspacing",
+ --
+ "opordspacing", "opopspacing", "opbinspacing", "oprelspacing",
+ "opopenspacing", "opclosespacing", "oppunctspacing", "opinnerspacing",
+ --
+ "binordspacing", "binopspacing", "binbinspacing", "binrelspacing",
+ "binopenspacing", "binclosespacing", "binpunctspacing", "bininnerspacing",
+ --
+ "relordspacing", "relopspacing", "relbinspacing", "relrelspacing",
+ "relopenspacing", "relclosespacing", "relpunctspacing", "relinnerspacing",
+ --
+ "openordspacing", "openopspacing", "openbinspacing", "openrelspacing",
+ "openopenspacing", "openclosespacing", "openpunctspacing", "openinnerspacing",
+ --
+ "closeordspacing", "closeopspacing", "closebinspacing", "closerelspacing",
+ "closeopenspacing", "closeclosespacing", "closepunctspacing", "closeinnerspacing",
+ --
+ "punctordspacing", "punctopspacing", "punctbinspacing", "punctrelspacing",
+ "punctopenspacing", "punctclosespacing", "punctpunctspacing", "punctinnerspacing",
+ --
+ "innerordspacing", "inneropspacing", "innerbinspacing", "innerrelspacing",
+ "inneropenspacing", "innerclosespacing", "innerpunctspacing", "innerinnerspacing",
+ --
+ "normalreqno",
+ --
+ "startimath", "stopimath", "normalstartimath", "normalstopimath",
+ "startdmath", "stopdmath", "normalstartdmath", "normalstopdmath",
+ --
+ "uncramped", "cramped", "triggermathstyle", "mathstylefont", "mathsmallstylefont", "mathstyleface", "mathsmallstyleface", "mathstylecommand", "mathpalette",
+ "mathstylehbox", "mathstylevbox", "mathstylevcenter", "mathstylevcenteredhbox", "mathstylevcenteredvbox",
+ "mathtext", "setmathsmalltextbox", "setmathtextbox",
+ --
+ "triggerdisplaystyle", "triggertextstyle", "triggerscriptstyle", "triggerscriptscriptstyle",
+ "triggeruncrampedstyle", "triggercrampedstyle",
+ "triggersmallstyle", "triggeruncrampedsmallstyle", "triggercrampedsmallstyle",
+ "triggerbigstyle", "triggeruncrampedbigstyle", "triggercrampedbigstyle",
+ --
+ "luaexpr", "expdoifelse", "expdoif", "expdoifnot", "expdoifcommonelse", "expdoifinsetelse",
+ --
+ "ctxdirectlua", "ctxlatelua", "ctxsprint", "ctxwrite", "ctxcommand", "ctxdirectcommand", "ctxlatecommand", "ctxreport",
+ "ctxlua", "luacode", "lateluacode", "directluacode",
+ "registerctxluafile", "ctxloadluafile",
+ "luaversion", "luamajorversion", "luaminorversion",
+ "ctxluacode", "luaconditional", "luaexpanded",
+ "startluaparameterset", "stopluaparameterset", "luaparameterset",
+ "definenamedlua",
+ "obeylualines", "obeyluatokens",
+ "startluacode", "stopluacode", "startlua", "stoplua",
+ --
+ "carryoverpar",
+ --
+ "Umathbotaccent",
+ }
+}
diff --git a/tex/context/base/mult-mps.lua b/tex/context/base/mult-mps.lua
index 59411cd97..f599111e8 100644
--- a/tex/context/base/mult-mps.lua
+++ b/tex/context/base/mult-mps.lua
@@ -1,115 +1,115 @@
-return {
- tex = {
- "btex", "etex", "verbatimtex",
- },
- shortcuts = {
- "..", "...", "--", "---", "&",
- },
- primitives = { -- to be checked
- "charcode", "day", "linecap", "linejoin", "miterlimit", "month", "pausing",
- "prologues", "showstopping", "time", "tracingcapsules", "tracingchoices", "mpprocset",
- "tracingcommands", "tracingequations", "tracinglostchars",
- "tracingmacros", "tracingonline", "tracingoutput", "tracingrestores",
- "tracingspecs", "tracingstats", "tracingtitles", "truecorners",
- "warningcheck", "year",
- "false", "nullpicture", "pencircle", "true",
- "and", "angle", "arclength", "arctime", "ASCII", "boolean", "bot",
- "char", "color", "cosd", "cycle", "decimal", "directiontime", "floor", "fontsize",
- "hex", "infont", "intersectiontimes", "known", "length", "llcorner",
- "lrcorner", "makepath", "makepen", "mexp", "mlog", "normaldeviate", "not",
- "numeric", "oct", "odd", "or", "path", "pair", "pen", "penoffset", "picture", "point",
- "postcontrol", "precontrol", "reverse", "rotated", "scaled",
- "shifted", "sind", "slanted", "sqrt", "str", "string", "subpath", "substring",
- "transform", "transformed", "ulcorner", "uniformdeviate", "unknown",
- "urcorner", "xpart", "xscaled", "xxpart", "xypart", "ypart", "yscaled", "yxpart",
- "yypart", "zscaled",
- "addto", "clip", "input", "interim", "let", "newinternal", "save", "setbounds",
- "shipout", "show", "showdependencies", "showtoken", "showvariable",
- "special",
- "begingroup", "endgroup", "of", "curl", "tension", "and", "controls",
- "interpath", "on", "off",
- "def", "vardef", "enddef", "expr", "suffix", "text", "primary", "secondary",
- "tertiary", "primarydef", "secondarydef", "tertiarydef",
- "randomseed", "also", "contour", "doublepath",
- "withcolor", "withpen", "dashed", "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "within",
- "forsuffixes", "downto", "upto", "step", "until",
- "charlist", "extensible", "fontdimen", "headerbyte", "kern", "ligtable",
- "boundarychar", "chardp", "charext", "charht", "charic", "charwd", "designsize",
- "fontmaking", "charexists",
- "cullit", "currenttransform", "gfcorners", "grayfont", "hround",
- "imagerules", "lowres_fix", "nodisplays", "notransforms", "openit",
- "displaying", "currentwindow", "screen_rows", "screen_cols",
- "pixels_per_inch", "cull", "display", "openwindow", "numspecial",
- "totalweight", "autorounding", "fillin", "proofing", "tracingpens",
- "xoffset", "chardx", "granularity", "smoothing", "turningcheck", "yoffset",
- "chardy", "hppp", "tracingedges", "vppp",
- "extra_beginfig", "extra_endfig", "mpxbreak",
- "endinput",
- "message", "delimiters", "turningnumber", "errmessage",
- "readstring", "scantokens", "end", "outer", "inner", "write", "to", "readfrom",
- "withprescript", "withpostscript",
- "top", "bot", "lft", "rt", "ulft", "urt", "llft", "lrt",
- --
- "redpart", "greenpart", "bluepart", "cyanpart", "magentapart", "yellowpart", "blackpart", "greypart",
- "rgbcolor", "cmykcolor", "greycolor", "graycolor",
- "colormodel", "graypart",
- "dashpart", "penpart",
--- "colorpart",
- "stroked", "filled", "textual", "clipped", "bounded",
- "expandafter",
- },
- commands = {
- "beginfig", "endfig",
- "rotatedaround", "reflectedabout",
- "arrowhead",
- "currentpen", "currentpicture", "cuttings",
- "defaultfont", "extra_beginfig", "extra_endfig",
- "ditto", "EOF", "down",
- "evenly", "fullcircle", "halfcircle", "identity", "in", "left",
- "origin", "pensquare", "quartercircle", "right",
- "unitsquare", "up", "withdots",
- "abs", "bbox", "ceiling", "center", "cutafter", "cutbefore", "dir",
- "directionpoint", "div", "dotprod", "intersectionpoint", "inverse", "mod",
- "round", "unitvector", "whatever",
- "cutdraw", "draw", "drawarrow", "drawdblarrow", "fill", "filldraw", "drawdot",
- "loggingall", "interact", "tracingall", "tracingnone",
- "pickup",
- "undraw", "unfill", "unfilldraw",
- "buildcycle", "dashpattern", "decr", "dotlabel", "dotlabels", "drawoptions",
- "incr", "label", "labels", "max", "min", "thelabel", "z",
- "beginchar", "blacker", "capsule_end", "change_width",
- "define_blacker_pixels", "define_corrected_pixels",
- "define_good_x_pixels", "define_good_y_pixels",
- "define_horizontal_corrected_pixels", "define_pixels",
- "define_whole_blacker_pixels", "define_whole_pixels",
- "define_whole_vertical_blacker_pixels",
- "define_whole_vertical_pixels", "endchar", "extra_beginchar",
- "extra_endchar", "extra_setup", "font_coding_scheme",
- "clearxy", "clearit", "clearpen", "shipit",
- "font_extra_space",
- "exitunless",
- "relax", "hide", "gobble", "gobbled", "stop",
- "blankpicture",
- "counterclockwise", "tensepath", "takepower", "direction",
- "softjoin", -- "magstep",
- "makelabel", -- "laboff",
- "rotatedabout", "flex", "superellipse", "erase", "image",
- "nullpen", "savepen", "clearpen", "penpos", "penlabels", -- "clear_pen_memory",
- "range", "numtok", "thru",
- "z", "laboff",
- "bye",
- --
- "red", "green", "blue", "cyan", "magenta", "yellow", "black", "white", "background",
- "graypart", "graycolor",
- --
- "mm", "pt", "dd", "bp", "cm", "pc", "cc", "in",
- },
- internals = { -- we need to remove duplicates above
- --
- "mitered", "rounded", "beveled", "butt", "squared",
- "eps", "epsilon", "infinity",
- "bboxmargin", "ahlength", "ahangle", "labeloffset", "dotlabeldiam", "defaultpen", "defaultscale", "join_radius",
- --
- "pen_lft", "pen_rt", "pen_top", "pen_bot", -- "pen_count_",
- },
-}
+return {
+ tex = {
+ "btex", "etex", "verbatimtex",
+ },
+ shortcuts = {
+ "..", "...", "--", "---", "&",
+ },
+ primitives = { -- to be checked
+ "charcode", "day", "linecap", "linejoin", "miterlimit", "month", "pausing",
+ "prologues", "showstopping", "time", "tracingcapsules", "tracingchoices", "mpprocset",
+ "tracingcommands", "tracingequations", "tracinglostchars",
+ "tracingmacros", "tracingonline", "tracingoutput", "tracingrestores",
+ "tracingspecs", "tracingstats", "tracingtitles", "truecorners",
+ "warningcheck", "year",
+ "false", "nullpicture", "pencircle", "true",
+ "and", "angle", "arclength", "arctime", "ASCII", "boolean", "bot",
+ "char", "color", "cosd", "cycle", "decimal", "directiontime", "floor", "fontsize",
+ "hex", "infont", "intersectiontimes", "known", "length", "llcorner",
+ "lrcorner", "makepath", "makepen", "mexp", "mlog", "normaldeviate", "not",
+ "numeric", "oct", "odd", "or", "path", "pair", "pen", "penoffset", "picture", "point",
+ "postcontrol", "precontrol", "reverse", "rotated", "scaled",
+ "shifted", "sind", "slanted", "sqrt", "str", "string", "subpath", "substring",
+ "transform", "transformed", "ulcorner", "uniformdeviate", "unknown",
+ "urcorner", "xpart", "xscaled", "xxpart", "xypart", "ypart", "yscaled", "yxpart",
+ "yypart", "zscaled",
+ "addto", "clip", "input", "interim", "let", "newinternal", "save", "setbounds",
+ "shipout", "show", "showdependencies", "showtoken", "showvariable",
+ "special",
+ "begingroup", "endgroup", "of", "curl", "tension", "and", "controls",
+ "interpath", "on", "off",
+ "def", "vardef", "enddef", "expr", "suffix", "text", "primary", "secondary",
+ "tertiary", "primarydef", "secondarydef", "tertiarydef",
+ "randomseed", "also", "contour", "doublepath",
+ "withcolor", "withpen", "dashed", "if", "else", "elseif", "fi", "for", "endfor", "forever", "exitif", "within",
+ "forsuffixes", "downto", "upto", "step", "until",
+ "charlist", "extensible", "fontdimen", "headerbyte", "kern", "ligtable",
+ "boundarychar", "chardp", "charext", "charht", "charic", "charwd", "designsize",
+ "fontmaking", "charexists",
+ "cullit", "currenttransform", "gfcorners", "grayfont", "hround",
+ "imagerules", "lowres_fix", "nodisplays", "notransforms", "openit",
+ "displaying", "currentwindow", "screen_rows", "screen_cols",
+ "pixels_per_inch", "cull", "display", "openwindow", "numspecial",
+ "totalweight", "autorounding", "fillin", "proofing", "tracingpens",
+ "xoffset", "chardx", "granularity", "smoothing", "turningcheck", "yoffset",
+ "chardy", "hppp", "tracingedges", "vppp",
+ "extra_beginfig", "extra_endfig", "mpxbreak",
+ "endinput",
+ "message", "delimiters", "turningnumber", "errmessage",
+ "readstring", "scantokens", "end", "outer", "inner", "write", "to", "readfrom",
+ "withprescript", "withpostscript",
+ "top", "bot", "lft", "rt", "ulft", "urt", "llft", "lrt",
+ --
+ "redpart", "greenpart", "bluepart", "cyanpart", "magentapart", "yellowpart", "blackpart", "greypart",
+ "rgbcolor", "cmykcolor", "greycolor", "graycolor",
+ "colormodel", "graypart",
+ "dashpart", "penpart",
+-- "colorpart",
+ "stroked", "filled", "textual", "clipped", "bounded",
+ "expandafter",
+ },
+ commands = {
+ "beginfig", "endfig",
+ "rotatedaround", "reflectedabout",
+ "arrowhead",
+ "currentpen", "currentpicture", "cuttings",
+ "defaultfont", "extra_beginfig", "extra_endfig",
+ "ditto", "EOF", "down",
+ "evenly", "fullcircle", "halfcircle", "identity", "in", "left",
+ "origin", "pensquare", "quartercircle", "right",
+ "unitsquare", "up", "withdots",
+ "abs", "bbox", "ceiling", "center", "cutafter", "cutbefore", "dir",
+ "directionpoint", "div", "dotprod", "intersectionpoint", "inverse", "mod",
+ "round", "unitvector", "whatever",
+ "cutdraw", "draw", "drawarrow", "drawdblarrow", "fill", "filldraw", "drawdot",
+ "loggingall", "interact", "tracingall", "tracingnone",
+ "pickup",
+ "undraw", "unfill", "unfilldraw",
+ "buildcycle", "dashpattern", "decr", "dotlabel", "dotlabels", "drawoptions",
+ "incr", "label", "labels", "max", "min", "thelabel", "z",
+ "beginchar", "blacker", "capsule_end", "change_width",
+ "define_blacker_pixels", "define_corrected_pixels",
+ "define_good_x_pixels", "define_good_y_pixels",
+ "define_horizontal_corrected_pixels", "define_pixels",
+ "define_whole_blacker_pixels", "define_whole_pixels",
+ "define_whole_vertical_blacker_pixels",
+ "define_whole_vertical_pixels", "endchar", "extra_beginchar",
+ "extra_endchar", "extra_setup", "font_coding_scheme",
+ "clearxy", "clearit", "clearpen", "shipit",
+ "font_extra_space",
+ "exitunless",
+ "relax", "hide", "gobble", "gobbled", "stop",
+ "blankpicture",
+ "counterclockwise", "tensepath", "takepower", "direction",
+ "softjoin", -- "magstep",
+ "makelabel", -- "laboff",
+ "rotatedabout", "flex", "superellipse", "erase", "image",
+ "nullpen", "savepen", "clearpen", "penpos", "penlabels", -- "clear_pen_memory",
+ "range", "numtok", "thru",
+ "z", "laboff",
+ "bye",
+ --
+ "red", "green", "blue", "cyan", "magenta", "yellow", "black", "white", "background",
+ "graypart", "graycolor",
+ --
+ "mm", "pt", "dd", "bp", "cm", "pc", "cc", "in",
+ },
+ internals = { -- we need to remove duplicates above
+ --
+ "mitered", "rounded", "beveled", "butt", "squared",
+ "eps", "epsilon", "infinity",
+ "bboxmargin", "ahlength", "ahangle", "labeloffset", "dotlabeldiam", "defaultpen", "defaultscale", "join_radius",
+ --
+ "pen_lft", "pen_rt", "pen_top", "pen_bot", -- "pen_count_",
+ },
+}
diff --git a/tex/context/base/node-acc.lua b/tex/context/base/node-acc.lua
index 4380ec3a4..c2675b970 100644
--- a/tex/context/base/node-acc.lua
+++ b/tex/context/base/node-acc.lua
@@ -1,140 +1,140 @@
-if not modules then modules = { } end modules ['node-acc'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local nodes, node = nodes, node
-
-local nodecodes = nodes.nodecodes
-local tasks = nodes.tasks
-
-local traverse_nodes = node.traverse
-local traverse_id = node.traverse_id
-local copy_node = node.copy
-local free_nodelist = node.flush_list
-
-local glue_code = nodecodes.glue
-local kern_code = nodecodes.kern
-local glyph_code = nodecodes.glyph
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-
-local a_characters = attributes.private("characters")
-
-local threshold = 65536
-
--- todo: nbsp etc
--- todo: collapse kerns
-
-local function injectspaces(head)
- local p
- local n = head
- while n do
- local id = n.id
- if id == glue_code then -- todo: check for subtype related to spacing (13/14 but most seems to be 0)
---~ if n.spec.width > 0 then -- threshold
- if p and p.id == glyph_code then
- local g = copy_node(p)
- local c = g.components
- if c then -- it happens that we copied a ligature
- free_nodelist(c)
- g.components = nil
- g.subtype = 256
- end
- local a = n[a_characters]
- local s = copy_node(n.spec)
- g.char, n.spec = 32, s
- p.next, g.prev = g, p
- g.next, n.prev = n, g
- s.width = s.width - g.width
- if a then
- g[a_characters] = a
- end
- s[a_characters] = 0
- n[a_characters] = 0
- end
---~ end
- elseif id == hlist_code or id == vlist_code then
- injectspaces(n.list,attribute)
- -- elseif id == kern_code then -- the backend already collapses
- -- local first = n
- -- while true do
- -- local nn = n.next
- -- if nn and nn.id == kern_code then
- -- -- maybe we should delete kerns but who cares at this stage
- -- first.kern = first.kern + nn.kern
- -- nn.kern = 0
- -- n = nn
- -- else
- -- break
- -- end
- -- end
- end
- p = n
- n = n.next
- end
- return head, true
-end
-
-nodes.handlers.accessibility = injectspaces
-
--- todo:
-
---~ local a_hyphenated = attributes.private('hyphenated')
---~
---~ local hyphenated, codes = { }, { }
---~
---~ local function compact(n)
---~ local t = { }
---~ for n in traverse_id(glyph_code,n) do
---~ t[#t+1] = utfchar(n.char) -- check for unicode
---~ end
---~ return concat(t,"")
---~ end
---~
---~ local function injectspans(head)
---~ for n in traverse_nodes(head) do
---~ local id = n.id
---~ if id == disc then
---~ local r, p = n.replace, n.pre
---~ if r and p then
---~ local str = compact(r)
---~ local hsh = hyphenated[str]
---~ if not hsh then
---~ hsh = #codes + 1
---~ hyphenated[str] = hsh
---~ codes[hsh] = str
---~ end
---~ n[a_hyphenated] = hsh
---~ end
---~ elseif id == hlist_code or id == vlist_code then
---~ injectspans(n.list)
---~ end
---~ end
---~ return head, true
---~ end
---~
---~ nodes.injectspans = injectspans
---~
---~ tasks.appendaction("processors", "words", "nodes.injectspans")
---~
---~ local function injectspans(head)
---~ for n in traverse_nodes(head) do
---~ local id = n.id
---~ if id == disc then
---~ local a = n[a_hyphenated]
---~ if a then
---~ local str = codes[a]
---~ local b = new_pdfliteral(format("/Span << /ActualText %s >> BDC", lpdf.tosixteen(str)))
---~ local e = new_pdfliteral("EMC")
---~ node.insert_before(head,n,b)
---~ node.insert_after(head,n,e)
---~ end
---~ elseif id == hlist_code or id == vlist_code then
---~ injectspans(n.list)
---~ end
---~ end
---~ end
+if not modules then modules = { } end modules ['node-acc'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local nodes, node = nodes, node
+
+local nodecodes = nodes.nodecodes
+local tasks = nodes.tasks
+
+local traverse_nodes = node.traverse
+local traverse_id = node.traverse_id
+local copy_node = node.copy
+local free_nodelist = node.flush_list
+
+local glue_code = nodecodes.glue
+local kern_code = nodecodes.kern
+local glyph_code = nodecodes.glyph
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+
+local a_characters = attributes.private("characters")
+
+local threshold = 65536
+
+-- todo: nbsp etc
+-- todo: collapse kerns
+
+local function injectspaces(head)
+ local p
+ local n = head
+ while n do
+ local id = n.id
+ if id == glue_code then -- todo: check for subtype related to spacing (13/14 but most seems to be 0)
+--~ if n.spec.width > 0 then -- threshold
+ if p and p.id == glyph_code then
+ local g = copy_node(p)
+ local c = g.components
+ if c then -- it happens that we copied a ligature
+ free_nodelist(c)
+ g.components = nil
+ g.subtype = 256
+ end
+ local a = n[a_characters]
+ local s = copy_node(n.spec)
+ g.char, n.spec = 32, s
+ p.next, g.prev = g, p
+ g.next, n.prev = n, g
+ s.width = s.width - g.width
+ if a then
+ g[a_characters] = a
+ end
+ s[a_characters] = 0
+ n[a_characters] = 0
+ end
+--~ end
+ elseif id == hlist_code or id == vlist_code then
+ injectspaces(n.list,attribute)
+ -- elseif id == kern_code then -- the backend already collapses
+ -- local first = n
+ -- while true do
+ -- local nn = n.next
+ -- if nn and nn.id == kern_code then
+ -- -- maybe we should delete kerns but who cares at this stage
+ -- first.kern = first.kern + nn.kern
+ -- nn.kern = 0
+ -- n = nn
+ -- else
+ -- break
+ -- end
+ -- end
+ end
+ p = n
+ n = n.next
+ end
+ return head, true
+end
+
+nodes.handlers.accessibility = injectspaces
+
+-- todo:
+
+--~ local a_hyphenated = attributes.private('hyphenated')
+--~
+--~ local hyphenated, codes = { }, { }
+--~
+--~ local function compact(n)
+--~ local t = { }
+--~ for n in traverse_id(glyph_code,n) do
+--~ t[#t+1] = utfchar(n.char) -- check for unicode
+--~ end
+--~ return concat(t,"")
+--~ end
+--~
+--~ local function injectspans(head)
+--~ for n in traverse_nodes(head) do
+--~ local id = n.id
+--~ if id == disc then
+--~ local r, p = n.replace, n.pre
+--~ if r and p then
+--~ local str = compact(r)
+--~ local hsh = hyphenated[str]
+--~ if not hsh then
+--~ hsh = #codes + 1
+--~ hyphenated[str] = hsh
+--~ codes[hsh] = str
+--~ end
+--~ n[a_hyphenated] = hsh
+--~ end
+--~ elseif id == hlist_code or id == vlist_code then
+--~ injectspans(n.list)
+--~ end
+--~ end
+--~ return head, true
+--~ end
+--~
+--~ nodes.injectspans = injectspans
+--~
+--~ tasks.appendaction("processors", "words", "nodes.injectspans")
+--~
+--~ local function injectspans(head)
+--~ for n in traverse_nodes(head) do
+--~ local id = n.id
+--~ if id == disc then
+--~ local a = n[a_hyphenated]
+--~ if a then
+--~ local str = codes[a]
+--~ local b = new_pdfliteral(format("/Span << /ActualText %s >> BDC", lpdf.tosixteen(str)))
+--~ local e = new_pdfliteral("EMC")
+--~ node.insert_before(head,n,b)
+--~ node.insert_after(head,n,e)
+--~ end
+--~ elseif id == hlist_code or id == vlist_code then
+--~ injectspans(n.list)
+--~ end
+--~ end
+--~ end
diff --git a/tex/context/base/node-aux.lua b/tex/context/base/node-aux.lua
index e3fc7ad6f..21737a43b 100644
--- a/tex/context/base/node-aux.lua
+++ b/tex/context/base/node-aux.lua
@@ -1,389 +1,389 @@
-if not modules then modules = { } end modules ['node-aux'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: n1 .. n2 : __concat metatable
-
-local type, tostring = type, tostring
-
-local nodes, node = nodes, node
-
-local utfvalues = utf.values
-
-local nodecodes = nodes.nodecodes
-
-local glyph_code = nodecodes.glyph
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local attributelist_code = nodecodes.attributelist -- temporary
-local math_code = nodecodes.math
-
-local nodepool = nodes.pool
-
-local new_glue = nodepool.glue
-local new_glyph = nodepool.glyph
-
-local traverse_nodes = node.traverse
-local traverse_id = node.traverse_id
-local free_node = node.free
-local hpack_nodes = node.hpack
-local unset_attribute = node.unset_attribute
-local first_glyph = node.first_glyph or node.first_character
-local copy_node = node.copy
-local copy_node_list = node.copy_list
-local slide_nodes = node.slide
-local insert_node_after = node.insert_after
-local isnode = node.is_node
-
-local unsetvalue = attributes.unsetvalue
-
-local current_font = font.current
-
-local texbox = tex.box
-
-local report_error = logs.reporter("node-aux:error")
-
-function nodes.repackhlist(list,...)
---~ nodes.showsimplelist(list)
- local temp, b = hpack_nodes(list,...)
- list = temp.list
- temp.list = nil
- free_node(temp)
- return list, b
-end
-
-local function set_attributes(head,attr,value)
- for n in traverse_nodes(head) do
- n[attr] = value
- local id = n.id
- if id == hlist_node or id == vlist_node then
- set_attributes(n.list,attr,value)
- end
- end
-end
-
-local function set_unset_attributes(head,attr,value)
- for n in traverse_nodes(head) do
- if not n[attr] then
- n[attr] = value
- end
- local id = n.id
- if id == hlist_code or id == vlist_code then
- set_unset_attributes(n.list,attr,value)
- end
- end
-end
-
-local function unset_attributes(head,attr)
- for n in traverse_nodes(head) do
- n[attr] = unsetvalue
- local id = n.id
- if id == hlist_code or id == vlist_code then
- unset_attributes(n.list,attr)
- end
- end
-end
-
-nodes.setattribute = node.set_attribute
-nodes.getattribute = node.has_attribute
-nodes.unsetattribute = node.unset_attribute
-nodes.has_attribute = node.has_attribute
-
-nodes.firstglyph = first_glyph
-nodes.setattributes = set_attributes
-nodes.setunsetattributes = set_unset_attributes
-nodes.unsetattributes = unset_attributes
-
--- function nodes.is_skipable(a,id) -- skipable nodes at the margins during character protrusion
--- return (
--- id ~= glyph_node
--- or id == ins_node
--- or id == mark_node
--- or id == adjust_node
--- or id == penalty_node
--- or (id == glue_node and a.spec.writable)
--- or (id == disc_node and a.pre == nil and a.post == nil and a.replace == nil)
--- or (id == math_node and a.surround == 0)
--- or (id == kern_node and (a.kern == 0 or a.subtype == NORMAL))
--- or (id == hlist_node and a.width == 0 and a.height == 0 and a.depth == 0 and a.list == nil)
--- or (id == whatsit_node and a.subtype ~= pdf_refximage_node and a.subtype ~= pdf_refxform_node)
--- )
--- end
-
--- history:
---
---
--- local function glyph_width(a)
--- local ch = chardata[a.font][a.char]
--- return (ch and ch.width) or 0
--- end
---
--- local function glyph_total(a)
--- local ch = chardata[a.font][a.char]
--- return (ch and (ch.height+ch.depth)) or 0
--- end
---
--- local function non_discardable(a) -- inline
--- return a.id < math_node -- brrrr
--- end
---
--- local function calculate_badness(t,s)
--- if t == 0 then
--- return 0
--- elseif s <= 0 then
--- return INF_BAD
--- else
--- local r
--- if t <= 7230584 then
--- r = t * 297 / s
--- elseif s >= 1663497 then
--- r = t / floor(s / 297)
--- else
--- r = t
--- end
--- r = floor(r)
--- if r > 1290 then
--- return INF_BAD
--- else
--- return floor((r * r * r + 0x20000) / 0x40000) -- 0400000 / 01000000
--- end
--- end
--- end
---
--- left-overs
---
--- local function round_xn_over_d(x, n, d)
--- local positive -- was x >= 0
--- if x >= 0 then
--- positive = true
--- else
--- x = -x
--- positive = false
--- end
--- local t = floor(x % 0x8000) * n -- 0100000
--- local f = floor(t / 0x8000) -- 0100000
--- local u = floor(x / 0x8000) * n + f -- 0100000
--- local v = floor(u % d) * 0x8000 + f -- 0100000
--- if floor(u / d) >= 0x8000 then -- 0100000
--- report_parbuilders('arith_error')
--- else
--- u = 0x8000 * floor(u / d) + floor(v / d) -- 0100000
--- end
--- v = floor(v % d)
--- if 2*v >= d then
--- u = u + 1
--- end
--- if positive then
--- return u
--- else
--- return -u
--- end
--- end
-
-function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255
- if untagged then
- return first_glyph(n)
- else
- for g in traverse_id(glyph_code,n) do
- return g
- end
- end
-end
-
-function nodes.firstcharinbox(n)
- local l = texbox[n].list
- if l then
- for g in traverse_id(glyph_code,l) do
- return g.char
- end
- end
- return 0
-end
-
-if not node.end_of_math then
- function node.end_of_math(n)
- for n in traverse_id(math_code,n.next) do
- return n
- end
- end
-end
-
-nodes.endofmath = node.end_of_math
-
--- local function firstline(n)
--- while n do
--- local id = n.id
--- if id == hlist_code then
--- if n.subtype == line_code then
--- return n
--- else
--- return firstline(n.list)
--- end
--- elseif id == vlist_code then
--- return firstline(n.list)
--- end
--- n = n.next
--- end
--- end
-
--- nodes.firstline = firstline
-
--- this depends on fonts, so we have a funny dependency ... will be
--- sorted out .. we could make tonodes a plugin into this
-
-local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-ini
- if not str or str == "" then
- return
- end
- local head, tail, space, fnt, template = nil, nil, nil, nil, nil
- if not fnt then
- fnt = current_font()
- elseif type(fnt) ~= "number" and fnt.id == "glyph" then
- fnt, template = nil, fnt
- -- else
- -- already a number
- end
- for s in utfvalues(str) do
- local n
- if s == 32 then
- if space then
- n = copy_node(space)
- elseif fonts then -- depedency
- local parameters = fonts.hashes.identifiers[fnt].parameters
- space = new_glue(parameters.space,parameters.space_stretch,parameters.space_shrink)
- n = space
- end
- elseif template then
- n = copy_node(template)
- n.char = s
- else
- n = new_glyph(fnt,s)
- end
- if attr then -- normally false when template
- n.attr = copy_node_list(attr)
- end
- if head then
- insert_node_after(head,tail,n)
- else
- head = n
- end
- tail = n
- end
- return head, tail
-end
-
-nodes.tonodes = tonodes
-
-local function link(list,currentfont,currentattr,head,tail)
- for i=1,#list do
- local n = list[i]
- if n then
- local tn = isnode(n)
- if not tn then
- local tn = type(n)
- if tn == "number" then
- if not currentfont then
- currentfont = current_font()
- end
- local h, t = tonodes(tostring(n),currentfont,currentattr)
- if not h then
- -- skip
- elseif not head then
- head, tail = h, t
- else
- tail.next, h.prev, tail = h, t, t
- end
- elseif tn == "string" then
- if #tn > 0 then
- if not currentfont then
- currentfont = current_font()
- end
- local h, t = tonodes(n,currentfont,currentattr)
- if not h then
- -- skip
- elseif not head then
- head, tail = h, t
- else
- tail.next, h.prev, tail = h, t, t
- end
- end
- elseif tn == "table" then
- if #tn > 0 then
- if not currentfont then
- currentfont = current_font()
- end
- head, tail = link(n,currentfont,currentattr,head,tail)
- end
- end
- elseif not head then
- head = n
- if n.next then
- tail = slide_nodes(n)
- else
- tail = n
- end
- elseif n.id == attributelist_code then
- -- weird case
- report_error("weird node type in list at index %s:",i)
- for i=1,#list do
- local l = list[i]
- report_error("%3i: %s %S",i,l.id == attributelist_code and "!" or ">",l)
- end
- os.exit()
- else
- tail.next = n
- n.prev = tail
- if n.next then
- tail = slide_nodes(n)
- else
- tail = n
- end
- end
- else
- -- permitting nil is convenient
- end
- end
- return head, tail
-end
-
-nodes.link = link
-
-local function locate(start,wantedid,wantedsubtype)
- for n in traverse_nodes(start) do
- local id = n.id
- if id == wantedid then
- if not wantedsubtype or n.subtype == wantedsubtype then
- return n
- end
- elseif id == hlist_code or id == vlist_code then
- local found = locate(n.list,wantedid,wantedsubtype)
- if found then
- return found
- end
- end
- end
-end
-
-nodes.locate = locate
-
-function nodes.concat(list)
- local head, tail
- for i=1,#list do
- local li = list[i]
- if not li then
- -- skip
- elseif head then
- tail.next = li
- li.prev = tail
- tail = li.next and slide_nodes(li) or li
- else
- head = li
- tail = li.next and slide_nodes(li) or li
- end
- end
- return head, tail
-end
+if not modules then modules = { } end modules ['node-aux'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: n1 .. n2 : __concat metatable
+
+local type, tostring = type, tostring
+
+local nodes, node = nodes, node
+
+local utfvalues = utf.values
+
+local nodecodes = nodes.nodecodes
+
+local glyph_code = nodecodes.glyph
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local attributelist_code = nodecodes.attributelist -- temporary
+local math_code = nodecodes.math
+
+local nodepool = nodes.pool
+
+local new_glue = nodepool.glue
+local new_glyph = nodepool.glyph
+
+local traverse_nodes = node.traverse
+local traverse_id = node.traverse_id
+local free_node = node.free
+local hpack_nodes = node.hpack
+local unset_attribute = node.unset_attribute
+local first_glyph = node.first_glyph or node.first_character
+local copy_node = node.copy
+local copy_node_list = node.copy_list
+local slide_nodes = node.slide
+local insert_node_after = node.insert_after
+local isnode = node.is_node
+
+local unsetvalue = attributes.unsetvalue
+
+local current_font = font.current
+
+local texbox = tex.box
+
+local report_error = logs.reporter("node-aux:error")
+
+function nodes.repackhlist(list,...)
+--~ nodes.showsimplelist(list)
+ local temp, b = hpack_nodes(list,...)
+ list = temp.list
+ temp.list = nil
+ free_node(temp)
+ return list, b
+end
+
+local function set_attributes(head,attr,value)
+ for n in traverse_nodes(head) do
+ n[attr] = value
+ local id = n.id
+ if id == hlist_node or id == vlist_node then
+ set_attributes(n.list,attr,value)
+ end
+ end
+end
+
+local function set_unset_attributes(head,attr,value)
+ for n in traverse_nodes(head) do
+ if not n[attr] then
+ n[attr] = value
+ end
+ local id = n.id
+ if id == hlist_code or id == vlist_code then
+ set_unset_attributes(n.list,attr,value)
+ end
+ end
+end
+
+local function unset_attributes(head,attr)
+ for n in traverse_nodes(head) do
+ n[attr] = unsetvalue
+ local id = n.id
+ if id == hlist_code or id == vlist_code then
+ unset_attributes(n.list,attr)
+ end
+ end
+end
+
+nodes.setattribute = node.set_attribute
+nodes.getattribute = node.has_attribute
+nodes.unsetattribute = node.unset_attribute
+nodes.has_attribute = node.has_attribute
+
+nodes.firstglyph = first_glyph
+nodes.setattributes = set_attributes
+nodes.setunsetattributes = set_unset_attributes
+nodes.unsetattributes = unset_attributes
+
+-- function nodes.is_skipable(a,id) -- skipable nodes at the margins during character protrusion
+-- return (
+-- id ~= glyph_node
+-- or id == ins_node
+-- or id == mark_node
+-- or id == adjust_node
+-- or id == penalty_node
+-- or (id == glue_node and a.spec.writable)
+-- or (id == disc_node and a.pre == nil and a.post == nil and a.replace == nil)
+-- or (id == math_node and a.surround == 0)
+-- or (id == kern_node and (a.kern == 0 or a.subtype == NORMAL))
+-- or (id == hlist_node and a.width == 0 and a.height == 0 and a.depth == 0 and a.list == nil)
+-- or (id == whatsit_node and a.subtype ~= pdf_refximage_node and a.subtype ~= pdf_refxform_node)
+-- )
+-- end
+
+-- history:
+--
+--
+-- local function glyph_width(a)
+-- local ch = chardata[a.font][a.char]
+-- return (ch and ch.width) or 0
+-- end
+--
+-- local function glyph_total(a)
+-- local ch = chardata[a.font][a.char]
+-- return (ch and (ch.height+ch.depth)) or 0
+-- end
+--
+-- local function non_discardable(a) -- inline
+-- return a.id < math_node -- brrrr
+-- end
+--
+-- local function calculate_badness(t,s)
+-- if t == 0 then
+-- return 0
+-- elseif s <= 0 then
+-- return INF_BAD
+-- else
+-- local r
+-- if t <= 7230584 then
+-- r = t * 297 / s
+-- elseif s >= 1663497 then
+-- r = t / floor(s / 297)
+-- else
+-- r = t
+-- end
+-- r = floor(r)
+-- if r > 1290 then
+-- return INF_BAD
+-- else
+-- return floor((r * r * r + 0x20000) / 0x40000) -- 0400000 / 01000000
+-- end
+-- end
+-- end
+--
+-- left-overs
+--
+-- local function round_xn_over_d(x, n, d)
+-- local positive -- was x >= 0
+-- if x >= 0 then
+-- positive = true
+-- else
+-- x = -x
+-- positive = false
+-- end
+-- local t = floor(x % 0x8000) * n -- 0100000
+-- local f = floor(t / 0x8000) -- 0100000
+-- local u = floor(x / 0x8000) * n + f -- 0100000
+-- local v = floor(u % d) * 0x8000 + f -- 0100000
+-- if floor(u / d) >= 0x8000 then -- 0100000
+-- report_parbuilders('arith_error')
+-- else
+-- u = 0x8000 * floor(u / d) + floor(v / d) -- 0100000
+-- end
+-- v = floor(v % d)
+-- if 2*v >= d then
+-- u = u + 1
+-- end
+-- if positive then
+-- return u
+-- else
+-- return -u
+-- end
+-- end
+
+function nodes.firstcharacter(n,untagged) -- tagged == subtype > 255
+ if untagged then
+ return first_glyph(n)
+ else
+ for g in traverse_id(glyph_code,n) do
+ return g
+ end
+ end
+end
+
+function nodes.firstcharinbox(n)
+ local l = texbox[n].list
+ if l then
+ for g in traverse_id(glyph_code,l) do
+ return g.char
+ end
+ end
+ return 0
+end
+
+if not node.end_of_math then
+ function node.end_of_math(n)
+ for n in traverse_id(math_code,n.next) do
+ return n
+ end
+ end
+end
+
+nodes.endofmath = node.end_of_math
+
+-- local function firstline(n)
+-- while n do
+-- local id = n.id
+-- if id == hlist_code then
+-- if n.subtype == line_code then
+-- return n
+-- else
+-- return firstline(n.list)
+-- end
+-- elseif id == vlist_code then
+-- return firstline(n.list)
+-- end
+-- n = n.next
+-- end
+-- end
+
+-- nodes.firstline = firstline
+
+-- this depends on fonts, so we have a funny dependency ... will be
+-- sorted out .. we could make tonodes a plugin into this
+
+local function tonodes(str,fnt,attr) -- (str,template_glyph) -- moved from blob-ini
+ if not str or str == "" then
+ return
+ end
+ local head, tail, space, fnt, template = nil, nil, nil, nil, nil
+ if not fnt then
+ fnt = current_font()
+ elseif type(fnt) ~= "number" and fnt.id == "glyph" then
+ fnt, template = nil, fnt
+ -- else
+ -- already a number
+ end
+ for s in utfvalues(str) do
+ local n
+ if s == 32 then
+ if space then
+ n = copy_node(space)
+ elseif fonts then -- depedency
+ local parameters = fonts.hashes.identifiers[fnt].parameters
+ space = new_glue(parameters.space,parameters.space_stretch,parameters.space_shrink)
+ n = space
+ end
+ elseif template then
+ n = copy_node(template)
+ n.char = s
+ else
+ n = new_glyph(fnt,s)
+ end
+ if attr then -- normally false when template
+ n.attr = copy_node_list(attr)
+ end
+ if head then
+ insert_node_after(head,tail,n)
+ else
+ head = n
+ end
+ tail = n
+ end
+ return head, tail
+end
+
+nodes.tonodes = tonodes
+
+local function link(list,currentfont,currentattr,head,tail)
+ for i=1,#list do
+ local n = list[i]
+ if n then
+ local tn = isnode(n)
+ if not tn then
+ local tn = type(n)
+ if tn == "number" then
+ if not currentfont then
+ currentfont = current_font()
+ end
+ local h, t = tonodes(tostring(n),currentfont,currentattr)
+ if not h then
+ -- skip
+ elseif not head then
+ head, tail = h, t
+ else
+ tail.next, h.prev, tail = h, t, t
+ end
+ elseif tn == "string" then
+ if #tn > 0 then
+ if not currentfont then
+ currentfont = current_font()
+ end
+ local h, t = tonodes(n,currentfont,currentattr)
+ if not h then
+ -- skip
+ elseif not head then
+ head, tail = h, t
+ else
+ tail.next, h.prev, tail = h, t, t
+ end
+ end
+ elseif tn == "table" then
+ if #tn > 0 then
+ if not currentfont then
+ currentfont = current_font()
+ end
+ head, tail = link(n,currentfont,currentattr,head,tail)
+ end
+ end
+ elseif not head then
+ head = n
+ if n.next then
+ tail = slide_nodes(n)
+ else
+ tail = n
+ end
+ elseif n.id == attributelist_code then
+ -- weird case
+ report_error("weird node type in list at index %s:",i)
+ for i=1,#list do
+ local l = list[i]
+ report_error("%3i: %s %S",i,l.id == attributelist_code and "!" or ">",l)
+ end
+ os.exit()
+ else
+ tail.next = n
+ n.prev = tail
+ if n.next then
+ tail = slide_nodes(n)
+ else
+ tail = n
+ end
+ end
+ else
+ -- permitting nil is convenient
+ end
+ end
+ return head, tail
+end
+
+nodes.link = link
+
+local function locate(start,wantedid,wantedsubtype)
+ for n in traverse_nodes(start) do
+ local id = n.id
+ if id == wantedid then
+ if not wantedsubtype or n.subtype == wantedsubtype then
+ return n
+ end
+ elseif id == hlist_code or id == vlist_code then
+ local found = locate(n.list,wantedid,wantedsubtype)
+ if found then
+ return found
+ end
+ end
+ end
+end
+
+nodes.locate = locate
+
+function nodes.concat(list)
+ local head, tail
+ for i=1,#list do
+ local li = list[i]
+ if not li then
+ -- skip
+ elseif head then
+ tail.next = li
+ li.prev = tail
+ tail = li.next and slide_nodes(li) or li
+ else
+ head = li
+ tail = li.next and slide_nodes(li) or li
+ end
+ end
+ return head, tail
+end
diff --git a/tex/context/base/node-bck.lua b/tex/context/base/node-bck.lua
index feaa2c684..44fed5e17 100644
--- a/tex/context/base/node-bck.lua
+++ b/tex/context/base/node-bck.lua
@@ -1,161 +1,161 @@
-if not modules then modules = { } end modules ['node-bck'] = {
- version = 1.001,
- comment = "companion to node-bck.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- beware, this one takes quite some runtime, so we need a status flag
--- maybe some page related state
-
-local attributes, nodes, node = attributes, nodes, node
-
-local nodecodes = nodes.nodecodes
-local listcodes = nodes.listcodes
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local glyph_code = nodecodes.glyph
-local cell_code = listcodes.cell
-
-local traverse = node.traverse
-local traverse_id = node.traverse_id
-
-local nodepool = nodes.pool
-local tasks = nodes.tasks
-
-local new_rule = nodepool.rule
-local new_glue = nodepool.glue
-
-local a_color = attributes.private('color')
-local a_transparency = attributes.private('transparency')
-local a_colorspace = attributes.private('colormodel')
-local a_background = attributes.private('background')
-local a_alignbackground = attributes.private('alignbackground')
-
-local function add_backgrounds(head) -- rather old code .. to be redone
- local current = head
- while current do
- local id = current.id
- if id == hlist_code or id == vlist_code then
- local list = current.list
- if list then
- local head = add_backgrounds(list)
- if head then
- current.list = head
- list = head
- end
- end
- local width = current.width
- if width > 0 then
- local background = current[a_background]
- if background then
- -- direct to hbox
- -- colorspace is already set so we can omit that and stick to color
- local mode = current[a_colorspace]
- if mode then
- local height = current.height
- local depth = current.depth
- local skip = id == hlist_code and width or (height + depth)
- local glue = new_glue(-skip)
- local rule = new_rule(width,height,depth)
- local color = current[a_color]
- local transparency = current[a_transparency]
- rule[a_colorspace] = mode
- if color then
- rule[a_color] = color
- end
- if transparency then
- rule[a_transparency] = transparency
- end
- rule.next = glue
- glue.prev = rule
- if list then
- glue.next = list
- list.prev = glue
- end
- current.list = rule
- end
- end
- end
- end
- current = current.next
- end
- return head, true
-end
-
-local function add_alignbackgrounds(head)
- local current = head
- while current do
- local id = current.id
- if id == hlist_code then
- local list = current.list
- if not list then
- -- no need to look
- elseif current.subtype == cell_code then
- local background = nil
- local found = nil
- -- for l in traverse(list) do
- -- background = l[a_alignbackground]
- -- if background then
- -- found = l
- -- break
- -- end
- -- end
- -- we know that it's a fake hlist (could be user node)
- -- but we cannot store tables in user nodes yet
- for l in traverse_id(hpack_code,list) do
- background = l[a_alignbackground]
- if background then
- found = l
- end
- break
- end
- --
- if background then
- -- current has subtype 5 (cell)
- local width = current.width
- if width > 0 then
- local mode = found[a_colorspace]
- if mode then
- local glue = new_glue(-width)
- local rule = new_rule(width,current.height,current.depth)
- local color = found[a_color]
- local transparency = found[a_transparency]
- rule[a_colorspace] = mode
- if color then
- rule[a_color] = color
- end
- if transparency then
- rule[a_transparency] = transparency
- end
- rule.next = glue
- glue.prev = rule
- if list then
- glue.next = list
- list.prev = glue
- end
- current.list = rule
- end
- end
- end
- else
- add_alignbackgrounds(list)
- end
- elseif id == vlist_code then
- local list = current.list
- if list then
- add_alignbackgrounds(list)
- end
- end
- current = current.next
- end
- return head, true
-end
-
-nodes.handlers.backgrounds = add_backgrounds
-nodes.handlers.alignbackgrounds = add_alignbackgrounds
-
-tasks.appendaction("shipouts","normalizers","nodes.handlers.backgrounds")
-tasks.appendaction("shipouts","normalizers","nodes.handlers.alignbackgrounds")
+if not modules then modules = { } end modules ['node-bck'] = {
+ version = 1.001,
+ comment = "companion to node-bck.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- beware, this one takes quite some runtime, so we need a status flag
+-- maybe some page related state
+
+local attributes, nodes, node = attributes, nodes, node
+
+local nodecodes = nodes.nodecodes
+local listcodes = nodes.listcodes
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local glyph_code = nodecodes.glyph
+local cell_code = listcodes.cell
+
+local traverse = node.traverse
+local traverse_id = node.traverse_id
+
+local nodepool = nodes.pool
+local tasks = nodes.tasks
+
+local new_rule = nodepool.rule
+local new_glue = nodepool.glue
+
+local a_color = attributes.private('color')
+local a_transparency = attributes.private('transparency')
+local a_colorspace = attributes.private('colormodel')
+local a_background = attributes.private('background')
+local a_alignbackground = attributes.private('alignbackground')
+
+local function add_backgrounds(head) -- rather old code .. to be redone
+ local current = head
+ while current do
+ local id = current.id
+ if id == hlist_code or id == vlist_code then
+ local list = current.list
+ if list then
+ local head = add_backgrounds(list)
+ if head then
+ current.list = head
+ list = head
+ end
+ end
+ local width = current.width
+ if width > 0 then
+ local background = current[a_background]
+ if background then
+ -- direct to hbox
+ -- colorspace is already set so we can omit that and stick to color
+ local mode = current[a_colorspace]
+ if mode then
+ local height = current.height
+ local depth = current.depth
+ local skip = id == hlist_code and width or (height + depth)
+ local glue = new_glue(-skip)
+ local rule = new_rule(width,height,depth)
+ local color = current[a_color]
+ local transparency = current[a_transparency]
+ rule[a_colorspace] = mode
+ if color then
+ rule[a_color] = color
+ end
+ if transparency then
+ rule[a_transparency] = transparency
+ end
+ rule.next = glue
+ glue.prev = rule
+ if list then
+ glue.next = list
+ list.prev = glue
+ end
+ current.list = rule
+ end
+ end
+ end
+ end
+ current = current.next
+ end
+ return head, true
+end
+
+local function add_alignbackgrounds(head)
+ local current = head
+ while current do
+ local id = current.id
+ if id == hlist_code then
+ local list = current.list
+ if not list then
+ -- no need to look
+ elseif current.subtype == cell_code then
+ local background = nil
+ local found = nil
+ -- for l in traverse(list) do
+ -- background = l[a_alignbackground]
+ -- if background then
+ -- found = l
+ -- break
+ -- end
+ -- end
+ -- we know that it's a fake hlist (could be user node)
+ -- but we cannot store tables in user nodes yet
+ for l in traverse_id(hpack_code,list) do
+ background = l[a_alignbackground]
+ if background then
+ found = l
+ end
+ break
+ end
+ --
+ if background then
+ -- current has subtype 5 (cell)
+ local width = current.width
+ if width > 0 then
+ local mode = found[a_colorspace]
+ if mode then
+ local glue = new_glue(-width)
+ local rule = new_rule(width,current.height,current.depth)
+ local color = found[a_color]
+ local transparency = found[a_transparency]
+ rule[a_colorspace] = mode
+ if color then
+ rule[a_color] = color
+ end
+ if transparency then
+ rule[a_transparency] = transparency
+ end
+ rule.next = glue
+ glue.prev = rule
+ if list then
+ glue.next = list
+ list.prev = glue
+ end
+ current.list = rule
+ end
+ end
+ end
+ else
+ add_alignbackgrounds(list)
+ end
+ elseif id == vlist_code then
+ local list = current.list
+ if list then
+ add_alignbackgrounds(list)
+ end
+ end
+ current = current.next
+ end
+ return head, true
+end
+
+nodes.handlers.backgrounds = add_backgrounds
+nodes.handlers.alignbackgrounds = add_alignbackgrounds
+
+tasks.appendaction("shipouts","normalizers","nodes.handlers.backgrounds")
+tasks.appendaction("shipouts","normalizers","nodes.handlers.alignbackgrounds")
diff --git a/tex/context/base/node-dir.lua b/tex/context/base/node-dir.lua
index 6ee5cd4b8..9a1f4e30c 100644
--- a/tex/context/base/node-dir.lua
+++ b/tex/context/base/node-dir.lua
@@ -1,309 +1,309 @@
-if not modules then modules = { } end modules ['node-dir'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Taco Hoekwater and Hans Hagen",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[
-
Serializing nodes can be handy for tracing. Also, saving and
-loading node lists can come in handy as soon we are going to
-use external applications to process node lists.
---ldx]]--
-
-function nodes.show(stack)
--- logs.writer(table.serialize(stack))
-end
-
-function nodes.save(stack,name) -- *.ltn : luatex node file
--- if name then
--- file.savedata(name,table.serialize(stack))
--- else
--- logs.writer(table.serialize(stack))
--- end
-end
-
-function nodes.load(name)
--- return file.loaddata(name)
--- -- todo
-end
+if not modules then modules = { } end modules ['node-ext'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+
Serializing nodes can be handy for tracing. Also, saving and
+loading node lists can come in handy as soon we are going to
+use external applications to process node lists.
+--ldx]]--
+
+function nodes.show(stack)
+-- logs.writer(table.serialize(stack))
+end
+
+function nodes.save(stack,name) -- *.ltn : luatex node file
+-- if name then
+-- file.savedata(name,table.serialize(stack))
+-- else
+-- logs.writer(table.serialize(stack))
+-- end
+end
+
+function nodes.load(name)
+-- return file.loaddata(name)
+-- -- todo
+end
diff --git a/tex/context/base/node-fin.lua b/tex/context/base/node-fin.lua
index 2e62ebcb5..e95725d29 100644
--- a/tex/context/base/node-fin.lua
+++ b/tex/context/base/node-fin.lua
@@ -1,1222 +1,1222 @@
-if not modules then modules = { } end modules ['node-fin'] = {
- version = 1.001,
- comment = "companion to node-fin.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- this module is being reconstructed
--- local functions, only slightly slower
-
-local next, type, format = next, type, string.format
-
-local attributes, nodes, node = attributes, nodes, node
-
-local copy_node = node.copy
-local find_tail = node.slide
-
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
-
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local glue_code = nodecodes.glue
-local rule_code = nodecodes.rule
-local whatsit_code = nodecodes.whatsit
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-
-local pdfliteral_code = whatcodes.pdfliteral
-
-local states = attributes.states
-local numbers = attributes.numbers
-local a_trigger = attributes.private('trigger')
-local triggering = false
-
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local loadstripped = utilities.lua.loadstripped
-local unsetvalue = attributes.unsetvalue
-
--- these two will be like trackers
-
-function states.enabletriggering()
- triggering = true
-end
-function states.disabletriggering()
- triggering = false
-end
-
--- the following code is no longer needed due to the new backend
--- but we keep it around for a while as an example
---
--- states.collected = states.collected or { }
---
--- storage.register("states/collected", states.collected, "states.collected")
---
--- local collected = states.collected
---
--- function states.collect(str)
--- collected[#collected+1] = str
--- end
---
--- function states.flush()
--- if #collected > 0 then
--- for i=1,#collected do
--- context(collected[i]) -- we're in context mode anyway
--- end
--- collected = { }
--- states.collected = collected
--- end
--- end
---
--- function states.check()
--- logs.report("states",concat(collected,"\n"))
--- end
-
--- we used to do the main processor loop here and call processor for each node
--- but eventually this was too much a slow down (1 sec on 23 for 120 pages mk)
--- so that we moved looping to the processor itself; this may lead to a bit of
--- duplicate code once that we have more state handlers
-
--- local function process_attribute(head,plugin) -- head,attribute,enabled,initializer,resolver,processor,finalizer
--- local namespace = plugin.namespace
--- if namespace.enabled ~= false then -- this test will go away
--- starttiming(attributes) -- in principle we could delegate this to the main caller
--- local done, used, ok = false, nil, false
--- local attribute = namespace.attribute or numbers[plugin.name] -- todo: plugin.attribute
--- local processor = plugin.processor
--- if processor then
--- local initializer = plugin.initializer
--- local resolver = plugin.resolver
--- local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip !
--- if initializer then
--- initializer(namespace,attribute,head)
--- end
--- head, ok = processor(namespace,attribute,head,inheritance)
--- if ok then
--- local finalizer = plugin.finalizer
--- if finalizer then
--- head, ok, used = finalizer(namespace,attribute,head)
--- if used then
--- local flusher = plugin.flusher
--- if flusher then
--- head = flusher(namespace,attribute,head,used)
--- end
--- end
--- end
--- done = true
--- end
--- end
--- stoptiming(attributes)
--- return head, done
--- else
--- return head, false
--- end
--- end
---
--- function nodes.installattributehandler(plugin) -- we need to avoid this nested function
--- return function(head)
--- return process_attribute(head,plugin)
--- end
--- end
-
--- An experiment: lean and mean functions. It is not really faster but
--- with upcoming functionality it might make a difference, e.g. features
--- like 'casing' and 'italics' can be called a lot so there it makes sense.
-
-nodes.plugindata = nil
-
-local template = [[
-local plugin = nodes.plugindata
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local namespace = plugin.namespace
-local attribute = namespace.attribute or attributes.numbers[plugin.name]
-local processor = plugin.processor
-local initializer = plugin.initializer
-local resolver = plugin.resolver
-local finalizer = plugin.finalizer
-local flusher = plugin.flusher
-if not processor then
- return function(head)
- return head, false
- end
-elseif initializer or finalizer or resolver then
- return function(head)
- starttiming(attributes)
- local done, used, ok = false, nil, false
- local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip !
- if initializer then
- initializer(namespace,attribute,head)
- end
- head, ok = processor(namespace,attribute,head,inheritance)
- if ok then
- if finalizer then
- head, ok, used = finalizer(namespace,attribute,head)
- if used and flusher then
- head = flusher(namespace,attribute,head,used)
- end
- end
- done = true
- end
- stoptiming(attributes)
- return head, done
- end
-else
- return function(head)
- starttiming(attributes)
- local head, done = processor(namespace,attribute,head)
- stoptiming(attributes)
- return head, done
- end
-end
-nodes.plugindata = nil
-]]
-
-function nodes.installattributehandler(plugin)
- nodes.plugindata = plugin
- return loadstripped(template)()
-end
-
--- the injectors
-
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-
-local nsdata, nsnone, nslistwise, nsforced, nsselector, nstrigger
-local current, current_selector, done = 0, 0, false -- nb, stack has a local current !
-local nsbegin, nsend
-
-function states.initialize(namespace,attribute,head)
- nsdata = namespace.data
- nsnone = namespace.none
- nsforced = namespace.forced
- nsselector = namespace.selector
- nslistwise = namespace.listwise
- nstrigger = triggering and namespace.triggering and a_trigger
- current = 0
- current_selector = 0
- done = false -- todo: done cleanup
- nsstep = namespace.resolve_step
- if nsstep then
- nsbegin = namespace.resolve_begin
- nsend = namespace.resolve_end
- nspush = namespace.push
- nspop = namespace.pop
- end
-end
-
-function states.finalize(namespace,attribute,head) -- is this one ok?
- if current > 0 and nsnone then
- local id = head.id
- if id == hlist_code or id == vlist_code then
- local list = head.list
- if list then
- head.list = insert_node_before(list,list,copy_node(nsnone))
- end
- else
- head = insert_node_before(head,head,copy_node(nsnone))
- end
- return head, true, true
- end
- return head, false, false
-end
-
--- disc nodes can be ignored
--- we need to deal with literals too (reset as well as oval)
--- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then
-
--- local function process(namespace,attribute,head,inheritance,default) -- one attribute
--- local stack, done = head, false
--- while stack do
--- local id = stack.id
--- if id == glyph_code or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
--- local c = stack[attribute]
--- if c then
--- if default and c == inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current ~= c then
--- head = insert_node_before(head,stack,copy_node(nsdata[c]))
--- current = c
--- done = true
--- end
--- -- here ? compare selective
--- if id == glue_code then --leader
--- -- same as *list
--- local content = stack.leader
--- if content then
--- local savedcurrent = current
--- local ci = content.id
--- if ci == hlist_code or ci == vlist_code then
--- -- else we reset inside a box unneeded, okay, the downside is
--- -- that we trigger color in each repeated box, so there is room
--- -- for improvement here
--- current = 0
--- end
--- local ok = false
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- stack.leader, ok = process(namespace,attribute,content,inheritance,outer)
--- else
--- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- else
--- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- current = savedcurrent
--- done = done or ok
--- end
--- end
--- elseif default and inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- current = 0
--- done = true
--- end
--- elseif id == hlist_code or id == vlist_code then
--- local content = stack.list
--- if content then
--- local ok = false
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- stack.list, ok = process(namespace,attribute,content,inheritance,outer)
--- else
--- stack.list, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- else
--- stack.list, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- done = done or ok
--- end
--- end
--- stack = stack.next
--- end
--- return head, done
--- end
-
--- local function process(namespace,attribute,head,inheritance,default) -- one attribute
--- local stack, done = head, false
-
--- local function check()
--- local c = stack[attribute]
--- if c then
--- if default and c == inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current ~= c then
--- head = insert_node_before(head,stack,copy_node(nsdata[c]))
--- current = c
--- done = true
--- end
--- elseif default and inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- current = 0
--- done = true
--- end
--- return c
--- end
-
--- local function nested(content)
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- return process(namespace,attribute,content,inheritance,outer)
--- else
--- return process(namespace,attribute,content,inheritance,default)
--- end
--- else
--- return process(namespace,attribute,content,inheritance,default)
--- end
--- end
-
--- while stack do
--- local id = stack.id
--- if id == glyph_code then
--- check()
--- elseif id == glue_code then
--- local content = stack.leader
--- if content and check() then
--- local savedcurrent = current
--- local ci = content.id
--- if ci == hlist_code or ci == vlist_code then
--- -- else we reset inside a box unneeded, okay, the downside is
--- -- that we trigger color in each repeated box, so there is room
--- -- for improvement here
--- current = 0
--- end
-
--- local ok = false
--- stack.leader, ok = nested(content)
--- done = done or ok
-
--- current = savedcurrent
--- end
--- elseif id == hlist_code or id == vlist_code then
--- local content = stack.list
--- if content then
-
--- local ok = false
--- stack.list, ok = nested(content)
--- done = done or ok
-
--- end
--- elseif id == rule_code then
--- if stack.width ~= 0 then
--- check()
--- end
--- end
--- stack = stack.next
--- end
--- return head, done
--- end
-
--- local function process(namespace,attribute,head,inheritance,default) -- one attribute
--- local stack, done = head, false
--- while stack do
--- local id = stack.id
--- if id == glyph_code then
--- -- begin of check
--- local c = stack[attribute]
--- if c then
--- if default and c == inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current ~= c then
--- head = insert_node_before(head,stack,copy_node(nsdata[c]))
--- current = c
--- done = true
--- end
--- elseif default and inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- current = 0
--- done = true
--- end
--- -- end of check
--- elseif id == glue_code then
--- local content = stack.leader
--- if content then
--- -- begin of check
--- local c = stack[attribute]
--- if c then
--- if default and c == inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current ~= c then
--- head = insert_node_before(head,stack,copy_node(nsdata[c]))
--- current = c
--- done = true
--- end
--- -- begin special to this check
--- local savedcurrent = current
--- local ci = content.id
--- if ci == hlist_code or ci == vlist_code then
--- -- else we reset inside a box unneeded, okay, the downside is
--- -- that we trigger color in each repeated box, so there is room
--- -- for improvement here
--- current = 0
--- end
--- -- begin nested --
--- local ok = false
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- stack.leader, ok = process(namespace,attribute,content,inheritance,outer)
--- else
--- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- else
--- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- -- end nested --
--- done = done or ok
--- current = savedcurrent
--- -- end special to this check
--- elseif default and inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- current = 0
--- done = true
--- end
--- -- end of check
--- end
--- elseif id == hlist_code or id == vlist_code then
--- local content = stack.list
--- if content then
--- -- begin nested --
--- local ok
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- stack.list, ok = process(namespace,attribute,content,inheritance,outer)
--- else
--- stack.list, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- else
--- stack.list, ok = process(namespace,attribute,content,inheritance,default)
--- end
--- -- end nested --
--- done = done or ok
--- end
--- elseif id == rule_code then
--- if stack.width ~= 0 then
--- -- begin of check
--- local c = stack[attribute]
--- if c then
--- if default and c == inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current ~= c then
--- head = insert_node_before(head,stack,copy_node(nsdata[c]))
--- current = c
--- done = true
--- end
--- elseif default and inheritance then
--- if current ~= default then
--- head = insert_node_before(head,stack,copy_node(nsdata[default]))
--- current = default
--- done = true
--- end
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- current = 0
--- done = true
--- end
--- -- end of check
--- end
--- end
--- stack = stack.next
--- end
--- return head, done
--- end
-
-local function process(namespace,attribute,head,inheritance,default) -- one attribute
- local stack = head
- local done = false
- local check = false
- local leader = nil
- while stack do
- local id = stack.id
- if id == glyph_code then
- check = true
- elseif id == glue_code then
- leader = stack.leader
- if leader then
- check = true
- end
- elseif id == hlist_code or id == vlist_code then
- local content = stack.list
- if content then
- -- begin nested --
- local ok
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
- if outer ~= inheritance then
- stack.list, ok = process(namespace,attribute,content,inheritance,outer)
- else
- stack.list, ok = process(namespace,attribute,content,inheritance,default)
- end
- else
- stack.list, ok = process(namespace,attribute,content,inheritance,default)
- end
- -- end nested --
- done = done or ok
- end
- elseif id == rule_code then
- check = stack.width ~= 0
- end
- -- much faster this way than using a check() and nested() function
- if check then
- local c = stack[attribute]
- if c then
- if default and c == inheritance then
- if current ~= default then
- head = insert_node_before(head,stack,copy_node(nsdata[default]))
- current = default
- done = true
- end
- elseif current ~= c then
- head = insert_node_before(head,stack,copy_node(nsdata[c]))
- current = c
- done = true
- end
- if leader then
- local savedcurrent = current
- local ci = leader.id
- if ci == hlist_code or ci == vlist_code then
- -- else we reset inside a box unneeded, okay, the downside is
- -- that we trigger color in each repeated box, so there is room
- -- for improvement here
- current = 0
- end
- -- begin nested --
- local ok = false
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
- if outer ~= inheritance then
- stack.leader, ok = process(namespace,attribute,leader,inheritance,outer)
- else
- stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
- end
- else
- stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
- end
- -- end nested --
- done = done or ok
- current = savedcurrent
- leader = false
- end
- elseif default and inheritance then
- if current ~= default then
- head = insert_node_before(head,stack,copy_node(nsdata[default]))
- current = default
- done = true
- end
- elseif current > 0 then
- head = insert_node_before(head,stack,copy_node(nsnone))
- current = 0
- done = true
- end
- check = false
- end
- stack = stack.next
- end
- return head, done
-end
-
-states.process = process
-
--- we can force a selector, e.g. document wide color spaces, saves a little
--- watch out, we need to check both the selector state (like colorspace) and
--- the main state (like color), otherwise we get into troubles when a selector
--- state changes while the main state stays the same (like two glyphs following
--- each other with the same color but different color spaces e.g. \showcolor)
-
--- local function selective(namespace,attribute,head,inheritance,default) -- two attributes
--- local stack, done = head, false
--- while stack do
--- local id = stack.id
--- -- we need to deal with literals too (reset as well as oval)
--- -- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
--- if id == glyph_code -- or id == disc_code
--- or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
--- local c = stack[attribute]
--- if c then
--- if default and c == inheritance then
--- if current ~= default then
--- local data = nsdata[default]
--- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
--- current = default
--- done = true
--- end
--- else
--- local s = stack[nsselector]
--- if current ~= c or current_selector ~= s then
--- local data = nsdata[c]
--- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
--- current = c
--- current_selector = s
--- done = true
--- end
--- end
--- elseif default and inheritance then
--- if current ~= default then
--- local data = nsdata[default]
--- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
--- current = default
--- done = true
--- end
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- current, current_selector, done = 0, 0, true
--- end
--- if id == glue_code then -- leader
--- -- same as *list
--- local content = stack.leader
--- if content then
--- local savedcurrent = current
--- local ci = content.id
--- if ci == hlist_code or ci == vlist_code then
--- -- else we reset inside a box unneeded, okay, the downside is
--- -- that we trigger color in each repeated box, so there is room
--- -- for improvement here
--- current = 0
--- end
--- local ok = false
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- stack.leader, ok = selective(namespace,attribute,content,inheritance,outer)
--- else
--- stack.leader, ok = selective(namespace,attribute,content,inheritance,default)
--- end
--- else
--- stack.leader, ok = selective(namespace,attribute,content,inheritance,default)
--- end
--- current = savedcurrent
--- done = done or ok
--- end
--- end
--- elseif id == hlist_code or id == vlist_code then
--- local content = stack.list
--- if content then
--- local ok = false
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- stack.list, ok = selective(namespace,attribute,content,inheritance,outer)
--- else
--- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
--- end
--- else
--- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
--- end
--- done = done or ok
--- end
--- end
--- stack = stack.next
--- end
--- return head, done
--- end
-
--- local function selective(namespace,attribute,head,inheritance,default) -- two attributes
--- local stack, done = head, false
-
--- local function check()
--- local c = stack[attribute]
--- if c then
--- if default and c == inheritance then
--- if current ~= default then
--- local data = nsdata[default]
--- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
--- current = default
--- done = true
--- end
--- else
--- local s = stack[nsselector]
--- if current ~= c or current_selector ~= s then
--- local data = nsdata[c]
--- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
--- current = c
--- current_selector = s
--- done = true
--- end
--- end
--- elseif default and inheritance then
--- if current ~= default then
--- local data = nsdata[default]
--- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
--- current = default
--- done = true
--- end
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- current, current_selector, done = 0, 0, true
--- end
--- return c
--- end
-
--- local function nested(content)
--- if nstrigger and stack[nstrigger] then
--- local outer = stack[attribute]
--- if outer ~= inheritance then
--- return selective(namespace,attribute,content,inheritance,outer)
--- else
--- return selective(namespace,attribute,content,inheritance,default)
--- end
--- else
--- return selective(namespace,attribute,content,inheritance,default)
--- end
--- end
-
--- while stack do
--- local id = stack.id
--- if id == glyph_code then
--- check()
--- elseif id == glue_code then
--- local content = stack.leader
--- if content and check() then
--- -- local savedcurrent = current
--- -- local ci = content.id
--- -- if ci == hlist_code or ci == vlist_code then
--- -- -- else we reset inside a box unneeded, okay, the downside is
--- -- -- that we trigger color in each repeated box, so there is room
--- -- -- for improvement here
--- -- current = 0
--- -- end
-
--- local ok = false
--- stack.leader, ok = nested(content)
--- done = done or ok
-
--- -- current = savedcurrent
--- end
--- elseif id == hlist_code or id == vlist_code then
--- local content = stack.list
--- if content then
-
--- local ok = false
--- stack.list, ok = nested(content)
--- done = done or ok
-
--- end
--- elseif id == rule_code then
--- if stack.width ~= 0 then
--- check()
--- end
--- end
--- stack = stack.next
--- end
--- return head, done
--- end
-
-local function selective(namespace,attribute,head,inheritance,default) -- two attributes
- local stack = head
- local done = false
- local check = false
- local leader = nil
- while stack do
- local id = stack.id
- if id == glyph_code then
- check = true
- elseif id == glue_code then
- leader = stack.leader
- if leader then
- check = true
- end
- elseif id == hlist_code or id == vlist_code then
- local content = stack.list
- if content then
- local ok = false
- -- begin nested
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
- if outer ~= inheritance then
- stack.list, ok = selective(namespace,attribute,content,inheritance,outer)
- else
- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
- end
- else
- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
- end
- -- end nested
- done = done or ok
- end
- elseif id == rule_code then
- check = stack.width ~= 0
- end
-
- if check then
- local c = stack[attribute]
- if c then
- if default and c == inheritance then
- if current ~= default then
- local data = nsdata[default]
- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
- current = default
- done = true
- end
- else
- local s = stack[nsselector]
- if current ~= c or current_selector ~= s then
- local data = nsdata[c]
- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
- current = c
- current_selector = s
- done = true
- end
- end
- if leader then
- local ok = false
- -- begin nested
- if nstrigger and stack[nstrigger] then
- local outer = stack[attribute]
- if outer ~= inheritance then
- stack.leader, ok = selective(namespace,attribute,leader,inheritance,outer)
- else
- stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
- end
- else
- stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
- end
- -- end nested
- done = done or ok
- leader = false
- end
- elseif default and inheritance then
- if current ~= default then
- local data = nsdata[default]
- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
- current = default
- done = true
- end
- elseif current > 0 then
- head = insert_node_before(head,stack,copy_node(nsnone))
- current, current_selector, done = 0, 0, true
- end
- check = false
- end
-
- stack = stack.next
- end
- return head, done
-end
-
-states.selective = selective
-
--- Ideally the next one should be merged with the previous but keeping it separate is
--- safer. We deal with two situations: efficient boxwise (layoutareas) and mixed layers
--- (as used in the stepper). In the stepper we cannot use the box branch as it involves
--- paragraph lines and then gets mixed up. A messy business (esp since we want to be
--- efficient).
---
--- Todo: make a better stacker. Keep track (in attribute) about nesting level. Not
--- entirely trivial and a generic solution is nicer (compares to the exporter).
-
--- local function stacked(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
--- local stack, done = head, false
--- local current, depth = default or 0, 0
---
--- local function check()
--- local a = stack[attribute]
--- if a then
--- if current ~= a then
--- head = insert_node_before(head,stack,copy_node(nsdata[a]))
--- depth = depth + 1
--- current, done = a, true
--- end
--- elseif default > 0 then
--- --
--- elseif current > 0 then
--- head = insert_node_before(head,stack,copy_node(nsnone))
--- depth = depth - 1
--- current, done = 0, true
--- end
--- return a
--- end
---
--- while stack do
--- local id = stack.id
--- if id == glyph_code then
--- check()
--- elseif id == glue_code then
--- local content = stack.leader
--- if content and check() then
--- local ok = false
--- stack.leader, ok = stacked(namespace,attribute,content,current)
--- done = done or ok
--- end
--- elseif id == hlist_code or id == vlist_code then
--- local content = stack.list
--- if content then
--- -- the problem is that broken lines gets the attribute which can be a later one
--- if nslistwise then
--- local a = stack[attribute]
--- if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below
--- local p = current
--- current, done = a, true
--- head = insert_node_before(head,stack,copy_node(nsdata[a]))
--- stack.list = stacked(namespace,attribute,content,current)
--- head, stack = insert_node_after(head,stack,copy_node(nsnone))
--- current = p
--- else
--- local ok = false
--- stack.list, ok = stacked(namespace,attribute,content,current)
--- done = done or ok
--- end
--- else
--- local ok = false
--- stack.list, ok = stacked(namespace,attribute,content,current)
--- done = done or ok
--- end
--- end
--- elseif id == rule_code then
--- if stack.width ~= 0 then
--- check()
--- end
--- end
--- stack = stack.next
--- end
--- while depth > 0 do
--- head = insert_node_after(head,stack,copy_node(nsnone))
--- depth = depth - 1
--- end
--- return head, done
--- end
-
-local function stacked(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
- local stack = head
- local done = false
- local current = default or 0
- local depth = 0
- local check = false
- local leader = false
- while stack do
- local id = stack.id
- if id == glyph_code then
- check = true
- elseif id == glue_code then
- leader = stack.leader
- if leader then
- check = true
- end
- elseif id == hlist_code or id == vlist_code then
- local content = stack.list
- if content then
- -- the problem is that broken lines gets the attribute which can be a later one
- if nslistwise then
- local a = stack[attribute]
- if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below
- local p = current
- current, done = a, true
- head = insert_node_before(head,stack,copy_node(nsdata[a]))
- stack.list = stacked(namespace,attribute,content,current)
- head, stack = insert_node_after(head,stack,copy_node(nsnone))
- current = p
- else
- local ok = false
- stack.list, ok = stacked(namespace,attribute,content,current)
- done = done or ok
- end
- else
- local ok = false
- stack.list, ok = stacked(namespace,attribute,content,current)
- done = done or ok
- end
- end
- elseif id == rule_code then
- check = stack.width ~= 0
- end
-
- if check then
- local a = stack[attribute]
- if a then
- if current ~= a then
- head = insert_node_before(head,stack,copy_node(nsdata[a]))
- depth = depth + 1
- current, done = a, true
- end
- if leader then
- local ok = false
- stack.leader, ok = stacked(namespace,attribute,content,current)
- done = done or ok
- leader = false
- end
- elseif default > 0 then
- --
- elseif current > 0 then
- head = insert_node_before(head,stack,copy_node(nsnone))
- depth = depth - 1
- current, done = 0, true
- end
- check = false
- end
-
- stack = stack.next
- end
- while depth > 0 do
- head = insert_node_after(head,stack,copy_node(nsnone))
- depth = depth - 1
- end
- return head, done
-end
-
-states.stacked = stacked
-
--- experimental
-
--- local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
--- nsbegin()
--- local current, previous, done, okay = head, head, false, false
--- local attrib = default or unsetvalue
---
--- local function check()
--- local a = current[attribute] or unsetvalue
--- if a ~= attrib then
--- local n = nsstep(a)
--- if n then
--- -- !!!! TEST CODE !!!!
--- -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a
--- head = insert_node_before(head,current,n) -- a
--- end
--- attrib, done, okay = a, true, true
--- end
--- return a
--- end
---
--- while current do
--- local id = current.id
--- if id == glyph_code then
--- check()
--- elseif id == glue_code then
--- local content = current.leader
--- if content and check() then
--- -- tricky as a leader has to be a list so we cannot inject before
--- local _, ok = stacker(namespace,attribute,content,attrib)
--- done = done or ok
--- end
--- elseif id == hlist_code or id == vlist_code then
--- local content = current.list
--- if not content then
--- -- skip
--- elseif nslistwise then
--- local a = current[attribute]
--- if a and attrib ~= a and nslistwise[a] then -- viewerlayer
--- done = true
--- head = insert_node_before(head,current,copy_node(nsdata[a]))
--- current.list = stacker(namespace,attribute,content,a)
--- head, current = insert_node_after(head,current,copy_node(nsnone))
--- else
--- local ok = false
--- current.list, ok = stacker(namespace,attribute,content,attrib)
--- done = done or ok
--- end
--- else
--- local ok = false
--- current.list, ok = stacker(namespace,attribute,content,default)
--- done = done or ok
--- end
--- elseif id == rule_code then
--- if current.width ~= 0 then
--- check()
--- end
--- end
--- previous = current
--- current = current.next
--- end
--- if okay then
--- local n = nsend()
--- if n then
--- -- !!!! TEST CODE !!!!
--- -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)]))
--- head = insert_node_after(head,previous,n)
--- end
--- end
--- return head, done
--- end
-
-local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
- nsbegin()
- local current = head
- local previous = head
- local done = false
- local okay = false
- local attrib = default or unsetvalue
- local check = false
- local leader = false
- while current do
- local id = current.id
- if id == glyph_code then
- check = true
- elseif id == glue_code then
- leader = current.leader
- if leader then
- check = true
- end
- elseif id == hlist_code or id == vlist_code then
- local content = current.list
- if not content then
- -- skip
- elseif nslistwise then
- local a = current[attribute]
- if a and attrib ~= a and nslistwise[a] then -- viewerlayer
- done = true
- head = insert_node_before(head,current,copy_node(nsdata[a]))
- current.list = stacker(namespace,attribute,content,a)
- head, current = insert_node_after(head,current,copy_node(nsnone))
- else
- local ok = false
- current.list, ok = stacker(namespace,attribute,content,attrib)
- done = done or ok
- end
- else
- local ok = false
- current.list, ok = stacker(namespace,attribute,content,default)
- done = done or ok
- end
- elseif id == rule_code then
- check = current.width ~= 0
- end
-
- if check then
- local a = current[attribute] or unsetvalue
- if a ~= attrib then
- local n = nsstep(a)
- if n then
- -- !!!! TEST CODE !!!!
- -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a
- head = insert_node_before(head,current,n) -- a
- end
- attrib, done, okay = a, true, true
- if leader then
- -- tricky as a leader has to be a list so we cannot inject before
- local _, ok = stacker(namespace,attribute,leader,attrib)
- done = done or ok
- leader = false
- end
- end
- check = false
- end
-
- previous = current
- current = current.next
- end
- if okay then
- local n = nsend()
- if n then
- -- !!!! TEST CODE !!!!
- -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)]))
- head = insert_node_after(head,previous,n)
- end
- end
- return head, done
-end
-
-states.stacker = stacker
-
--- -- --
-
-statistics.register("attribute processing time", function()
- return statistics.elapsedseconds(attributes,"front- and backend")
-end)
+if not modules then modules = { } end modules ['node-fin'] = {
+ version = 1.001,
+ comment = "companion to node-fin.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- this module is being reconstructed
+-- local functions, only slightly slower
+
+local next, type, format = next, type, string.format
+
+local attributes, nodes, node = attributes, nodes, node
+
+local copy_node = node.copy
+local find_tail = node.slide
+
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
+
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local glue_code = nodecodes.glue
+local rule_code = nodecodes.rule
+local whatsit_code = nodecodes.whatsit
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+
+local pdfliteral_code = whatcodes.pdfliteral
+
+local states = attributes.states
+local numbers = attributes.numbers
+local a_trigger = attributes.private('trigger')
+local triggering = false
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+local loadstripped = utilities.lua.loadstripped
+local unsetvalue = attributes.unsetvalue
+
+-- these two will be like trackers
+
+function states.enabletriggering()
+ triggering = true
+end
+function states.disabletriggering()
+ triggering = false
+end
+
+-- the following code is no longer needed due to the new backend
+-- but we keep it around for a while as an example
+--
+-- states.collected = states.collected or { }
+--
+-- storage.register("states/collected", states.collected, "states.collected")
+--
+-- local collected = states.collected
+--
+-- function states.collect(str)
+-- collected[#collected+1] = str
+-- end
+--
+-- function states.flush()
+-- if #collected > 0 then
+-- for i=1,#collected do
+-- context(collected[i]) -- we're in context mode anyway
+-- end
+-- collected = { }
+-- states.collected = collected
+-- end
+-- end
+--
+-- function states.check()
+-- logs.report("states",concat(collected,"\n"))
+-- end
+
+-- we used to do the main processor loop here and call processor for each node
+-- but eventually this was too much a slow down (1 sec on 23 for 120 pages mk)
+-- so that we moved looping to the processor itself; this may lead to a bit of
+-- duplicate code once that we have more state handlers
+
+-- local function process_attribute(head,plugin) -- head,attribute,enabled,initializer,resolver,processor,finalizer
+-- local namespace = plugin.namespace
+-- if namespace.enabled ~= false then -- this test will go away
+-- starttiming(attributes) -- in principle we could delegate this to the main caller
+-- local done, used, ok = false, nil, false
+-- local attribute = namespace.attribute or numbers[plugin.name] -- todo: plugin.attribute
+-- local processor = plugin.processor
+-- if processor then
+-- local initializer = plugin.initializer
+-- local resolver = plugin.resolver
+-- local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip !
+-- if initializer then
+-- initializer(namespace,attribute,head)
+-- end
+-- head, ok = processor(namespace,attribute,head,inheritance)
+-- if ok then
+-- local finalizer = plugin.finalizer
+-- if finalizer then
+-- head, ok, used = finalizer(namespace,attribute,head)
+-- if used then
+-- local flusher = plugin.flusher
+-- if flusher then
+-- head = flusher(namespace,attribute,head,used)
+-- end
+-- end
+-- end
+-- done = true
+-- end
+-- end
+-- stoptiming(attributes)
+-- return head, done
+-- else
+-- return head, false
+-- end
+-- end
+--
+-- function nodes.installattributehandler(plugin) -- we need to avoid this nested function
+-- return function(head)
+-- return process_attribute(head,plugin)
+-- end
+-- end
+
+-- An experiment: lean and mean functions. It is not really faster but
+-- with upcoming functionality it might make a difference, e.g. features
+-- like 'casing' and 'italics' can be called a lot so there it makes sense.
+
+nodes.plugindata = nil
+
+local template = [[
+local plugin = nodes.plugindata
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+local namespace = plugin.namespace
+local attribute = namespace.attribute or attributes.numbers[plugin.name]
+local processor = plugin.processor
+local initializer = plugin.initializer
+local resolver = plugin.resolver
+local finalizer = plugin.finalizer
+local flusher = plugin.flusher
+if not processor then
+ return function(head)
+ return head, false
+ end
+elseif initializer or finalizer or resolver then
+ return function(head)
+ starttiming(attributes)
+ local done, used, ok = false, nil, false
+ local inheritance = (resolver and resolver()) or nil -- -0x7FFFFFFF -- we can best use nil and skip !
+ if initializer then
+ initializer(namespace,attribute,head)
+ end
+ head, ok = processor(namespace,attribute,head,inheritance)
+ if ok then
+ if finalizer then
+ head, ok, used = finalizer(namespace,attribute,head)
+ if used and flusher then
+ head = flusher(namespace,attribute,head,used)
+ end
+ end
+ done = true
+ end
+ stoptiming(attributes)
+ return head, done
+ end
+else
+ return function(head)
+ starttiming(attributes)
+ local head, done = processor(namespace,attribute,head)
+ stoptiming(attributes)
+ return head, done
+ end
+end
+nodes.plugindata = nil
+]]
+
+function nodes.installattributehandler(plugin)
+ nodes.plugindata = plugin
+ return loadstripped(template)()
+end
+
+-- the injectors
+
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+
+local nsdata, nsnone, nslistwise, nsforced, nsselector, nstrigger
+local current, current_selector, done = 0, 0, false -- nb, stack has a local current !
+local nsbegin, nsend
+
+function states.initialize(namespace,attribute,head)
+ nsdata = namespace.data
+ nsnone = namespace.none
+ nsforced = namespace.forced
+ nsselector = namespace.selector
+ nslistwise = namespace.listwise
+ nstrigger = triggering and namespace.triggering and a_trigger
+ current = 0
+ current_selector = 0
+ done = false -- todo: done cleanup
+ nsstep = namespace.resolve_step
+ if nsstep then
+ nsbegin = namespace.resolve_begin
+ nsend = namespace.resolve_end
+ nspush = namespace.push
+ nspop = namespace.pop
+ end
+end
+
+function states.finalize(namespace,attribute,head) -- is this one ok?
+ if current > 0 and nsnone then
+ local id = head.id
+ if id == hlist_code or id == vlist_code then
+ local list = head.list
+ if list then
+ head.list = insert_node_before(list,list,copy_node(nsnone))
+ end
+ else
+ head = insert_node_before(head,head,copy_node(nsnone))
+ end
+ return head, true, true
+ end
+ return head, false, false
+end
+
+-- disc nodes can be ignored
+-- we need to deal with literals too (reset as well as oval)
+-- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then
+
+-- local function process(namespace,attribute,head,inheritance,default) -- one attribute
+-- local stack, done = head, false
+-- while stack do
+-- local id = stack.id
+-- if id == glyph_code or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
+-- local c = stack[attribute]
+-- if c then
+-- if default and c == inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current ~= c then
+-- head = insert_node_before(head,stack,copy_node(nsdata[c]))
+-- current = c
+-- done = true
+-- end
+-- -- here ? compare selective
+-- if id == glue_code then --leader
+-- -- same as *list
+-- local content = stack.leader
+-- if content then
+-- local savedcurrent = current
+-- local ci = content.id
+-- if ci == hlist_code or ci == vlist_code then
+-- -- else we reset inside a box unneeded, okay, the downside is
+-- -- that we trigger color in each repeated box, so there is room
+-- -- for improvement here
+-- current = 0
+-- end
+-- local ok = false
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- stack.leader, ok = process(namespace,attribute,content,inheritance,outer)
+-- else
+-- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- current = savedcurrent
+-- done = done or ok
+-- end
+-- end
+-- elseif default and inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- current = 0
+-- done = true
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local content = stack.list
+-- if content then
+-- local ok = false
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- stack.list, ok = process(namespace,attribute,content,inheritance,outer)
+-- else
+-- stack.list, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- stack.list, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- done = done or ok
+-- end
+-- end
+-- stack = stack.next
+-- end
+-- return head, done
+-- end
+
+-- local function process(namespace,attribute,head,inheritance,default) -- one attribute
+-- local stack, done = head, false
+
+-- local function check()
+-- local c = stack[attribute]
+-- if c then
+-- if default and c == inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current ~= c then
+-- head = insert_node_before(head,stack,copy_node(nsdata[c]))
+-- current = c
+-- done = true
+-- end
+-- elseif default and inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- current = 0
+-- done = true
+-- end
+-- return c
+-- end
+
+-- local function nested(content)
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- return process(namespace,attribute,content,inheritance,outer)
+-- else
+-- return process(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- return process(namespace,attribute,content,inheritance,default)
+-- end
+-- end
+
+-- while stack do
+-- local id = stack.id
+-- if id == glyph_code then
+-- check()
+-- elseif id == glue_code then
+-- local content = stack.leader
+-- if content and check() then
+-- local savedcurrent = current
+-- local ci = content.id
+-- if ci == hlist_code or ci == vlist_code then
+-- -- else we reset inside a box unneeded, okay, the downside is
+-- -- that we trigger color in each repeated box, so there is room
+-- -- for improvement here
+-- current = 0
+-- end
+
+-- local ok = false
+-- stack.leader, ok = nested(content)
+-- done = done or ok
+
+-- current = savedcurrent
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local content = stack.list
+-- if content then
+
+-- local ok = false
+-- stack.list, ok = nested(content)
+-- done = done or ok
+
+-- end
+-- elseif id == rule_code then
+-- if stack.width ~= 0 then
+-- check()
+-- end
+-- end
+-- stack = stack.next
+-- end
+-- return head, done
+-- end
+
+-- local function process(namespace,attribute,head,inheritance,default) -- one attribute
+-- local stack, done = head, false
+-- while stack do
+-- local id = stack.id
+-- if id == glyph_code then
+-- -- begin of check
+-- local c = stack[attribute]
+-- if c then
+-- if default and c == inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current ~= c then
+-- head = insert_node_before(head,stack,copy_node(nsdata[c]))
+-- current = c
+-- done = true
+-- end
+-- elseif default and inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- current = 0
+-- done = true
+-- end
+-- -- end of check
+-- elseif id == glue_code then
+-- local content = stack.leader
+-- if content then
+-- -- begin of check
+-- local c = stack[attribute]
+-- if c then
+-- if default and c == inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current ~= c then
+-- head = insert_node_before(head,stack,copy_node(nsdata[c]))
+-- current = c
+-- done = true
+-- end
+-- -- begin special to this check
+-- local savedcurrent = current
+-- local ci = content.id
+-- if ci == hlist_code or ci == vlist_code then
+-- -- else we reset inside a box unneeded, okay, the downside is
+-- -- that we trigger color in each repeated box, so there is room
+-- -- for improvement here
+-- current = 0
+-- end
+-- -- begin nested --
+-- local ok = false
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- stack.leader, ok = process(namespace,attribute,content,inheritance,outer)
+-- else
+-- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- stack.leader, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- -- end nested --
+-- done = done or ok
+-- current = savedcurrent
+-- -- end special to this check
+-- elseif default and inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- current = 0
+-- done = true
+-- end
+-- -- end of check
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local content = stack.list
+-- if content then
+-- -- begin nested --
+-- local ok
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- stack.list, ok = process(namespace,attribute,content,inheritance,outer)
+-- else
+-- stack.list, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- stack.list, ok = process(namespace,attribute,content,inheritance,default)
+-- end
+-- -- end nested --
+-- done = done or ok
+-- end
+-- elseif id == rule_code then
+-- if stack.width ~= 0 then
+-- -- begin of check
+-- local c = stack[attribute]
+-- if c then
+-- if default and c == inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current ~= c then
+-- head = insert_node_before(head,stack,copy_node(nsdata[c]))
+-- current = c
+-- done = true
+-- end
+-- elseif default and inheritance then
+-- if current ~= default then
+-- head = insert_node_before(head,stack,copy_node(nsdata[default]))
+-- current = default
+-- done = true
+-- end
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- current = 0
+-- done = true
+-- end
+-- -- end of check
+-- end
+-- end
+-- stack = stack.next
+-- end
+-- return head, done
+-- end
+
+local function process(namespace,attribute,head,inheritance,default) -- one attribute
+ local stack = head
+ local done = false
+ local check = false
+ local leader = nil
+ while stack do
+ local id = stack.id
+ if id == glyph_code then
+ check = true
+ elseif id == glue_code then
+ leader = stack.leader
+ if leader then
+ check = true
+ end
+ elseif id == hlist_code or id == vlist_code then
+ local content = stack.list
+ if content then
+ -- begin nested --
+ local ok
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
+ if outer ~= inheritance then
+ stack.list, ok = process(namespace,attribute,content,inheritance,outer)
+ else
+ stack.list, ok = process(namespace,attribute,content,inheritance,default)
+ end
+ else
+ stack.list, ok = process(namespace,attribute,content,inheritance,default)
+ end
+ -- end nested --
+ done = done or ok
+ end
+ elseif id == rule_code then
+ check = stack.width ~= 0
+ end
+ -- much faster this way than using a check() and nested() function
+ if check then
+ local c = stack[attribute]
+ if c then
+ if default and c == inheritance then
+ if current ~= default then
+ head = insert_node_before(head,stack,copy_node(nsdata[default]))
+ current = default
+ done = true
+ end
+ elseif current ~= c then
+ head = insert_node_before(head,stack,copy_node(nsdata[c]))
+ current = c
+ done = true
+ end
+ if leader then
+ local savedcurrent = current
+ local ci = leader.id
+ if ci == hlist_code or ci == vlist_code then
+ -- else we reset inside a box unneeded, okay, the downside is
+ -- that we trigger color in each repeated box, so there is room
+ -- for improvement here
+ current = 0
+ end
+ -- begin nested --
+ local ok = false
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
+ if outer ~= inheritance then
+ stack.leader, ok = process(namespace,attribute,leader,inheritance,outer)
+ else
+ stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
+ end
+ else
+ stack.leader, ok = process(namespace,attribute,leader,inheritance,default)
+ end
+ -- end nested --
+ done = done or ok
+ current = savedcurrent
+ leader = false
+ end
+ elseif default and inheritance then
+ if current ~= default then
+ head = insert_node_before(head,stack,copy_node(nsdata[default]))
+ current = default
+ done = true
+ end
+ elseif current > 0 then
+ head = insert_node_before(head,stack,copy_node(nsnone))
+ current = 0
+ done = true
+ end
+ check = false
+ end
+ stack = stack.next
+ end
+ return head, done
+end
+
+states.process = process
+
+-- we can force a selector, e.g. document wide color spaces, saves a little
+-- watch out, we need to check both the selector state (like colorspace) and
+-- the main state (like color), otherwise we get into troubles when a selector
+-- state changes while the main state stays the same (like two glyphs following
+-- each other with the same color but different color spaces e.g. \showcolor)
+
+-- local function selective(namespace,attribute,head,inheritance,default) -- two attributes
+-- local stack, done = head, false
+-- while stack do
+-- local id = stack.id
+-- -- we need to deal with literals too (reset as well as oval)
+-- -- if id == glyph_code or (id == whatsit_code and stack.subtype == pdfliteral_code) or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
+-- if id == glyph_code -- or id == disc_code
+-- or (id == rule_code and stack.width ~= 0) or (id == glue_code and stack.leader) then -- or disc_code
+-- local c = stack[attribute]
+-- if c then
+-- if default and c == inheritance then
+-- if current ~= default then
+-- local data = nsdata[default]
+-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+-- current = default
+-- done = true
+-- end
+-- else
+-- local s = stack[nsselector]
+-- if current ~= c or current_selector ~= s then
+-- local data = nsdata[c]
+-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+-- current = c
+-- current_selector = s
+-- done = true
+-- end
+-- end
+-- elseif default and inheritance then
+-- if current ~= default then
+-- local data = nsdata[default]
+-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+-- current = default
+-- done = true
+-- end
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- current, current_selector, done = 0, 0, true
+-- end
+-- if id == glue_code then -- leader
+-- -- same as *list
+-- local content = stack.leader
+-- if content then
+-- local savedcurrent = current
+-- local ci = content.id
+-- if ci == hlist_code or ci == vlist_code then
+-- -- else we reset inside a box unneeded, okay, the downside is
+-- -- that we trigger color in each repeated box, so there is room
+-- -- for improvement here
+-- current = 0
+-- end
+-- local ok = false
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- stack.leader, ok = selective(namespace,attribute,content,inheritance,outer)
+-- else
+-- stack.leader, ok = selective(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- stack.leader, ok = selective(namespace,attribute,content,inheritance,default)
+-- end
+-- current = savedcurrent
+-- done = done or ok
+-- end
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local content = stack.list
+-- if content then
+-- local ok = false
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- stack.list, ok = selective(namespace,attribute,content,inheritance,outer)
+-- else
+-- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+-- end
+-- done = done or ok
+-- end
+-- end
+-- stack = stack.next
+-- end
+-- return head, done
+-- end
+
+-- local function selective(namespace,attribute,head,inheritance,default) -- two attributes
+-- local stack, done = head, false
+
+-- local function check()
+-- local c = stack[attribute]
+-- if c then
+-- if default and c == inheritance then
+-- if current ~= default then
+-- local data = nsdata[default]
+-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+-- current = default
+-- done = true
+-- end
+-- else
+-- local s = stack[nsselector]
+-- if current ~= c or current_selector ~= s then
+-- local data = nsdata[c]
+-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+-- current = c
+-- current_selector = s
+-- done = true
+-- end
+-- end
+-- elseif default and inheritance then
+-- if current ~= default then
+-- local data = nsdata[default]
+-- head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+-- current = default
+-- done = true
+-- end
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- current, current_selector, done = 0, 0, true
+-- end
+-- return c
+-- end
+
+-- local function nested(content)
+-- if nstrigger and stack[nstrigger] then
+-- local outer = stack[attribute]
+-- if outer ~= inheritance then
+-- return selective(namespace,attribute,content,inheritance,outer)
+-- else
+-- return selective(namespace,attribute,content,inheritance,default)
+-- end
+-- else
+-- return selective(namespace,attribute,content,inheritance,default)
+-- end
+-- end
+
+-- while stack do
+-- local id = stack.id
+-- if id == glyph_code then
+-- check()
+-- elseif id == glue_code then
+-- local content = stack.leader
+-- if content and check() then
+-- -- local savedcurrent = current
+-- -- local ci = content.id
+-- -- if ci == hlist_code or ci == vlist_code then
+-- -- -- else we reset inside a box unneeded, okay, the downside is
+-- -- -- that we trigger color in each repeated box, so there is room
+-- -- -- for improvement here
+-- -- current = 0
+-- -- end
+
+-- local ok = false
+-- stack.leader, ok = nested(content)
+-- done = done or ok
+
+-- -- current = savedcurrent
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local content = stack.list
+-- if content then
+
+-- local ok = false
+-- stack.list, ok = nested(content)
+-- done = done or ok
+
+-- end
+-- elseif id == rule_code then
+-- if stack.width ~= 0 then
+-- check()
+-- end
+-- end
+-- stack = stack.next
+-- end
+-- return head, done
+-- end
+
+local function selective(namespace,attribute,head,inheritance,default) -- two attributes
+ local stack = head
+ local done = false
+ local check = false
+ local leader = nil
+ while stack do
+ local id = stack.id
+ if id == glyph_code then
+ check = true
+ elseif id == glue_code then
+ leader = stack.leader
+ if leader then
+ check = true
+ end
+ elseif id == hlist_code or id == vlist_code then
+ local content = stack.list
+ if content then
+ local ok = false
+ -- begin nested
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
+ if outer ~= inheritance then
+ stack.list, ok = selective(namespace,attribute,content,inheritance,outer)
+ else
+ stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+ end
+ else
+ stack.list, ok = selective(namespace,attribute,content,inheritance,default)
+ end
+ -- end nested
+ done = done or ok
+ end
+ elseif id == rule_code then
+ check = stack.width ~= 0
+ end
+
+ if check then
+ local c = stack[attribute]
+ if c then
+ if default and c == inheritance then
+ if current ~= default then
+ local data = nsdata[default]
+ head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+ current = default
+ done = true
+ end
+ else
+ local s = stack[nsselector]
+ if current ~= c or current_selector ~= s then
+ local data = nsdata[c]
+ head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+ current = c
+ current_selector = s
+ done = true
+ end
+ end
+ if leader then
+ local ok = false
+ -- begin nested
+ if nstrigger and stack[nstrigger] then
+ local outer = stack[attribute]
+ if outer ~= inheritance then
+ stack.leader, ok = selective(namespace,attribute,leader,inheritance,outer)
+ else
+ stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
+ end
+ else
+ stack.leader, ok = selective(namespace,attribute,leader,inheritance,default)
+ end
+ -- end nested
+ done = done or ok
+ leader = false
+ end
+ elseif default and inheritance then
+ if current ~= default then
+ local data = nsdata[default]
+ head = insert_node_before(head,stack,copy_node(data[nsforced or stack[nsselector] or nsselector]))
+ current = default
+ done = true
+ end
+ elseif current > 0 then
+ head = insert_node_before(head,stack,copy_node(nsnone))
+ current, current_selector, done = 0, 0, true
+ end
+ check = false
+ end
+
+ stack = stack.next
+ end
+ return head, done
+end
+
+states.selective = selective
+
+-- Ideally the next one should be merged with the previous but keeping it separate is
+-- safer. We deal with two situations: efficient boxwise (layoutareas) and mixed layers
+-- (as used in the stepper). In the stepper we cannot use the box branch as it involves
+-- paragraph lines and then gets mixed up. A messy business (esp since we want to be
+-- efficient).
+--
+-- Todo: make a better stacker. Keep track (in attribute) about nesting level. Not
+-- entirely trivial and a generic solution is nicer (compares to the exporter).
+
+-- local function stacked(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
+-- local stack, done = head, false
+-- local current, depth = default or 0, 0
+--
+-- local function check()
+-- local a = stack[attribute]
+-- if a then
+-- if current ~= a then
+-- head = insert_node_before(head,stack,copy_node(nsdata[a]))
+-- depth = depth + 1
+-- current, done = a, true
+-- end
+-- elseif default > 0 then
+-- --
+-- elseif current > 0 then
+-- head = insert_node_before(head,stack,copy_node(nsnone))
+-- depth = depth - 1
+-- current, done = 0, true
+-- end
+-- return a
+-- end
+--
+-- while stack do
+-- local id = stack.id
+-- if id == glyph_code then
+-- check()
+-- elseif id == glue_code then
+-- local content = stack.leader
+-- if content and check() then
+-- local ok = false
+-- stack.leader, ok = stacked(namespace,attribute,content,current)
+-- done = done or ok
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local content = stack.list
+-- if content then
+-- -- the problem is that broken lines gets the attribute which can be a later one
+-- if nslistwise then
+-- local a = stack[attribute]
+-- if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below
+-- local p = current
+-- current, done = a, true
+-- head = insert_node_before(head,stack,copy_node(nsdata[a]))
+-- stack.list = stacked(namespace,attribute,content,current)
+-- head, stack = insert_node_after(head,stack,copy_node(nsnone))
+-- current = p
+-- else
+-- local ok = false
+-- stack.list, ok = stacked(namespace,attribute,content,current)
+-- done = done or ok
+-- end
+-- else
+-- local ok = false
+-- stack.list, ok = stacked(namespace,attribute,content,current)
+-- done = done or ok
+-- end
+-- end
+-- elseif id == rule_code then
+-- if stack.width ~= 0 then
+-- check()
+-- end
+-- end
+-- stack = stack.next
+-- end
+-- while depth > 0 do
+-- head = insert_node_after(head,stack,copy_node(nsnone))
+-- depth = depth - 1
+-- end
+-- return head, done
+-- end
+
+local function stacked(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
+ local stack = head
+ local done = false
+ local current = default or 0
+ local depth = 0
+ local check = false
+ local leader = false
+ while stack do
+ local id = stack.id
+ if id == glyph_code then
+ check = true
+ elseif id == glue_code then
+ leader = stack.leader
+ if leader then
+ check = true
+ end
+ elseif id == hlist_code or id == vlist_code then
+ local content = stack.list
+ if content then
+ -- the problem is that broken lines gets the attribute which can be a later one
+ if nslistwise then
+ local a = stack[attribute]
+ if a and current ~= a and nslistwise[a] then -- viewerlayer / needs checking, see below
+ local p = current
+ current, done = a, true
+ head = insert_node_before(head,stack,copy_node(nsdata[a]))
+ stack.list = stacked(namespace,attribute,content,current)
+ head, stack = insert_node_after(head,stack,copy_node(nsnone))
+ current = p
+ else
+ local ok = false
+ stack.list, ok = stacked(namespace,attribute,content,current)
+ done = done or ok
+ end
+ else
+ local ok = false
+ stack.list, ok = stacked(namespace,attribute,content,current)
+ done = done or ok
+ end
+ end
+ elseif id == rule_code then
+ check = stack.width ~= 0
+ end
+
+ if check then
+ local a = stack[attribute]
+ if a then
+ if current ~= a then
+ head = insert_node_before(head,stack,copy_node(nsdata[a]))
+ depth = depth + 1
+ current, done = a, true
+ end
+ if leader then
+ local ok = false
+ stack.leader, ok = stacked(namespace,attribute,content,current)
+ done = done or ok
+ leader = false
+ end
+ elseif default > 0 then
+ --
+ elseif current > 0 then
+ head = insert_node_before(head,stack,copy_node(nsnone))
+ depth = depth - 1
+ current, done = 0, true
+ end
+ check = false
+ end
+
+ stack = stack.next
+ end
+ while depth > 0 do
+ head = insert_node_after(head,stack,copy_node(nsnone))
+ depth = depth - 1
+ end
+ return head, done
+end
+
+states.stacked = stacked
+
+-- experimental
+
+-- local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
+-- nsbegin()
+-- local current, previous, done, okay = head, head, false, false
+-- local attrib = default or unsetvalue
+--
+-- local function check()
+-- local a = current[attribute] or unsetvalue
+-- if a ~= attrib then
+-- local n = nsstep(a)
+-- if n then
+-- -- !!!! TEST CODE !!!!
+-- -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a
+-- head = insert_node_before(head,current,n) -- a
+-- end
+-- attrib, done, okay = a, true, true
+-- end
+-- return a
+-- end
+--
+-- while current do
+-- local id = current.id
+-- if id == glyph_code then
+-- check()
+-- elseif id == glue_code then
+-- local content = current.leader
+-- if content and check() then
+-- -- tricky as a leader has to be a list so we cannot inject before
+-- local _, ok = stacker(namespace,attribute,content,attrib)
+-- done = done or ok
+-- end
+-- elseif id == hlist_code or id == vlist_code then
+-- local content = current.list
+-- if not content then
+-- -- skip
+-- elseif nslistwise then
+-- local a = current[attribute]
+-- if a and attrib ~= a and nslistwise[a] then -- viewerlayer
+-- done = true
+-- head = insert_node_before(head,current,copy_node(nsdata[a]))
+-- current.list = stacker(namespace,attribute,content,a)
+-- head, current = insert_node_after(head,current,copy_node(nsnone))
+-- else
+-- local ok = false
+-- current.list, ok = stacker(namespace,attribute,content,attrib)
+-- done = done or ok
+-- end
+-- else
+-- local ok = false
+-- current.list, ok = stacker(namespace,attribute,content,default)
+-- done = done or ok
+-- end
+-- elseif id == rule_code then
+-- if current.width ~= 0 then
+-- check()
+-- end
+-- end
+-- previous = current
+-- current = current.next
+-- end
+-- if okay then
+-- local n = nsend()
+-- if n then
+-- -- !!!! TEST CODE !!!!
+-- -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)]))
+-- head = insert_node_after(head,previous,n)
+-- end
+-- end
+-- return head, done
+-- end
+
+local function stacker(namespace,attribute,head,default) -- no triggering, no inheritance, but list-wise
+ nsbegin()
+ local current = head
+ local previous = head
+ local done = false
+ local okay = false
+ local attrib = default or unsetvalue
+ local check = false
+ local leader = false
+ while current do
+ local id = current.id
+ if id == glyph_code then
+ check = true
+ elseif id == glue_code then
+ leader = current.leader
+ if leader then
+ check = true
+ end
+ elseif id == hlist_code or id == vlist_code then
+ local content = current.list
+ if not content then
+ -- skip
+ elseif nslistwise then
+ local a = current[attribute]
+ if a and attrib ~= a and nslistwise[a] then -- viewerlayer
+ done = true
+ head = insert_node_before(head,current,copy_node(nsdata[a]))
+ current.list = stacker(namespace,attribute,content,a)
+ head, current = insert_node_after(head,current,copy_node(nsnone))
+ else
+ local ok = false
+ current.list, ok = stacker(namespace,attribute,content,attrib)
+ done = done or ok
+ end
+ else
+ local ok = false
+ current.list, ok = stacker(namespace,attribute,content,default)
+ done = done or ok
+ end
+ elseif id == rule_code then
+ check = current.width ~= 0
+ end
+
+ if check then
+ local a = current[attribute] or unsetvalue
+ if a ~= attrib then
+ local n = nsstep(a)
+ if n then
+ -- !!!! TEST CODE !!!!
+ -- head = insert_node_before(head,current,copy_node(nsdata[tonumber(n)])) -- a
+ head = insert_node_before(head,current,n) -- a
+ end
+ attrib, done, okay = a, true, true
+ if leader then
+ -- tricky as a leader has to be a list so we cannot inject before
+ local _, ok = stacker(namespace,attribute,leader,attrib)
+ done = done or ok
+ leader = false
+ end
+ end
+ check = false
+ end
+
+ previous = current
+ current = current.next
+ end
+ if okay then
+ local n = nsend()
+ if n then
+ -- !!!! TEST CODE !!!!
+ -- head = insert_node_after(head,previous,copy_node(nsdata[tostring(n)]))
+ head = insert_node_after(head,previous,n)
+ end
+ end
+ return head, done
+end
+
+states.stacker = stacker
+
+-- -- --
+
+statistics.register("attribute processing time", function()
+ return statistics.elapsedseconds(attributes,"front- and backend")
+end)
diff --git a/tex/context/base/node-fnt.lua b/tex/context/base/node-fnt.lua
index 54359117e..edc1c990e 100644
--- a/tex/context/base/node-fnt.lua
+++ b/tex/context/base/node-fnt.lua
@@ -1,226 +1,226 @@
-if not modules then modules = { } end modules ['node-fnt'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
-if not context then os.exit() end -- generic function in node-dum
-
-local next, type = next, type
-local concat, keys = table.concat, table.keys
-
-local nodes, node, fonts = nodes, node, fonts
-
-local trace_characters = false trackers.register("nodes.characters", function(v) trace_characters = v end)
-local trace_fontrun = false trackers.register("nodes.fontrun", function(v) trace_fontrun = v end)
-
-local report_fonts = logs.reporter("fonts","processing")
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-
-local otf = fonts.handlers.otf
-
-local traverse_id = node.traverse_id
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local nodecodes = nodes.nodecodes
-local handlers = nodes.handlers
-
-local glyph_code = nodecodes.glyph
-
-local setmetatableindex = table.setmetatableindex
-
--- some tests with using an array of dynamics[id] and processes[id] demonstrated
--- that there was nothing to gain (unless we also optimize other parts)
---
--- maybe getting rid of the intermediate shared can save some time
-
--- potential speedup: check for subtype < 256 so that we can remove that test
--- elsewhere, danger: injected nodes will not be dealt with but that does not
--- happen often; we could consider processing sublists but that might need more
--- checking later on; the current approach also permits variants
-
-local run = 0
-
-local setfontdynamics = { }
-local fontprocesses = { }
-
-setmetatableindex(setfontdynamics, function(t,font)
- local tfmdata = fontdata[font]
- local shared = tfmdata.shared
- local v = shared and shared.dynamics and otf.setdynamics or false
- t[font] = v
- return v
-end)
-
-setmetatableindex(fontprocesses, function(t,font)
- local tfmdata = fontdata[font]
- local shared = tfmdata.shared -- we need to check shared, only when same features
- local processes = shared and shared.processes
- if processes and #processes > 0 then
- t[font] = processes
- return processes
- else
- t[font] = false
- return false
- end
-end)
-
-fonts.hashes.setdynamics = setfontdynamics
-fonts.hashes.processes = fontprocesses
-
-function handlers.characters(head)
- -- either next or not, but definitely no already processed list
- starttiming(nodes)
- local usedfonts, attrfonts, done = { }, { }, false
- local a, u, prevfont, prevattr = 0, 0, nil, 0
- if trace_fontrun then
- run = run + 1
- report_fonts()
- report_fonts("checking node list, run %s",run)
- report_fonts()
- local n = head
- while n do
- local id = n.id
- if id == glyph_code then
- local font = n.font
- local attr = n[0] or 0
- report_fonts("font %03i, dynamic %03i, glyph %s",font,attr,utf.char(n.char))
- else
- report_fonts("[%s]",nodecodes[n.id])
- end
- n = n.next
- end
- end
- for n in traverse_id(glyph_code,head) do
- -- if n.subtype<256 then -- all are 1
- local font = n.font
- local attr = n[0] or 0 -- zero attribute is reserved for fonts in context
- if font ~= prevfont or attr ~= prevattr then
- if attr > 0 then
- local used = attrfonts[font]
- if not used then
- used = { }
- attrfonts[font] = used
- end
- if not used[attr] then
- local sd = setfontdynamics[font]
- if sd then -- always true ?
- local d = sd(font,attr) -- can we cache this one?
- if d then
- used[attr] = d
- a = a + 1
- else
- -- can't happen ... otherwise best use nil/false distinction
- end
- end
- end
- else
- local used = usedfonts[font]
- if not used then
- local fp = fontprocesses[font]
- if fp then
- usedfonts[font] = fp
- u = u + 1
- else
- -- can't happen ... otherwise best use nil/false distinction
- end
- end
- end
- prevfont = font
- prevattr = attr
- end
- -- end
- end
- if trace_fontrun then
- report_fonts()
- report_fonts("statics : %s",(u > 0 and concat(keys(usedfonts)," ")) or "none")
- report_fonts("dynamics: %s",(a > 0 and concat(keys(attrfonts)," ")) or "none")
- report_fonts()
- end
- if u == 0 then
- -- skip
- elseif u == 1 then
- local font, processors = next(usedfonts)
- local n = #processors
- if n > 0 then
- local h, d = processors[1](head,font,0)
- head = h or head
- done = done or d
- if n > 1 then
- for i=2,n do
- local h, d = processors[i](head,font,0)
- head = h or head
- done = done or d
- end
- end
- end
- else
- for font, processors in next, usedfonts do
- local n = #processors
- local h, d = processors[1](head,font,0)
- head = h or head
- done = done or d
- if n > 1 then
- for i=2,n do
- local h, d = processors[i](head,font,0)
- head = h or head
- done = done or d
- end
- end
- end
- end
- if a == 0 then
- -- skip
- elseif a == 1 then
- local font, dynamics = next(attrfonts)
- for attribute, processors in next, dynamics do -- attr can switch in between
- local n = #processors
- if n == 0 then
- report_fonts("no processors associated with dynamic %s",attribute)
- else
- local h, d = processors[1](head,font,attribute)
- head = h or head
- done = done or d
- if n > 1 then
- for i=2,n do
- local h, d = processors[i](head,font,attribute)
- head = h or head
- done = done or d
- end
- end
- end
- end
- else
- for font, dynamics in next, attrfonts do
- for attribute, processors in next, dynamics do -- attr can switch in between
- local n = #processors
- if n == 0 then
- report_fonts("no processors associated with dynamic %s",attribute)
- else
- local h, d = processors[1](head,font,attribute)
- head = h or head
- done = done or d
- if n > 1 then
- for i=2,n do
- local h, d = processors[i](head,font,attribute)
- head = h or head
- done = done or d
- end
- end
- end
- end
- end
- end
- stoptiming(nodes)
- if trace_characters then
- nodes.report(head,done)
- end
- return head, true
-end
-
-handlers.protectglyphs = node.protect_glyphs
-handlers.unprotectglyphs = node.unprotect_glyphs
+if not modules then modules = { } end modules ['node-fnt'] = {
+ version = 1.001,
+ comment = "companion to font-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+if not context then os.exit() end -- generic function in node-dum
+
+local next, type = next, type
+local concat, keys = table.concat, table.keys
+
+local nodes, node, fonts = nodes, node, fonts
+
+local trace_characters = false trackers.register("nodes.characters", function(v) trace_characters = v end)
+local trace_fontrun = false trackers.register("nodes.fontrun", function(v) trace_fontrun = v end)
+
+local report_fonts = logs.reporter("fonts","processing")
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+
+local otf = fonts.handlers.otf
+
+local traverse_id = node.traverse_id
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+local nodecodes = nodes.nodecodes
+local handlers = nodes.handlers
+
+local glyph_code = nodecodes.glyph
+
+local setmetatableindex = table.setmetatableindex
+
+-- some tests with using an array of dynamics[id] and processes[id] demonstrated
+-- that there was nothing to gain (unless we also optimize other parts)
+--
+-- maybe getting rid of the intermediate shared can save some time
+
+-- potential speedup: check for subtype < 256 so that we can remove that test
+-- elsewhere, danger: injected nodes will not be dealt with but that does not
+-- happen often; we could consider processing sublists but that might need more
+-- checking later on; the current approach also permits variants
+
+local run = 0
+
+local setfontdynamics = { }
+local fontprocesses = { }
+
+setmetatableindex(setfontdynamics, function(t,font)
+ local tfmdata = fontdata[font]
+ local shared = tfmdata.shared
+ local v = shared and shared.dynamics and otf.setdynamics or false
+ t[font] = v
+ return v
+end)
+
+setmetatableindex(fontprocesses, function(t,font)
+ local tfmdata = fontdata[font]
+ local shared = tfmdata.shared -- we need to check shared, only when same features
+ local processes = shared and shared.processes
+ if processes and #processes > 0 then
+ t[font] = processes
+ return processes
+ else
+ t[font] = false
+ return false
+ end
+end)
+
+fonts.hashes.setdynamics = setfontdynamics
+fonts.hashes.processes = fontprocesses
+
+function handlers.characters(head)
+ -- either next or not, but definitely no already processed list
+ starttiming(nodes)
+ local usedfonts, attrfonts, done = { }, { }, false
+ local a, u, prevfont, prevattr = 0, 0, nil, 0
+ if trace_fontrun then
+ run = run + 1
+ report_fonts()
+ report_fonts("checking node list, run %s",run)
+ report_fonts()
+ local n = head
+ while n do
+ local id = n.id
+ if id == glyph_code then
+ local font = n.font
+ local attr = n[0] or 0
+ report_fonts("font %03i, dynamic %03i, glyph %s",font,attr,utf.char(n.char))
+ else
+ report_fonts("[%s]",nodecodes[n.id])
+ end
+ n = n.next
+ end
+ end
+ for n in traverse_id(glyph_code,head) do
+ -- if n.subtype<256 then -- all are 1
+ local font = n.font
+ local attr = n[0] or 0 -- zero attribute is reserved for fonts in context
+ if font ~= prevfont or attr ~= prevattr then
+ if attr > 0 then
+ local used = attrfonts[font]
+ if not used then
+ used = { }
+ attrfonts[font] = used
+ end
+ if not used[attr] then
+ local sd = setfontdynamics[font]
+ if sd then -- always true ?
+ local d = sd(font,attr) -- can we cache this one?
+ if d then
+ used[attr] = d
+ a = a + 1
+ else
+ -- can't happen ... otherwise best use nil/false distinction
+ end
+ end
+ end
+ else
+ local used = usedfonts[font]
+ if not used then
+ local fp = fontprocesses[font]
+ if fp then
+ usedfonts[font] = fp
+ u = u + 1
+ else
+ -- can't happen ... otherwise best use nil/false distinction
+ end
+ end
+ end
+ prevfont = font
+ prevattr = attr
+ end
+ -- end
+ end
+ if trace_fontrun then
+ report_fonts()
+ report_fonts("statics : %s",(u > 0 and concat(keys(usedfonts)," ")) or "none")
+ report_fonts("dynamics: %s",(a > 0 and concat(keys(attrfonts)," ")) or "none")
+ report_fonts()
+ end
+ if u == 0 then
+ -- skip
+ elseif u == 1 then
+ local font, processors = next(usedfonts)
+ local n = #processors
+ if n > 0 then
+ local h, d = processors[1](head,font,0)
+ head = h or head
+ done = done or d
+ if n > 1 then
+ for i=2,n do
+ local h, d = processors[i](head,font,0)
+ head = h or head
+ done = done or d
+ end
+ end
+ end
+ else
+ for font, processors in next, usedfonts do
+ local n = #processors
+ local h, d = processors[1](head,font,0)
+ head = h or head
+ done = done or d
+ if n > 1 then
+ for i=2,n do
+ local h, d = processors[i](head,font,0)
+ head = h or head
+ done = done or d
+ end
+ end
+ end
+ end
+ if a == 0 then
+ -- skip
+ elseif a == 1 then
+ local font, dynamics = next(attrfonts)
+ for attribute, processors in next, dynamics do -- attr can switch in between
+ local n = #processors
+ if n == 0 then
+ report_fonts("no processors associated with dynamic %s",attribute)
+ else
+ local h, d = processors[1](head,font,attribute)
+ head = h or head
+ done = done or d
+ if n > 1 then
+ for i=2,n do
+ local h, d = processors[i](head,font,attribute)
+ head = h or head
+ done = done or d
+ end
+ end
+ end
+ end
+ else
+ for font, dynamics in next, attrfonts do
+ for attribute, processors in next, dynamics do -- attr can switch in between
+ local n = #processors
+ if n == 0 then
+ report_fonts("no processors associated with dynamic %s",attribute)
+ else
+ local h, d = processors[1](head,font,attribute)
+ head = h or head
+ done = done or d
+ if n > 1 then
+ for i=2,n do
+ local h, d = processors[i](head,font,attribute)
+ head = h or head
+ done = done or d
+ end
+ end
+ end
+ end
+ end
+ end
+ stoptiming(nodes)
+ if trace_characters then
+ nodes.report(head,done)
+ end
+ return head, true
+end
+
+handlers.protectglyphs = node.protect_glyphs
+handlers.unprotectglyphs = node.unprotect_glyphs
diff --git a/tex/context/base/node-ini.lua b/tex/context/base/node-ini.lua
index 5a3986c3a..1de6fbddd 100644
--- a/tex/context/base/node-ini.lua
+++ b/tex/context/base/node-ini.lua
@@ -1,421 +1,421 @@
-if not modules then modules = { } end modules ['node-ini'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-
Most of the code that had accumulated here is now separated in
-modules.
---ldx]]--
-
--- this module is being reconstructed
-
-local next, type = next, type
-local format, match, gsub = string.format, string.match, string.gsub
-local concat, remove = table.concat, table.remove
-local sortedhash, sortedkeys, swapped, tohash = table.sortedhash, table.sortedkeys, table.swapped, table.tohash
-local utfchar = utf.char
-local lpegmatch = lpeg.match
-local formatcolumns = utilities.formatters.formatcolumns
-
---[[ldx--
-
Access to nodes is what gives its power. Here we
-implement a few helper functions. These functions are rather optimized.
---ldx]]--
-
---[[ldx--
-
When manipulating node lists in , we will remove
-nodes and insert new ones. While node access was implemented, we did
-quite some experiments in order to find out if manipulating nodes
-in was feasible from the perspective of performance.
-
-
First of all, we noticed that the bottleneck is more with excessive
-callbacks (some gets called very often) and the conversion from and to
-'s datastructures. However, at the end, we
-found that inserting and deleting nodes in a table could become a
-bottleneck.
-
-
This resulted in two special situations in passing nodes back to
-: a table entry with value false is ignored,
-and when instead of a table true is returned, the
-original table is used.
-
-
Insertion is handled (at least in as follows. When
-we need to insert a node at a certain position, we change the node at
-that position by a dummy node, tagged inline which itself
-has_attribute the original node and one or more new nodes. Before we pass
-back the list we collapse the list. Of course collapsing could be built
-into the engine, but this is a not so natural extension.
-
-
When we collapse (something that we only do when really needed), we
-also ignore the empty nodes. [This is obsolete!]
---ldx]]--
-
-local traverse = node.traverse
-local traverse_id = node.traverse_id
-local free_node = node.free
-local remove_node = node.remove
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local node_fields = node.fields
-
-local allocate = utilities.storage.allocate
-
-nodes = nodes or { }
-local nodes = nodes
-
-nodes.handlers = nodes.handlers or { }
-
--- there will be more of this:
-
-local skipcodes = allocate {
- [ 0] = "userskip",
- [ 1] = "lineskip",
- [ 2] = "baselineskip",
- [ 3] = "parskip",
- [ 4] = "abovedisplayskip",
- [ 5] = "belowdisplayskip",
- [ 6] = "abovedisplayshortskip",
- [ 7] = "belowdisplayshortskip",
- [ 8] = "leftskip",
- [ 9] = "rightskip",
- [ 10] = "topskip",
- [ 11] = "splittopskip",
- [ 12] = "tabskip",
- [ 13] = "spaceskip",
- [ 14] = "xspaceskip",
- [ 15] = "parfillskip",
- [ 16] = "thinmuskip",
- [ 17] = "medmuskip",
- [ 18] = "thickmuskip",
- [100] = "leaders",
- [101] = "cleaders",
- [102] = "xleaders",
- [103] = "gleaders",
-}
-
-local penaltycodes = allocate { -- unfortunately not used
- [ 0] = "userpenalty",
-}
-
-table.setmetatableindex(penaltycodes,function(t,k) return "userpenalty" end) -- not used anyway
-
-local noadcodes = allocate {
- [ 0] = "ord",
- [ 1] = "opdisplaylimits",
- [ 2] = "oplimits",
- [ 3] = "opnolimits",
- [ 4] = "bin",
- [ 5] = "rel",
- [ 6] = "open",
- [ 7] = "close",
- [ 8] = "punct",
- [ 9] = "inner",
- [10] = "under",
- [11] = "over",
- [12] = "vcenter",
-}
-
-local listcodes = allocate {
- [ 0] = "unknown",
- [ 1] = "line",
- [ 2] = "box",
- [ 3] = "indent",
- [ 4] = "alignment", -- row or column
- [ 5] = "cell",
-}
-
-local glyphcodes = allocate {
- [0] = "character",
- [1] = "glyph",
- [2] = "ligature",
- [3] = "ghost",
- [4] = "left",
- [5] = "right",
-}
-
-local kerncodes = allocate {
- [0] = "fontkern",
- [1] = "userkern",
- [2] = "accentkern",
-}
-
-local mathcodes = allocate {
- [0] = "beginmath",
- [1] = "endmath",
-}
-
-local fillcodes = allocate {
- [0] = "stretch",
- [1] = "fi",
- [2] = "fil",
- [3] = "fill",
- [4] = "filll",
-}
-
-local margincodes = allocate {
- [0] = "left",
- [1] = "right",
-}
-
-local disccodes = allocate {
- [0] = "discretionary", -- \discretionary
- [1] = "explicit", -- \-
- [2] = "automatic", -- following a -
- [3] = "regular", -- simple
- [4] = "first", -- hard first item
- [5] = "second", -- hard second item
-}
-
-local function simplified(t)
- local r = { }
- for k, v in next, t do
- r[k] = gsub(v,"_","")
- end
- return r
-end
-
-local nodecodes = simplified(node.types())
-local whatcodes = simplified(node.whatsits())
-
-skipcodes = allocate(swapped(skipcodes,skipcodes))
-noadcodes = allocate(swapped(noadcodes,noadcodes))
-nodecodes = allocate(swapped(nodecodes,nodecodes))
-whatcodes = allocate(swapped(whatcodes,whatcodes))
-listcodes = allocate(swapped(listcodes,listcodes))
-glyphcodes = allocate(swapped(glyphcodes,glyphcodes))
-kerncodes = allocate(swapped(kerncodes,kerncodes))
-penaltycodes = allocate(swapped(penaltycodes,penaltycodes))
-mathcodes = allocate(swapped(mathcodes,mathcodes))
-fillcodes = allocate(swapped(fillcodes,fillcodes))
-margincodes = allocate(swapped(margincodes,margincodes))
-disccodes = allocate(swapped(disccodes,disccodes))
-
-nodes.skipcodes = skipcodes nodes.gluecodes = skipcodes -- more official
-nodes.noadcodes = noadcodes
-nodes.nodecodes = nodecodes
-nodes.whatcodes = whatcodes nodes.whatsitcodes = whatcodes -- more official
-nodes.listcodes = listcodes
-nodes.glyphcodes = glyphcodes
-nodes.kerncodes = kerncodes
-nodes.penaltycodes = kerncodes
-nodes.mathcodes = mathcodes
-nodes.fillcodes = fillcodes
-nodes.margincodes = margincodes
-nodes.disccodes = disccodes nodes.discretionarycodes = disccodes
-
-listcodes.row = listcodes.alignment
-listcodes.column = listcodes.alignment
-
-kerncodes.italiccorrection = kerncodes.userkern
-kerncodes.kerning = kerncodes.fontkern
-
-nodes.codes = allocate { -- mostly for listing
- glue = skipcodes,
- noad = noadcodes,
- node = nodecodes,
- hlist = listcodes,
- vlist = listcodes,
- glyph = glyphcodes,
- kern = kerncodes,
- penalty = penaltycodes,
- math = mathnodes,
- fill = fillcodes,
- margin = margincodes,
- disc = disccodes,
- whatsit = whatcodes,
-}
-
-local report_codes = logs.reporter("nodes","codes")
-
-function nodes.showcodes()
- local t = { }
- for name, codes in sortedhash(nodes.codes) do
- local sorted = sortedkeys(codes)
- for i=1,#sorted do
- local s = sorted[i]
- if type(s) ~= "number" then
- t[#t+1] = { name, s, codes[s] }
- end
- end
- end
- formatcolumns(t)
- for k=1,#t do
- report_codes (t[k])
- end
-end
-
-local whatsit_node = nodecodes.whatsit
-
-local messyhack = tohash { -- temporary solution
- nodecodes.attributelist,
- nodecodes.attribute,
- nodecodes.gluespec,
- nodecodes.action,
-}
-
-function nodes.fields(n)
- local id = n.id
- if id == whatsit_node then
- return node_fields(id,n.subtype)
- else
- local t = node_fields(id)
- if messyhack[id] then
- for i=1,#t do
- if t[i] == "subtype" then
- remove(t,i)
- break
- end
- end
- end
- return t
- end
-end
-
-trackers.register("system.showcodes", nodes.showcodes)
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local glue_code = nodecodes.glue
-
--- if t.id == glue_code then
--- local s = t.spec
--- print(t)
--- print(s,s and s.writable)
--- if s and s.writable then
--- free_node(s)
--- end
--- t.spec = nil
--- end
-
-local function remove(head, current, free_too)
- local t = current
- head, current = remove_node(head,current)
- if t then
- if free_too then
- free_node(t)
- t = nil
- else
- t.next = nil
- t.prev = nil
- end
- end
- return head, current, t
-end
-
-nodes.remove = remove
-
-function nodes.delete(head,current)
- return remove(head,current,true)
-end
-
-nodes.before = insert_node_before
-nodes.after = insert_node_after
-
--- we need to test this, as it might be fixed now
-
-function nodes.before(h,c,n)
- if c then
- if c == h then
- n.next = h
- n.prev = nil
- h.prev = n
- else
- local cp = c.prev
- n.next = c
- n.prev = cp
- if cp then
- cp.next = n
- end
- c.prev = n
- return h, n
- end
- end
- return n, n
-end
-
-function nodes.after(h,c,n)
- if c then
- local cn = c.next
- if cn then
- n.next = cn
- cn.prev = n
- else
- n.next = nil
- end
- c.next = n
- n.prev = c
- return h, n
- end
- return n, n
-end
-
--- local h, c = nodes.replace(head,current,new)
--- local c = nodes.replace(false,current,new)
--- local c = nodes.replace(current,new)
-
-function nodes.replace(head,current,new) -- no head returned if false
- if not new then
- head, current, new = false, head, current
- end
- local prev, next = current.prev, current.next
- if next then
- new.next = next
- next.prev = new
- end
- if prev then
- new.prev = prev
- prev.next = new
- end
- if head then
- if head == current then
- head = new
- end
- free_node(current)
- return head, new
- else
- free_node(current)
- return new
- end
-end
-
--- will move
-
-local function count(stack,flat)
- local n = 0
- while stack do
- local id = stack.id
- if not flat and id == hlist_code or id == vlist_code then
- local list = stack.list
- if list then
- n = n + 1 + count(list) -- self counts too
- else
- n = n + 1
- end
- else
- n = n + 1
- end
- stack = stack.next
- end
- return n
-end
-
-nodes.count = count
-
-local left, space = lpeg.P("<"), lpeg.P(" ")
-
-local reference = left * (1-left)^0 * left * space^0 * lpeg.C((1-space)^0)
-
-function nodes.reference(n)
- return lpegmatch(reference,tostring(n))
-end
-
-if not node.next then
-
- function node.next(n) return n and n.next end
- function node.prev(n) return n and n.prev end
-
-end
+if not modules then modules = { } end modules ['node-ini'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+
Most of the code that had accumulated here is now separated in
+modules.
+--ldx]]--
+
+-- this module is being reconstructed
+
+local next, type = next, type
+local format, match, gsub = string.format, string.match, string.gsub
+local concat, remove = table.concat, table.remove
+local sortedhash, sortedkeys, swapped, tohash = table.sortedhash, table.sortedkeys, table.swapped, table.tohash
+local utfchar = utf.char
+local lpegmatch = lpeg.match
+local formatcolumns = utilities.formatters.formatcolumns
+
+--[[ldx--
+
Access to nodes is what gives its power. Here we
+implement a few helper functions. These functions are rather optimized.
+--ldx]]--
+
+--[[ldx--
+
When manipulating node lists in , we will remove
+nodes and insert new ones. While node access was implemented, we did
+quite some experiments in order to find out if manipulating nodes
+in was feasible from the perspective of performance.
+
+
First of all, we noticed that the bottleneck is more with excessive
+callbacks (some gets called very often) and the conversion from and to
+'s datastructures. However, at the end, we
+found that inserting and deleting nodes in a table could become a
+bottleneck.
+
+
This resulted in two special situations in passing nodes back to
+: a table entry with value false is ignored,
+and when instead of a table true is returned, the
+original table is used.
+
+
Insertion is handled (at least in as follows. When
+we need to insert a node at a certain position, we change the node at
+that position by a dummy node, tagged inline which itself
+has_attribute the original node and one or more new nodes. Before we pass
+back the list we collapse the list. Of course collapsing could be built
+into the engine, but this is a not so natural extension.
+
+
When we collapse (something that we only do when really needed), we
+also ignore the empty nodes. [This is obsolete!]
+--ldx]]--
+
+local traverse = node.traverse
+local traverse_id = node.traverse_id
+local free_node = node.free
+local remove_node = node.remove
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local node_fields = node.fields
+
+local allocate = utilities.storage.allocate
+
+nodes = nodes or { }
+local nodes = nodes
+
+nodes.handlers = nodes.handlers or { }
+
+-- there will be more of this:
+
+local skipcodes = allocate {
+ [ 0] = "userskip",
+ [ 1] = "lineskip",
+ [ 2] = "baselineskip",
+ [ 3] = "parskip",
+ [ 4] = "abovedisplayskip",
+ [ 5] = "belowdisplayskip",
+ [ 6] = "abovedisplayshortskip",
+ [ 7] = "belowdisplayshortskip",
+ [ 8] = "leftskip",
+ [ 9] = "rightskip",
+ [ 10] = "topskip",
+ [ 11] = "splittopskip",
+ [ 12] = "tabskip",
+ [ 13] = "spaceskip",
+ [ 14] = "xspaceskip",
+ [ 15] = "parfillskip",
+ [ 16] = "thinmuskip",
+ [ 17] = "medmuskip",
+ [ 18] = "thickmuskip",
+ [100] = "leaders",
+ [101] = "cleaders",
+ [102] = "xleaders",
+ [103] = "gleaders",
+}
+
+local penaltycodes = allocate { -- unfortunately not used
+ [ 0] = "userpenalty",
+}
+
+table.setmetatableindex(penaltycodes,function(t,k) return "userpenalty" end) -- not used anyway
+
+local noadcodes = allocate {
+ [ 0] = "ord",
+ [ 1] = "opdisplaylimits",
+ [ 2] = "oplimits",
+ [ 3] = "opnolimits",
+ [ 4] = "bin",
+ [ 5] = "rel",
+ [ 6] = "open",
+ [ 7] = "close",
+ [ 8] = "punct",
+ [ 9] = "inner",
+ [10] = "under",
+ [11] = "over",
+ [12] = "vcenter",
+}
+
+local listcodes = allocate {
+ [ 0] = "unknown",
+ [ 1] = "line",
+ [ 2] = "box",
+ [ 3] = "indent",
+ [ 4] = "alignment", -- row or column
+ [ 5] = "cell",
+}
+
+local glyphcodes = allocate {
+ [0] = "character",
+ [1] = "glyph",
+ [2] = "ligature",
+ [3] = "ghost",
+ [4] = "left",
+ [5] = "right",
+}
+
+local kerncodes = allocate {
+ [0] = "fontkern",
+ [1] = "userkern",
+ [2] = "accentkern",
+}
+
+local mathcodes = allocate {
+ [0] = "beginmath",
+ [1] = "endmath",
+}
+
+local fillcodes = allocate {
+ [0] = "stretch",
+ [1] = "fi",
+ [2] = "fil",
+ [3] = "fill",
+ [4] = "filll",
+}
+
+local margincodes = allocate {
+ [0] = "left",
+ [1] = "right",
+}
+
+local disccodes = allocate {
+ [0] = "discretionary", -- \discretionary
+ [1] = "explicit", -- \-
+ [2] = "automatic", -- following a -
+ [3] = "regular", -- simple
+ [4] = "first", -- hard first item
+ [5] = "second", -- hard second item
+}
+
+local function simplified(t)
+ local r = { }
+ for k, v in next, t do
+ r[k] = gsub(v,"_","")
+ end
+ return r
+end
+
+local nodecodes = simplified(node.types())
+local whatcodes = simplified(node.whatsits())
+
+skipcodes = allocate(swapped(skipcodes,skipcodes))
+noadcodes = allocate(swapped(noadcodes,noadcodes))
+nodecodes = allocate(swapped(nodecodes,nodecodes))
+whatcodes = allocate(swapped(whatcodes,whatcodes))
+listcodes = allocate(swapped(listcodes,listcodes))
+glyphcodes = allocate(swapped(glyphcodes,glyphcodes))
+kerncodes = allocate(swapped(kerncodes,kerncodes))
+penaltycodes = allocate(swapped(penaltycodes,penaltycodes))
+mathcodes = allocate(swapped(mathcodes,mathcodes))
+fillcodes = allocate(swapped(fillcodes,fillcodes))
+margincodes = allocate(swapped(margincodes,margincodes))
+disccodes = allocate(swapped(disccodes,disccodes))
+
+nodes.skipcodes = skipcodes nodes.gluecodes = skipcodes -- more official
+nodes.noadcodes = noadcodes
+nodes.nodecodes = nodecodes
+nodes.whatcodes = whatcodes nodes.whatsitcodes = whatcodes -- more official
+nodes.listcodes = listcodes
+nodes.glyphcodes = glyphcodes
+nodes.kerncodes = kerncodes
+nodes.penaltycodes = kerncodes
+nodes.mathcodes = mathcodes
+nodes.fillcodes = fillcodes
+nodes.margincodes = margincodes
+nodes.disccodes = disccodes nodes.discretionarycodes = disccodes
+
+listcodes.row = listcodes.alignment
+listcodes.column = listcodes.alignment
+
+kerncodes.italiccorrection = kerncodes.userkern
+kerncodes.kerning = kerncodes.fontkern
+
+nodes.codes = allocate { -- mostly for listing
+ glue = skipcodes,
+ noad = noadcodes,
+ node = nodecodes,
+ hlist = listcodes,
+ vlist = listcodes,
+ glyph = glyphcodes,
+ kern = kerncodes,
+ penalty = penaltycodes,
+ math = mathnodes,
+ fill = fillcodes,
+ margin = margincodes,
+ disc = disccodes,
+ whatsit = whatcodes,
+}
+
+local report_codes = logs.reporter("nodes","codes")
+
+function nodes.showcodes()
+ local t = { }
+ for name, codes in sortedhash(nodes.codes) do
+ local sorted = sortedkeys(codes)
+ for i=1,#sorted do
+ local s = sorted[i]
+ if type(s) ~= "number" then
+ t[#t+1] = { name, s, codes[s] }
+ end
+ end
+ end
+ formatcolumns(t)
+ for k=1,#t do
+ report_codes (t[k])
+ end
+end
+
+local whatsit_node = nodecodes.whatsit
+
+local messyhack = tohash { -- temporary solution
+ nodecodes.attributelist,
+ nodecodes.attribute,
+ nodecodes.gluespec,
+ nodecodes.action,
+}
+
+function nodes.fields(n)
+ local id = n.id
+ if id == whatsit_node then
+ return node_fields(id,n.subtype)
+ else
+ local t = node_fields(id)
+ if messyhack[id] then
+ for i=1,#t do
+ if t[i] == "subtype" then
+ remove(t,i)
+ break
+ end
+ end
+ end
+ return t
+ end
+end
+
+trackers.register("system.showcodes", nodes.showcodes)
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local glue_code = nodecodes.glue
+
+-- if t.id == glue_code then
+-- local s = t.spec
+-- print(t)
+-- print(s,s and s.writable)
+-- if s and s.writable then
+-- free_node(s)
+-- end
+-- t.spec = nil
+-- end
+
+local function remove(head, current, free_too)
+ local t = current
+ head, current = remove_node(head,current)
+ if t then
+ if free_too then
+ free_node(t)
+ t = nil
+ else
+ t.next = nil
+ t.prev = nil
+ end
+ end
+ return head, current, t
+end
+
+nodes.remove = remove
+
+function nodes.delete(head,current)
+ return remove(head,current,true)
+end
+
+nodes.before = insert_node_before
+nodes.after = insert_node_after
+
+-- we need to test this, as it might be fixed now
+
+function nodes.before(h,c,n)
+ if c then
+ if c == h then
+ n.next = h
+ n.prev = nil
+ h.prev = n
+ else
+ local cp = c.prev
+ n.next = c
+ n.prev = cp
+ if cp then
+ cp.next = n
+ end
+ c.prev = n
+ return h, n
+ end
+ end
+ return n, n
+end
+
+function nodes.after(h,c,n)
+ if c then
+ local cn = c.next
+ if cn then
+ n.next = cn
+ cn.prev = n
+ else
+ n.next = nil
+ end
+ c.next = n
+ n.prev = c
+ return h, n
+ end
+ return n, n
+end
+
+-- local h, c = nodes.replace(head,current,new)
+-- local c = nodes.replace(false,current,new)
+-- local c = nodes.replace(current,new)
+
+function nodes.replace(head,current,new) -- no head returned if false
+ if not new then
+ head, current, new = false, head, current
+ end
+ local prev, next = current.prev, current.next
+ if next then
+ new.next = next
+ next.prev = new
+ end
+ if prev then
+ new.prev = prev
+ prev.next = new
+ end
+ if head then
+ if head == current then
+ head = new
+ end
+ free_node(current)
+ return head, new
+ else
+ free_node(current)
+ return new
+ end
+end
+
+-- will move
+
+local function count(stack,flat)
+ local n = 0
+ while stack do
+ local id = stack.id
+ if not flat and id == hlist_code or id == vlist_code then
+ local list = stack.list
+ if list then
+ n = n + 1 + count(list) -- self counts too
+ else
+ n = n + 1
+ end
+ else
+ n = n + 1
+ end
+ stack = stack.next
+ end
+ return n
+end
+
+nodes.count = count
+
+local left, space = lpeg.P("<"), lpeg.P(" ")
+
+local reference = left * (1-left)^0 * left * space^0 * lpeg.C((1-space)^0)
+
+function nodes.reference(n)
+ return lpegmatch(reference,tostring(n))
+end
+
+if not node.next then
+
+ function node.next(n) return n and n.next end
+ function node.prev(n) return n and n.prev end
+
+end
diff --git a/tex/context/base/node-inj.lua b/tex/context/base/node-inj.lua
index 697370cfb..d6a851cfb 100644
--- a/tex/context/base/node-inj.lua
+++ b/tex/context/base/node-inj.lua
@@ -1,519 +1,519 @@
-if not modules then modules = { } end modules ['node-inj'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- This is very experimental (this will change when we have luatex > .50 and
--- a few pending thingies are available. Also, Idris needs to make a few more
--- test fonts. Btw, future versions of luatex will have extended glyph properties
--- that can be of help. Some optimizations can go away when we have faster machines.
-
-local next = next
-local utfchar = utf.char
-
-local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
-
-local report_injections = logs.reporter("nodes","injections")
-
-local attributes, nodes, node = attributes, nodes, node
-
-fonts = fonts
-local fontdata = fonts.hashes.identifiers
-
-nodes.injections = nodes.injections or { }
-local injections = nodes.injections
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-local kern_code = nodecodes.kern
-local nodepool = nodes.pool
-local newkern = nodepool.kern
-
-local traverse_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-
-local a_kernpair = attributes.private('kernpair')
-local a_ligacomp = attributes.private('ligacomp')
-local a_markbase = attributes.private('markbase')
-local a_markmark = attributes.private('markmark')
-local a_markdone = attributes.private('markdone')
-local a_cursbase = attributes.private('cursbase')
-local a_curscurs = attributes.private('curscurs')
-local a_cursdone = attributes.private('cursdone')
-
--- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as
--- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner
--- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure
--- that this code is not 100% okay but examples are needed to figure things out.
-
-function injections.installnewkern(nk)
- newkern = nk or newkern
-end
-
-local cursives = { }
-local marks = { }
-local kerns = { }
-
--- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in
--- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we
--- can share tables.
-
--- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs
--- checking with husayni (volt and fontforge).
-
-function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
- local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
- local ws, wn = tfmstart.width, tfmnext.width
- local bound = #cursives + 1
- start[a_cursbase] = bound
- nxt[a_curscurs] = bound
- cursives[bound] = { rlmode, dx, dy, ws, wn }
- return dx, dy, bound
-end
-
-function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
- local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
- -- dy = y - h
- if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
- local bound = current[a_kernpair]
- if bound then
- local kb = kerns[bound]
- -- inefficient but singles have less, but weird anyway, needs checking
- kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
- else
- bound = #kerns + 1
- current[a_kernpair] = bound
- kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
- end
- return x, y, w, h, bound
- end
- return x, y, w, h -- no bound
-end
-
-function injections.setkern(current,factor,rlmode,x,tfmchr)
- local dx = factor*x
- if dx ~= 0 then
- local bound = #kerns + 1
- current[a_kernpair] = bound
- kerns[bound] = { rlmode, dx }
- return dx, bound
- else
- return 0, 0
- end
-end
-
-function injections.setmark(start,base,factor,rlmode,ba,ma,index) -- ba=baseanchor, ma=markanchor
- local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this
- local bound = base[a_markbase] -- fails again we should pass it
- local index = 1
- if bound then
- local mb = marks[bound]
- if mb then
- -- if not index then index = #mb + 1 end
- index = #mb + 1
- mb[index] = { dx, dy, rlmode }
- start[a_markmark] = bound
- start[a_markdone] = index
- return dx, dy, bound
- else
- report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
- end
- end
--- index = index or 1
- index = index or 1
- bound = #marks + 1
- base[a_markbase] = bound
- start[a_markmark] = bound
- start[a_markdone] = index
- marks[bound] = { [index] = { dx, dy, rlmode } }
- return dx, dy, bound
-end
-
-local function dir(n)
- return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
-end
-
-local function trace(head)
- report_injections("begin run")
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local kp = n[a_kernpair]
- local mb = n[a_markbase]
- local mm = n[a_markmark]
- local md = n[a_markdone]
- local cb = n[a_cursbase]
- local cc = n[a_curscurs]
- local char = n.char
- report_injections("font %s, char %U, glyph %c",n.font,char,char)
- if kp then
- local k = kerns[kp]
- if k[3] then
- report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5])
- else
- report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2])
- end
- end
- if mb then
- report_injections(" markbase: bound %a",mb)
- end
- if mm then
- local m = marks[mm]
- if mb then
- local m = m[mb]
- if m then
- report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2])
- else
- report_injections(" markmark: bound %a, missing index",mm)
- end
- else
- m = m[1]
- report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2])
- end
- end
- if cb then
- report_injections(" cursbase: bound %a",cb)
- end
- if cc then
- local c = cursives[cc]
- report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3])
- end
- end
- end
- report_injections("end run")
-end
-
--- todo: reuse tables (i.e. no collection), but will be extra fields anyway
--- todo: check for attribute
-
--- We can have a fast test on a font being processed, so we can check faster for marks etc
--- but I'll make a context variant anyway.
-
-local function show_result(head)
- local current = head
- local skipping = false
- while current do
- local id = current.id
- if id == glyph_code then
- report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
- skipping = false
- elseif id == kern_code then
- report_injections("kern: %p",current.kern)
- skipping = false
- elseif not skipping then
- report_injections()
- skipping = true
- end
- current = current.next
- end
-end
-
-function injections.handler(head,where,keep)
- local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
- if has_marks or has_cursives then
- if trace_injections then
- trace(head)
- end
- -- in the future variant we will not copy items but refs to tables
- local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0
- if has_kerns then -- move outside loop
- local nf, tm = nil, nil
- for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
- if n.subtype < 256 then
- nofvalid = nofvalid + 1
- valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
- end
- if tm then
- mk[n] = tm[n.char]
- end
- local k = n[a_kernpair]
- if k then
- local kk = kerns[k]
- if kk then
- local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0
- local dy = y - h
- if dy ~= 0 then
- ky[n] = dy
- end
- if w ~= 0 or x ~= 0 then
- wx[n] = kk
- end
- rl[n] = kk[1] -- could move in test
- end
- end
- end
- end
- else
- local nf, tm = nil, nil
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- nofvalid = nofvalid + 1
- valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
- end
- if tm then
- mk[n] = tm[n.char]
- end
- end
- end
- end
- if nofvalid > 0 then
- -- we can assume done == true because we have cursives and marks
- local cx = { }
- if has_kerns and next(ky) then
- for n, k in next, ky do
- n.yoffset = k
- end
- end
- -- todo: reuse t and use maxt
- if has_cursives then
- local p_cursbase, p = nil, nil
- -- since we need valid[n+1] we can also use a "while true do"
- local t, d, maxt = { }, { }, 0
- for i=1,nofvalid do -- valid == glyphs
- local n = valid[i]
- if not mk[n] then
- local n_cursbase = n[a_cursbase]
- if p_cursbase then
- local n_curscurs = n[a_curscurs]
- if p_cursbase == n_curscurs then
- local c = cursives[n_curscurs]
- if c then
- local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5]
- if rlmode >= 0 then
- dx = dx - ws
- else
- dx = dx + wn
- end
- if dx ~= 0 then
- cx[n] = dx
- rl[n] = rlmode
- end
- -- if rlmode and rlmode < 0 then
- dy = -dy
- -- end
- maxt = maxt + 1
- t[maxt] = p
- d[maxt] = dy
- else
- maxt = 0
- end
- end
- elseif maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ti.yoffset + ny
- end
- maxt = 0
- end
- if not n_cursbase and maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- p_cursbase, p = n_cursbase, n
- end
- end
- if maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- if not keep then
- cursives = { }
- end
- end
- if has_marks then
- for i=1,nofvalid do
- local p = valid[i]
- local p_markbase = p[a_markbase]
- if p_markbase then
- local mrks = marks[p_markbase]
- local nofmarks = #mrks
- for n in traverse_id(glyph_code,p.next) do
- local n_markmark = n[a_markmark]
- if p_markbase == n_markmark then
- local index = n[a_markdone] or 1
- local d = mrks[index]
- if d then
- local rlmode = d[3]
- --
- local k = wx[p]
- if k then
- local x = k[2]
- local w = k[4]
- if w then
- if rlmode and rlmode >= 0 then
- -- kern(x) glyph(p) kern(w-x) mark(n)
- n.xoffset = p.xoffset - p.width + d[1] - (w-x)
- else
- -- kern(w-x) glyph(p) kern(x) mark(n)
- n.xoffset = p.xoffset - d[1] - x
- end
- else
- if rlmode and rlmode >= 0 then
- -- okay for husayni
- n.xoffset = p.xoffset - p.width + d[1]
- else
- -- needs checking: is x ok here?
- n.xoffset = p.xoffset - d[1] - x
- end
- end
- else
- if rlmode and rlmode >= 0 then
- n.xoffset = p.xoffset - p.width + d[1]
- else
- n.xoffset = p.xoffset - d[1]
- end
- end
- -- --
- if mk[p] then
- n.yoffset = p.yoffset + d[2]
- else
- n.yoffset = n.yoffset + p.yoffset + d[2]
- end
- --
- if nofmarks == 1 then
- break
- else
- nofmarks = nofmarks - 1
- end
- end
- else
- -- KE: there can be sequences in ligatures
- end
- end
- end
- end
- if not keep then
- marks = { }
- end
- end
- -- todo : combine
- if next(wx) then
- for n, k in next, wx do
- -- only w can be nil (kernclasses), can be sped up when w == nil
- local x = k[2]
- local w = k[4]
- if w then
- local rl = k[1] -- r2l = k[6]
- local wx = w - x
- if rl < 0 then -- KE: don't use r2l here
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx)) -- type 0/2
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x)) -- type 0/2
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x)) -- type 0/2
- end
- if wx ~= 0 then
- insert_node_after (head,n,newkern(wx)) -- type 0/2
- end
- end
- elseif x ~= 0 then
- -- this needs checking for rl < 0 but it is unlikely that a r2l script
- -- uses kernclasses between glyphs so we're probably safe (KE has a
- -- problematic font where marks interfere with rl < 0 in the previous
- -- case)
- insert_node_before(head,n,newkern(x)) -- a real font kern, type 0
- end
- end
- end
- if next(cx) then
- for n, k in next, cx do
- if k ~= 0 then
- local rln = rl[n]
- if rln and rln < 0 then
- insert_node_before(head,n,newkern(-k)) -- type 0/2
- else
- insert_node_before(head,n,newkern(k)) -- type 0/2
- end
- end
- end
- end
- if not keep then
- kerns = { }
- end
- -- if trace_injections then
- -- show_result(head)
- -- end
- return head, true
- elseif not keep then
- kerns, cursives, marks = { }, { }, { }
- end
- elseif has_kerns then
- if trace_injections then
- trace(head)
- end
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local k = n[a_kernpair]
- if k then
- local kk = kerns[k]
- if kk then
- local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
- if y and y ~= 0 then
- n.yoffset = y -- todo: h ?
- end
- if w then
- -- copied from above
- -- local r2l = kk[6]
- local wx = w - x
- if rl < 0 then -- KE: don't use r2l here
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- if wx ~= 0 then
- insert_node_after(head,n,newkern(wx))
- end
- end
- else
- -- simple (e.g. kernclass kerns)
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- end
- end
- end
- end
- end
- if not keep then
- kerns = { }
- end
- -- if trace_injections then
- -- show_result(head)
- -- end
- return head, true
- else
- -- no tracing needed
- end
- return head, false
-end
+if not modules then modules = { } end modules ['node-inj'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- This is very experimental (this will change when we have luatex > .50 and
+-- a few pending thingies are available. Also, Idris needs to make a few more
+-- test fonts. Btw, future versions of luatex will have extended glyph properties
+-- that can be of help. Some optimizations can go away when we have faster machines.
+
+local next = next
+local utfchar = utf.char
+
+local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
+
+local report_injections = logs.reporter("nodes","injections")
+
+local attributes, nodes, node = attributes, nodes, node
+
+fonts = fonts
+local fontdata = fonts.hashes.identifiers
+
+nodes.injections = nodes.injections or { }
+local injections = nodes.injections
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local kern_code = nodecodes.kern
+local nodepool = nodes.pool
+local newkern = nodepool.kern
+
+local traverse_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+
+local a_kernpair = attributes.private('kernpair')
+local a_ligacomp = attributes.private('ligacomp')
+local a_markbase = attributes.private('markbase')
+local a_markmark = attributes.private('markmark')
+local a_markdone = attributes.private('markdone')
+local a_cursbase = attributes.private('cursbase')
+local a_curscurs = attributes.private('curscurs')
+local a_cursdone = attributes.private('cursdone')
+
+-- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as
+-- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner
+-- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure
+-- that this code is not 100% okay but examples are needed to figure things out.
+
+function injections.installnewkern(nk)
+ newkern = nk or newkern
+end
+
+local cursives = { }
+local marks = { }
+local kerns = { }
+
+-- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in
+-- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we
+-- can share tables.
+
+-- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs
+-- checking with husayni (volt and fontforge).
+
+function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
+ local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
+ local ws, wn = tfmstart.width, tfmnext.width
+ local bound = #cursives + 1
+ start[a_cursbase] = bound
+ nxt[a_curscurs] = bound
+ cursives[bound] = { rlmode, dx, dy, ws, wn }
+ return dx, dy, bound
+end
+
+function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
+ local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
+ -- dy = y - h
+ if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
+ local bound = current[a_kernpair]
+ if bound then
+ local kb = kerns[bound]
+ -- inefficient but singles have less, but weird anyway, needs checking
+ kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
+ else
+ bound = #kerns + 1
+ current[a_kernpair] = bound
+ kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
+ end
+ return x, y, w, h, bound
+ end
+ return x, y, w, h -- no bound
+end
+
+function injections.setkern(current,factor,rlmode,x,tfmchr)
+ local dx = factor*x
+ if dx ~= 0 then
+ local bound = #kerns + 1
+ current[a_kernpair] = bound
+ kerns[bound] = { rlmode, dx }
+ return dx, bound
+ else
+ return 0, 0
+ end
+end
+
+function injections.setmark(start,base,factor,rlmode,ba,ma,index) -- ba=baseanchor, ma=markanchor
+ local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this
+ local bound = base[a_markbase] -- fails again we should pass it
+ local index = 1
+ if bound then
+ local mb = marks[bound]
+ if mb then
+ -- if not index then index = #mb + 1 end
+ index = #mb + 1
+ mb[index] = { dx, dy, rlmode }
+ start[a_markmark] = bound
+ start[a_markdone] = index
+ return dx, dy, bound
+ else
+ report_injections("possible problem, %U is base mark without data (id %a)",base.char,bound)
+ end
+ end
+-- index = index or 1
+ index = index or 1
+ bound = #marks + 1
+ base[a_markbase] = bound
+ start[a_markmark] = bound
+ start[a_markdone] = index
+ marks[bound] = { [index] = { dx, dy, rlmode } }
+ return dx, dy, bound
+end
+
+local function dir(n)
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+end
+
+local function trace(head)
+ report_injections("begin run")
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype < 256 then
+ local kp = n[a_kernpair]
+ local mb = n[a_markbase]
+ local mm = n[a_markmark]
+ local md = n[a_markdone]
+ local cb = n[a_cursbase]
+ local cc = n[a_curscurs]
+ local char = n.char
+ report_injections("font %s, char %U, glyph %c",n.font,char,char)
+ if kp then
+ local k = kerns[kp]
+ if k[3] then
+ report_injections(" pairkern: dir %a, x %p, y %p, w %p, h %p",dir(k[1]),k[2],k[3],k[4],k[5])
+ else
+ report_injections(" kern: dir %a, dx %p",dir(k[1]),k[2])
+ end
+ end
+ if mb then
+ report_injections(" markbase: bound %a",mb)
+ end
+ if mm then
+ local m = marks[mm]
+ if mb then
+ local m = m[mb]
+ if m then
+ report_injections(" markmark: bound %a, index %a, dx %p, dy %p",mm,md,m[1],m[2])
+ else
+ report_injections(" markmark: bound %a, missing index",mm)
+ end
+ else
+ m = m[1]
+ report_injections(" markmark: bound %a, dx %p, dy %p",mm,m and m[1],m and m[2])
+ end
+ end
+ if cb then
+ report_injections(" cursbase: bound %a",cb)
+ end
+ if cc then
+ local c = cursives[cc]
+ report_injections(" curscurs: bound %a, dir %a, dx %p, dy %p",cc,dir(c[1]),c[2],c[3])
+ end
+ end
+ end
+ report_injections("end run")
+end
+
+-- todo: reuse tables (i.e. no collection), but will be extra fields anyway
+-- todo: check for attribute
+
+-- We can have a fast test on a font being processed, so we can check faster for marks etc
+-- but I'll make a context variant anyway.
+
+local function show_result(head)
+ local current = head
+ local skipping = false
+ while current do
+ local id = current.id
+ if id == glyph_code then
+ report_injections("char: %C, width %p, xoffset %p, yoffset %p",current.char,current.width,current.xoffset,current.yoffset)
+ skipping = false
+ elseif id == kern_code then
+ report_injections("kern: %p",current.kern)
+ skipping = false
+ elseif not skipping then
+ report_injections()
+ skipping = true
+ end
+ current = current.next
+ end
+end
+
+function injections.handler(head,where,keep)
+ local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
+ if has_marks or has_cursives then
+ if trace_injections then
+ trace(head)
+ end
+ -- in the future variant we will not copy items but refs to tables
+ local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0
+ if has_kerns then -- move outside loop
+ local nf, tm = nil, nil
+ for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
+ if n.subtype < 256 then
+ nofvalid = nofvalid + 1
+ valid[nofvalid] = n
+ if n.font ~= nf then
+ nf = n.font
+ tm = fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n] = tm[n.char]
+ end
+ local k = n[a_kernpair]
+ if k then
+ local kk = kerns[k]
+ if kk then
+ local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0
+ local dy = y - h
+ if dy ~= 0 then
+ ky[n] = dy
+ end
+ if w ~= 0 or x ~= 0 then
+ wx[n] = kk
+ end
+ rl[n] = kk[1] -- could move in test
+ end
+ end
+ end
+ end
+ else
+ local nf, tm = nil, nil
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype < 256 then
+ nofvalid = nofvalid + 1
+ valid[nofvalid] = n
+ if n.font ~= nf then
+ nf = n.font
+ tm = fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n] = tm[n.char]
+ end
+ end
+ end
+ end
+ if nofvalid > 0 then
+ -- we can assume done == true because we have cursives and marks
+ local cx = { }
+ if has_kerns and next(ky) then
+ for n, k in next, ky do
+ n.yoffset = k
+ end
+ end
+ -- todo: reuse t and use maxt
+ if has_cursives then
+ local p_cursbase, p = nil, nil
+ -- since we need valid[n+1] we can also use a "while true do"
+ local t, d, maxt = { }, { }, 0
+ for i=1,nofvalid do -- valid == glyphs
+ local n = valid[i]
+ if not mk[n] then
+ local n_cursbase = n[a_cursbase]
+ if p_cursbase then
+ local n_curscurs = n[a_curscurs]
+ if p_cursbase == n_curscurs then
+ local c = cursives[n_curscurs]
+ if c then
+ local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5]
+ if rlmode >= 0 then
+ dx = dx - ws
+ else
+ dx = dx + wn
+ end
+ if dx ~= 0 then
+ cx[n] = dx
+ rl[n] = rlmode
+ end
+ -- if rlmode and rlmode < 0 then
+ dy = -dy
+ -- end
+ maxt = maxt + 1
+ t[maxt] = p
+ d[maxt] = dy
+ else
+ maxt = 0
+ end
+ end
+ elseif maxt > 0 then
+ local ny = n.yoffset
+ for i=maxt,1,-1 do
+ ny = ny + d[i]
+ local ti = t[i]
+ ti.yoffset = ti.yoffset + ny
+ end
+ maxt = 0
+ end
+ if not n_cursbase and maxt > 0 then
+ local ny = n.yoffset
+ for i=maxt,1,-1 do
+ ny = ny + d[i]
+ local ti = t[i]
+ ti.yoffset = ny
+ end
+ maxt = 0
+ end
+ p_cursbase, p = n_cursbase, n
+ end
+ end
+ if maxt > 0 then
+ local ny = n.yoffset
+ for i=maxt,1,-1 do
+ ny = ny + d[i]
+ local ti = t[i]
+ ti.yoffset = ny
+ end
+ maxt = 0
+ end
+ if not keep then
+ cursives = { }
+ end
+ end
+ if has_marks then
+ for i=1,nofvalid do
+ local p = valid[i]
+ local p_markbase = p[a_markbase]
+ if p_markbase then
+ local mrks = marks[p_markbase]
+ local nofmarks = #mrks
+ for n in traverse_id(glyph_code,p.next) do
+ local n_markmark = n[a_markmark]
+ if p_markbase == n_markmark then
+ local index = n[a_markdone] or 1
+ local d = mrks[index]
+ if d then
+ local rlmode = d[3]
+ --
+ local k = wx[p]
+ if k then
+ local x = k[2]
+ local w = k[4]
+ if w then
+ if rlmode and rlmode >= 0 then
+ -- kern(x) glyph(p) kern(w-x) mark(n)
+ n.xoffset = p.xoffset - p.width + d[1] - (w-x)
+ else
+ -- kern(w-x) glyph(p) kern(x) mark(n)
+ n.xoffset = p.xoffset - d[1] - x
+ end
+ else
+ if rlmode and rlmode >= 0 then
+ -- okay for husayni
+ n.xoffset = p.xoffset - p.width + d[1]
+ else
+ -- needs checking: is x ok here?
+ n.xoffset = p.xoffset - d[1] - x
+ end
+ end
+ else
+ if rlmode and rlmode >= 0 then
+ n.xoffset = p.xoffset - p.width + d[1]
+ else
+ n.xoffset = p.xoffset - d[1]
+ end
+ end
+ -- --
+ if mk[p] then
+ n.yoffset = p.yoffset + d[2]
+ else
+ n.yoffset = n.yoffset + p.yoffset + d[2]
+ end
+ --
+ if nofmarks == 1 then
+ break
+ else
+ nofmarks = nofmarks - 1
+ end
+ end
+ else
+ -- KE: there can be sequences in ligatures
+ end
+ end
+ end
+ end
+ if not keep then
+ marks = { }
+ end
+ end
+ -- todo : combine
+ if next(wx) then
+ for n, k in next, wx do
+ -- only w can be nil (kernclasses), can be sped up when w == nil
+ local x = k[2]
+ local w = k[4]
+ if w then
+ local rl = k[1] -- r2l = k[6]
+ local wx = w - x
+ if rl < 0 then -- KE: don't use r2l here
+ if wx ~= 0 then
+ insert_node_before(head,n,newkern(wx)) -- type 0/2
+ end
+ if x ~= 0 then
+ insert_node_after (head,n,newkern(x)) -- type 0/2
+ end
+ else
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x)) -- type 0/2
+ end
+ if wx ~= 0 then
+ insert_node_after (head,n,newkern(wx)) -- type 0/2
+ end
+ end
+ elseif x ~= 0 then
+ -- this needs checking for rl < 0 but it is unlikely that a r2l script
+ -- uses kernclasses between glyphs so we're probably safe (KE has a
+ -- problematic font where marks interfere with rl < 0 in the previous
+ -- case)
+ insert_node_before(head,n,newkern(x)) -- a real font kern, type 0
+ end
+ end
+ end
+ if next(cx) then
+ for n, k in next, cx do
+ if k ~= 0 then
+ local rln = rl[n]
+ if rln and rln < 0 then
+ insert_node_before(head,n,newkern(-k)) -- type 0/2
+ else
+ insert_node_before(head,n,newkern(k)) -- type 0/2
+ end
+ end
+ end
+ end
+ if not keep then
+ kerns = { }
+ end
+ -- if trace_injections then
+ -- show_result(head)
+ -- end
+ return head, true
+ elseif not keep then
+ kerns, cursives, marks = { }, { }, { }
+ end
+ elseif has_kerns then
+ if trace_injections then
+ trace(head)
+ end
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype < 256 then
+ local k = n[a_kernpair]
+ if k then
+ local kk = kerns[k]
+ if kk then
+ local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
+ if y and y ~= 0 then
+ n.yoffset = y -- todo: h ?
+ end
+ if w then
+ -- copied from above
+ -- local r2l = kk[6]
+ local wx = w - x
+ if rl < 0 then -- KE: don't use r2l here
+ if wx ~= 0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x ~= 0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx ~= 0 then
+ insert_node_after(head,n,newkern(wx))
+ end
+ end
+ else
+ -- simple (e.g. kernclass kerns)
+ if x ~= 0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ end
+ end
+ end
+ end
+ end
+ if not keep then
+ kerns = { }
+ end
+ -- if trace_injections then
+ -- show_result(head)
+ -- end
+ return head, true
+ else
+ -- no tracing needed
+ end
+ return head, false
+end
diff --git a/tex/context/base/node-mig.lua b/tex/context/base/node-mig.lua
index 9fc35a048..fd14fc43f 100644
--- a/tex/context/base/node-mig.lua
+++ b/tex/context/base/node-mig.lua
@@ -1,138 +1,138 @@
-if not modules then modules = { } end modules ['node-mig'] = {
- version = 1.001,
- comment = "companion to node-mig.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format = string.format
-
-local attributes, nodes, node = attributes, nodes, node
-
-local remove_nodes = nodes.remove
-
-local nodecodes = nodes.nodecodes
-local tasks = nodes.tasks
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local insert_code = nodecodes.ins
-local mark_code = nodecodes.mark
-
-local a_migrated = attributes.private("migrated")
-
-local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end)
-
-local report_nodes = logs.reporter("nodes","migrations")
-
-local migrate_inserts, migrate_marks, inserts_too
-
-local t_inserts, t_marks, t_sweeps = 0, 0, 0
-
-local function locate(head,first,last,ni,nm)
- local current = head
- while current do
- local id = current.id
- if id == vlist_code or id == hlist_code then
- current.list, first, last, ni, nm = locate(current.list,first,last,ni,nm)
- current = current.next
- elseif migrate_inserts and id == insert_code then
- local insert
- head, current, insert = remove_nodes(head,current)
- insert.next = nil
- if first then
- insert.prev, last.next = last, insert
- else
- insert.prev, first = nil, insert
- end
- last, ni = insert, ni + 1
- elseif migrate_marks and id == mark_code then
- local mark
- head, current, mark = remove_nodes(head,current)
- mark.next = nil
- if first then
- mark.prev, last.next = last, mark
- else
- mark.prev, first = nil, mark
- end
- last, nm = mark, nm + 1
- else
- current= current.next
- end
- end
- return head, first, last, ni, nm
-end
-
-function nodes.handlers.migrate(head,where)
- local done = false
- if head then
- if trace_migrations then
- report_nodes("migration sweep %a",where)
- end
- local current = head
- while current do
- local id = current.id
- -- inserts_too is a temp hack, we should only do them when it concerns
- -- newly placed (flushed) inserts
- if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not current[a_migrated] then
- current[a_migrated] = 1
- t_sweeps = t_sweeps + 1
- local h = current.list
- local first, last, ni, nm
- while h do
- local id = h.id
- if id == vlist_code or id == hlist_code then
- h, first, last, ni, nm = locate(h,first,last,0,0)
- end
- h = h.next
- end
- if first then
- t_inserts, t_marks = t_inserts + ni, t_marks + nm
- if trace_migrations and (ni > 0 or nm > 0) then
- report_nodes("sweep %a, container %a, %s inserts and %s marks migrated outwards during %a",
- t_sweeps,nodecodes[id],ni,nm,where)
- end
- -- inserts after head
- local n = current.next
- if n then
- last.next, n.prev = n, last
- end
- current.next, first.prev = first, current
- done, current = true, last
- end
- end
- current = current.next
- end
- return head, done
- end
-end
-
--- for the moment this way, this will disappear
-
-experiments.register("marks.migrate", function(v)
- if v then
- tasks.enableaction("mvlbuilders", "nodes.handlers.migrate")
- end
- migrate_marks = v
-end)
-
-experiments.register("inserts.migrate", function(v)
- if v then
- tasks.enableaction("mvlbuilders", "nodes.handlers.migrate")
- end
- migrate_inserts = v
-end)
-
-experiments.register("inserts.migrate.nested", function(v)
- if v then
- tasks.enableaction("mvlbuilders", "nodes.handlers.migrate")
- end
- inserts_too = v
-end)
-
-statistics.register("node migrations", function()
- if trace_migrations and t_sweeps > 0 then
- return format("%s sweeps, %s inserts moved, %s marks moved",t_sweeps,t_inserts,t_marks)
- end
-end)
+if not modules then modules = { } end modules ['node-mig'] = {
+ version = 1.001,
+ comment = "companion to node-mig.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format = string.format
+
+local attributes, nodes, node = attributes, nodes, node
+
+local remove_nodes = nodes.remove
+
+local nodecodes = nodes.nodecodes
+local tasks = nodes.tasks
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local insert_code = nodecodes.ins
+local mark_code = nodecodes.mark
+
+local a_migrated = attributes.private("migrated")
+
+local trace_migrations = false trackers.register("nodes.migrations", function(v) trace_migrations = v end)
+
+local report_nodes = logs.reporter("nodes","migrations")
+
+local migrate_inserts, migrate_marks, inserts_too
+
+local t_inserts, t_marks, t_sweeps = 0, 0, 0
+
+local function locate(head,first,last,ni,nm)
+ local current = head
+ while current do
+ local id = current.id
+ if id == vlist_code or id == hlist_code then
+ current.list, first, last, ni, nm = locate(current.list,first,last,ni,nm)
+ current = current.next
+ elseif migrate_inserts and id == insert_code then
+ local insert
+ head, current, insert = remove_nodes(head,current)
+ insert.next = nil
+ if first then
+ insert.prev, last.next = last, insert
+ else
+ insert.prev, first = nil, insert
+ end
+ last, ni = insert, ni + 1
+ elseif migrate_marks and id == mark_code then
+ local mark
+ head, current, mark = remove_nodes(head,current)
+ mark.next = nil
+ if first then
+ mark.prev, last.next = last, mark
+ else
+ mark.prev, first = nil, mark
+ end
+ last, nm = mark, nm + 1
+ else
+ current= current.next
+ end
+ end
+ return head, first, last, ni, nm
+end
+
+function nodes.handlers.migrate(head,where)
+ local done = false
+ if head then
+ if trace_migrations then
+ report_nodes("migration sweep %a",where)
+ end
+ local current = head
+ while current do
+ local id = current.id
+ -- inserts_too is a temp hack, we should only do them when it concerns
+ -- newly placed (flushed) inserts
+ if id == vlist_code or id == hlist_code or (inserts_too and id == insert_code) and not current[a_migrated] then
+ current[a_migrated] = 1
+ t_sweeps = t_sweeps + 1
+ local h = current.list
+ local first, last, ni, nm
+ while h do
+ local id = h.id
+ if id == vlist_code or id == hlist_code then
+ h, first, last, ni, nm = locate(h,first,last,0,0)
+ end
+ h = h.next
+ end
+ if first then
+ t_inserts, t_marks = t_inserts + ni, t_marks + nm
+ if trace_migrations and (ni > 0 or nm > 0) then
+ report_nodes("sweep %a, container %a, %s inserts and %s marks migrated outwards during %a",
+ t_sweeps,nodecodes[id],ni,nm,where)
+ end
+ -- inserts after head
+ local n = current.next
+ if n then
+ last.next, n.prev = n, last
+ end
+ current.next, first.prev = first, current
+ done, current = true, last
+ end
+ end
+ current = current.next
+ end
+ return head, done
+ end
+end
+
+-- for the moment this way, this will disappear
+
+experiments.register("marks.migrate", function(v)
+ if v then
+ tasks.enableaction("mvlbuilders", "nodes.handlers.migrate")
+ end
+ migrate_marks = v
+end)
+
+experiments.register("inserts.migrate", function(v)
+ if v then
+ tasks.enableaction("mvlbuilders", "nodes.handlers.migrate")
+ end
+ migrate_inserts = v
+end)
+
+experiments.register("inserts.migrate.nested", function(v)
+ if v then
+ tasks.enableaction("mvlbuilders", "nodes.handlers.migrate")
+ end
+ inserts_too = v
+end)
+
+statistics.register("node migrations", function()
+ if trace_migrations and t_sweeps > 0 then
+ return format("%s sweeps, %s inserts moved, %s marks moved",t_sweeps,t_inserts,t_marks)
+ end
+end)
diff --git a/tex/context/base/node-pag.lua b/tex/context/base/node-pag.lua
index 9b8202042..47eba4eeb 100644
--- a/tex/context/base/node-pag.lua
+++ b/tex/context/base/node-pag.lua
@@ -1,30 +1,30 @@
-if not modules then modules = { } end modules ['node-pag'] = {
- version = 1.001,
- comment = "companion to node-pag.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this callback might disappear and come back in the same way
--- as par builders
-
-pagebuilders = pagebuilders or { }
-
-local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
-
-local actions = nodes.tasks.actions("pagebuilders")
-
-local function processor(head,groupcode,size,packtype,maxdepth,direction)
- starttiming(pagebuilders)
- local _, done = actions(head,groupcode,size,packtype,maxdepth,direction)
- stoptiming(pagebuilders)
- return (done and head) or true
--- return vpack(head)
-end
-
---~ callbacks.register('pre_output_filter', processor, "preparing output box")
-
---~ statistics.register("output preparation time", function()
---~ return statistics.elapsedseconds(pagebuilders)
---~ end)
+if not modules then modules = { } end modules ['node-pag'] = {
+ version = 1.001,
+ comment = "companion to node-pag.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- this callback might disappear and come back in the same way
+-- as par builders
+
+pagebuilders = pagebuilders or { }
+
+local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
+
+local actions = nodes.tasks.actions("pagebuilders")
+
+local function processor(head,groupcode,size,packtype,maxdepth,direction)
+ starttiming(pagebuilders)
+ local _, done = actions(head,groupcode,size,packtype,maxdepth,direction)
+ stoptiming(pagebuilders)
+ return (done and head) or true
+-- return vpack(head)
+end
+
+--~ callbacks.register('pre_output_filter', processor, "preparing output box")
+
+--~ statistics.register("output preparation time", function()
+--~ return statistics.elapsedseconds(pagebuilders)
+--~ end)
diff --git a/tex/context/base/node-pro.lua b/tex/context/base/node-pro.lua
index 60f2d8a72..6b0829e5e 100644
--- a/tex/context/base/node-pro.lua
+++ b/tex/context/base/node-pro.lua
@@ -1,165 +1,165 @@
-if not modules then modules = { } end modules ['node-pro'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local utfchar = utf.char
-local format, concat = string.format, table.concat
-
-local trace_callbacks = false trackers.register("nodes.callbacks", function(v) trace_callbacks = v end)
-
-local report_nodes = logs.reporter("nodes","processors")
-
-local nodes, node = nodes, node
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-local tasks = nodes.tasks
-
-local free_node = node.free
-local first_glyph = node.first_glyph or node.first_character
-local has_attribute = node.has_attribute
-
-nodes.processors = nodes.processors or { }
-local processors = nodes.processors
-
--- vbox: grouptype: vbox vtop output split_off split_keep | box_type: exactly|aditional
--- hbox: grouptype: hbox adjusted_hbox(=hbox_in_vmode) | box_type: exactly|aditional
-
-local actions = tasks.actions("processors")
-
-local n = 0
-
-local function reconstruct(head) -- we probably have a better one
- local t, n, h = { }, 0, head
- while h do
- n = n + 1
- local id = h.id
- if id == glyph_code then -- todo: disc etc
- t[n] = utfchar(h.char)
- else
- t[n] = "[]"
- end
- h = h.next
- end
- return concat(t)
-end
-
-local function tracer(what,state,head,groupcode,before,after,show)
- if not groupcode then
- groupcode = "unknown"
- elseif groupcode == "" then
- groupcode = "mvl"
- end
- n = n + 1
- if show then
- report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s, stream: %s",what,n,state,groupcode,before,after,reconstruct(head))
- else
- report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s",what,n,state,groupcode,before,after)
- end
-end
-
-processors.tracer = tracer
-
-processors.enabled = true -- this will become a proper state (like trackers)
-
-function processors.pre_linebreak_filter(head,groupcode,size,packtype,direction)
- local first, found = first_glyph(head) -- they really need to be glyphs
- if found then
- if trace_callbacks then
- local before = nodes.count(head,true)
- local head, done = actions(head,groupcode,size,packtype,direction) -- todo : pass first
- local after = nodes.count(head,true)
- if done then
- tracer("pre_linebreak","changed",head,groupcode,before,after,true)
- else
- tracer("pre_linebreak","unchanged",head,groupcode,before,after,true)
- end
- return done and head or true
- else
- local head, done = actions(head,groupcode,size,packtype,direction) -- todo : pass first
- return done and head or true
- end
- elseif trace_callbacks then
- local n = nodes.count(head,false)
- tracer("pre_linebreak","no chars",head,groupcode,n,n)
- end
- return true
-end
-
-local enabled = true
-
-function processors.hpack_filter(head,groupcode,size,packtype,direction)
- if enabled then
- local first, found = first_glyph(head) -- they really need to be glyphs
- if found then
- if trace_callbacks then
- local before = nodes.count(head,true)
- local head, done = actions(head,groupcode,size,packtype,direction)
- local after = nodes.count(head,true)
- if done then
- tracer("hpack","changed",head,groupcode,before,after,true)
- else
- tracer("hpack","unchanged",head,groupcode,before,after,true)
- end
- return done and head or true
- else
- local head, done = actions(head,groupcode,size,packtype,direction)
- return done and head or true
- end
- elseif trace_callbacks then
- local n = nodes.count(head,false)
- tracer("hpack","no chars",head,groupcode,n,n)
- end
- end
- return true
-end
-
-local hpack = node.hpack
-
-function nodes.fasthpack(...) -- todo: pass explicit arguments
- enabled = false
- local hp, b = hpack(...)
- hp.prev = nil
- hp.next = nil
- enabled = true
- return hp, b
-end
-
-callbacks.register('pre_linebreak_filter', processors.pre_linebreak_filter, "all kind of horizontal manipulations (before par break)")
-callbacks.register('hpack_filter' , processors.hpack_filter, "all kind of horizontal manipulations (before hbox creation)")
-
-local actions = tasks.actions("finalizers") -- head, where
-
--- beware, these are packaged boxes so no first_glyph test
--- maybe some day a hash with valid groupcodes
---
--- beware, much can pass twice, for instance vadjust passes two times
---
--- something weird here .. group mvl when making a vbox
-
-function processors.post_linebreak_filter(head,groupcode)
- if trace_callbacks then
- local before = nodes.count(head,true)
- local head, done = actions(head,groupcode)
- local after = nodes.count(head,true)
- if done then
- tracer("post_linebreak","changed",head,groupcode,before,after,true)
- else
- tracer("post_linebreak","unchanged",head,groupcode,before,after,true)
- end
- return done and head or true
- else
- local head, done = actions(head,groupcode)
- return done and head or true
- end
-end
-
-callbacks.register('post_linebreak_filter', processors.post_linebreak_filter,"all kind of horizontal manipulations (after par break)")
-
-statistics.register("h-node processing time", function()
- return statistics.elapsedseconds(nodes,"including kernel") -- hm, ok here?
-end)
+if not modules then modules = { } end modules ['node-pro'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local utfchar = utf.char
+local format, concat = string.format, table.concat
+
+local trace_callbacks = false trackers.register("nodes.callbacks", function(v) trace_callbacks = v end)
+
+local report_nodes = logs.reporter("nodes","processors")
+
+local nodes, node = nodes, node
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local tasks = nodes.tasks
+
+local free_node = node.free
+local first_glyph = node.first_glyph or node.first_character
+local has_attribute = node.has_attribute
+
+nodes.processors = nodes.processors or { }
+local processors = nodes.processors
+
+-- vbox: grouptype: vbox vtop output split_off split_keep | box_type: exactly|aditional
+-- hbox: grouptype: hbox adjusted_hbox(=hbox_in_vmode) | box_type: exactly|aditional
+
+local actions = tasks.actions("processors")
+
+local n = 0
+
+local function reconstruct(head) -- we probably have a better one
+ local t, n, h = { }, 0, head
+ while h do
+ n = n + 1
+ local id = h.id
+ if id == glyph_code then -- todo: disc etc
+ t[n] = utfchar(h.char)
+ else
+ t[n] = "[]"
+ end
+ h = h.next
+ end
+ return concat(t)
+end
+
+local function tracer(what,state,head,groupcode,before,after,show)
+ if not groupcode then
+ groupcode = "unknown"
+ elseif groupcode == "" then
+ groupcode = "mvl"
+ end
+ n = n + 1
+ if show then
+ report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s, stream: %s",what,n,state,groupcode,before,after,reconstruct(head))
+ else
+ report_nodes("%s: location %a, state %a, group %a, # before %a, # after %s",what,n,state,groupcode,before,after)
+ end
+end
+
+processors.tracer = tracer
+
+processors.enabled = true -- this will become a proper state (like trackers)
+
+function processors.pre_linebreak_filter(head,groupcode,size,packtype,direction)
+ local first, found = first_glyph(head) -- they really need to be glyphs
+ if found then
+ if trace_callbacks then
+ local before = nodes.count(head,true)
+ local head, done = actions(head,groupcode,size,packtype,direction) -- todo : pass first
+ local after = nodes.count(head,true)
+ if done then
+ tracer("pre_linebreak","changed",head,groupcode,before,after,true)
+ else
+ tracer("pre_linebreak","unchanged",head,groupcode,before,after,true)
+ end
+ return done and head or true
+ else
+ local head, done = actions(head,groupcode,size,packtype,direction) -- todo : pass first
+ return done and head or true
+ end
+ elseif trace_callbacks then
+ local n = nodes.count(head,false)
+ tracer("pre_linebreak","no chars",head,groupcode,n,n)
+ end
+ return true
+end
+
+local enabled = true
+
+function processors.hpack_filter(head,groupcode,size,packtype,direction)
+ if enabled then
+ local first, found = first_glyph(head) -- they really need to be glyphs
+ if found then
+ if trace_callbacks then
+ local before = nodes.count(head,true)
+ local head, done = actions(head,groupcode,size,packtype,direction)
+ local after = nodes.count(head,true)
+ if done then
+ tracer("hpack","changed",head,groupcode,before,after,true)
+ else
+ tracer("hpack","unchanged",head,groupcode,before,after,true)
+ end
+ return done and head or true
+ else
+ local head, done = actions(head,groupcode,size,packtype,direction)
+ return done and head or true
+ end
+ elseif trace_callbacks then
+ local n = nodes.count(head,false)
+ tracer("hpack","no chars",head,groupcode,n,n)
+ end
+ end
+ return true
+end
+
+local hpack = node.hpack
+
+function nodes.fasthpack(...) -- todo: pass explicit arguments
+ enabled = false
+ local hp, b = hpack(...)
+ hp.prev = nil
+ hp.next = nil
+ enabled = true
+ return hp, b
+end
+
+callbacks.register('pre_linebreak_filter', processors.pre_linebreak_filter, "all kind of horizontal manipulations (before par break)")
+callbacks.register('hpack_filter' , processors.hpack_filter, "all kind of horizontal manipulations (before hbox creation)")
+
+local actions = tasks.actions("finalizers") -- head, where
+
+-- beware, these are packaged boxes so no first_glyph test
+-- maybe some day a hash with valid groupcodes
+--
+-- beware, much can pass twice, for instance vadjust passes two times
+--
+-- something weird here .. group mvl when making a vbox
+
+function processors.post_linebreak_filter(head,groupcode)
+ if trace_callbacks then
+ local before = nodes.count(head,true)
+ local head, done = actions(head,groupcode)
+ local after = nodes.count(head,true)
+ if done then
+ tracer("post_linebreak","changed",head,groupcode,before,after,true)
+ else
+ tracer("post_linebreak","unchanged",head,groupcode,before,after,true)
+ end
+ return done and head or true
+ else
+ local head, done = actions(head,groupcode)
+ return done and head or true
+ end
+end
+
+callbacks.register('post_linebreak_filter', processors.post_linebreak_filter,"all kind of horizontal manipulations (after par break)")
+
+statistics.register("h-node processing time", function()
+ return statistics.elapsedseconds(nodes,"including kernel") -- hm, ok here?
+end)
diff --git a/tex/context/base/node-ref.lua b/tex/context/base/node-ref.lua
index 09e066434..cd46cd2dd 100644
--- a/tex/context/base/node-ref.lua
+++ b/tex/context/base/node-ref.lua
@@ -1,585 +1,585 @@
-if not modules then modules = { } end modules ['node-ref'] = {
- version = 1.001,
- comment = "companion to node-ref.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- We supported pdf right from the start and in mkii this has resulted in
--- extensive control over the links. Nowadays pdftex provides a lot more
--- control over margins but as mkii supports multiple backends we stuck to
--- our own mechanisms. In mkiv again we implement our own handling. Eventually
--- we will even disable the pdf primitives.
-
--- helper, will end up in luatex
-
--- is grouplevel still used?
-
-local format = string.format
-
-local allocate, mark = utilities.storage.allocate, utilities.storage.mark
-
-local cleanupreferences, cleanupdestinations = false, true
-
-local attributes, nodes, node = attributes, nodes, node
-
-local nodeinjections = backends.nodeinjections
-local codeinjections = backends.codeinjections
-
-local transparencies = attributes.transparencies
-local colors = attributes.colors
-local references = structures.references
-local tasks = nodes.tasks
-
-local hpack_list = node.hpack
-local list_dimensions = node.dimensions
-
--- current.glue_set current.glue_sign
-
-local trace_backend = false trackers.register("nodes.backend", function(v) trace_backend = v end)
-local trace_references = false trackers.register("nodes.references", function(v) trace_references = v end)
-local trace_destinations = false trackers.register("nodes.destinations", function(v) trace_destinations = v end)
-
-local report_reference = logs.reporter("backend","references")
-local report_destination = logs.reporter("backend","destinations")
-local report_area = logs.reporter("backend","areas")
-
-local nodecodes = nodes.nodecodes
-local skipcodes = nodes.skipcodes
-local whatcodes = nodes.whatcodes
-local listcodes = nodes.listcodes
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local glue_code = nodecodes.glue
-local whatsit_code = nodecodes.whatsit
-
-local leftskip_code = skipcodes.leftskip
-local rightskip_code = skipcodes.rightskip
-local parfillskip_code = skipcodes.parfillskip
-
-local localpar_code = whatcodes.localpar
-local dir_code = whatcodes.dir
-
-local line_code = listcodes.line
-
-local nodepool = nodes.pool
-
-local new_kern = nodepool.kern
-
-local traverse = node.traverse
-local find_node_tail = node.tail or node.slide
-local tosequence = nodes.tosequence
-
--- local function dimensions(parent,start,stop)
--- stop = stop and stop.next
--- if parent then
--- if stop then
--- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop)
--- else
--- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start)
--- end
--- else
--- if stop then
--- return list_dimensions(start,stop)
--- else
--- return list_dimensions(start)
--- end
--- end
--- end
---
--- -- more compact
-
-local function dimensions(parent,start,stop)
- if parent then
- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop and stop.next)
- else
- return list_dimensions(start,stop and stop.next)
- end
-end
-
--- is pardir important at all?
-
-local function inject_range(head,first,last,reference,make,stack,parent,pardir,txtdir)
- local width, height, depth = dimensions(parent,first,last)
- if txtdir == "+TRT" or (txtdir == "===" and pardir == "TRT") then -- KH: textdir == "===" test added
- width = - width
- end
- local result, resolved = make(width,height,depth,reference)
- if result and resolved then
- if head == first then
- if trace_backend then
- report_area("head: %04i %s %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
- end
- result.next = first
- first.prev = result
- return result, last
- else
- if trace_backend then
- report_area("middle: %04i %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
- end
- local prev = first.prev
- if prev then
- result.next = first
- result.prev = prev
- prev.next = result
- first.prev = result
- else
- result.next = first
- first.prev = result
- end
- if first == head.next then
- head.next = result -- hm, weird
- end
- return head, last
- end
- else
- return head, last
- end
-end
-
-local function inject_list(id,current,reference,make,stack,pardir,txtdir)
- local width, height, depth, correction = current.width, current.height, current.depth, 0
- local moveright = false
- local first = current.list
- if id == hlist_code then -- box_code line_code
- -- can be either an explicit hbox or a line and there is no way
- -- to recognize this; anyway only if ht/dp (then inline)
- local sr = stack[reference]
- if first then
- if sr and sr[2] then
- local last = find_node_tail(first)
- if last.id == glue_code and last.subtype == rightskip_code then
- local prev = last.prev
- moveright = first.id == glue_code and first.subtype == leftskip_code
- if prev and prev.id == glue_code and prev.subtype == parfillskip_code then
- width = dimensions(current,first,prev.prev) -- maybe not current as we already take care of it
- else
- if moveright and first.writable then
- width = width - first.spec.stretch*current.glue_set * current.glue_sign
- end
- if last.writable then
- width = width - last.spec.stretch*current.glue_set * current.glue_sign
- end
- end
- end
- else
- -- also weird
- end
- else
- -- ok
- end
- correction = width
- else
- correction = height + depth
- height, depth = depth, height -- ugly hack, needed because pdftex backend does something funny
- end
- if pardir == "TRT" then
- width = - width
- end
- local result, resolved = make(width,height,depth,reference)
- -- todo: only when width is ok
- if result and resolved then
- if trace_backend then
- report_area("box: %04i %s %s: w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",width,height,depth,resolved)
- end
- if not first then
- current.list = result
- elseif moveright then -- brr no prevs done
- -- result after first
- local n = first.next
- result.next = n
- first.next = result
- result.prev = first
- if n then n.prev = result end
- else
- -- first after result
- result.next = first
- first.prev = result
- current.list = result
- end
- end
-end
-
--- skip is somewhat messy
-
-local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,txtdir) -- main
- if head then
- local current, first, last, firstdir, reference = head, nil, nil, nil, nil
- pardir = pardir or "==="
- txtdir = txtdir or "==="
- while current do
- local id = current.id
- if id == hlist_code or id == vlist_code then
- local r = current[attribute]
- -- somehow reference is true so the following fails (second one not done) in
- -- test \goto{test}[page(2)] test \gotobox{test}[page(2)]
- -- so let's wait till this fails again
- -- if not reference and r and (not skip or r > skip) then -- > or ~=
- if r and (not skip or r > skip) then -- > or ~=
- inject_list(id,current,r,make,stack,pardir,txtdir)
- end
- if r then
- done[r] = (done[r] or 0) + 1
- end
- local list = current.list
- if list then
- local _
- current.list, _, pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir)
- end
- if r then
- done[r] = done[r] - 1
- end
- elseif id == whatsit_code then
- local subtype = current.subtype
- if subtype == localpar_code then
- pardir = current.dir
- elseif subtype == dir_code then
- txtdir = current.dir
- end
- elseif id == glue_code and current.subtype == leftskip_code then -- any glue at the left?
- --
- else
- local r = current[attribute]
- if not r then
- -- just go on, can be kerns
- elseif not reference then
- reference, first, last, firstdir = r, current, current, txtdir
- elseif r == reference then
- last = current
- elseif (done[reference] or 0) == 0 then -- or id == glue_code and current.subtype == right_skip_code
- if not skip or r > skip then -- maybe no > test
- head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
- reference, first, last, firstdir = nil, nil, nil, nil
- end
- else
- reference, first, last, firstdir = r, current, current, txtdir
- end
- end
- current = current.next
- end
- if reference and (done[reference] or 0) == 0 then
- head = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
- end
- end
- return head, true, pardir, txtdir
-end
-
-local function inject_area(head,attribute,make,stack,done,parent,pardir,txtdir) -- singular !
- if head then
- pardir = pardir or "==="
- txtdir = txtdir or "==="
- local current = head
- while current do
- local id = current.id
- if id == hlist_code or id == vlist_code then
- local r = current[attribute]
- if r and not done[r] then
- done[r] = true
- inject_list(id,current,r,make,stack,pardir,txtdir)
- end
- local list = current.list
- if list then
- current.list = inject_area(list,attribute,make,stack,done,current,pardir,txtdir)
- end
- elseif id == whatsit_code then
- local subtype = current.subtype
- if subtype == localpar_code then
- pardir = current.dir
- elseif subtype == dir_code then
- txtdir = current.dir
- end
- else
- local r = current[attribute]
- if r and not done[r] then
- done[r] = true
- head, current = inject_range(head,current,current,r,make,stack,parent,pardir,txtdir)
- end
- end
- current = current.next
- end
- end
- return head, true
-end
-
--- tracing
-
-local nodepool = nodes.pool
-
-local new_rule = nodepool.rule
-local new_kern = nodepool.kern
-
-local set_attribute = node.set_attribute
-local register_color = colors.register
-
-local a_color = attributes.private('color')
-local a_colormodel = attributes.private('colormodel')
-local a_transparency = attributes.private('transparency')
-local u_transparency = nil
-local u_colors = { }
-local force_gray = true
-
-local function colorize(width,height,depth,n,reference,what)
- if force_gray then n = 0 end
- u_transparency = u_transparency or transparencies.register(nil,2,.65)
- local ucolor = u_colors[n]
- if not ucolor then
- if n == 1 then
- u_color = register_color(nil,'rgb',.75,0,0)
- elseif n == 2 then
- u_color = register_color(nil,'rgb',0,.75,0)
- elseif n == 3 then
- u_color = register_color(nil,'rgb',0,0,.75)
- else
- n = 0
- u_color = register_color(nil,'gray',.5)
- end
- u_colors[n] = u_color
- end
- if width == 0 then
- -- probably a strut as placeholder
- report_area("%s %s has no %s dimensions, width %p, height %p, depth %p",what,reference,"horizontal",width,height,depth)
- width = 65536
- end
- if height + depth <= 0 then
- report_area("%s %s has no %s dimensions, width %p, height %p, depth %p",what,reference,"vertical",width,height,depth)
- height = 65536/2
- depth = height
- end
- local rule = new_rule(width,height,depth)
- rule[a_colormodel] = 1 -- gray color model
- rule[a_color] = u_color
- rule[a_transparency] = u_transparency
- if width < 0 then
- local kern = new_kern(width)
- rule.width = -width
- kern.next = rule
- rule.prev = kern
- return kern
- else
- return rule
- end
-end
-
-local nodepool = nodes.pool
-
-local new_kern = nodepool.kern
-
-local texattribute = tex.attribute
-local texcount = tex.count
-
--- references:
-
-local stack = { }
-local done = { }
-local attribute = attributes.private('reference')
-local nofreferences = 0
-local topofstack = 0
-
-nodes.references = {
- attribute = attribute,
- stack = stack,
- done = done,
-}
-
--- todo: get rid of n (n is just a number, can be used for tracing, obsolete)
-
-local function setreference(h,d,r)
- topofstack = topofstack + 1
- -- the preroll permits us to determine samepage (but delayed also has some advantages)
- -- so some part of the backend work is already done here
- stack[topofstack] = { r, h, d, codeinjections.prerollreference(r) }
- -- texattribute[attribute] = topofstack -- todo -> at tex end
- texcount.lastreferenceattribute = topofstack
-end
-
-function references.get(n) -- not public so functionality can change
- local sn = stack[n]
- return sn and sn[1]
-end
-
-local function makereference(width,height,depth,reference)
- local sr = stack[reference]
- if sr then
- if trace_references then
- report_reference("resolving attribute %a",reference)
- end
- local resolved, ht, dp, set, n = sr[1], sr[2], sr[3], sr[4], sr[5]
- if ht then
- if height < ht then height = ht end
- if depth < dp then depth = dp end
- end
- local annot = nodeinjections.reference(width,height,depth,set)
- if annot then
- nofreferences = nofreferences + 1
- local result, current
- if trace_references then
- local step = 65536
- result = hpack_list(colorize(width,height-step,depth-step,2,reference,"reference")) -- step subtracted so that we can see seperate links
- result.width = 0
- current = result
- end
- if current then
- current.next = annot
- else
- result = annot
- end
- references.registerpage(n)
- result = hpack_list(result,0)
- result.width, result.height, result.depth = 0, 0, 0
- if cleanupreferences then stack[reference] = nil end
- return result, resolved
- elseif trace_references then
- report_reference("unable to resolve annotation %a",reference)
- end
- elseif trace_references then
- report_reference("unable to resolve attribute %a",reference)
- end
-end
-
-function nodes.references.handler(head)
- if topofstack > 0 then
- return inject_areas(head,attribute,makereference,stack,done)
- else
- return head, false
- end
-end
-
--- destinations (we can clean up once set, unless tagging!)
-
-local stack = { }
-local done = { }
-local attribute = attributes.private('destination')
-local nofdestinations = 0
-local topofstack = 0
-
-nodes.destinations = {
- attribute = attribute,
- stack = stack,
- done = done,
-}
-
-local function setdestination(n,h,d,name,view) -- n = grouplevel, name == table
- topofstack = topofstack + 1
- stack[topofstack] = { n, h, d, name, view }
- return topofstack
-end
-
-local function makedestination(width,height,depth,reference)
- local sr = stack[reference]
- if sr then
- if trace_destinations then
- report_destination("resolving attribute %a",reference)
- end
- local resolved, ht, dp, name, view = sr[1], sr[2], sr[3], sr[4], sr[5]
- if ht then
- if height < ht then height = ht end
- if depth < dp then depth = dp end
- end
- local result, current
- if trace_destinations then
- local step = 0
- if width == 0 then
- step = 4*65536
- width, height, depth = 5*step, 5*step, 0
- end
- for n=1,#name do
- local rule = hpack_list(colorize(width,height,depth,3,reference,"destination"))
- rule.width = 0
- if not result then
- result, current = rule, rule
- else
- current.next = rule
- rule.prev = current
- current = rule
- end
- width, height = width - step, height - step
- end
- end
- nofdestinations = nofdestinations + 1
- for n=1,#name do
- local annot = nodeinjections.destination(width,height,depth,name[n],view)
- if not result then
- result = annot
- else
- current.next = annot
- annot.prev = current
- end
- current = find_node_tail(annot)
- end
- if result then
- -- some internal error
- result = hpack_list(result,0)
- result.width, result.height, result.depth = 0, 0, 0
- end
- if cleanupdestinations then stack[reference] = nil end
- return result, resolved
- elseif trace_destinations then
- report_destination("unable to resolve attribute %a",reference)
- end
-end
-
-function nodes.destinations.handler(head)
- if topofstack > 0 then
- return inject_area(head,attribute,makedestination,stack,done) -- singular
- else
- return head, false
- end
-end
-
--- will move
-
-function references.mark(reference,h,d,view)
- return setdestination(tex.currentgrouplevel,h,d,reference,view)
-end
-
-function references.inject(prefix,reference,h,d,highlight,newwindow,layer) -- todo: use currentreference is possible
- local set, bug = references.identify(prefix,reference)
- if bug or #set == 0 then
- -- unknown ref, just don't set it and issue an error
- else
- -- check
- set.highlight, set.newwindow,set.layer = highlight, newwindow, layer
- setreference(h,d,set) -- sets attribute / todo: for set[*].error
- end
-end
-
-function references.injectcurrentset(h,d) -- used inside doifelse
- local currentset = references.currentset
- if currentset then
- setreference(h,d,currentset) -- sets attribute / todo: for set[*].error
- end
-end
-
-commands.injectreference = references.inject
-commands.injectcurrentreference = references.injectcurrentset
-
---
-
-local function checkboth(open,close)
- if open and open ~= "" then
- local set, bug = references.identify("",open)
- open = not bug and #set > 0 and set
- end
- if close and close ~= "" then
- local set, bug = references.identify("",close)
- close = not bug and #set > 0 and set
- end
- return open, close
-end
-
--- end temp hack
-
-statistics.register("interactive elements", function()
- if nofreferences > 0 or nofdestinations > 0 then
- return format("%s references, %s destinations",nofreferences,nofdestinations)
- else
- return nil
- end
-end)
-
-function references.enableinteraction()
- tasks.enableaction("shipouts","nodes.references.handler")
- tasks.enableaction("shipouts","nodes.destinations.handler")
-end
+if not modules then modules = { } end modules ['node-ref'] = {
+ version = 1.001,
+ comment = "companion to node-ref.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- We supported pdf right from the start and in mkii this has resulted in
+-- extensive control over the links. Nowadays pdftex provides a lot more
+-- control over margins but as mkii supports multiple backends we stuck to
+-- our own mechanisms. In mkiv again we implement our own handling. Eventually
+-- we will even disable the pdf primitives.
+
+-- helper, will end up in luatex
+
+-- is grouplevel still used?
+
+local format = string.format
+
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+
+local cleanupreferences, cleanupdestinations = false, true
+
+local attributes, nodes, node = attributes, nodes, node
+
+local nodeinjections = backends.nodeinjections
+local codeinjections = backends.codeinjections
+
+local transparencies = attributes.transparencies
+local colors = attributes.colors
+local references = structures.references
+local tasks = nodes.tasks
+
+local hpack_list = node.hpack
+local list_dimensions = node.dimensions
+
+-- current.glue_set current.glue_sign
+
+local trace_backend = false trackers.register("nodes.backend", function(v) trace_backend = v end)
+local trace_references = false trackers.register("nodes.references", function(v) trace_references = v end)
+local trace_destinations = false trackers.register("nodes.destinations", function(v) trace_destinations = v end)
+
+local report_reference = logs.reporter("backend","references")
+local report_destination = logs.reporter("backend","destinations")
+local report_area = logs.reporter("backend","areas")
+
+local nodecodes = nodes.nodecodes
+local skipcodes = nodes.skipcodes
+local whatcodes = nodes.whatcodes
+local listcodes = nodes.listcodes
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local glue_code = nodecodes.glue
+local whatsit_code = nodecodes.whatsit
+
+local leftskip_code = skipcodes.leftskip
+local rightskip_code = skipcodes.rightskip
+local parfillskip_code = skipcodes.parfillskip
+
+local localpar_code = whatcodes.localpar
+local dir_code = whatcodes.dir
+
+local line_code = listcodes.line
+
+local nodepool = nodes.pool
+
+local new_kern = nodepool.kern
+
+local traverse = node.traverse
+local find_node_tail = node.tail or node.slide
+local tosequence = nodes.tosequence
+
+-- local function dimensions(parent,start,stop)
+-- stop = stop and stop.next
+-- if parent then
+-- if stop then
+-- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop)
+-- else
+-- return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start)
+-- end
+-- else
+-- if stop then
+-- return list_dimensions(start,stop)
+-- else
+-- return list_dimensions(start)
+-- end
+-- end
+-- end
+--
+-- -- more compact
+
+local function dimensions(parent,start,stop)
+ if parent then
+ return list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,start,stop and stop.next)
+ else
+ return list_dimensions(start,stop and stop.next)
+ end
+end
+
+-- is pardir important at all?
+
+local function inject_range(head,first,last,reference,make,stack,parent,pardir,txtdir)
+ local width, height, depth = dimensions(parent,first,last)
+ if txtdir == "+TRT" or (txtdir == "===" and pardir == "TRT") then -- KH: textdir == "===" test added
+ width = - width
+ end
+ local result, resolved = make(width,height,depth,reference)
+ if result and resolved then
+ if head == first then
+ if trace_backend then
+ report_area("head: %04i %s %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
+ end
+ result.next = first
+ first.prev = result
+ return result, last
+ else
+ if trace_backend then
+ report_area("middle: %04i %s %s => w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",tosequence(first,last,true),width,height,depth,resolved)
+ end
+ local prev = first.prev
+ if prev then
+ result.next = first
+ result.prev = prev
+ prev.next = result
+ first.prev = result
+ else
+ result.next = first
+ first.prev = result
+ end
+ if first == head.next then
+ head.next = result -- hm, weird
+ end
+ return head, last
+ end
+ else
+ return head, last
+ end
+end
+
+local function inject_list(id,current,reference,make,stack,pardir,txtdir)
+ local width, height, depth, correction = current.width, current.height, current.depth, 0
+ local moveright = false
+ local first = current.list
+ if id == hlist_code then -- box_code line_code
+ -- can be either an explicit hbox or a line and there is no way
+ -- to recognize this; anyway only if ht/dp (then inline)
+ local sr = stack[reference]
+ if first then
+ if sr and sr[2] then
+ local last = find_node_tail(first)
+ if last.id == glue_code and last.subtype == rightskip_code then
+ local prev = last.prev
+ moveright = first.id == glue_code and first.subtype == leftskip_code
+ if prev and prev.id == glue_code and prev.subtype == parfillskip_code then
+ width = dimensions(current,first,prev.prev) -- maybe not current as we already take care of it
+ else
+ if moveright and first.writable then
+ width = width - first.spec.stretch*current.glue_set * current.glue_sign
+ end
+ if last.writable then
+ width = width - last.spec.stretch*current.glue_set * current.glue_sign
+ end
+ end
+ end
+ else
+ -- also weird
+ end
+ else
+ -- ok
+ end
+ correction = width
+ else
+ correction = height + depth
+ height, depth = depth, height -- ugly hack, needed because pdftex backend does something funny
+ end
+ if pardir == "TRT" then
+ width = - width
+ end
+ local result, resolved = make(width,height,depth,reference)
+ -- todo: only when width is ok
+ if result and resolved then
+ if trace_backend then
+ report_area("box: %04i %s %s: w=%p, h=%p, d=%p, c=%s",reference,pardir or "---",txtdir or "----",width,height,depth,resolved)
+ end
+ if not first then
+ current.list = result
+ elseif moveright then -- brr no prevs done
+ -- result after first
+ local n = first.next
+ result.next = n
+ first.next = result
+ result.prev = first
+ if n then n.prev = result end
+ else
+ -- first after result
+ result.next = first
+ first.prev = result
+ current.list = result
+ end
+ end
+end
+
+-- skip is somewhat messy
+
+local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,txtdir) -- main
+ if head then
+ local current, first, last, firstdir, reference = head, nil, nil, nil, nil
+ pardir = pardir or "==="
+ txtdir = txtdir or "==="
+ while current do
+ local id = current.id
+ if id == hlist_code or id == vlist_code then
+ local r = current[attribute]
+ -- somehow reference is true so the following fails (second one not done) in
+ -- test \goto{test}[page(2)] test \gotobox{test}[page(2)]
+ -- so let's wait till this fails again
+ -- if not reference and r and (not skip or r > skip) then -- > or ~=
+ if r and (not skip or r > skip) then -- > or ~=
+ inject_list(id,current,r,make,stack,pardir,txtdir)
+ end
+ if r then
+ done[r] = (done[r] or 0) + 1
+ end
+ local list = current.list
+ if list then
+ local _
+ current.list, _, pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir)
+ end
+ if r then
+ done[r] = done[r] - 1
+ end
+ elseif id == whatsit_code then
+ local subtype = current.subtype
+ if subtype == localpar_code then
+ pardir = current.dir
+ elseif subtype == dir_code then
+ txtdir = current.dir
+ end
+ elseif id == glue_code and current.subtype == leftskip_code then -- any glue at the left?
+ --
+ else
+ local r = current[attribute]
+ if not r then
+ -- just go on, can be kerns
+ elseif not reference then
+ reference, first, last, firstdir = r, current, current, txtdir
+ elseif r == reference then
+ last = current
+ elseif (done[reference] or 0) == 0 then -- or id == glue_code and current.subtype == right_skip_code
+ if not skip or r > skip then -- maybe no > test
+ head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
+ reference, first, last, firstdir = nil, nil, nil, nil
+ end
+ else
+ reference, first, last, firstdir = r, current, current, txtdir
+ end
+ end
+ current = current.next
+ end
+ if reference and (done[reference] or 0) == 0 then
+ head = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
+ end
+ end
+ return head, true, pardir, txtdir
+end
+
+local function inject_area(head,attribute,make,stack,done,parent,pardir,txtdir) -- singular !
+ if head then
+ pardir = pardir or "==="
+ txtdir = txtdir or "==="
+ local current = head
+ while current do
+ local id = current.id
+ if id == hlist_code or id == vlist_code then
+ local r = current[attribute]
+ if r and not done[r] then
+ done[r] = true
+ inject_list(id,current,r,make,stack,pardir,txtdir)
+ end
+ local list = current.list
+ if list then
+ current.list = inject_area(list,attribute,make,stack,done,current,pardir,txtdir)
+ end
+ elseif id == whatsit_code then
+ local subtype = current.subtype
+ if subtype == localpar_code then
+ pardir = current.dir
+ elseif subtype == dir_code then
+ txtdir = current.dir
+ end
+ else
+ local r = current[attribute]
+ if r and not done[r] then
+ done[r] = true
+ head, current = inject_range(head,current,current,r,make,stack,parent,pardir,txtdir)
+ end
+ end
+ current = current.next
+ end
+ end
+ return head, true
+end
+
+-- tracing
+
+local nodepool = nodes.pool
+
+local new_rule = nodepool.rule
+local new_kern = nodepool.kern
+
+local set_attribute = node.set_attribute
+local register_color = colors.register
+
+local a_color = attributes.private('color')
+local a_colormodel = attributes.private('colormodel')
+local a_transparency = attributes.private('transparency')
+local u_transparency = nil
+local u_colors = { }
+local force_gray = true
+
+local function colorize(width,height,depth,n,reference,what)
+ if force_gray then n = 0 end
+ u_transparency = u_transparency or transparencies.register(nil,2,.65)
+ local ucolor = u_colors[n]
+ if not ucolor then
+ if n == 1 then
+ u_color = register_color(nil,'rgb',.75,0,0)
+ elseif n == 2 then
+ u_color = register_color(nil,'rgb',0,.75,0)
+ elseif n == 3 then
+ u_color = register_color(nil,'rgb',0,0,.75)
+ else
+ n = 0
+ u_color = register_color(nil,'gray',.5)
+ end
+ u_colors[n] = u_color
+ end
+ if width == 0 then
+ -- probably a strut as placeholder
+ report_area("%s %s has no %s dimensions, width %p, height %p, depth %p",what,reference,"horizontal",width,height,depth)
+ width = 65536
+ end
+ if height + depth <= 0 then
+ report_area("%s %s has no %s dimensions, width %p, height %p, depth %p",what,reference,"vertical",width,height,depth)
+ height = 65536/2
+ depth = height
+ end
+ local rule = new_rule(width,height,depth)
+ rule[a_colormodel] = 1 -- gray color model
+ rule[a_color] = u_color
+ rule[a_transparency] = u_transparency
+ if width < 0 then
+ local kern = new_kern(width)
+ rule.width = -width
+ kern.next = rule
+ rule.prev = kern
+ return kern
+ else
+ return rule
+ end
+end
+
+local nodepool = nodes.pool
+
+local new_kern = nodepool.kern
+
+local texattribute = tex.attribute
+local texcount = tex.count
+
+-- references:
+
+local stack = { }
+local done = { }
+local attribute = attributes.private('reference')
+local nofreferences = 0
+local topofstack = 0
+
+nodes.references = {
+ attribute = attribute,
+ stack = stack,
+ done = done,
+}
+
+-- todo: get rid of n (n is just a number, can be used for tracing, obsolete)
+
+local function setreference(h,d,r)
+ topofstack = topofstack + 1
+ -- the preroll permits us to determine samepage (but delayed also has some advantages)
+ -- so some part of the backend work is already done here
+ stack[topofstack] = { r, h, d, codeinjections.prerollreference(r) }
+ -- texattribute[attribute] = topofstack -- todo -> at tex end
+ texcount.lastreferenceattribute = topofstack
+end
+
+function references.get(n) -- not public so functionality can change
+ local sn = stack[n]
+ return sn and sn[1]
+end
+
+local function makereference(width,height,depth,reference)
+ local sr = stack[reference]
+ if sr then
+ if trace_references then
+ report_reference("resolving attribute %a",reference)
+ end
+ local resolved, ht, dp, set, n = sr[1], sr[2], sr[3], sr[4], sr[5]
+ if ht then
+ if height < ht then height = ht end
+ if depth < dp then depth = dp end
+ end
+ local annot = nodeinjections.reference(width,height,depth,set)
+ if annot then
+ nofreferences = nofreferences + 1
+ local result, current
+ if trace_references then
+ local step = 65536
+ result = hpack_list(colorize(width,height-step,depth-step,2,reference,"reference")) -- step subtracted so that we can see seperate links
+ result.width = 0
+ current = result
+ end
+ if current then
+ current.next = annot
+ else
+ result = annot
+ end
+ references.registerpage(n)
+ result = hpack_list(result,0)
+ result.width, result.height, result.depth = 0, 0, 0
+ if cleanupreferences then stack[reference] = nil end
+ return result, resolved
+ elseif trace_references then
+ report_reference("unable to resolve annotation %a",reference)
+ end
+ elseif trace_references then
+ report_reference("unable to resolve attribute %a",reference)
+ end
+end
+
+function nodes.references.handler(head)
+ if topofstack > 0 then
+ return inject_areas(head,attribute,makereference,stack,done)
+ else
+ return head, false
+ end
+end
+
+-- destinations (we can clean up once set, unless tagging!)
+
+local stack = { }
+local done = { }
+local attribute = attributes.private('destination')
+local nofdestinations = 0
+local topofstack = 0
+
+nodes.destinations = {
+ attribute = attribute,
+ stack = stack,
+ done = done,
+}
+
+local function setdestination(n,h,d,name,view) -- n = grouplevel, name == table
+ topofstack = topofstack + 1
+ stack[topofstack] = { n, h, d, name, view }
+ return topofstack
+end
+
+local function makedestination(width,height,depth,reference)
+ local sr = stack[reference]
+ if sr then
+ if trace_destinations then
+ report_destination("resolving attribute %a",reference)
+ end
+ local resolved, ht, dp, name, view = sr[1], sr[2], sr[3], sr[4], sr[5]
+ if ht then
+ if height < ht then height = ht end
+ if depth < dp then depth = dp end
+ end
+ local result, current
+ if trace_destinations then
+ local step = 0
+ if width == 0 then
+ step = 4*65536
+ width, height, depth = 5*step, 5*step, 0
+ end
+ for n=1,#name do
+ local rule = hpack_list(colorize(width,height,depth,3,reference,"destination"))
+ rule.width = 0
+ if not result then
+ result, current = rule, rule
+ else
+ current.next = rule
+ rule.prev = current
+ current = rule
+ end
+ width, height = width - step, height - step
+ end
+ end
+ nofdestinations = nofdestinations + 1
+ for n=1,#name do
+ local annot = nodeinjections.destination(width,height,depth,name[n],view)
+ if not result then
+ result = annot
+ else
+ current.next = annot
+ annot.prev = current
+ end
+ current = find_node_tail(annot)
+ end
+ if result then
+ -- some internal error
+ result = hpack_list(result,0)
+ result.width, result.height, result.depth = 0, 0, 0
+ end
+ if cleanupdestinations then stack[reference] = nil end
+ return result, resolved
+ elseif trace_destinations then
+ report_destination("unable to resolve attribute %a",reference)
+ end
+end
+
+function nodes.destinations.handler(head)
+ if topofstack > 0 then
+ return inject_area(head,attribute,makedestination,stack,done) -- singular
+ else
+ return head, false
+ end
+end
+
+-- will move
+
+function references.mark(reference,h,d,view)
+ return setdestination(tex.currentgrouplevel,h,d,reference,view)
+end
+
+function references.inject(prefix,reference,h,d,highlight,newwindow,layer) -- todo: use currentreference is possible
+ local set, bug = references.identify(prefix,reference)
+ if bug or #set == 0 then
+ -- unknown ref, just don't set it and issue an error
+ else
+ -- check
+ set.highlight, set.newwindow,set.layer = highlight, newwindow, layer
+ setreference(h,d,set) -- sets attribute / todo: for set[*].error
+ end
+end
+
+function references.injectcurrentset(h,d) -- used inside doifelse
+ local currentset = references.currentset
+ if currentset then
+ setreference(h,d,currentset) -- sets attribute / todo: for set[*].error
+ end
+end
+
+commands.injectreference = references.inject
+commands.injectcurrentreference = references.injectcurrentset
+
+--
+
+local function checkboth(open,close)
+ if open and open ~= "" then
+ local set, bug = references.identify("",open)
+ open = not bug and #set > 0 and set
+ end
+ if close and close ~= "" then
+ local set, bug = references.identify("",close)
+ close = not bug and #set > 0 and set
+ end
+ return open, close
+end
+
+-- end temp hack
+
+statistics.register("interactive elements", function()
+ if nofreferences > 0 or nofdestinations > 0 then
+ return format("%s references, %s destinations",nofreferences,nofdestinations)
+ else
+ return nil
+ end
+end)
+
+function references.enableinteraction()
+ tasks.enableaction("shipouts","nodes.references.handler")
+ tasks.enableaction("shipouts","nodes.destinations.handler")
+end
diff --git a/tex/context/base/node-res.lua b/tex/context/base/node-res.lua
index 768aac404..6ec6895c8 100644
--- a/tex/context/base/node-res.lua
+++ b/tex/context/base/node-res.lua
@@ -1,406 +1,406 @@
-if not modules then modules = { } end modules ['node-res'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local gmatch, format = string.gmatch, string.format
-local tonumber, round = tonumber, math.round
-
---[[ldx--
-
The next function is not that much needed but in we use
-for debugging node management.
---ldx]]--
-
-local report_nodes = logs.reporter("nodes","housekeeping")
-
-local nodes, node = nodes, node
-
-local copy_node = node.copy
-local free_node = node.free
-local free_list = node.flush_list
-local new_node = node.new
-
-nodes.pool = nodes.pool or { }
-local pool = nodes.pool
-
-local whatsitcodes = nodes.whatsitcodes
-local skipcodes = nodes.skipcodes
-local kerncodes = nodes.kerncodes
-local nodecodes = nodes.nodecodes
-
-local glyph_code = nodecodes.glyph
-
-local allocate = utilities.storage.allocate
-
-local reserved, nofreserved = { }, 0
-
-local function register_node(n)
- nofreserved = nofreserved + 1
- reserved[nofreserved] = n
- return n
-end
-
-pool.register = register_node
-
-function pool.cleanup(nofboxes) -- todo
- if nodes.tracers.steppers then -- to be resolved
- nodes.tracers.steppers.reset() -- todo: make a registration subsystem
- end
- local nl, nr = 0, nofreserved
- for i=1,nofreserved do
- local ri = reserved[i]
- -- if not (ri.id == glue_spec and not ri.is_writable) then
- free_node(reserved[i])
- -- end
- end
- if nofboxes then
- local tb = tex.box
- for i=0,nofboxes do
- local l = tb[i]
- if l then
- free_node(tb[i])
- nl = nl + 1
- end
- end
- end
- reserved = { }
- nofreserved = 0
- return nr, nl, nofboxes -- can be nil
-end
-
-function pool.usage()
- local t = { }
- for n, tag in gmatch(status.node_mem_usage,"(%d+) ([a-z_]+)") do
- t[tag] = n
- end
- return t
-end
-
-local disc = register_node(new_node("disc"))
-local kern = register_node(new_node("kern",kerncodes.userkern))
-local fontkern = register_node(new_node("kern",kerncodes.fontkern))
-local penalty = register_node(new_node("penalty"))
-local glue = register_node(new_node("glue")) -- glue.spec = nil
-local glue_spec = register_node(new_node("glue_spec"))
-local glyph = register_node(new_node("glyph",0))
-local textdir = register_node(new_node("whatsit",whatsitcodes.dir))
-local latelua = register_node(new_node("whatsit",whatsitcodes.latelua))
-local special = register_node(new_node("whatsit",whatsitcodes.special))
-local user_n = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_n.type = 100 -- 44
-local user_l = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_l.type = 110 -- 44
-local user_s = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_s.type = 115 -- 44
-local user_t = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_t.type = 116 -- 44
-local left_margin_kern = register_node(new_node("margin_kern",0))
-local right_margin_kern = register_node(new_node("margin_kern",1))
-local lineskip = register_node(new_node("glue",skipcodes.lineskip))
-local baselineskip = register_node(new_node("glue",skipcodes.baselineskip))
-local leftskip = register_node(new_node("glue",skipcodes.leftskip))
-local rightskip = register_node(new_node("glue",skipcodes.rightskip))
-local temp = register_node(new_node("temp",0))
-local noad = register_node(new_node("noad"))
-
--- the dir field needs to be set otherwise crash:
-
-local rule = register_node(new_node("rule")) rule .dir = "TLT"
-local hlist = register_node(new_node("hlist")) hlist.dir = "TLT"
-local vlist = register_node(new_node("vlist")) vlist.dir = "TLT"
-
-function pool.zeroglue(n)
- local s = n.spec
- return not writable or (
- s.width == 0
- and s.stretch == 0
- and s.shrink == 0
- and s.stretch_order == 0
- and s.shrink_order == 0
- )
-end
-
-function pool.glyph(fnt,chr)
- local n = copy_node(glyph)
- if fnt then n.font = fnt end
- if chr then n.char = chr end
- return n
-end
-
-function pool.penalty(p)
- local n = copy_node(penalty)
- n.penalty = p
- return n
-end
-
-function pool.kern(k)
- local n = copy_node(kern)
- n.kern = k
- return n
-end
-
-function pool.fontkern(k)
- local n = copy_node(fontkern)
- n.kern = k
- return n
-end
-
-function pool.gluespec(width,stretch,shrink,stretch_order,shrink_order)
- local s = copy_node(glue_spec)
- if width then s.width = width end
- if stretch then s.stretch = stretch end
- if shrink then s.shrink = shrink end
- if stretch_order then s.stretch_order = stretch_order end
- if shrink_order then s.shrink_order = shrink_order end
- return s
-end
-
-local function someskip(skip,width,stretch,shrink,stretch_order,shrink_order)
- local n = copy_node(skip)
- if not width then
- -- no spec
- elseif width == false or tonumber(width) then
- local s = copy_node(glue_spec)
- if width then s.width = width end
- if stretch then s.stretch = stretch end
- if shrink then s.shrink = shrink end
- if stretch_order then s.stretch_order = stretch_order end
- if shrink_order then s.shrink_order = shrink_order end
- n.spec = s
- else
- -- shared
- n.spec = copy_node(width)
- end
- return n
-end
-
-function pool.stretch(a,b)
- local n = copy_node(glue)
- local s = copy_node(glue_spec)
- if b then
- s.stretch = a
- s.stretch_order = b
- else
- s.stretch = 1
- s.stretch_order = a or 1
- end
- n.spec = s
- return n
-end
-
-function pool.shrink(a,b)
- local n = copy_node(glue)
- local s = copy_node(glue_spec)
- if b then
- s.shrink = a
- s.shrink_order = b
- else
- s.shrink = 1
- s.shrink_order = a or 1
- end
- n.spec = s
- return n
-end
-
-
-function pool.glue(width,stretch,shrink,stretch_order,shrink_order)
- return someskip(glue,width,stretch,shrink,stretch_order,shrink_order)
-end
-
-function pool.leftskip(width,stretch,shrink,stretch_order,shrink_order)
- return someskip(leftskip,width,stretch,shrink,stretch_order,shrink_order)
-end
-
-function pool.rightskip(width,stretch,shrink,stretch_order,shrink_order)
- return someskip(rightskip,width,stretch,shrink,stretch_order,shrink_order)
-end
-
-function pool.lineskip(width,stretch,shrink,stretch_order,shrink_order)
- return someskip(lineskip,width,stretch,shrink,stretch_order,shrink_order)
-end
-
-function pool.baselineskip(width,stretch,shrink)
- return someskip(baselineskip,width,stretch,shrink)
-end
-
-function pool.disc()
- return copy_node(disc)
-end
-
-function pool.textdir(dir)
- local t = copy_node(textdir)
- t.dir = dir
- return t
-end
-
-function pool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt
- local n = copy_node(rule)
- if width then n.width = width end
- if height then n.height = height end
- if depth then n.depth = depth end
- if dir then n.dir = dir end
- return n
-end
-
-if node.has_field(latelua,'string') then
- function pool.latelua(code)
- local n = copy_node(latelua)
- n.string = code
- return n
- end
-else
- function pool.latelua(code)
- local n = copy_node(latelua)
- n.data = code
- return n
- end
-end
-
-function pool.leftmarginkern(glyph,width)
- local n = copy_node(left_margin_kern)
- if not glyph then
- report_nodes("invalid pointer to left margin glyph node")
- elseif glyph.id ~= glyph_code then
- report_nodes("invalid node type %a for %s margin glyph node",nodecodes[glyph],"left")
- else
- n.glyph = glyph
- end
- if width then
- n.width = width
- end
- return n
-end
-
-function pool.rightmarginkern(glyph,width)
- local n = copy_node(right_margin_kern)
- if not glyph then
- report_nodes("invalid pointer to right margin glyph node")
- elseif glyph.id ~= glyph_code then
- report_nodes("invalid node type %a for %s margin glyph node",nodecodes[p],"right")
- else
- n.glyph = glyph
- end
- if width then
- n.width = width
- end
- return n
-end
-
-function pool.temp()
- return copy_node(temp)
-end
-
-function pool.noad()
- return copy_node(noad)
-end
-
-function pool.hlist()
- return copy_node(hlist)
-end
-
-function pool.vlist()
- return copy_node(vlist)
-end
-
---[[
-
At some point we ran into a problem that the glue specification
-of the zeropoint dimension was overwritten when adapting a glue spec
-node. This is a side effect of glue specs being shared. After a
-couple of hours tracing and debugging Taco and I came to the
-conclusion that it made no sense to complicate the spec allocator
-and settled on a writable flag. This all is a side effect of the
-fact that some glues use reserved memory slots (with the zeropoint
-glue being a noticeable one). So, next we wrap this into a function
-and hide it for the user. And yes, LuaTeX now gives a warning as
-well.
-]]--
-
-function nodes.writable_spec(n) -- not pool
- local spec = n.spec
- if not spec then
- spec = copy_node(glue_spec)
- n.spec = spec
- elseif not spec.writable then
- spec = copy_node(spec)
- n.spec = spec
- end
- return spec
-end
-
--- local num = userids["my id"]
--- local str = userids[num]
-
-local userids = allocate() pool.userids = userids
-local lastid = 0
-
-setmetatable(userids, {
- __index = function(t,k)
- if type(k) == "string" then
- lastid = lastid + 1
- rawset(userids,lastid,k)
- rawset(userids,k,lastid)
- return lastid
- else
- rawset(userids,k,k)
- return k
- end
- end,
- __call = function(t,k)
- return t[k]
- end
-} )
-
-function pool.usernumber(id,num)
- local n = copy_node(user_n)
- if num then
- n.user_id, n.value = id, num
- elseif id then
- n.value = id
- end
- return n
-end
-
-function pool.userlist(id,list)
- local n = copy_node(user_l)
- if list then
- n.user_id, n.value = id, list
- else
- n.value = id
- end
- return n
-end
-
-function pool.userstring(id,str)
- local n = copy_node(user_s)
- if str then
- n.user_id, n.value = id, str
- else
- n.value = id
- end
- return n
-end
-
-function pool.usertokens(id,tokens)
- local n = copy_node(user_t)
- if tokens then
- n.user_id, n.value = id, tokens
- else
- n.value = id
- end
- return n
-end
-
-function pool.special(str)
- local n = copy_node(special)
- n.data = str
- return n
-end
-
-statistics.register("cleaned up reserved nodes", function()
- return format("%s nodes, %s lists of %s", pool.cleanup(tex.count["c_syst_last_allocated_box"]))
-end) -- \topofboxstack
-
-statistics.register("node memory usage", function() -- comes after cleanup !
- return status.node_mem_usage
-end)
-
-lua.registerfinalizer(pool.cleanup, "cleanup reserved nodes")
+if not modules then modules = { } end modules ['node-res'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local gmatch, format = string.gmatch, string.format
+local tonumber, round = tonumber, math.round
+
+--[[ldx--
+
The next function is not that much needed but in we use
+for debugging node management.
+--ldx]]--
+
+local report_nodes = logs.reporter("nodes","housekeeping")
+
+local nodes, node = nodes, node
+
+local copy_node = node.copy
+local free_node = node.free
+local free_list = node.flush_list
+local new_node = node.new
+
+nodes.pool = nodes.pool or { }
+local pool = nodes.pool
+
+local whatsitcodes = nodes.whatsitcodes
+local skipcodes = nodes.skipcodes
+local kerncodes = nodes.kerncodes
+local nodecodes = nodes.nodecodes
+
+local glyph_code = nodecodes.glyph
+
+local allocate = utilities.storage.allocate
+
+local reserved, nofreserved = { }, 0
+
+local function register_node(n)
+ nofreserved = nofreserved + 1
+ reserved[nofreserved] = n
+ return n
+end
+
+pool.register = register_node
+
+function pool.cleanup(nofboxes) -- todo
+ if nodes.tracers.steppers then -- to be resolved
+ nodes.tracers.steppers.reset() -- todo: make a registration subsystem
+ end
+ local nl, nr = 0, nofreserved
+ for i=1,nofreserved do
+ local ri = reserved[i]
+ -- if not (ri.id == glue_spec and not ri.is_writable) then
+ free_node(reserved[i])
+ -- end
+ end
+ if nofboxes then
+ local tb = tex.box
+ for i=0,nofboxes do
+ local l = tb[i]
+ if l then
+ free_node(tb[i])
+ nl = nl + 1
+ end
+ end
+ end
+ reserved = { }
+ nofreserved = 0
+ return nr, nl, nofboxes -- can be nil
+end
+
+function pool.usage()
+ local t = { }
+ for n, tag in gmatch(status.node_mem_usage,"(%d+) ([a-z_]+)") do
+ t[tag] = n
+ end
+ return t
+end
+
+local disc = register_node(new_node("disc"))
+local kern = register_node(new_node("kern",kerncodes.userkern))
+local fontkern = register_node(new_node("kern",kerncodes.fontkern))
+local penalty = register_node(new_node("penalty"))
+local glue = register_node(new_node("glue")) -- glue.spec = nil
+local glue_spec = register_node(new_node("glue_spec"))
+local glyph = register_node(new_node("glyph",0))
+local textdir = register_node(new_node("whatsit",whatsitcodes.dir))
+local latelua = register_node(new_node("whatsit",whatsitcodes.latelua))
+local special = register_node(new_node("whatsit",whatsitcodes.special))
+local user_n = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_n.type = 100 -- 44
+local user_l = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_l.type = 110 -- 44
+local user_s = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_s.type = 115 -- 44
+local user_t = register_node(new_node("whatsit",whatsitcodes.userdefined)) user_t.type = 116 -- 44
+local left_margin_kern = register_node(new_node("margin_kern",0))
+local right_margin_kern = register_node(new_node("margin_kern",1))
+local lineskip = register_node(new_node("glue",skipcodes.lineskip))
+local baselineskip = register_node(new_node("glue",skipcodes.baselineskip))
+local leftskip = register_node(new_node("glue",skipcodes.leftskip))
+local rightskip = register_node(new_node("glue",skipcodes.rightskip))
+local temp = register_node(new_node("temp",0))
+local noad = register_node(new_node("noad"))
+
+-- the dir field needs to be set otherwise crash:
+
+local rule = register_node(new_node("rule")) rule .dir = "TLT"
+local hlist = register_node(new_node("hlist")) hlist.dir = "TLT"
+local vlist = register_node(new_node("vlist")) vlist.dir = "TLT"
+
+function pool.zeroglue(n)
+ local s = n.spec
+ return not writable or (
+ s.width == 0
+ and s.stretch == 0
+ and s.shrink == 0
+ and s.stretch_order == 0
+ and s.shrink_order == 0
+ )
+end
+
+function pool.glyph(fnt,chr)
+ local n = copy_node(glyph)
+ if fnt then n.font = fnt end
+ if chr then n.char = chr end
+ return n
+end
+
+function pool.penalty(p)
+ local n = copy_node(penalty)
+ n.penalty = p
+ return n
+end
+
+function pool.kern(k)
+ local n = copy_node(kern)
+ n.kern = k
+ return n
+end
+
+function pool.fontkern(k)
+ local n = copy_node(fontkern)
+ n.kern = k
+ return n
+end
+
+function pool.gluespec(width,stretch,shrink,stretch_order,shrink_order)
+ local s = copy_node(glue_spec)
+ if width then s.width = width end
+ if stretch then s.stretch = stretch end
+ if shrink then s.shrink = shrink end
+ if stretch_order then s.stretch_order = stretch_order end
+ if shrink_order then s.shrink_order = shrink_order end
+ return s
+end
+
+local function someskip(skip,width,stretch,shrink,stretch_order,shrink_order)
+ local n = copy_node(skip)
+ if not width then
+ -- no spec
+ elseif width == false or tonumber(width) then
+ local s = copy_node(glue_spec)
+ if width then s.width = width end
+ if stretch then s.stretch = stretch end
+ if shrink then s.shrink = shrink end
+ if stretch_order then s.stretch_order = stretch_order end
+ if shrink_order then s.shrink_order = shrink_order end
+ n.spec = s
+ else
+ -- shared
+ n.spec = copy_node(width)
+ end
+ return n
+end
+
+function pool.stretch(a,b)
+ local n = copy_node(glue)
+ local s = copy_node(glue_spec)
+ if b then
+ s.stretch = a
+ s.stretch_order = b
+ else
+ s.stretch = 1
+ s.stretch_order = a or 1
+ end
+ n.spec = s
+ return n
+end
+
+function pool.shrink(a,b)
+ local n = copy_node(glue)
+ local s = copy_node(glue_spec)
+ if b then
+ s.shrink = a
+ s.shrink_order = b
+ else
+ s.shrink = 1
+ s.shrink_order = a or 1
+ end
+ n.spec = s
+ return n
+end
+
+
+function pool.glue(width,stretch,shrink,stretch_order,shrink_order)
+ return someskip(glue,width,stretch,shrink,stretch_order,shrink_order)
+end
+
+function pool.leftskip(width,stretch,shrink,stretch_order,shrink_order)
+ return someskip(leftskip,width,stretch,shrink,stretch_order,shrink_order)
+end
+
+function pool.rightskip(width,stretch,shrink,stretch_order,shrink_order)
+ return someskip(rightskip,width,stretch,shrink,stretch_order,shrink_order)
+end
+
+function pool.lineskip(width,stretch,shrink,stretch_order,shrink_order)
+ return someskip(lineskip,width,stretch,shrink,stretch_order,shrink_order)
+end
+
+function pool.baselineskip(width,stretch,shrink)
+ return someskip(baselineskip,width,stretch,shrink)
+end
+
+function pool.disc()
+ return copy_node(disc)
+end
+
+function pool.textdir(dir)
+ local t = copy_node(textdir)
+ t.dir = dir
+ return t
+end
+
+function pool.rule(width,height,depth,dir) -- w/h/d == nil will let them adapt
+ local n = copy_node(rule)
+ if width then n.width = width end
+ if height then n.height = height end
+ if depth then n.depth = depth end
+ if dir then n.dir = dir end
+ return n
+end
+
+if node.has_field(latelua,'string') then
+ function pool.latelua(code)
+ local n = copy_node(latelua)
+ n.string = code
+ return n
+ end
+else
+ function pool.latelua(code)
+ local n = copy_node(latelua)
+ n.data = code
+ return n
+ end
+end
+
+function pool.leftmarginkern(glyph,width)
+ local n = copy_node(left_margin_kern)
+ if not glyph then
+ report_nodes("invalid pointer to left margin glyph node")
+ elseif glyph.id ~= glyph_code then
+ report_nodes("invalid node type %a for %s margin glyph node",nodecodes[glyph],"left")
+ else
+ n.glyph = glyph
+ end
+ if width then
+ n.width = width
+ end
+ return n
+end
+
+function pool.rightmarginkern(glyph,width)
+ local n = copy_node(right_margin_kern)
+ if not glyph then
+ report_nodes("invalid pointer to right margin glyph node")
+ elseif glyph.id ~= glyph_code then
+ report_nodes("invalid node type %a for %s margin glyph node",nodecodes[p],"right")
+ else
+ n.glyph = glyph
+ end
+ if width then
+ n.width = width
+ end
+ return n
+end
+
+function pool.temp()
+ return copy_node(temp)
+end
+
+function pool.noad()
+ return copy_node(noad)
+end
+
+function pool.hlist()
+ return copy_node(hlist)
+end
+
+function pool.vlist()
+ return copy_node(vlist)
+end
+
+--[[
+
At some point we ran into a problem that the glue specification
+of the zeropoint dimension was overwritten when adapting a glue spec
+node. This is a side effect of glue specs being shared. After a
+couple of hours tracing and debugging Taco and I came to the
+conclusion that it made no sense to complicate the spec allocator
+and settled on a writable flag. This all is a side effect of the
+fact that some glues use reserved memory slots (with the zeropoint
+glue being a noticeable one). So, next we wrap this into a function
+and hide it for the user. And yes, LuaTeX now gives a warning as
+well.
+]]--
+
+function nodes.writable_spec(n) -- not pool
+ local spec = n.spec
+ if not spec then
+ spec = copy_node(glue_spec)
+ n.spec = spec
+ elseif not spec.writable then
+ spec = copy_node(spec)
+ n.spec = spec
+ end
+ return spec
+end
+
+-- local num = userids["my id"]
+-- local str = userids[num]
+
+local userids = allocate() pool.userids = userids
+local lastid = 0
+
+setmetatable(userids, {
+ __index = function(t,k)
+ if type(k) == "string" then
+ lastid = lastid + 1
+ rawset(userids,lastid,k)
+ rawset(userids,k,lastid)
+ return lastid
+ else
+ rawset(userids,k,k)
+ return k
+ end
+ end,
+ __call = function(t,k)
+ return t[k]
+ end
+} )
+
+function pool.usernumber(id,num)
+ local n = copy_node(user_n)
+ if num then
+ n.user_id, n.value = id, num
+ elseif id then
+ n.value = id
+ end
+ return n
+end
+
+function pool.userlist(id,list)
+ local n = copy_node(user_l)
+ if list then
+ n.user_id, n.value = id, list
+ else
+ n.value = id
+ end
+ return n
+end
+
+function pool.userstring(id,str)
+ local n = copy_node(user_s)
+ if str then
+ n.user_id, n.value = id, str
+ else
+ n.value = id
+ end
+ return n
+end
+
+function pool.usertokens(id,tokens)
+ local n = copy_node(user_t)
+ if tokens then
+ n.user_id, n.value = id, tokens
+ else
+ n.value = id
+ end
+ return n
+end
+
+function pool.special(str)
+ local n = copy_node(special)
+ n.data = str
+ return n
+end
+
+statistics.register("cleaned up reserved nodes", function()
+ return format("%s nodes, %s lists of %s", pool.cleanup(tex.count["c_syst_last_allocated_box"]))
+end) -- \topofboxstack
+
+statistics.register("node memory usage", function() -- comes after cleanup !
+ return status.node_mem_usage
+end)
+
+lua.registerfinalizer(pool.cleanup, "cleanup reserved nodes")
diff --git a/tex/context/base/node-rul.lua b/tex/context/base/node-rul.lua
index 09300964e..00039550c 100644
--- a/tex/context/base/node-rul.lua
+++ b/tex/context/base/node-rul.lua
@@ -1,389 +1,389 @@
-if not modules then modules = { } end modules ['node-rul'] = {
- version = 1.001,
- comment = "companion to node-rul.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this will go to an auxiliary module
--- beware: rules now have a dir field
---
--- todo: make robust for layers ... order matters
-
-local attributes, nodes, node = attributes, nodes, node
-
-local nodecodes = nodes.nodecodes
-local tasks = nodes.tasks
-
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local rule_code = nodecodes.rule
-
-function nodes.striprange(first,last) -- todo: dir
- if first and last then -- just to be sure
- if first == last then
- return first, last
- end
- while first and first ~= last do
- local id = first.id
- if id == glyph_code or id == disc_code then -- or id == rule_code
- break
- else
- first = first.next
- end
- end
- if not first then
- return nil, nil
- elseif first == last then
- return first, last
- end
- while last and last ~= first do
- local id = last.id
- if id == glyph_code or id == disc_code then -- or id == rule_code
- break
- else
- local prev = last.prev -- luatex < 0.70 has italic correction kern not prev'd
- if prev then
- last = last.prev
- else
- break
- end
- end
- end
- if not last then
- return nil, nil
- end
- end
- return first, last
-end
-
--- todo: order and maybe other dimensions
-
-local floor = math.floor
-
-local trace_ruled = false trackers.register("nodes.rules", function(v) trace_ruled = v end)
-local report_ruled = logs.reporter("nodes","rules")
-
-local n_tostring = nodes.idstostring
-local n_tosequence = nodes.tosequence
-
-local a_ruled = attributes.private('ruled')
-local a_color = attributes.private('color')
-local a_transparency = attributes.private('transparency')
-local a_colorspace = attributes.private('colormodel')
-
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local striprange = nodes.striprange
-local list_dimensions = node.dimensions
-
-local hpack_nodes = node.hpack
-
-local fontdata = fonts.hashes.identifiers
-local variables = interfaces.variables
-local dimenfactor = fonts.helpers.dimenfactor
-local splitdimen = number.splitdimen
-
-local nodecodes = nodes.nodecodes
-local skipcodes = nodes.skipcodes
-local whatcodes = nodes.whatcodes
-local kerncodes = nodes.kerncodes
-
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local glue_code = nodecodes.glue
-local penalty_code = nodecodes.penalty
-local kern_code = nodecodes.kern
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local rule_code = nodecodes.rule
-local whatsit_code = nodecodes.whatsit
-
-local userskip_code = skipcodes.userskip
-local spaceskip_code = skipcodes.spaceskip
-local xspaceskip_code = skipcodes.xspaceskip
-
-local dir_code = whatcodes.dir
-
-local kerning_code = kerncodes.kern
-
-local nodepool = nodes.pool
-
-local new_rule = nodepool.rule
-local new_kern = nodepool.kern
-local new_glue = nodepool.glue
-
--- we can use this one elsewhere too
---
--- todo: functions: word, sentence
---
--- glyph rule unset whatsit glue margin_kern kern math disc
-
-local checkdir = true
-
--- we assume {glyphruns} and no funny extra kerning, ok, maybe we need
--- a dummy character as start and end; anyway we only collect glyphs
---
--- this one needs to take layers into account (i.e. we need a list of
--- critical attributes)
-
--- omkeren class en level -> scheelt functie call in analyze
-
--- todo: switching inside math
-
-local function processwords(attribute,data,flush,head,parent) -- we have hlistdir and local dir
- local n = head
- if n then
- local f, l, a, d, i, class
- local continue, done, strip, level = false, false, true, -1
- while n do
- local id = n.id
- if id == glyph_code or id == rule_code then
- local aa = n[attribute]
- if aa then
- if aa == a then
- if not f then -- ?
- f = n
- end
- l = n
- else
- -- possible extensions: when in same class then keep spanning
- local newlevel, newclass = floor(aa/1000), aa%1000
---~ strip = not continue or level == 1 -- 0
- if f then
- if class == newclass then -- and newlevel > level then
- head, done = flush(head,f,l,d,level,parent,false), true
- else
- head, done = flush(head,f,l,d,level,parent,strip), true
- end
- end
- f, l, a = n, n, aa
- level, class = newlevel, newclass
- d = data[class]
- continue = d.continue == variables.yes
- end
- else
- if f then
- head, done = flush(head,f,l,d,level,parent,strip), true
- end
- f, l, a = nil, nil, nil
- end
- elseif f and (id == disc_code or (id == kern_code and n.subtype == kerning_code)) then
- l = n
- elseif id == hlist_code or id == vlist_code then
- if f then
- head, done = flush(head,f,l,d,level,parent,strip), true
- f, l, a = nil, nil, nil
- end
- local list = n.list
- if list then
- n.list = processwords(attribute,data,flush,list,n)
- end
- elseif checkdir and id == whatsit_code and n.subtype == dir_code then -- only changes in dir, we assume proper boundaries
- if f and a then
- l = n
- end
- elseif f then
- if continue then
- if id == penalty_code then
- l = n
- elseif id == kern_code then
- l = n
- elseif id == glue_code then
- -- catch \underbar{a} \underbar{a} (subtype test is needed)
- local subtype = n.subtype
- if continue and n[attribute] and
- (subtype == userskip_code or subtype == spaceskip_code or subskip == xspaceskip_code) then
- l = n
- else
- head, done = flush(head,f,l,d,level,parent,strip), true
- f, l, a = nil, nil, nil
- end
- end
- else
- head, done = flush(head,f,l,d,level,parent,strip), true
- f, l, a = nil, nil, nil
- end
- end
- n = n.next
- end
- if f then
- head, done = flush(head,f,l,d,level,parent,strip), true
- end
- return head, true -- todo: done
- else
- return head, false
- end
-end
-
-nodes.processwords = processwords
-
---
-
-nodes.rules = nodes.rules or { }
-nodes.rules.data = nodes.rules.data or { }
-
-storage.register("nodes/rules/data", nodes.rules.data, "nodes.rules.data")
-
-local data = nodes.rules.data
-
-function nodes.rules.define(settings)
- data[#data+1] = settings
- context(#data)
-end
-
-local a_viewerlayer = attributes.private("viewerlayer")
-
-local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but acceptable for this purpose
--- check for f and l
- if f.id ~= glyph_code then
- -- saveguard ... we need to deal with rules and so (math)
- return head
- end
- local r, m
- if strip then
- if trace_ruled then
- local before = n_tosequence(f,l,true)
- f, l = striprange(f,l)
- local after = n_tosequence(f,l,true)
- report_ruled("range stripper, before %a, after %a",before,after)
- else
- f, l = striprange(f,l)
- end
- end
- if not f then
- return head
- end
- local w = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,f,l.next)
- local method, offset, continue, dy, order, max = d.method, d.offset, d.continue, d.dy, d.order, d.max
- local rulethickness, unit = d.rulethickness, d.unit
- local ma, ca, ta = d.ma, d.ca, d.ta
- local colorspace = (ma > 0 and ma) or f[a_colorspace] or 1
- local color = (ca > 0 and ca) or f[a_color]
- local transparency = (ta > 0 and ta) or f[a_transparency]
- local foreground = order == variables.foreground
-
- local e = dimenfactor(unit,fontdata[f.font]) -- what if no glyph node
-
- local rt = tonumber(rulethickness)
- if rt then
- rulethickness = e * rulethickness / 2
- else
- local n, u = splitdimen(rulethickness)
- if n and u then -- we need to intercept ex and em and % and ...
- rulethickness = n * dimenfactor(u,fontdata[f.font]) / 2
- else
- rulethickness = 1/5
- end
- end
-
- if level > max then
- level = max
- end
- if method == 0 then -- center
- offset = 2*offset
--- m = (offset+(level-1)*dy+rulethickness)*e/2
- m = (offset+(level-1)*dy)*e/2 + rulethickness/2
- else
- m = 0
- end
- for i=1,level do
--- local ht = (offset+(i-1)*dy+rulethickness)*e - m
--- local dp = -(offset+(i-1)*dy-rulethickness)*e + m
- local ht = (offset+(i-1)*dy)*e + rulethickness - m
- local dp = -(offset+(i-1)*dy)*e + rulethickness + m
- local r = new_rule(w,ht,dp)
- local v = f[a_viewerlayer]
- -- quick hack
- if v then
- r[a_viewerlayer] = v
- end
- --
- if color then
- r[a_colorspace] = colorspace
- r[a_color] = color
- end
- if transparency then
- r[a_transparency] = transparency
- end
- local k = new_kern(-w)
- if foreground then
- insert_node_after(head,l,k)
- insert_node_after(head,k,r)
- l = r
- else
- head = insert_node_before(head,f,r)
- insert_node_after(head,r,k)
- end
- if trace_ruled then
- report_ruled("level %a, width %p, height %p, depth %p, nodes %a, text %a",
- level,w,ht,dp,n_tostring(f,l),n_tosequence(f,l,true))
- end
- end
- return head
-end
-
-local process = nodes.processwords
-
-nodes.rules.handler = function(head) return process(a_ruled,data,flush_ruled,head) end
-
-function nodes.rules.enable()
- tasks.enableaction("shipouts","nodes.rules.handler")
-end
-
--- elsewhere:
---
--- tasks.appendaction ("shipouts", "normalizers", "nodes.rules.handler")
--- tasks.disableaction("shipouts", "nodes.rules.handler") -- only kick in when used
-
-local trace_shifted = false trackers.register("nodes.shifting", function(v) trace_shifted = v end)
-
-local report_shifted = logs.reporter("nodes","shifting")
-
-local a_shifted = attributes.private('shifted')
-
-nodes.shifts = nodes.shifts or { }
-nodes.shifts.data = nodes.shifts.data or { }
-
-storage.register("nodes/shifts/data", nodes.shifts.data, "nodes.shifts.data")
-
-local data = nodes.shifts.data
-
-function nodes.shifts.define(settings)
- data[#data+1] = settings
- context(#data)
-end
-
-local function flush_shifted(head,first,last,data,level,parent,strip) -- not that fast but acceptable for this purpose
- if true then
- first, last = striprange(first,last)
- end
- local prev, next = first.prev, last.next
- first.prev, last.next = nil, nil
- local width, height, depth = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,first,next)
- local list = hpack_nodes(first,width,"exactly")
- if first == head then
- head = list
- end
- if prev then
- prev.next, list.prev = list, prev
- end
- if next then
- next.prev, list.next = list, next
- end
- local raise = data.dy * dimenfactor(data.unit,fontdata[first.font])
- list.shift, list.height, list.depth = raise, height, depth
- if trace_shifted then
- report_shifted("width %p, nodes %a, text %a",width,n_tostring(first,last),n_tosequence(first,last,true))
- end
- return head
-end
-
-local process = nodes.processwords
-
-nodes.shifts.handler = function(head) return process(a_shifted,data,flush_shifted,head) end
-
-function nodes.shifts.enable()
- tasks.enableaction("shipouts","nodes.shifts.handler")
-end
+if not modules then modules = { } end modules ['node-rul'] = {
+ version = 1.001,
+ comment = "companion to node-rul.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- this will go to an auxiliary module
+-- beware: rules now have a dir field
+--
+-- todo: make robust for layers ... order matters
+
+local attributes, nodes, node = attributes, nodes, node
+
+local nodecodes = nodes.nodecodes
+local tasks = nodes.tasks
+
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local rule_code = nodecodes.rule
+
+function nodes.striprange(first,last) -- todo: dir
+ if first and last then -- just to be sure
+ if first == last then
+ return first, last
+ end
+ while first and first ~= last do
+ local id = first.id
+ if id == glyph_code or id == disc_code then -- or id == rule_code
+ break
+ else
+ first = first.next
+ end
+ end
+ if not first then
+ return nil, nil
+ elseif first == last then
+ return first, last
+ end
+ while last and last ~= first do
+ local id = last.id
+ if id == glyph_code or id == disc_code then -- or id == rule_code
+ break
+ else
+ local prev = last.prev -- luatex < 0.70 has italic correction kern not prev'd
+ if prev then
+ last = last.prev
+ else
+ break
+ end
+ end
+ end
+ if not last then
+ return nil, nil
+ end
+ end
+ return first, last
+end
+
+-- todo: order and maybe other dimensions
+
+local floor = math.floor
+
+local trace_ruled = false trackers.register("nodes.rules", function(v) trace_ruled = v end)
+local report_ruled = logs.reporter("nodes","rules")
+
+local n_tostring = nodes.idstostring
+local n_tosequence = nodes.tosequence
+
+local a_ruled = attributes.private('ruled')
+local a_color = attributes.private('color')
+local a_transparency = attributes.private('transparency')
+local a_colorspace = attributes.private('colormodel')
+
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local striprange = nodes.striprange
+local list_dimensions = node.dimensions
+
+local hpack_nodes = node.hpack
+
+local fontdata = fonts.hashes.identifiers
+local variables = interfaces.variables
+local dimenfactor = fonts.helpers.dimenfactor
+local splitdimen = number.splitdimen
+
+local nodecodes = nodes.nodecodes
+local skipcodes = nodes.skipcodes
+local whatcodes = nodes.whatcodes
+local kerncodes = nodes.kerncodes
+
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local glue_code = nodecodes.glue
+local penalty_code = nodecodes.penalty
+local kern_code = nodecodes.kern
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local rule_code = nodecodes.rule
+local whatsit_code = nodecodes.whatsit
+
+local userskip_code = skipcodes.userskip
+local spaceskip_code = skipcodes.spaceskip
+local xspaceskip_code = skipcodes.xspaceskip
+
+local dir_code = whatcodes.dir
+
+local kerning_code = kerncodes.kern
+
+local nodepool = nodes.pool
+
+local new_rule = nodepool.rule
+local new_kern = nodepool.kern
+local new_glue = nodepool.glue
+
+-- we can use this one elsewhere too
+--
+-- todo: functions: word, sentence
+--
+-- glyph rule unset whatsit glue margin_kern kern math disc
+
+local checkdir = true
+
+-- we assume {glyphruns} and no funny extra kerning, ok, maybe we need
+-- a dummy character as start and end; anyway we only collect glyphs
+--
+-- this one needs to take layers into account (i.e. we need a list of
+-- critical attributes)
+
+-- omkeren class en level -> scheelt functie call in analyze
+
+-- todo: switching inside math
+
+local function processwords(attribute,data,flush,head,parent) -- we have hlistdir and local dir
+ local n = head
+ if n then
+ local f, l, a, d, i, class
+ local continue, done, strip, level = false, false, true, -1
+ while n do
+ local id = n.id
+ if id == glyph_code or id == rule_code then
+ local aa = n[attribute]
+ if aa then
+ if aa == a then
+ if not f then -- ?
+ f = n
+ end
+ l = n
+ else
+ -- possible extensions: when in same class then keep spanning
+ local newlevel, newclass = floor(aa/1000), aa%1000
+--~ strip = not continue or level == 1 -- 0
+ if f then
+ if class == newclass then -- and newlevel > level then
+ head, done = flush(head,f,l,d,level,parent,false), true
+ else
+ head, done = flush(head,f,l,d,level,parent,strip), true
+ end
+ end
+ f, l, a = n, n, aa
+ level, class = newlevel, newclass
+ d = data[class]
+ continue = d.continue == variables.yes
+ end
+ else
+ if f then
+ head, done = flush(head,f,l,d,level,parent,strip), true
+ end
+ f, l, a = nil, nil, nil
+ end
+ elseif f and (id == disc_code or (id == kern_code and n.subtype == kerning_code)) then
+ l = n
+ elseif id == hlist_code or id == vlist_code then
+ if f then
+ head, done = flush(head,f,l,d,level,parent,strip), true
+ f, l, a = nil, nil, nil
+ end
+ local list = n.list
+ if list then
+ n.list = processwords(attribute,data,flush,list,n)
+ end
+ elseif checkdir and id == whatsit_code and n.subtype == dir_code then -- only changes in dir, we assume proper boundaries
+ if f and a then
+ l = n
+ end
+ elseif f then
+ if continue then
+ if id == penalty_code then
+ l = n
+ elseif id == kern_code then
+ l = n
+ elseif id == glue_code then
+ -- catch \underbar{a} \underbar{a} (subtype test is needed)
+ local subtype = n.subtype
+ if continue and n[attribute] and
+ (subtype == userskip_code or subtype == spaceskip_code or subskip == xspaceskip_code) then
+ l = n
+ else
+ head, done = flush(head,f,l,d,level,parent,strip), true
+ f, l, a = nil, nil, nil
+ end
+ end
+ else
+ head, done = flush(head,f,l,d,level,parent,strip), true
+ f, l, a = nil, nil, nil
+ end
+ end
+ n = n.next
+ end
+ if f then
+ head, done = flush(head,f,l,d,level,parent,strip), true
+ end
+ return head, true -- todo: done
+ else
+ return head, false
+ end
+end
+
+nodes.processwords = processwords
+
+--
+
+nodes.rules = nodes.rules or { }
+nodes.rules.data = nodes.rules.data or { }
+
+storage.register("nodes/rules/data", nodes.rules.data, "nodes.rules.data")
+
+local data = nodes.rules.data
+
+function nodes.rules.define(settings)
+ data[#data+1] = settings
+ context(#data)
+end
+
+local a_viewerlayer = attributes.private("viewerlayer")
+
+local function flush_ruled(head,f,l,d,level,parent,strip) -- not that fast but acceptable for this purpose
+-- check for f and l
+ if f.id ~= glyph_code then
+ -- saveguard ... we need to deal with rules and so (math)
+ return head
+ end
+ local r, m
+ if strip then
+ if trace_ruled then
+ local before = n_tosequence(f,l,true)
+ f, l = striprange(f,l)
+ local after = n_tosequence(f,l,true)
+ report_ruled("range stripper, before %a, after %a",before,after)
+ else
+ f, l = striprange(f,l)
+ end
+ end
+ if not f then
+ return head
+ end
+ local w = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,f,l.next)
+ local method, offset, continue, dy, order, max = d.method, d.offset, d.continue, d.dy, d.order, d.max
+ local rulethickness, unit = d.rulethickness, d.unit
+ local ma, ca, ta = d.ma, d.ca, d.ta
+ local colorspace = (ma > 0 and ma) or f[a_colorspace] or 1
+ local color = (ca > 0 and ca) or f[a_color]
+ local transparency = (ta > 0 and ta) or f[a_transparency]
+ local foreground = order == variables.foreground
+
+ local e = dimenfactor(unit,fontdata[f.font]) -- what if no glyph node
+
+ local rt = tonumber(rulethickness)
+ if rt then
+ rulethickness = e * rulethickness / 2
+ else
+ local n, u = splitdimen(rulethickness)
+ if n and u then -- we need to intercept ex and em and % and ...
+ rulethickness = n * dimenfactor(u,fontdata[f.font]) / 2
+ else
+ rulethickness = 1/5
+ end
+ end
+
+ if level > max then
+ level = max
+ end
+ if method == 0 then -- center
+ offset = 2*offset
+-- m = (offset+(level-1)*dy+rulethickness)*e/2
+ m = (offset+(level-1)*dy)*e/2 + rulethickness/2
+ else
+ m = 0
+ end
+ for i=1,level do
+-- local ht = (offset+(i-1)*dy+rulethickness)*e - m
+-- local dp = -(offset+(i-1)*dy-rulethickness)*e + m
+ local ht = (offset+(i-1)*dy)*e + rulethickness - m
+ local dp = -(offset+(i-1)*dy)*e + rulethickness + m
+ local r = new_rule(w,ht,dp)
+ local v = f[a_viewerlayer]
+ -- quick hack
+ if v then
+ r[a_viewerlayer] = v
+ end
+ --
+ if color then
+ r[a_colorspace] = colorspace
+ r[a_color] = color
+ end
+ if transparency then
+ r[a_transparency] = transparency
+ end
+ local k = new_kern(-w)
+ if foreground then
+ insert_node_after(head,l,k)
+ insert_node_after(head,k,r)
+ l = r
+ else
+ head = insert_node_before(head,f,r)
+ insert_node_after(head,r,k)
+ end
+ if trace_ruled then
+ report_ruled("level %a, width %p, height %p, depth %p, nodes %a, text %a",
+ level,w,ht,dp,n_tostring(f,l),n_tosequence(f,l,true))
+ end
+ end
+ return head
+end
+
+local process = nodes.processwords
+
+nodes.rules.handler = function(head) return process(a_ruled,data,flush_ruled,head) end
+
+function nodes.rules.enable()
+ tasks.enableaction("shipouts","nodes.rules.handler")
+end
+
+-- elsewhere:
+--
+-- tasks.appendaction ("shipouts", "normalizers", "nodes.rules.handler")
+-- tasks.disableaction("shipouts", "nodes.rules.handler") -- only kick in when used
+
+local trace_shifted = false trackers.register("nodes.shifting", function(v) trace_shifted = v end)
+
+local report_shifted = logs.reporter("nodes","shifting")
+
+local a_shifted = attributes.private('shifted')
+
+nodes.shifts = nodes.shifts or { }
+nodes.shifts.data = nodes.shifts.data or { }
+
+storage.register("nodes/shifts/data", nodes.shifts.data, "nodes.shifts.data")
+
+local data = nodes.shifts.data
+
+function nodes.shifts.define(settings)
+ data[#data+1] = settings
+ context(#data)
+end
+
+local function flush_shifted(head,first,last,data,level,parent,strip) -- not that fast but acceptable for this purpose
+ if true then
+ first, last = striprange(first,last)
+ end
+ local prev, next = first.prev, last.next
+ first.prev, last.next = nil, nil
+ local width, height, depth = list_dimensions(parent.glue_set,parent.glue_sign,parent.glue_order,first,next)
+ local list = hpack_nodes(first,width,"exactly")
+ if first == head then
+ head = list
+ end
+ if prev then
+ prev.next, list.prev = list, prev
+ end
+ if next then
+ next.prev, list.next = list, next
+ end
+ local raise = data.dy * dimenfactor(data.unit,fontdata[first.font])
+ list.shift, list.height, list.depth = raise, height, depth
+ if trace_shifted then
+ report_shifted("width %p, nodes %a, text %a",width,n_tostring(first,last),n_tosequence(first,last,true))
+ end
+ return head
+end
+
+local process = nodes.processwords
+
+nodes.shifts.handler = function(head) return process(a_shifted,data,flush_shifted,head) end
+
+function nodes.shifts.enable()
+ tasks.enableaction("shipouts","nodes.shifts.handler")
+end
diff --git a/tex/context/base/node-ser.lua b/tex/context/base/node-ser.lua
index b0a6e9952..f4ae1e2b2 100644
--- a/tex/context/base/node-ser.lua
+++ b/tex/context/base/node-ser.lua
@@ -1,286 +1,286 @@
-if not modules then modules = { } end modules ['node-ser'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- beware, some field names will change in a next releases
--- of luatex; this is pretty old code that needs an overhaul
-
-local type, format, rep = type, string.format, string.rep
-local concat, tohash, sortedkeys, printtable = table.concat, table.tohash, table.sortedkeys, table.print
-
-local allocate = utilities.storage.allocate
-
-local nodes, node = nodes, node
-
-local traverse = node.traverse
-local is_node = node.is_node
-
-local nodecodes = nodes.nodecodes
-local noadcodes = nodes.noadcodes
-local nodefields = nodes.fields
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-
-local expand = allocate ( tohash {
- "list", -- list_ptr & ins_ptr & adjust_ptr
- "pre", --
- "post", --
- "spec", -- glue_ptr
- "top_skip", --
- "attr", --
- "replace", -- nobreak
- "components", -- lig_ptr
- "box_left", --
- "box_right", --
- "glyph", -- margin_char
- "leader", -- leader_ptr
- "action", -- action_ptr
- "value", -- user_defined nodes with subtype 'a' en 'n'
- "head",
-} )
-
--- page_insert: "height", "last_ins_ptr", "best_ins_ptr"
--- split_insert: "height", "last_ins_ptr", "best_ins_ptr", "broken_ptr", "broken_ins"
-
-local ignore = allocate ( tohash {
- "page_insert",
- "split_insert",
- "ref_count",
-} )
-
-local dimension = allocate ( tohash {
- "width", "height", "depth", "shift",
- "stretch", "shrink",
- "xoffset", "yoffset",
- "surround",
- "kern",
- "box_left_width", "box_right_width"
-} )
-
--- flat: don't use next, but indexes
--- verbose: also add type
--- can be sped up
-
-nodes.dimensionfields = dimension
-nodes.listablefields = expand
-nodes.ignorablefields = ignore
-
--- not ok yet:
-
-local function astable(n,sparse) -- not yet ok
- local f, t = nodefields(n), { }
- for i=1,#f do
- local v = f[i]
- local d = n[v]
- if d then
- if ignore[v] or v == "id" then
- -- skip
- elseif expand[v] then -- or: type(n[v]) ~= "string" or type(n[v]) ~= "number" or type(n[v]) ~= "table"
- t[v] = "pointer to list"
- elseif sparse then
- if (type(d) == "number" and d ~= 0) or (type(d) == "string" and d ~= "") then
- t[v] = d
- end
- else
- t[v] = d
- end
- end
- end
- t.type = nodecodes[n.id]
- return t
-end
-
-nodes.astable = astable
-
-setinspector(function(v) if is_node(v) then printtable(astable(v),tostring(v)) return true end end)
-
--- under construction:
-
-local function totable(n,flat,verbose,noattributes)
- -- todo: no local function
- local function to_table(n,flat,verbose,noattributes) -- no need to pass
- local f = nodefields(n)
- local tt = { }
- for k=1,#f do
- local v = f[k]
- local nv = v and n[v]
- if nv then
- if ignore[v] then
- -- skip
- elseif noattributes and v == "attr" then
- -- skip
- elseif expand[v] then
- if type(nv) == "number" or type(nv) == "string" then
- tt[v] = nv
- else
- tt[v] = totable(nv,flat,verbose)
- end
- elseif type(nv) == "table" then
- tt[v] = nv -- totable(nv,flat,verbose) -- data
- else
- tt[v] = nv
- end
- end
- end
- if verbose then
- tt.type = nodecodes[tt.id]
- end
- return tt
- end
- if n then
- if flat then
- local t, tn = { }, 0
- while n do
- tn = tn + 1
- t[tn] = to_table(n,flat,verbose,noattributes)
- n = n.next
- end
- return t
- else
- local t = to_table(n)
- if n.next then
- t.next = totable(n.next,flat,verbose,noattributes)
- end
- return t
- end
- else
- return { }
- end
-end
-
-nodes.totable = totable
-
-local function key(k)
- return ((type(k) == "number") and "["..k.."]") or k
-end
-
--- not ok yet; this will become a module
-
--- todo: adapt to nodecodes etc
-
-local function serialize(root,name,handle,depth,m,noattributes)
- handle = handle or print
- if depth then
- depth = depth .. " "
- handle(format("%s%s={",depth,key(name)))
- else
- depth = ""
- local tname = type(name)
- if tname == "string" then
- if name == "return" then
- handle("return {")
- else
- handle(name .. "={")
- end
- elseif tname == "number" then
- handle("[" .. name .. "]={")
- else
- handle("t={")
- end
- end
- if root then
- local fld
- if root.id then
- fld = nodefields(root) -- we can cache these (todo)
- else
- fld = sortedkeys(root)
- end
- if type(root) == 'table' and root['type'] then -- userdata or table
- handle(format("%s %s=%q,",depth,'type',root['type']))
- end
- for f=1,#fld do
- local k = fld[f]
- if k == "ref_count" then
- -- skip
- elseif noattributes and k == "attr" then
- -- skip
- elseif k == "id" then
- local v = root[k]
- handle(format("%s id=%s,",depth,nodecodes[v] or noadcodes[v] or v))
- elseif k then
- local v = root[k]
- local t = type(v)
- if t == "number" then
- if v == 0 then
- -- skip
- else
- handle(format("%s %s=%s,",depth,key(k),v))
- end
- elseif t == "string" then
- if v == "" then
- -- skip
- else
- handle(format("%s %s=%q,",depth,key(k),v))
- end
- elseif t == "boolean" then
- handle(format("%s %s=%q,",depth,key(k),tostring(v)))
- elseif v then -- userdata or table
- serialize(v,k,handle,depth,m+1,noattributes)
- end
- end
- end
- if root['next'] then -- userdata or table
- serialize(root['next'],'next',handle,depth,m+1,noattributes)
- end
- end
- if m and m > 0 then
- handle(format("%s},",depth))
- else
- handle(format("%s}",depth))
- end
-end
-
-function nodes.serialize(root,name,noattributes)
- local t, n = { }, 0
- local function flush(s)
- n = n + 1
- t[n] = s
- end
- serialize(root,name,flush,nil,0,noattributes)
- return concat(t,"\n")
-end
-
-function nodes.serializebox(n,flat,verbose,name)
- return nodes.serialize(nodes.totable(tex.box[n],flat,verbose),name)
-end
-
-function nodes.visualizebox(...) -- to be checked .. will move to module anyway
- context.starttyping()
- context.pushcatcodes("verbatim")
- context(nodes.serializebox(...))
- context.stoptyping()
- context.popcatcodes()
-end
-
-function nodes.list(head,n) -- name might change to nodes.type -- to be checked .. will move to module anyway
- if not n then
- context.starttyping(true)
- end
- while head do
- local id = head.id
- context(rep(" ",n or 0) .. tostring(head) .. "\n")
- if id == hlist_code or id == vlist_code then
- nodes.list(head.list,(n or 0)+1)
- end
- head = head.next
- end
- if not n then
- context.stoptyping(true)
- end
-end
-
-function nodes.print(head,n)
- while head do
- local id = head.id
- logs.writer(string.formatters["%w%S"],n or 0,head)
- if id == hlist_code or id == vlist_code then
- nodes.print(head.list,(n or 0)+1)
- end
- head = head.next
- end
-end
+if not modules then modules = { } end modules ['node-ser'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- beware, some field names will change in a next releases
+-- of luatex; this is pretty old code that needs an overhaul
+
+local type, format, rep = type, string.format, string.rep
+local concat, tohash, sortedkeys, printtable = table.concat, table.tohash, table.sortedkeys, table.print
+
+local allocate = utilities.storage.allocate
+
+local nodes, node = nodes, node
+
+local traverse = node.traverse
+local is_node = node.is_node
+
+local nodecodes = nodes.nodecodes
+local noadcodes = nodes.noadcodes
+local nodefields = nodes.fields
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+
+local expand = allocate ( tohash {
+ "list", -- list_ptr & ins_ptr & adjust_ptr
+ "pre", --
+ "post", --
+ "spec", -- glue_ptr
+ "top_skip", --
+ "attr", --
+ "replace", -- nobreak
+ "components", -- lig_ptr
+ "box_left", --
+ "box_right", --
+ "glyph", -- margin_char
+ "leader", -- leader_ptr
+ "action", -- action_ptr
+ "value", -- user_defined nodes with subtype 'a' en 'n'
+ "head",
+} )
+
+-- page_insert: "height", "last_ins_ptr", "best_ins_ptr"
+-- split_insert: "height", "last_ins_ptr", "best_ins_ptr", "broken_ptr", "broken_ins"
+
+local ignore = allocate ( tohash {
+ "page_insert",
+ "split_insert",
+ "ref_count",
+} )
+
+local dimension = allocate ( tohash {
+ "width", "height", "depth", "shift",
+ "stretch", "shrink",
+ "xoffset", "yoffset",
+ "surround",
+ "kern",
+ "box_left_width", "box_right_width"
+} )
+
+-- flat: don't use next, but indexes
+-- verbose: also add type
+-- can be sped up
+
+nodes.dimensionfields = dimension
+nodes.listablefields = expand
+nodes.ignorablefields = ignore
+
+-- not ok yet:
+
+local function astable(n,sparse) -- not yet ok
+ local f, t = nodefields(n), { }
+ for i=1,#f do
+ local v = f[i]
+ local d = n[v]
+ if d then
+ if ignore[v] or v == "id" then
+ -- skip
+ elseif expand[v] then -- or: type(n[v]) ~= "string" or type(n[v]) ~= "number" or type(n[v]) ~= "table"
+ t[v] = "pointer to list"
+ elseif sparse then
+ if (type(d) == "number" and d ~= 0) or (type(d) == "string" and d ~= "") then
+ t[v] = d
+ end
+ else
+ t[v] = d
+ end
+ end
+ end
+ t.type = nodecodes[n.id]
+ return t
+end
+
+nodes.astable = astable
+
+setinspector(function(v) if is_node(v) then printtable(astable(v),tostring(v)) return true end end)
+
+-- under construction:
+
+local function totable(n,flat,verbose,noattributes)
+ -- todo: no local function
+ local function to_table(n,flat,verbose,noattributes) -- no need to pass
+ local f = nodefields(n)
+ local tt = { }
+ for k=1,#f do
+ local v = f[k]
+ local nv = v and n[v]
+ if nv then
+ if ignore[v] then
+ -- skip
+ elseif noattributes and v == "attr" then
+ -- skip
+ elseif expand[v] then
+ if type(nv) == "number" or type(nv) == "string" then
+ tt[v] = nv
+ else
+ tt[v] = totable(nv,flat,verbose)
+ end
+ elseif type(nv) == "table" then
+ tt[v] = nv -- totable(nv,flat,verbose) -- data
+ else
+ tt[v] = nv
+ end
+ end
+ end
+ if verbose then
+ tt.type = nodecodes[tt.id]
+ end
+ return tt
+ end
+ if n then
+ if flat then
+ local t, tn = { }, 0
+ while n do
+ tn = tn + 1
+ t[tn] = to_table(n,flat,verbose,noattributes)
+ n = n.next
+ end
+ return t
+ else
+ local t = to_table(n)
+ if n.next then
+ t.next = totable(n.next,flat,verbose,noattributes)
+ end
+ return t
+ end
+ else
+ return { }
+ end
+end
+
+nodes.totable = totable
+
+local function key(k)
+ return ((type(k) == "number") and "["..k.."]") or k
+end
+
+-- not ok yet; this will become a module
+
+-- todo: adapt to nodecodes etc
+
+local function serialize(root,name,handle,depth,m,noattributes)
+ handle = handle or print
+ if depth then
+ depth = depth .. " "
+ handle(format("%s%s={",depth,key(name)))
+ else
+ depth = ""
+ local tname = type(name)
+ if tname == "string" then
+ if name == "return" then
+ handle("return {")
+ else
+ handle(name .. "={")
+ end
+ elseif tname == "number" then
+ handle("[" .. name .. "]={")
+ else
+ handle("t={")
+ end
+ end
+ if root then
+ local fld
+ if root.id then
+ fld = nodefields(root) -- we can cache these (todo)
+ else
+ fld = sortedkeys(root)
+ end
+ if type(root) == 'table' and root['type'] then -- userdata or table
+ handle(format("%s %s=%q,",depth,'type',root['type']))
+ end
+ for f=1,#fld do
+ local k = fld[f]
+ if k == "ref_count" then
+ -- skip
+ elseif noattributes and k == "attr" then
+ -- skip
+ elseif k == "id" then
+ local v = root[k]
+ handle(format("%s id=%s,",depth,nodecodes[v] or noadcodes[v] or v))
+ elseif k then
+ local v = root[k]
+ local t = type(v)
+ if t == "number" then
+ if v == 0 then
+ -- skip
+ else
+ handle(format("%s %s=%s,",depth,key(k),v))
+ end
+ elseif t == "string" then
+ if v == "" then
+ -- skip
+ else
+ handle(format("%s %s=%q,",depth,key(k),v))
+ end
+ elseif t == "boolean" then
+ handle(format("%s %s=%q,",depth,key(k),tostring(v)))
+ elseif v then -- userdata or table
+ serialize(v,k,handle,depth,m+1,noattributes)
+ end
+ end
+ end
+ if root['next'] then -- userdata or table
+ serialize(root['next'],'next',handle,depth,m+1,noattributes)
+ end
+ end
+ if m and m > 0 then
+ handle(format("%s},",depth))
+ else
+ handle(format("%s}",depth))
+ end
+end
+
+function nodes.serialize(root,name,noattributes)
+ local t, n = { }, 0
+ local function flush(s)
+ n = n + 1
+ t[n] = s
+ end
+ serialize(root,name,flush,nil,0,noattributes)
+ return concat(t,"\n")
+end
+
+function nodes.serializebox(n,flat,verbose,name)
+ return nodes.serialize(nodes.totable(tex.box[n],flat,verbose),name)
+end
+
+function nodes.visualizebox(...) -- to be checked .. will move to module anyway
+ context.starttyping()
+ context.pushcatcodes("verbatim")
+ context(nodes.serializebox(...))
+ context.stoptyping()
+ context.popcatcodes()
+end
+
+function nodes.list(head,n) -- name might change to nodes.type -- to be checked .. will move to module anyway
+ if not n then
+ context.starttyping(true)
+ end
+ while head do
+ local id = head.id
+ context(rep(" ",n or 0) .. tostring(head) .. "\n")
+ if id == hlist_code or id == vlist_code then
+ nodes.list(head.list,(n or 0)+1)
+ end
+ head = head.next
+ end
+ if not n then
+ context.stoptyping(true)
+ end
+end
+
+function nodes.print(head,n)
+ while head do
+ local id = head.id
+ logs.writer(string.formatters["%w%S"],n or 0,head)
+ if id == hlist_code or id == vlist_code then
+ nodes.print(head.list,(n or 0)+1)
+ end
+ head = head.next
+ end
+end
diff --git a/tex/context/base/node-shp.lua b/tex/context/base/node-shp.lua
index 8f7a411a7..42084a135 100644
--- a/tex/context/base/node-shp.lua
+++ b/tex/context/base/node-shp.lua
@@ -1,148 +1,148 @@
-if not modules then modules = { } end modules ['node-shp'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local nodes, node = nodes, node
-
-local next, type = next, type
-local format = string.format
-local concat, sortedpairs = table.concat, table.sortedpairs
-local setmetatableindex = table.setmetatableindex
-
-local nodecodes = nodes.nodecodes
-local tasks = nodes.tasks
-local handlers = nodes.handlers
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local disc_code = nodecodes.disc
-local mark_code = nodecodes.mark
-local kern_code = nodecodes.kern
-local glue_code = nodecodes.glue
-
-local texbox = tex.box
-
-local free_node = node.free
-local remove_node = node.remove
-local traverse_nodes = node.traverse
-
-local function cleanup(head) -- rough
- local start = head
- while start do
- local id = start.id
- if id == disc_code or (id == glue_code and not start.writable) or (id == kern_code and start.kern == 0) or id == mark_code then
- head, start, tmp = remove_node(head,start)
- free_node(tmp)
- elseif id == hlist_code or id == vlist_code then
- local sl = start.list
- if sl then
- start.list = cleanup(sl)
- start = start.next
- else
- head, start, tmp = remove_node(head,start)
- free_node(tmp)
- end
- else
- start = start.next
- end
- end
- return head
-end
-
-directives.register("backend.cleanup", function()
- tasks.enableaction("shipouts","nodes.handlers.cleanuppage")
-end)
-
-function handlers.cleanuppage(head)
- -- about 10% of the nodes make no sense for the backend
- return cleanup(head), true
-end
-
-local actions = tasks.actions("shipouts") -- no extra arguments
-
-function handlers.finalize(head) -- problem, attr loaded before node, todo ...
- return actions(head)
-end
-
--- handlers.finalize = actions
-
--- interface
-
-function commands.finalizebox(n)
- actions(texbox[n])
-end
-
--- just in case we want to optimize lookups:
-
-local frequencies = { }
-
-nodes.tracers.frequencies = frequencies
-
-local data = { }
-local done = false
-
-setmetatableindex(data,function(t,k)
- local v = { }
- setmetatableindex(v,function(t,k)
- local v = { }
- t[k] = v
- setmetatableindex(v,function(t,k)
- t[k] = 0
- return 0
- end)
- return v
- end)
- t[k] = v
- return v
-end)
-
-local function count(head,data,subcategory)
- -- no components, pre, post, replace .. can maybe an option .. but
- -- we use this for optimization so it makes sense to look the the
- -- main node only
- for n in traverse_nodes(head) do
- local id = n.id
- local dn = data[nodecodes[n.id]]
- dn[subcategory] = dn[subcategory] + 1
- if id == hlist_code or id == vlist_code then
- count(n.list,data,subcategory)
- end
- end
-end
-
-local function register(category,subcategory)
- return function(head)
- done = true
- count(head,data[category],subcategory)
- return head, false
- end
-end
-
-frequencies.register = register
-frequencies.filename = nil
-
-trackers.register("nodes.frequencies",function(v)
- if type(v) == "string" then
- frequencies.filename = v
- end
- handlers.frequencies_shipouts_before = register("shipouts", "begin")
- handlers.frequencies_shipouts_after = register("shipouts", "end")
- handlers.frequencies_processors_before = register("processors", "begin")
- handlers.frequencies_processors_after = register("processors", "end")
- tasks.prependaction("shipouts", "before", "nodes.handlers.frequencies_shipouts_before")
- tasks.appendaction ("shipouts", "after", "nodes.handlers.frequencies_shipouts_after")
- tasks.prependaction("processors", "before", "nodes.handlers.frequencies_processors_before")
- tasks.appendaction ("processors", "after", "nodes.handlers.frequencies_processors_after")
-end)
-
-statistics.register("node frequencies", function()
- if done then
- local filename = frequencies.filename or (tex.jobname .. "-frequencies.lua")
- io.savedata(filename,table.serialize(data,true))
- return format("saved in %q",filename)
- end
-end)
+if not modules then modules = { } end modules ['node-shp'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local nodes, node = nodes, node
+
+local next, type = next, type
+local format = string.format
+local concat, sortedpairs = table.concat, table.sortedpairs
+local setmetatableindex = table.setmetatableindex
+
+local nodecodes = nodes.nodecodes
+local tasks = nodes.tasks
+local handlers = nodes.handlers
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local disc_code = nodecodes.disc
+local mark_code = nodecodes.mark
+local kern_code = nodecodes.kern
+local glue_code = nodecodes.glue
+
+local texbox = tex.box
+
+local free_node = node.free
+local remove_node = node.remove
+local traverse_nodes = node.traverse
+
+local function cleanup(head) -- rough
+ local start = head
+ while start do
+ local id = start.id
+ if id == disc_code or (id == glue_code and not start.writable) or (id == kern_code and start.kern == 0) or id == mark_code then
+ head, start, tmp = remove_node(head,start)
+ free_node(tmp)
+ elseif id == hlist_code or id == vlist_code then
+ local sl = start.list
+ if sl then
+ start.list = cleanup(sl)
+ start = start.next
+ else
+ head, start, tmp = remove_node(head,start)
+ free_node(tmp)
+ end
+ else
+ start = start.next
+ end
+ end
+ return head
+end
+
+directives.register("backend.cleanup", function()
+ tasks.enableaction("shipouts","nodes.handlers.cleanuppage")
+end)
+
+function handlers.cleanuppage(head)
+ -- about 10% of the nodes make no sense for the backend
+ return cleanup(head), true
+end
+
+local actions = tasks.actions("shipouts") -- no extra arguments
+
+function handlers.finalize(head) -- problem, attr loaded before node, todo ...
+ return actions(head)
+end
+
+-- handlers.finalize = actions
+
+-- interface
+
+function commands.finalizebox(n)
+ actions(texbox[n])
+end
+
+-- just in case we want to optimize lookups:
+
+local frequencies = { }
+
+nodes.tracers.frequencies = frequencies
+
+local data = { }
+local done = false
+
+setmetatableindex(data,function(t,k)
+ local v = { }
+ setmetatableindex(v,function(t,k)
+ local v = { }
+ t[k] = v
+ setmetatableindex(v,function(t,k)
+ t[k] = 0
+ return 0
+ end)
+ return v
+ end)
+ t[k] = v
+ return v
+end)
+
+local function count(head,data,subcategory)
+ -- no components, pre, post, replace .. can maybe an option .. but
+ -- we use this for optimization so it makes sense to look the the
+ -- main node only
+ for n in traverse_nodes(head) do
+ local id = n.id
+ local dn = data[nodecodes[n.id]]
+ dn[subcategory] = dn[subcategory] + 1
+ if id == hlist_code or id == vlist_code then
+ count(n.list,data,subcategory)
+ end
+ end
+end
+
+local function register(category,subcategory)
+ return function(head)
+ done = true
+ count(head,data[category],subcategory)
+ return head, false
+ end
+end
+
+frequencies.register = register
+frequencies.filename = nil
+
+trackers.register("nodes.frequencies",function(v)
+ if type(v) == "string" then
+ frequencies.filename = v
+ end
+ handlers.frequencies_shipouts_before = register("shipouts", "begin")
+ handlers.frequencies_shipouts_after = register("shipouts", "end")
+ handlers.frequencies_processors_before = register("processors", "begin")
+ handlers.frequencies_processors_after = register("processors", "end")
+ tasks.prependaction("shipouts", "before", "nodes.handlers.frequencies_shipouts_before")
+ tasks.appendaction ("shipouts", "after", "nodes.handlers.frequencies_shipouts_after")
+ tasks.prependaction("processors", "before", "nodes.handlers.frequencies_processors_before")
+ tasks.appendaction ("processors", "after", "nodes.handlers.frequencies_processors_after")
+end)
+
+statistics.register("node frequencies", function()
+ if done then
+ local filename = frequencies.filename or (tex.jobname .. "-frequencies.lua")
+ io.savedata(filename,table.serialize(data,true))
+ return format("saved in %q",filename)
+ end
+end)
diff --git a/tex/context/base/node-snp.lua b/tex/context/base/node-snp.lua
index 31c7771ac..3a764e90a 100644
--- a/tex/context/base/node-snp.lua
+++ b/tex/context/base/node-snp.lua
@@ -1,66 +1,66 @@
-if not modules then modules = { } end modules ['node-snp'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-if not nodes then
- nodes = { } -- also loaded in mtx-timing
-end
-
-local snapshots = { }
-nodes.snapshots = snapshots
-
-local nodeusage = nodes.pool and nodes.pool.usage
-local clock = os.gettimeofday or os.clock -- should go in environment
-local lasttime = clock()
-local samples = { }
-
-local parameters = {
- "cs_count",
- "dyn_used",
- "elapsed_time",
- "luabytecode_bytes",
- "luastate_bytes",
- "max_buf_stack",
- "obj_ptr",
- "pdf_mem_ptr",
- "pdf_mem_size",
- "pdf_os_cntr",
--- "pool_ptr", -- obsolete
- "str_ptr",
-}
-
-function snapshots.takesample(comment)
- if nodeusage then
- local c = clock()
- local t = {
- elapsed_time = c - lasttime,
- node_memory = nodeusage(),
- comment = comment,
- }
- for i=1,#parameters do
- local parameter = parameters[i]
- local ps = status[parameter]
- if ps then
- t[parameter] = ps
- end
- end
- samples[#samples+1] = t
- lasttime = c
- end
-end
-
-function snapshots.getsamples()
- return samples -- one return value !
-end
-
-function snapshots.resetsamples()
- samples = { }
-end
-
-function snapshots.getparameters()
- return parameters
-end
+if not modules then modules = { } end modules ['node-snp'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+if not nodes then
+ nodes = { } -- also loaded in mtx-timing
+end
+
+local snapshots = { }
+nodes.snapshots = snapshots
+
+local nodeusage = nodes.pool and nodes.pool.usage
+local clock = os.gettimeofday or os.clock -- should go in environment
+local lasttime = clock()
+local samples = { }
+
+local parameters = {
+ "cs_count",
+ "dyn_used",
+ "elapsed_time",
+ "luabytecode_bytes",
+ "luastate_bytes",
+ "max_buf_stack",
+ "obj_ptr",
+ "pdf_mem_ptr",
+ "pdf_mem_size",
+ "pdf_os_cntr",
+-- "pool_ptr", -- obsolete
+ "str_ptr",
+}
+
+function snapshots.takesample(comment)
+ if nodeusage then
+ local c = clock()
+ local t = {
+ elapsed_time = c - lasttime,
+ node_memory = nodeusage(),
+ comment = comment,
+ }
+ for i=1,#parameters do
+ local parameter = parameters[i]
+ local ps = status[parameter]
+ if ps then
+ t[parameter] = ps
+ end
+ end
+ samples[#samples+1] = t
+ lasttime = c
+ end
+end
+
+function snapshots.getsamples()
+ return samples -- one return value !
+end
+
+function snapshots.resetsamples()
+ samples = { }
+end
+
+function snapshots.getparameters()
+ return parameters
+end
diff --git a/tex/context/base/node-tex.lua b/tex/context/base/node-tex.lua
index 2170e0603..9393eaf79 100644
--- a/tex/context/base/node-tex.lua
+++ b/tex/context/base/node-tex.lua
@@ -1,41 +1,41 @@
-if not modules then modules = { } end modules ['node-tex'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format = string.format
-
-builders = builders or { }
-builders.kernel = builders.kernel or { }
-local kernel = builders.kernel
-
-local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
-local hyphenate, ligaturing, kerning = lang.hyphenate, node.ligaturing, node.kerning
-
-function kernel.hyphenation(head)
- -- starttiming(kernel)
- local done = hyphenate(head)
- -- stoptiming(kernel)
- return head, done
-end
-
-function kernel.ligaturing(head)
- -- starttiming(kernel)
- local head, tail, done = ligaturing(head) -- todo: check what is returned
- -- stoptiming(kernel)
- return head, done
-end
-
-function kernel.kerning(head)
- -- starttiming(kernel)
- local head, tail, done = kerning(head) -- todo: check what is returned
- -- stoptiming(kernel)
- return head, done
-end
-
-callbacks.register('hyphenate' , false, "normal hyphenation routine, called elsewhere")
-callbacks.register('ligaturing', false, "normal ligaturing routine, called elsewhere")
-callbacks.register('kerning' , false, "normal kerning routine, called elsewhere")
+if not modules then modules = { } end modules ['node-tex'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format = string.format
+
+builders = builders or { }
+builders.kernel = builders.kernel or { }
+local kernel = builders.kernel
+
+local starttiming, stoptiming = statistics.starttiming, statistics.stoptiming
+local hyphenate, ligaturing, kerning = lang.hyphenate, node.ligaturing, node.kerning
+
+function kernel.hyphenation(head)
+ -- starttiming(kernel)
+ local done = hyphenate(head)
+ -- stoptiming(kernel)
+ return head, done
+end
+
+function kernel.ligaturing(head)
+ -- starttiming(kernel)
+ local head, tail, done = ligaturing(head) -- todo: check what is returned
+ -- stoptiming(kernel)
+ return head, done
+end
+
+function kernel.kerning(head)
+ -- starttiming(kernel)
+ local head, tail, done = kerning(head) -- todo: check what is returned
+ -- stoptiming(kernel)
+ return head, done
+end
+
+callbacks.register('hyphenate' , false, "normal hyphenation routine, called elsewhere")
+callbacks.register('ligaturing', false, "normal ligaturing routine, called elsewhere")
+callbacks.register('kerning' , false, "normal kerning routine, called elsewhere")
diff --git a/tex/context/base/node-tra.lua b/tex/context/base/node-tra.lua
index 916b2143d..f194239bb 100644
--- a/tex/context/base/node-tra.lua
+++ b/tex/context/base/node-tra.lua
@@ -1,529 +1,529 @@
-if not modules then modules = { } end modules ['node-tra'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-
This is rather experimental. We need more control and some of this
-might become a runtime module instead. This module will be cleaned up!
---ldx]]--
-
-local utfchar = utf.char
-local format, match, gmatch, concat, rep = string.format, string.match, string.gmatch, table.concat, string.rep
-local lpegmatch = lpeg.match
-local clock = os.gettimeofday or os.clock -- should go in environment
-
-local report_nodes = logs.reporter("nodes","tracing")
-
-nodes = nodes or { }
-
-local nodes, node, context = nodes, node, context
-
-local tracers = nodes.tracers or { }
-nodes.tracers = tracers
-
-local tasks = nodes.tasks or { }
-nodes.tasks = tasks
-
-local handlers = nodes.handlers or {}
-nodes.handlers = handlers
-
-local injections = nodes.injections or { }
-nodes.injections = injections
-
-local traverse_nodes = node.traverse
-local traverse_by_id = node.traverse_id
-local count_nodes = nodes.count
-
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
-local skipcodes = nodes.skipcodes
-local fillcodes = nodes.fillcodes
-
-local glyph_code = nodecodes.glyph
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local disc_code = nodecodes.disc
-local glue_code = nodecodes.glue
-local kern_code = nodecodes.kern
-local rule_code = nodecodes.rule
-local whatsit_code = nodecodes.whatsit
-local spec_code = nodecodes.glue_spec
-
-local localpar_code = whatcodes.localpar
-local dir_code = whatcodes.dir
-
-local nodepool = nodes.pool
-
-local dimenfactors = number.dimenfactors
-local formatters = string.formatters
-
--- this will be reorganized:
-
-function nodes.showlist(head, message)
- if message then
- report_nodes(message)
- end
- for n in traverse_nodes(head) do
- report_nodes(tostring(n))
- end
-end
-
-function nodes.handlers.checkglyphs(head,message)
- local t = { }
- for g in traverse_by_id(glyph_code,head) do
- t[#t+1] = formatters["%U:%s"](g.char,g.subtype)
- end
- if #t > 0 then
- if message and message ~= "" then
- report_nodes("%s, %s glyphs: % t",message,#t,t)
- else
- report_nodes("%s glyphs: % t",#t,t)
- end
- end
- return false
-end
-
-function nodes.handlers.checkforleaks(sparse)
- local l = { }
- local q = node.usedlist()
- for p in traverse(q) do
- local s = table.serialize(nodes.astable(p,sparse),nodecodes[p.id])
- l[s] = (l[s] or 0) + 1
- end
- node.flush_list(q)
- for k, v in next, l do
- write_nl(formatters["%s * %s"](v,k))
- end
-end
-
-local f_sequence = formatters["U+%04X:%s"]
-
-local function tosequence(start,stop,compact)
- if start then
- local t = { }
- while start do
- local id = start.id
- if id == glyph_code then
- local c = start.char
- if compact then
- if start.components then
- t[#t+1] = tosequence(start.components,nil,compact)
- else
- t[#t+1] = utfchar(c)
- end
- else
- t[#t+1] = f_sequence(c,utfchar(c))
- end
- elseif id == whatsit_code and start.subtype == localpar_code or start.subtype == dir_code then
- t[#t+1] = "[" .. start.dir .. "]"
- elseif id == rule_code then
- if compact then
- t[#t+1] = "|"
- else
- t[#t+1] = nodecodes[id]
- end
- else
- if compact then
- t[#t+1] = "[]"
- else
- t[#t+1] = nodecodes[id]
- end
- end
- if start == stop then
- break
- else
- start = start.next
- end
- end
- if compact then
- return concat(t)
- else
- return concat(t," ")
- end
- else
- return "[empty]"
- end
-end
-
-nodes.tosequence = tosequence
-
-function nodes.report(t,done)
- report_nodes("output %a, %changed %a, %s nodes",status.output_active,done,count_nodes(t))
-end
-
-function nodes.packlist(head)
- local t = { }
- for n in traverse(head) do
- t[#t+1] = tostring(n)
- end
- return t
-end
-
-function nodes.idstostring(head,tail)
- local t, last_id, last_n = { }, nil, 0
- for n in traverse_nodes(head,tail) do -- hm, does not stop at tail
- local id = n.id
- if not last_id then
- last_id, last_n = id, 1
- elseif last_id == id then
- last_n = last_n + 1
- else
- if last_n > 1 then
- t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?")
- else
- t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?")
- end
- last_id, last_n = id, 1
- end
- if n == tail then
- break
- end
- end
- if not last_id then
- t[#t+1] = "no nodes"
- elseif last_n > 1 then
- t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?")
- else
- t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?")
- end
- return concat(t," ")
-end
-
--- function nodes.xidstostring(head,tail) -- only for special tracing of backlinks
--- local n = head
--- while n.next do
--- n = n.next
--- end
--- local t, last_id, last_n = { }, nil, 0
--- while n do
--- local id = n.id
--- if not last_id then
--- last_id, last_n = id, 1
--- elseif last_id == id then
--- last_n = last_n + 1
--- else
--- if last_n > 1 then
--- t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?")
--- else
--- t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?")
--- end
--- last_id, last_n = id, 1
--- end
--- if n == head then
--- break
--- end
--- n = n.prev
--- end
--- if not last_id then
--- t[#t+1] = "no nodes"
--- elseif last_n > 1 then
--- t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?")
--- else
--- t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?")
--- end
--- return table.concat(table.reversed(t)," ")
--- end
-
-local function showsimplelist(h,depth,n)
- while h do
- write_nl(rep(" ",n) .. tostring(h))
- if not depth or n < depth then
- local id = h.id
- if id == hlist_code or id == vlist_code then
- showsimplelist(h.list,depth,n+1)
- end
- end
- h = h.next
- end
-end
-
---~ \startluacode
---~ callback.register('buildpage_filter',function() nodes.show_simple_list(tex.lists.contrib_head) end)
---~ \stopluacode
---~ \vbox{b\footnote{n}a}
---~ \startluacode
---~ callback.register('buildpage_filter',nil)
---~ \stopluacode
-
-nodes.showsimplelist = function(h,depth) showsimplelist(h,depth,0) end
-
-local function listtoutf(h,joiner,textonly,last)
- local joiner = (joiner == true and utfchar(0x200C)) or joiner -- zwnj
- local w = { }
- while h do
- local id = h.id
- if id == glyph_code then -- always true
- w[#w+1] = utfchar(h.char)
- if joiner then
- w[#w+1] = joiner
- end
- elseif id == disc_code then
- local pre, rep, pos = h.pre, h.replace, h.post
- w[#w+1] = formatters["[%s|%s|%s]"] (
- pre and listtoutf(pre,joiner,textonly) or "",
- rep and listtoutf(rep,joiner,textonly) or "",
- mid and listtoutf(mid,joiner,textonly) or ""
- )
- elseif textonly then
- if id == glue_code and h.spec and h.spec.width > 0 then
- w[#w+1] = " "
- end
- else
- w[#w+1] = "[-]"
- end
- if h == last then
- break
- else
- h = h.next
- end
- end
- return concat(w)
-end
-
-nodes.listtoutf = listtoutf
-
-local what = { [0] = "unknown", "line", "box", "indent", "row", "cell" }
-
-local function showboxes(n,symbol,depth)
- depth, symbol = depth or 0, symbol or "."
- for n in traverse_nodes(n) do
- local id = n.id
- if id == hlist_code or id == vlist_code then
- local s = n.subtype
- report_nodes(rep(symbol,depth) .. what[s] or s)
- showboxes(n.list,symbol,depth+1)
- end
- end
-end
-
-nodes.showboxes = showboxes
-
-local ptfactor = dimenfactors.pt
-local bpfactor = dimenfactors.bp
-local stripper = lpeg.patterns.stripzeros
-
--- start redefinition
---
--- -- if fmt then
--- -- return formatters[fmt](n*dimenfactors[unit],unit)
--- -- else
--- -- return match(formatters["%.20f"](n*dimenfactors[unit]),"(.-0?)0*$") .. unit
--- -- end
---
--- redefined:
-
-local dimenfactors = number.dimenfactors
-
-local function numbertodimen(d,unit,fmt,strip)
- if not d then
- local str = formatters[fmt](0,unit)
- return strip and lpegmatch(stripper,str) or str
- end
- local t = type(d)
- if t == 'string' then
- return d
- end
- if unit == true then
- unit = "pt"
- fmt = "%0.5f%s"
- else
- unit = unit or 'pt'
- if not fmt then
- fmt = "%s%s"
- elseif fmt == true then
- fmt = "%0.5f%s"
- end
- end
- if t == "number" then
- local str = formatters[fmt](d*dimenfactors[unit],unit)
- return strip and lpegmatch(stripper,str) or str
- end
- local id = node.id
- if id == kern_code then
- local str = formatters[fmt](d.width*dimenfactors[unit],unit)
- return strip and lpegmatch(stripper,str) or str
- end
- if id == glue_code then
- d = d.spec
- end
- if not d or not d.id == spec_code then
- local str = formatters[fmt](0,unit)
- return strip and lpegmatch(stripper,str) or str
- end
- local width = d.width
- local plus = d.stretch_order
- local minus = d.shrink_order
- local stretch = d.stretch
- local shrink = d.shrink
- if plus ~= 0 then
- plus = " plus " .. stretch/65536 .. fillcodes[plus]
- elseif stretch ~= 0 then
- plus = formatters[fmt](stretch*dimenfactors[unit],unit)
- plus = " plus " .. (strip and lpegmatch(stripper,plus) or plus)
- else
- plus = ""
- end
- if minus ~= 0 then
- minus = " minus " .. shrink/65536 .. fillcodes[minus]
- elseif shrink ~= 0 then
- minus = formatters[fmt](shrink*dimenfactors[unit],unit)
- minus = " minus " .. (strip and lpegmatch(stripper,minus) or minus)
- else
- minus = ""
- end
- local str = formatters[fmt](d.width*dimenfactors[unit],unit)
- return (strip and lpegmatch(stripper,str) or str) .. plus .. minus
-end
-
-number.todimen = numbertodimen
-
-function number.topoints (n,fmt) return numbertodimen(n,"pt",fmt) end
-function number.toinches (n,fmt) return numbertodimen(n,"in",fmt) end
-function number.tocentimeters (n,fmt) return numbertodimen(n,"cm",fmt) end
-function number.tomillimeters (n,fmt) return numbertodimen(n,"mm",fmt) end
-function number.toscaledpoints(n,fmt) return numbertodimen(n,"sp",fmt) end
-function number.toscaledpoints(n) return n .. "sp" end
-function number.tobasepoints (n,fmt) return numbertodimen(n,"bp",fmt) end
-function number.topicas (n,fmt) return numbertodimen(n "pc",fmt) end
-function number.todidots (n,fmt) return numbertodimen(n,"dd",fmt) end
-function number.tociceros (n,fmt) return numbertodimen(n,"cc",fmt) end
-function number.tonewdidots (n,fmt) return numbertodimen(n,"nd",fmt) end
-function number.tonewciceros (n,fmt) return numbertodimen(n,"nc",fmt) end
-
--- stop redefinition
-
-local points = function(n)
- if not n or n == 0 then
- return "0pt"
- elseif type(n) == "number" then
- return lpegmatch(stripper,format("%.5fpt",n*ptfactor)) -- faster than formatter
- else
- return numbertodimen(n,"pt",true,true) -- also deals with nodes
- end
-end
-
-local basepoints = function(n)
- if not n or n == 0 then
- return "0bp"
- elseif type(n) == "number" then
- return lpegmatch(stripper,format("%.5fbp",n*bpfactor)) -- faster than formatter
- else
- return numbertodimen(n,"bp",true,true) -- also deals with nodes
- end
-end
-
-local pts = function(n)
- if not n or n == 0 then
- return "0pt"
- elseif type(n) == "number" then
- return format("%.5fpt",n*ptfactor) -- faster than formatter
- else
- return numbertodimen(n,"pt",true) -- also deals with nodes
- end
-end
-
-local nopts = function(n)
- if not n or n == 0 then
- return "0"
- else
- return format("%.5f",n*ptfactor) -- faster than formatter
- end
-end
-
-number.points = points
-number.basepoints = basepoints
-number.pts = pts
-number.nopts = nopts
-
-local colors = { }
-tracers.colors = colors
-
-local unsetvalue = attributes.unsetvalue
-
-local a_color = attributes.private('color')
-local a_colormodel = attributes.private('colormodel')
-local m_color = attributes.list[a_color] or { }
-
-function colors.set(n,c,s)
- local mc = m_color[c]
- if not mc then
- n[a_color] = unsetvalue
- else
- if not n[a_colormodel] then
- n[a_colormodel] = s or 1
- end
- n[a_color] = mc
- end
- return n
-end
-
-function colors.setlist(n,c,s)
- local f = n
- while n do
- local mc = m_color[c]
- if not mc then
- n[a_color] = unsetvalue
- else
- if not n[a_colormodel] then
- n[a_colormodel] = s or 1
- end
- n[a_color] = mc
- end
- n = n.next
- end
- return f
-end
-
-function colors.reset(n)
- n[a_color] = unsetvalue
- return n
-end
-
--- maybe
-
-local transparencies = { }
-tracers.transparencies = transparencies
-
-local a_transparency = attributes.private('transparency')
-local m_transparency = attributes.list[a_transparency] or { }
-
-function transparencies.set(n,t)
- local mt = m_transparency[t]
- if not mt then
- n[a_transparency] = unsetvalue
- else
- n[a_transparency] = mt
- end
- return n
-end
-
-function transparencies.setlist(n,c,s)
- local f = n
- while n do
- local mt = m_transparency[c]
- if not mt then
- n[a_transparency] = unsetvalue
- else
- n[a_transparency] = mt
- end
- n = n.next
- end
- return f
-end
-
-function transparencies.reset(n)
- n[a_transparency] = unsetvalue
- return n
-end
-
--- for the moment here
-
-nodes.visualizers = { }
-
-function nodes.visualizers.handler(head)
- return head, false
-end
+if not modules then modules = { } end modules ['node-tra'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+
This is rather experimental. We need more control and some of this
+might become a runtime module instead. This module will be cleaned up!
+--ldx]]--
+
+local utfchar = utf.char
+local format, match, gmatch, concat, rep = string.format, string.match, string.gmatch, table.concat, string.rep
+local lpegmatch = lpeg.match
+local clock = os.gettimeofday or os.clock -- should go in environment
+
+local report_nodes = logs.reporter("nodes","tracing")
+
+nodes = nodes or { }
+
+local nodes, node, context = nodes, node, context
+
+local tracers = nodes.tracers or { }
+nodes.tracers = tracers
+
+local tasks = nodes.tasks or { }
+nodes.tasks = tasks
+
+local handlers = nodes.handlers or {}
+nodes.handlers = handlers
+
+local injections = nodes.injections or { }
+nodes.injections = injections
+
+local traverse_nodes = node.traverse
+local traverse_by_id = node.traverse_id
+local count_nodes = nodes.count
+
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
+local skipcodes = nodes.skipcodes
+local fillcodes = nodes.fillcodes
+
+local glyph_code = nodecodes.glyph
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local disc_code = nodecodes.disc
+local glue_code = nodecodes.glue
+local kern_code = nodecodes.kern
+local rule_code = nodecodes.rule
+local whatsit_code = nodecodes.whatsit
+local spec_code = nodecodes.glue_spec
+
+local localpar_code = whatcodes.localpar
+local dir_code = whatcodes.dir
+
+local nodepool = nodes.pool
+
+local dimenfactors = number.dimenfactors
+local formatters = string.formatters
+
+-- this will be reorganized:
+
+function nodes.showlist(head, message)
+ if message then
+ report_nodes(message)
+ end
+ for n in traverse_nodes(head) do
+ report_nodes(tostring(n))
+ end
+end
+
+function nodes.handlers.checkglyphs(head,message)
+ local t = { }
+ for g in traverse_by_id(glyph_code,head) do
+ t[#t+1] = formatters["%U:%s"](g.char,g.subtype)
+ end
+ if #t > 0 then
+ if message and message ~= "" then
+ report_nodes("%s, %s glyphs: % t",message,#t,t)
+ else
+ report_nodes("%s glyphs: % t",#t,t)
+ end
+ end
+ return false
+end
+
+function nodes.handlers.checkforleaks(sparse)
+ local l = { }
+ local q = node.usedlist()
+ for p in traverse(q) do
+ local s = table.serialize(nodes.astable(p,sparse),nodecodes[p.id])
+ l[s] = (l[s] or 0) + 1
+ end
+ node.flush_list(q)
+ for k, v in next, l do
+ write_nl(formatters["%s * %s"](v,k))
+ end
+end
+
+local f_sequence = formatters["U+%04X:%s"]
+
+local function tosequence(start,stop,compact)
+ if start then
+ local t = { }
+ while start do
+ local id = start.id
+ if id == glyph_code then
+ local c = start.char
+ if compact then
+ if start.components then
+ t[#t+1] = tosequence(start.components,nil,compact)
+ else
+ t[#t+1] = utfchar(c)
+ end
+ else
+ t[#t+1] = f_sequence(c,utfchar(c))
+ end
+ elseif id == whatsit_code and start.subtype == localpar_code or start.subtype == dir_code then
+ t[#t+1] = "[" .. start.dir .. "]"
+ elseif id == rule_code then
+ if compact then
+ t[#t+1] = "|"
+ else
+ t[#t+1] = nodecodes[id]
+ end
+ else
+ if compact then
+ t[#t+1] = "[]"
+ else
+ t[#t+1] = nodecodes[id]
+ end
+ end
+ if start == stop then
+ break
+ else
+ start = start.next
+ end
+ end
+ if compact then
+ return concat(t)
+ else
+ return concat(t," ")
+ end
+ else
+ return "[empty]"
+ end
+end
+
+nodes.tosequence = tosequence
+
+function nodes.report(t,done)
+ report_nodes("output %a, %changed %a, %s nodes",status.output_active,done,count_nodes(t))
+end
+
+function nodes.packlist(head)
+ local t = { }
+ for n in traverse(head) do
+ t[#t+1] = tostring(n)
+ end
+ return t
+end
+
+function nodes.idstostring(head,tail)
+ local t, last_id, last_n = { }, nil, 0
+ for n in traverse_nodes(head,tail) do -- hm, does not stop at tail
+ local id = n.id
+ if not last_id then
+ last_id, last_n = id, 1
+ elseif last_id == id then
+ last_n = last_n + 1
+ else
+ if last_n > 1 then
+ t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?")
+ else
+ t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?")
+ end
+ last_id, last_n = id, 1
+ end
+ if n == tail then
+ break
+ end
+ end
+ if not last_id then
+ t[#t+1] = "no nodes"
+ elseif last_n > 1 then
+ t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?")
+ else
+ t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?")
+ end
+ return concat(t," ")
+end
+
+-- function nodes.xidstostring(head,tail) -- only for special tracing of backlinks
+-- local n = head
+-- while n.next do
+-- n = n.next
+-- end
+-- local t, last_id, last_n = { }, nil, 0
+-- while n do
+-- local id = n.id
+-- if not last_id then
+-- last_id, last_n = id, 1
+-- elseif last_id == id then
+-- last_n = last_n + 1
+-- else
+-- if last_n > 1 then
+-- t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?")
+-- else
+-- t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?")
+-- end
+-- last_id, last_n = id, 1
+-- end
+-- if n == head then
+-- break
+-- end
+-- n = n.prev
+-- end
+-- if not last_id then
+-- t[#t+1] = "no nodes"
+-- elseif last_n > 1 then
+-- t[#t+1] = formatters["[%s*%s]"](last_n,nodecodes[last_id] or "?")
+-- else
+-- t[#t+1] = formatters["[%s]"](nodecodes[last_id] or "?")
+-- end
+-- return table.concat(table.reversed(t)," ")
+-- end
+
+local function showsimplelist(h,depth,n)
+ while h do
+ write_nl(rep(" ",n) .. tostring(h))
+ if not depth or n < depth then
+ local id = h.id
+ if id == hlist_code or id == vlist_code then
+ showsimplelist(h.list,depth,n+1)
+ end
+ end
+ h = h.next
+ end
+end
+
+--~ \startluacode
+--~ callback.register('buildpage_filter',function() nodes.show_simple_list(tex.lists.contrib_head) end)
+--~ \stopluacode
+--~ \vbox{b\footnote{n}a}
+--~ \startluacode
+--~ callback.register('buildpage_filter',nil)
+--~ \stopluacode
+
+nodes.showsimplelist = function(h,depth) showsimplelist(h,depth,0) end
+
+local function listtoutf(h,joiner,textonly,last)
+ local joiner = (joiner == true and utfchar(0x200C)) or joiner -- zwnj
+ local w = { }
+ while h do
+ local id = h.id
+ if id == glyph_code then -- always true
+ w[#w+1] = utfchar(h.char)
+ if joiner then
+ w[#w+1] = joiner
+ end
+ elseif id == disc_code then
+ local pre, rep, pos = h.pre, h.replace, h.post
+ w[#w+1] = formatters["[%s|%s|%s]"] (
+ pre and listtoutf(pre,joiner,textonly) or "",
+ rep and listtoutf(rep,joiner,textonly) or "",
+ mid and listtoutf(mid,joiner,textonly) or ""
+ )
+ elseif textonly then
+ if id == glue_code and h.spec and h.spec.width > 0 then
+ w[#w+1] = " "
+ end
+ else
+ w[#w+1] = "[-]"
+ end
+ if h == last then
+ break
+ else
+ h = h.next
+ end
+ end
+ return concat(w)
+end
+
+nodes.listtoutf = listtoutf
+
+local what = { [0] = "unknown", "line", "box", "indent", "row", "cell" }
+
+local function showboxes(n,symbol,depth)
+ depth, symbol = depth or 0, symbol or "."
+ for n in traverse_nodes(n) do
+ local id = n.id
+ if id == hlist_code or id == vlist_code then
+ local s = n.subtype
+ report_nodes(rep(symbol,depth) .. what[s] or s)
+ showboxes(n.list,symbol,depth+1)
+ end
+ end
+end
+
+nodes.showboxes = showboxes
+
+local ptfactor = dimenfactors.pt
+local bpfactor = dimenfactors.bp
+local stripper = lpeg.patterns.stripzeros
+
+-- start redefinition
+--
+-- -- if fmt then
+-- -- return formatters[fmt](n*dimenfactors[unit],unit)
+-- -- else
+-- -- return match(formatters["%.20f"](n*dimenfactors[unit]),"(.-0?)0*$") .. unit
+-- -- end
+--
+-- redefined:
+
+local dimenfactors = number.dimenfactors
+
+local function numbertodimen(d,unit,fmt,strip)
+ if not d then
+ local str = formatters[fmt](0,unit)
+ return strip and lpegmatch(stripper,str) or str
+ end
+ local t = type(d)
+ if t == 'string' then
+ return d
+ end
+ if unit == true then
+ unit = "pt"
+ fmt = "%0.5f%s"
+ else
+ unit = unit or 'pt'
+ if not fmt then
+ fmt = "%s%s"
+ elseif fmt == true then
+ fmt = "%0.5f%s"
+ end
+ end
+ if t == "number" then
+ local str = formatters[fmt](d*dimenfactors[unit],unit)
+ return strip and lpegmatch(stripper,str) or str
+ end
+ local id = node.id
+ if id == kern_code then
+ local str = formatters[fmt](d.width*dimenfactors[unit],unit)
+ return strip and lpegmatch(stripper,str) or str
+ end
+ if id == glue_code then
+ d = d.spec
+ end
+ if not d or not d.id == spec_code then
+ local str = formatters[fmt](0,unit)
+ return strip and lpegmatch(stripper,str) or str
+ end
+ local width = d.width
+ local plus = d.stretch_order
+ local minus = d.shrink_order
+ local stretch = d.stretch
+ local shrink = d.shrink
+ if plus ~= 0 then
+ plus = " plus " .. stretch/65536 .. fillcodes[plus]
+ elseif stretch ~= 0 then
+ plus = formatters[fmt](stretch*dimenfactors[unit],unit)
+ plus = " plus " .. (strip and lpegmatch(stripper,plus) or plus)
+ else
+ plus = ""
+ end
+ if minus ~= 0 then
+ minus = " minus " .. shrink/65536 .. fillcodes[minus]
+ elseif shrink ~= 0 then
+ minus = formatters[fmt](shrink*dimenfactors[unit],unit)
+ minus = " minus " .. (strip and lpegmatch(stripper,minus) or minus)
+ else
+ minus = ""
+ end
+ local str = formatters[fmt](d.width*dimenfactors[unit],unit)
+ return (strip and lpegmatch(stripper,str) or str) .. plus .. minus
+end
+
+number.todimen = numbertodimen
+
+function number.topoints (n,fmt) return numbertodimen(n,"pt",fmt) end
+function number.toinches (n,fmt) return numbertodimen(n,"in",fmt) end
+function number.tocentimeters (n,fmt) return numbertodimen(n,"cm",fmt) end
+function number.tomillimeters (n,fmt) return numbertodimen(n,"mm",fmt) end
+function number.toscaledpoints(n,fmt) return numbertodimen(n,"sp",fmt) end
+function number.toscaledpoints(n) return n .. "sp" end
+function number.tobasepoints (n,fmt) return numbertodimen(n,"bp",fmt) end
+function number.topicas (n,fmt) return numbertodimen(n "pc",fmt) end
+function number.todidots (n,fmt) return numbertodimen(n,"dd",fmt) end
+function number.tociceros (n,fmt) return numbertodimen(n,"cc",fmt) end
+function number.tonewdidots (n,fmt) return numbertodimen(n,"nd",fmt) end
+function number.tonewciceros (n,fmt) return numbertodimen(n,"nc",fmt) end
+
+-- stop redefinition
+
+local points = function(n)
+ if not n or n == 0 then
+ return "0pt"
+ elseif type(n) == "number" then
+ return lpegmatch(stripper,format("%.5fpt",n*ptfactor)) -- faster than formatter
+ else
+ return numbertodimen(n,"pt",true,true) -- also deals with nodes
+ end
+end
+
+local basepoints = function(n)
+ if not n or n == 0 then
+ return "0bp"
+ elseif type(n) == "number" then
+ return lpegmatch(stripper,format("%.5fbp",n*bpfactor)) -- faster than formatter
+ else
+ return numbertodimen(n,"bp",true,true) -- also deals with nodes
+ end
+end
+
+local pts = function(n)
+ if not n or n == 0 then
+ return "0pt"
+ elseif type(n) == "number" then
+ return format("%.5fpt",n*ptfactor) -- faster than formatter
+ else
+ return numbertodimen(n,"pt",true) -- also deals with nodes
+ end
+end
+
+local nopts = function(n)
+ if not n or n == 0 then
+ return "0"
+ else
+ return format("%.5f",n*ptfactor) -- faster than formatter
+ end
+end
+
+number.points = points
+number.basepoints = basepoints
+number.pts = pts
+number.nopts = nopts
+
+local colors = { }
+tracers.colors = colors
+
+local unsetvalue = attributes.unsetvalue
+
+local a_color = attributes.private('color')
+local a_colormodel = attributes.private('colormodel')
+local m_color = attributes.list[a_color] or { }
+
+function colors.set(n,c,s)
+ local mc = m_color[c]
+ if not mc then
+ n[a_color] = unsetvalue
+ else
+ if not n[a_colormodel] then
+ n[a_colormodel] = s or 1
+ end
+ n[a_color] = mc
+ end
+ return n
+end
+
+function colors.setlist(n,c,s)
+ local f = n
+ while n do
+ local mc = m_color[c]
+ if not mc then
+ n[a_color] = unsetvalue
+ else
+ if not n[a_colormodel] then
+ n[a_colormodel] = s or 1
+ end
+ n[a_color] = mc
+ end
+ n = n.next
+ end
+ return f
+end
+
+function colors.reset(n)
+ n[a_color] = unsetvalue
+ return n
+end
+
+-- maybe
+
+local transparencies = { }
+tracers.transparencies = transparencies
+
+local a_transparency = attributes.private('transparency')
+local m_transparency = attributes.list[a_transparency] or { }
+
+function transparencies.set(n,t)
+ local mt = m_transparency[t]
+ if not mt then
+ n[a_transparency] = unsetvalue
+ else
+ n[a_transparency] = mt
+ end
+ return n
+end
+
+function transparencies.setlist(n,c,s)
+ local f = n
+ while n do
+ local mt = m_transparency[c]
+ if not mt then
+ n[a_transparency] = unsetvalue
+ else
+ n[a_transparency] = mt
+ end
+ n = n.next
+ end
+ return f
+end
+
+function transparencies.reset(n)
+ n[a_transparency] = unsetvalue
+ return n
+end
+
+-- for the moment here
+
+nodes.visualizers = { }
+
+function nodes.visualizers.handler(head)
+ return head, false
+end
diff --git a/tex/context/base/node-tsk.lua b/tex/context/base/node-tsk.lua
index 596ac765a..d2686d4d8 100644
--- a/tex/context/base/node-tsk.lua
+++ b/tex/context/base/node-tsk.lua
@@ -1,402 +1,402 @@
-if not modules then modules = { } end modules ['node-tsk'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This might move to task-* and become less code as in sequencers
--- we already have dirty flags as well. On the other hand, nodes are
--- rather specialized and here we focus on node related tasks.
-
-local format = string.format
-
-local trace_tasks = false trackers.register("tasks.creation", function(v) trace_tasks = v end)
-
-local report_tasks = logs.reporter("tasks")
-
-local allocate = utilities.storage.allocate
-
-local nodes = nodes
-
-nodes.tasks = nodes.tasks or { }
-local tasks = nodes.tasks
-
-local tasksdata = { } -- no longer public
-
-local sequencers = utilities.sequencers
-local compile = sequencers.compile
-local nodeprocessor = sequencers.nodeprocessor
-
-local frozengroups = "no"
-
-function tasks.freeze(kind)
- frozengroups = kind or "tolerant" -- todo: hook into jobname
-end
-
-function tasks.new(specification) -- was: name,arguments,list
- local name = specification.name
- local arguments = specification.arguments or 0
- local sequence = specification.sequence
- if name and sequence then
- local tasklist = sequencers.new {
- -- we can move more to the sequencer now .. todo
- }
- tasksdata[name] = {
- list = tasklist,
- runner = false,
- arguments = arguments,
- -- sequence = sequence,
- frozen = { },
- processor = specification.processor or nodeprocessor
- }
- for l=1,#sequence do
- sequencers.appendgroup(tasklist,sequence[l])
- end
- end
-end
-
-local function valid(name)
- local data = tasksdata[name]
- if not data then
- report_tasks("unknown task %a",name)
- else
- return data
- end
-end
-
-local function validgroup(name,group,what)
- local data = tasksdata[name]
- if not data then
- report_tasks("unknown task %a",name)
- else
- local frozen = data.frozen[group]
- if frozen then
- if frozengroup == "no" then
- -- default
- elseif frozengroup == "strict" then
- report_tasks("warning: group %a of task %a is frozen, %a applied but not supported",group,name,what)
- return
- else -- if frozengroup == "tolerant" then
- report_tasks("warning: group %a of task %a is frozen, %a ignored",group,name,what)
- end
- end
- return data
- end
-end
-
-function tasks.freezegroup(name,group)
- local data = valid(name)
- if data then
- data.frozen[group] = true
- end
-end
-
-function tasks.restart(name)
- local data = valid(name)
- if data then
- data.runner = false
- end
-end
-
-function tasks.enableaction(name,action)
- local data = valid(name)
- if data then
- sequencers.enableaction(data.list,action)
- data.runner = false
- end
-end
-
-function tasks.disableaction(name,action)
- local data = valid(name)
- if data then
- sequencers.disableaction(data.list,action)
- data.runner = false
- end
-end
-
-function tasks.enablegroup(name,group)
- local data = validgroup(name,"enable group")
- if data then
- sequencers.enablegroup(data.list,group)
- data.runner = false
- end
-end
-
-function tasks.disablegroup(name,group)
- local data = validgroup(name,"disable group")
- if data then
- sequencers.disablegroup(data.list,group)
- data.runner = false
- end
-end
-
-function tasks.appendaction(name,group,action,where,kind)
- local data = validgroup(name,"append action")
- if data then
- sequencers.appendaction(data.list,group,action,where,kind)
- data.runner = false
- end
-end
-
-function tasks.prependaction(name,group,action,where,kind)
- local data = validgroup(name,"prepend action")
- if data then
- sequencers.prependaction(data.list,group,action,where,kind)
- data.runner = false
- end
-end
-
-function tasks.removeaction(name,group,action)
- local data = validgroup(name,"remove action")
- if data then
- sequencers.removeaction(data.list,group,action)
- data.runner = false
- end
-end
-
-function tasks.showactions(name,group,action,where,kind)
- local data = valid(name)
- if data then
- report_tasks("task %a, list:\n%s",name,nodeprocessor(data.list))
- end
-end
-
--- Optimizing for the number of arguments makes sense, but getting rid of
--- the nested call (no problem but then we also need to register the
--- callback with this mechanism so that it gets updated) does not save
--- much time (24K calls on mk.tex).
-
-local created, total = 0, 0
-
-statistics.register("node list callback tasks", function()
- if total > 0 then
- return format("%s unique task lists, %s instances (re)created, %s calls",table.count(tasksdata),created,total)
- else
- return nil
- end
-end)
-
-function tasks.actions(name) -- we optimize for the number or arguments (no ...)
- local data = tasksdata[name]
- if data then
- local n = data.arguments or 0
- if n == 0 then
- return function(head)
- total = total + 1 -- will go away
- local runner = data.runner
- if not runner then
- created = created + 1
- if trace_tasks then
- report_tasks("creating runner %a",name)
- end
- runner = compile(data.list,data.processor,0)
- data.runner = runner
- end
- return runner(head)
- end
- elseif n == 1 then
- return function(head,one)
- total = total + 1 -- will go away
- local runner = data.runner
- if not runner then
- created = created + 1
- if trace_tasks then
- report_tasks("creating runner %a with %s extra arguments",name,1)
- end
- runner = compile(data.list,data.processor,1)
- data.runner = runner
- end
- return runner(head,one)
- end
- elseif n == 2 then
- return function(head,one,two)
- total = total + 1 -- will go away
- local runner = data.runner
- if not runner then
- created = created + 1
- if trace_tasks then
- report_tasks("creating runner %a with %s extra arguments",name,2)
- end
- runner = compile(data.list,data.processor,2)
- data.runner = runner
- end
- return runner(head,one,two)
- end
- elseif n == 3 then
- return function(head,one,two,three)
- total = total + 1 -- will go away
- local runner = data.runner
- if not runner then
- created = created + 1
- if trace_tasks then
- report_tasks("creating runner %a with %s extra arguments",name,3)
- end
- runner = compile(data.list,data.processor,3)
- data.runner = runner
- end
- return runner(head,one,two,three)
- end
- elseif n == 4 then
- return function(head,one,two,three,four)
- total = total + 1 -- will go away
- local runner = data.runner
- if not runner then
- created = created + 1
- if trace_tasks then
- report_tasks("creating runner %a with %s extra arguments",name,4)
- end
- runner = compile(data.list,data.processor,4)
- data.runner = runner
- end
- return runner(head,one,two,three,four)
- end
- elseif n == 5 then
- return function(head,one,two,three,four,five)
- total = total + 1 -- will go away
- local runner = data.runner
- if not runner then
- created = created + 1
- if trace_tasks then
- report_tasks("creating runner %a with %s extra arguments",name,5)
- end
- runner = compile(data.list,data.processor,5)
- data.runner = runner
- end
- return runner(head,one,two,three,four,five)
- end
- else
- return function(head,...)
- total = total + 1 -- will go away
- local runner = data.runner
- if not runner then
- created = created + 1
- if trace_tasks then
- report_tasks("creating runner %a with %s extra arguments",name,n)
- end
- runner = compile(data.list,data.processor,"n")
- data.runner = runner
- end
- return runner(head,...)
- end
- end
- else
- return nil
- end
-end
-
-function tasks.table(name) --maybe move this to task-deb.lua
- local tsk = tasksdata[name]
- local lst = tsk and tsk.list
- local HL, NC, NR, bold, type = context.HL, context.NC, context.NR, context.bold, context.type
- if lst then
- local list, order = lst.list, lst.order
- if list and order then
- context.starttabulate { "|l|l|" }
- NC() bold("category") NC() bold("function") NC() NR()
- for i=1,#order do
- HL()
- local o = order[i]
- local l = list[o]
- if #l == 0 then
- NC() type(o) NC() context("unset") NC() NR()
- else
- local done = false
- for k, v in table.sortedhash(l) do
- NC() if not done then type(o) done = true end NC() type(v) NC() NR()
- end
- end
- end
- context.stoptabulate()
- end
- end
-end
-
--- this will move
-
-tasks.new {
- name = "processors",
- arguments = 4,
- processor = nodeprocessor,
- sequence = {
- "before", -- for users
- "normalizers",
- "characters",
- "words",
- "fonts",
- "lists",
- "after", -- for users
- }
-}
-
-tasks.new {
- name = "finalizers",
- arguments = 1,
- processor = nodeprocessor,
- sequence = {
- "before", -- for users
- "normalizers",
--- "characters",
--- "finishers",
- "fonts",
- "lists",
- "after", -- for users
- }
-}
-
-tasks.new {
- name = "shipouts",
- arguments = 0,
- processor = nodeprocessor,
- sequence = {
- "before", -- for users
- "normalizers",
- "finishers",
- "after", -- for users
- }
-}
-
-tasks.new {
- name = "mvlbuilders",
- arguments = 1,
- processor = nodeprocessor,
- sequence = {
- "before", -- for users
- "normalizers",
- "after", -- for users
- }
-}
-
-tasks.new {
- name = "vboxbuilders",
- arguments = 5,
- processor = nodeprocessor,
- sequence = {
- "before", -- for users
- "normalizers",
- "after", -- for users
- }
-}
-
--- tasks.new {
--- name = "parbuilders",
--- arguments = 1,
--- processor = nodeprocessor,
--- sequence = {
--- "before", -- for users
--- "lists",
--- "after", -- for users
--- }
--- }
-
--- tasks.new {
--- name = "pagebuilders",
--- arguments = 5,
--- processor = nodeprocessor,
--- sequence = {
--- "before", -- for users
--- "lists",
--- "after", -- for users
--- }
--- }
+if not modules then modules = { } end modules ['node-tsk'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This might move to task-* and become less code as in sequencers
+-- we already have dirty flags as well. On the other hand, nodes are
+-- rather specialized and here we focus on node related tasks.
+
+local format = string.format
+
+local trace_tasks = false trackers.register("tasks.creation", function(v) trace_tasks = v end)
+
+local report_tasks = logs.reporter("tasks")
+
+local allocate = utilities.storage.allocate
+
+local nodes = nodes
+
+nodes.tasks = nodes.tasks or { }
+local tasks = nodes.tasks
+
+local tasksdata = { } -- no longer public
+
+local sequencers = utilities.sequencers
+local compile = sequencers.compile
+local nodeprocessor = sequencers.nodeprocessor
+
+local frozengroups = "no"
+
+function tasks.freeze(kind)
+ frozengroups = kind or "tolerant" -- todo: hook into jobname
+end
+
+function tasks.new(specification) -- was: name,arguments,list
+ local name = specification.name
+ local arguments = specification.arguments or 0
+ local sequence = specification.sequence
+ if name and sequence then
+ local tasklist = sequencers.new {
+ -- we can move more to the sequencer now .. todo
+ }
+ tasksdata[name] = {
+ list = tasklist,
+ runner = false,
+ arguments = arguments,
+ -- sequence = sequence,
+ frozen = { },
+ processor = specification.processor or nodeprocessor
+ }
+ for l=1,#sequence do
+ sequencers.appendgroup(tasklist,sequence[l])
+ end
+ end
+end
+
+local function valid(name)
+ local data = tasksdata[name]
+ if not data then
+ report_tasks("unknown task %a",name)
+ else
+ return data
+ end
+end
+
+local function validgroup(name,group,what)
+ local data = tasksdata[name]
+ if not data then
+ report_tasks("unknown task %a",name)
+ else
+ local frozen = data.frozen[group]
+ if frozen then
+ if frozengroup == "no" then
+ -- default
+ elseif frozengroup == "strict" then
+ report_tasks("warning: group %a of task %a is frozen, %a applied but not supported",group,name,what)
+ return
+ else -- if frozengroup == "tolerant" then
+ report_tasks("warning: group %a of task %a is frozen, %a ignored",group,name,what)
+ end
+ end
+ return data
+ end
+end
+
+function tasks.freezegroup(name,group)
+ local data = valid(name)
+ if data then
+ data.frozen[group] = true
+ end
+end
+
+function tasks.restart(name)
+ local data = valid(name)
+ if data then
+ data.runner = false
+ end
+end
+
+function tasks.enableaction(name,action)
+ local data = valid(name)
+ if data then
+ sequencers.enableaction(data.list,action)
+ data.runner = false
+ end
+end
+
+function tasks.disableaction(name,action)
+ local data = valid(name)
+ if data then
+ sequencers.disableaction(data.list,action)
+ data.runner = false
+ end
+end
+
+function tasks.enablegroup(name,group)
+ local data = validgroup(name,"enable group")
+ if data then
+ sequencers.enablegroup(data.list,group)
+ data.runner = false
+ end
+end
+
+function tasks.disablegroup(name,group)
+ local data = validgroup(name,"disable group")
+ if data then
+ sequencers.disablegroup(data.list,group)
+ data.runner = false
+ end
+end
+
+function tasks.appendaction(name,group,action,where,kind)
+ local data = validgroup(name,"append action")
+ if data then
+ sequencers.appendaction(data.list,group,action,where,kind)
+ data.runner = false
+ end
+end
+
+function tasks.prependaction(name,group,action,where,kind)
+ local data = validgroup(name,"prepend action")
+ if data then
+ sequencers.prependaction(data.list,group,action,where,kind)
+ data.runner = false
+ end
+end
+
+function tasks.removeaction(name,group,action)
+ local data = validgroup(name,"remove action")
+ if data then
+ sequencers.removeaction(data.list,group,action)
+ data.runner = false
+ end
+end
+
+function tasks.showactions(name,group,action,where,kind)
+ local data = valid(name)
+ if data then
+ report_tasks("task %a, list:\n%s",name,nodeprocessor(data.list))
+ end
+end
+
+-- Optimizing for the number of arguments makes sense, but getting rid of
+-- the nested call (no problem but then we also need to register the
+-- callback with this mechanism so that it gets updated) does not save
+-- much time (24K calls on mk.tex).
+
+local created, total = 0, 0
+
+statistics.register("node list callback tasks", function()
+ if total > 0 then
+ return format("%s unique task lists, %s instances (re)created, %s calls",table.count(tasksdata),created,total)
+ else
+ return nil
+ end
+end)
+
+function tasks.actions(name) -- we optimize for the number or arguments (no ...)
+ local data = tasksdata[name]
+ if data then
+ local n = data.arguments or 0
+ if n == 0 then
+ return function(head)
+ total = total + 1 -- will go away
+ local runner = data.runner
+ if not runner then
+ created = created + 1
+ if trace_tasks then
+ report_tasks("creating runner %a",name)
+ end
+ runner = compile(data.list,data.processor,0)
+ data.runner = runner
+ end
+ return runner(head)
+ end
+ elseif n == 1 then
+ return function(head,one)
+ total = total + 1 -- will go away
+ local runner = data.runner
+ if not runner then
+ created = created + 1
+ if trace_tasks then
+ report_tasks("creating runner %a with %s extra arguments",name,1)
+ end
+ runner = compile(data.list,data.processor,1)
+ data.runner = runner
+ end
+ return runner(head,one)
+ end
+ elseif n == 2 then
+ return function(head,one,two)
+ total = total + 1 -- will go away
+ local runner = data.runner
+ if not runner then
+ created = created + 1
+ if trace_tasks then
+ report_tasks("creating runner %a with %s extra arguments",name,2)
+ end
+ runner = compile(data.list,data.processor,2)
+ data.runner = runner
+ end
+ return runner(head,one,two)
+ end
+ elseif n == 3 then
+ return function(head,one,two,three)
+ total = total + 1 -- will go away
+ local runner = data.runner
+ if not runner then
+ created = created + 1
+ if trace_tasks then
+ report_tasks("creating runner %a with %s extra arguments",name,3)
+ end
+ runner = compile(data.list,data.processor,3)
+ data.runner = runner
+ end
+ return runner(head,one,two,three)
+ end
+ elseif n == 4 then
+ return function(head,one,two,three,four)
+ total = total + 1 -- will go away
+ local runner = data.runner
+ if not runner then
+ created = created + 1
+ if trace_tasks then
+ report_tasks("creating runner %a with %s extra arguments",name,4)
+ end
+ runner = compile(data.list,data.processor,4)
+ data.runner = runner
+ end
+ return runner(head,one,two,three,four)
+ end
+ elseif n == 5 then
+ return function(head,one,two,three,four,five)
+ total = total + 1 -- will go away
+ local runner = data.runner
+ if not runner then
+ created = created + 1
+ if trace_tasks then
+ report_tasks("creating runner %a with %s extra arguments",name,5)
+ end
+ runner = compile(data.list,data.processor,5)
+ data.runner = runner
+ end
+ return runner(head,one,two,three,four,five)
+ end
+ else
+ return function(head,...)
+ total = total + 1 -- will go away
+ local runner = data.runner
+ if not runner then
+ created = created + 1
+ if trace_tasks then
+ report_tasks("creating runner %a with %s extra arguments",name,n)
+ end
+ runner = compile(data.list,data.processor,"n")
+ data.runner = runner
+ end
+ return runner(head,...)
+ end
+ end
+ else
+ return nil
+ end
+end
+
+function tasks.table(name) --maybe move this to task-deb.lua
+ local tsk = tasksdata[name]
+ local lst = tsk and tsk.list
+ local HL, NC, NR, bold, type = context.HL, context.NC, context.NR, context.bold, context.type
+ if lst then
+ local list, order = lst.list, lst.order
+ if list and order then
+ context.starttabulate { "|l|l|" }
+ NC() bold("category") NC() bold("function") NC() NR()
+ for i=1,#order do
+ HL()
+ local o = order[i]
+ local l = list[o]
+ if #l == 0 then
+ NC() type(o) NC() context("unset") NC() NR()
+ else
+ local done = false
+ for k, v in table.sortedhash(l) do
+ NC() if not done then type(o) done = true end NC() type(v) NC() NR()
+ end
+ end
+ end
+ context.stoptabulate()
+ end
+ end
+end
+
+-- this will move
+
+tasks.new {
+ name = "processors",
+ arguments = 4,
+ processor = nodeprocessor,
+ sequence = {
+ "before", -- for users
+ "normalizers",
+ "characters",
+ "words",
+ "fonts",
+ "lists",
+ "after", -- for users
+ }
+}
+
+tasks.new {
+ name = "finalizers",
+ arguments = 1,
+ processor = nodeprocessor,
+ sequence = {
+ "before", -- for users
+ "normalizers",
+-- "characters",
+-- "finishers",
+ "fonts",
+ "lists",
+ "after", -- for users
+ }
+}
+
+tasks.new {
+ name = "shipouts",
+ arguments = 0,
+ processor = nodeprocessor,
+ sequence = {
+ "before", -- for users
+ "normalizers",
+ "finishers",
+ "after", -- for users
+ }
+}
+
+tasks.new {
+ name = "mvlbuilders",
+ arguments = 1,
+ processor = nodeprocessor,
+ sequence = {
+ "before", -- for users
+ "normalizers",
+ "after", -- for users
+ }
+}
+
+tasks.new {
+ name = "vboxbuilders",
+ arguments = 5,
+ processor = nodeprocessor,
+ sequence = {
+ "before", -- for users
+ "normalizers",
+ "after", -- for users
+ }
+}
+
+-- tasks.new {
+-- name = "parbuilders",
+-- arguments = 1,
+-- processor = nodeprocessor,
+-- sequence = {
+-- "before", -- for users
+-- "lists",
+-- "after", -- for users
+-- }
+-- }
+
+-- tasks.new {
+-- name = "pagebuilders",
+-- arguments = 5,
+-- processor = nodeprocessor,
+-- sequence = {
+-- "before", -- for users
+-- "lists",
+-- "after", -- for users
+-- }
+-- }
diff --git a/tex/context/base/node-tst.lua b/tex/context/base/node-tst.lua
index bfe0051bd..98743ca0d 100644
--- a/tex/context/base/node-tst.lua
+++ b/tex/context/base/node-tst.lua
@@ -1,120 +1,120 @@
-if not modules then modules = { } end modules ['node-tst'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local nodes, node = nodes, node
-
-local chardata = characters.data
-local nodecodes = nodes.nodecodes
-local skipcodes = nodes.skipcodes
-
-local glue_code = nodecodes.glue
-local penalty_code = nodecodes.penalty
-local kern_code = nodecodes.kern
-local glyph_code = nodecodes.glyph
-local whatsit_code = nodecodes.whatsit
-local hlist_code = nodecodes.hlist
-
-local leftskip_code = skipcodes.leftskip
-local rightskip_code = skipcodes.rightskip
-local abovedisplayshortskip_code = skipcodes.abovedisplayshortskip
-local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip
-
-local find_node_tail = node.tail or node.slide
-
-function nodes.leftmarginwidth(n) -- todo: three values
- while n do
- local id = n.id
- if id == glue_code then
- return n.subtype == leftskip_code and n.spec.width or 0
- elseif id == whatsit_code then
- n = n.next
- elseif id == hlist_code then
- return n.width
- else
- break
- end
- end
- return 0
-end
-
-function nodes.rightmarginwidth(n)
- if n then
- n = find_node_tail(n)
- while n do
- local id = n.id
- if id == glue_code then
- return n.subtype == rightskip_code and n.spec.width or 0
- elseif id == whatsit_code then
- n = n.prev
- else
- break
- end
- end
- end
- return false
-end
-
-function nodes.somespace(n,all)
- if n then
- local id = n.id
- if id == glue_code then
- return (all or (n.spec.width ~= 0)) and glue_code
- elseif id == kern_code then
- return (all or (n.kern ~= 0)) and kern
- elseif id == glyph_code then
- local category = chardata[n.char].category
- -- maybe more category checks are needed
- return (category == "zs") and glyph_code
- end
- end
- return false
-end
-
-function nodes.somepenalty(n,value)
- if n then
- local id = n.id
- if id == penalty_code then
- if value then
- return n.penalty == value
- else
- return true
- end
- end
- end
- return false
-end
-
-function nodes.is_display_math(head)
- local n = head.prev
- while n do
- local id = n.id
- if id == penalty_code then
- elseif id == glue_code then
- if n.subtype == abovedisplayshortskip_code then
- return true
- end
- else
- break
- end
- n = n.prev
- end
- n = head.next
- while n do
- local id = n.id
- if id == penalty_code then
- elseif id == glue_code then
- if n.subtype == belowdisplayshortskip_code then
- return true
- end
- else
- break
- end
- n = n.next
- end
- return false
-end
+if not modules then modules = { } end modules ['node-tst'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local nodes, node = nodes, node
+
+local chardata = characters.data
+local nodecodes = nodes.nodecodes
+local skipcodes = nodes.skipcodes
+
+local glue_code = nodecodes.glue
+local penalty_code = nodecodes.penalty
+local kern_code = nodecodes.kern
+local glyph_code = nodecodes.glyph
+local whatsit_code = nodecodes.whatsit
+local hlist_code = nodecodes.hlist
+
+local leftskip_code = skipcodes.leftskip
+local rightskip_code = skipcodes.rightskip
+local abovedisplayshortskip_code = skipcodes.abovedisplayshortskip
+local belowdisplayshortskip_code = skipcodes.belowdisplayshortskip
+
+local find_node_tail = node.tail or node.slide
+
+function nodes.leftmarginwidth(n) -- todo: three values
+ while n do
+ local id = n.id
+ if id == glue_code then
+ return n.subtype == leftskip_code and n.spec.width or 0
+ elseif id == whatsit_code then
+ n = n.next
+ elseif id == hlist_code then
+ return n.width
+ else
+ break
+ end
+ end
+ return 0
+end
+
+function nodes.rightmarginwidth(n)
+ if n then
+ n = find_node_tail(n)
+ while n do
+ local id = n.id
+ if id == glue_code then
+ return n.subtype == rightskip_code and n.spec.width or 0
+ elseif id == whatsit_code then
+ n = n.prev
+ else
+ break
+ end
+ end
+ end
+ return false
+end
+
+function nodes.somespace(n,all)
+ if n then
+ local id = n.id
+ if id == glue_code then
+ return (all or (n.spec.width ~= 0)) and glue_code
+ elseif id == kern_code then
+ return (all or (n.kern ~= 0)) and kern
+ elseif id == glyph_code then
+ local category = chardata[n.char].category
+ -- maybe more category checks are needed
+ return (category == "zs") and glyph_code
+ end
+ end
+ return false
+end
+
+function nodes.somepenalty(n,value)
+ if n then
+ local id = n.id
+ if id == penalty_code then
+ if value then
+ return n.penalty == value
+ else
+ return true
+ end
+ end
+ end
+ return false
+end
+
+function nodes.is_display_math(head)
+ local n = head.prev
+ while n do
+ local id = n.id
+ if id == penalty_code then
+ elseif id == glue_code then
+ if n.subtype == abovedisplayshortskip_code then
+ return true
+ end
+ else
+ break
+ end
+ n = n.prev
+ end
+ n = head.next
+ while n do
+ local id = n.id
+ if id == penalty_code then
+ elseif id == glue_code then
+ if n.subtype == belowdisplayshortskip_code then
+ return true
+ end
+ else
+ break
+ end
+ n = n.next
+ end
+ return false
+end
diff --git a/tex/context/base/node-typ.lua b/tex/context/base/node-typ.lua
index 6e1a31643..25ad31f83 100644
--- a/tex/context/base/node-typ.lua
+++ b/tex/context/base/node-typ.lua
@@ -1,79 +1,79 @@
-if not modules then modules = { } end modules ['node-typ'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local utfvalues = utf.values
-
-local currentfont = font.current
-local fontparameters = fonts.hashes.parameters
-
-local hpack = node.hpack
-local vpack = node.vpack
-local fast_hpack = nodes.fasthpack
-
-local nodepool = nodes.pool
-
-local newglyph = nodepool.glyph
-local newglue = nodepool.glue
-
-typesetters = typesetters or { }
-
-local function tonodes(str,fontid,spacing) -- quick and dirty
- local head, prev = nil, nil
- if not fontid then
- fontid = currentfont()
- end
- local fp = fontparameters[fontid]
- local s, p, m
- if spacing then
- s, p, m = spacing, 0, 0
- else
- s, p, m = fp.space, fp.space_stretch, fp,space_shrink
- end
- local spacedone = false
- for c in utfvalues(str) do
- local next
- if c == 32 then
- if not spacedone then
- next = newglue(s,p,m)
- spacedone = true
- end
- else
- next = newglyph(fontid or 1,c)
- spacedone = false
- end
- if not next then
- -- nothing
- elseif not head then
- head = next
- else
- prev.next = next
- next.prev = prev
- end
- prev = next
- end
- return head
-end
-
-typesetters.tonodes = tonodes
-
-function typesetters.hpack(str,fontid,spacing)
- return hpack(tonodes(str,fontid,spacing),"exactly")
-end
-
-function typesetters.fast_hpack(str,fontid,spacing)
- return fast_hpack(tonodes(str,fontid,spacing),"exactly")
-end
-
-function typesetters.vpack(str,fontid,spacing)
- -- vpack is just a hack, and a proper implentation is on the agenda
- -- as it needs more info etc than currently available
- return vpack(tonodes(str,fontid,spacing))
-end
-
---~ node.write(typesetters.hpack("Hello World!"))
---~ node.write(typesetters.hpack("Hello World!",1,100*1024*10))
+if not modules then modules = { } end modules ['node-typ'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local utfvalues = utf.values
+
+local currentfont = font.current
+local fontparameters = fonts.hashes.parameters
+
+local hpack = node.hpack
+local vpack = node.vpack
+local fast_hpack = nodes.fasthpack
+
+local nodepool = nodes.pool
+
+local newglyph = nodepool.glyph
+local newglue = nodepool.glue
+
+typesetters = typesetters or { }
+
+local function tonodes(str,fontid,spacing) -- quick and dirty
+ local head, prev = nil, nil
+ if not fontid then
+ fontid = currentfont()
+ end
+ local fp = fontparameters[fontid]
+ local s, p, m
+ if spacing then
+ s, p, m = spacing, 0, 0
+ else
+ s, p, m = fp.space, fp.space_stretch, fp,space_shrink
+ end
+ local spacedone = false
+ for c in utfvalues(str) do
+ local next
+ if c == 32 then
+ if not spacedone then
+ next = newglue(s,p,m)
+ spacedone = true
+ end
+ else
+ next = newglyph(fontid or 1,c)
+ spacedone = false
+ end
+ if not next then
+ -- nothing
+ elseif not head then
+ head = next
+ else
+ prev.next = next
+ next.prev = prev
+ end
+ prev = next
+ end
+ return head
+end
+
+typesetters.tonodes = tonodes
+
+function typesetters.hpack(str,fontid,spacing)
+ return hpack(tonodes(str,fontid,spacing),"exactly")
+end
+
+function typesetters.fast_hpack(str,fontid,spacing)
+ return fast_hpack(tonodes(str,fontid,spacing),"exactly")
+end
+
+function typesetters.vpack(str,fontid,spacing)
+ -- vpack is just a hack, and a proper implentation is on the agenda
+ -- as it needs more info etc than currently available
+ return vpack(tonodes(str,fontid,spacing))
+end
+
+--~ node.write(typesetters.hpack("Hello World!"))
+--~ node.write(typesetters.hpack("Hello World!",1,100*1024*10))
diff --git a/tex/context/base/pack-obj.lua b/tex/context/base/pack-obj.lua
index 1e4e0f59e..b218a0a5c 100644
--- a/tex/context/base/pack-obj.lua
+++ b/tex/context/base/pack-obj.lua
@@ -1,77 +1,77 @@
-if not modules then modules = { } end modules ['pack-obj'] = {
- version = 1.001,
- comment = "companion to pack-obj.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-
We save object references in the main utility table. jobobjects are
-reusable components.
---ldx]]--
-
-local commands, context = commands, context
-
-local texcount = tex.count
-local allocate = utilities.storage.allocate
-
-local collected = allocate()
-local tobesaved = allocate()
-
-local jobobjects = {
- collected = collected,
- tobesaved = tobesaved,
-}
-
-job.objects = jobobjects
-
-local function initializer()
- collected = jobobjects.collected
- tobesaved = jobobjects.tobesaved
-end
-
-job.register('job.objects.collected', tobesaved, initializer, nil)
-
-function jobobjects.save(tag,number,page)
- local t = { number, page }
- tobesaved[tag], collected[tag] = t, t
-end
-
-function jobobjects.set(tag,number,page)
- collected[tag] = { number, page }
-end
-
-function jobobjects.get(tag)
- return collected[tag] or tobesaved[tag]
-end
-
-function jobobjects.number(tag,default)
- local o = collected[tag] or tobesaved[tag]
- return o and o[1] or default
-end
-
-function jobobjects.page(tag,default)
- local o = collected[tag] or tobesaved[tag]
- return o and o[2] or default
-end
-
--- interface
-
-commands.saveobject = jobobjects.save
-commands.setobject = jobobjects.set
-
-function commands.objectnumber(tag,default)
- local o = collected[tag] or tobesaved[tag]
- context(o and o[1] or default)
-end
-
-function commands.objectpage(tag,default)
- local o = collected[tag] or tobesaved[tag]
- context(o and o[2] or default)
-end
-
-function commands.doifobjectreferencefoundelse(tag)
- commands.doifelse(collected[tag] or tobesaved[tag])
-end
-
+if not modules then modules = { } end modules ['pack-obj'] = {
+ version = 1.001,
+ comment = "companion to pack-obj.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+
We save object references in the main utility table. jobobjects are
+reusable components.
+--ldx]]--
+
+local commands, context = commands, context
+
+local texcount = tex.count
+local allocate = utilities.storage.allocate
+
+local collected = allocate()
+local tobesaved = allocate()
+
+local jobobjects = {
+ collected = collected,
+ tobesaved = tobesaved,
+}
+
+job.objects = jobobjects
+
+local function initializer()
+ collected = jobobjects.collected
+ tobesaved = jobobjects.tobesaved
+end
+
+job.register('job.objects.collected', tobesaved, initializer, nil)
+
+function jobobjects.save(tag,number,page)
+ local t = { number, page }
+ tobesaved[tag], collected[tag] = t, t
+end
+
+function jobobjects.set(tag,number,page)
+ collected[tag] = { number, page }
+end
+
+function jobobjects.get(tag)
+ return collected[tag] or tobesaved[tag]
+end
+
+function jobobjects.number(tag,default)
+ local o = collected[tag] or tobesaved[tag]
+ return o and o[1] or default
+end
+
+function jobobjects.page(tag,default)
+ local o = collected[tag] or tobesaved[tag]
+ return o and o[2] or default
+end
+
+-- interface
+
+commands.saveobject = jobobjects.save
+commands.setobject = jobobjects.set
+
+function commands.objectnumber(tag,default)
+ local o = collected[tag] or tobesaved[tag]
+ context(o and o[1] or default)
+end
+
+function commands.objectpage(tag,default)
+ local o = collected[tag] or tobesaved[tag]
+ context(o and o[2] or default)
+end
+
+function commands.doifobjectreferencefoundelse(tag)
+ commands.doifelse(collected[tag] or tobesaved[tag])
+end
+
diff --git a/tex/context/base/pack-rul.lua b/tex/context/base/pack-rul.lua
index a990936e7..3dcabc3da 100644
--- a/tex/context/base/pack-rul.lua
+++ b/tex/context/base/pack-rul.lua
@@ -1,109 +1,109 @@
-if not modules then modules = { } end modules ['pack-rul'] = {
- version = 1.001,
- comment = "companion to pack-rul.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-
An explanation is given in the history document mk.
---ldx]]--
-
-local texsetdimen, texsetcount, texbox = tex.setdimen, tex.setcount, tex.box
-local hpack, free, copy, traverse_id = node.hpack, node.free, node.copy_list, node.traverse_id
-local texdimen, texcount = tex.dimen, tex.count
-
-local hlist_code = nodes.nodecodes.hlist
-local box_code = nodes.listcodes.box
-local node_dimensions = node.dimensions
-
-function commands.doreshapeframedbox(n)
- local box = texbox[n]
- local noflines = 0
- local firstheight = nil
- local lastdepth = nil
- local lastlinelength = 0
- local minwidth = 0
- local maxwidth = 0
- local totalwidth = 0
- if box.width ~= 0 then
- local list = box.list
- if list then
- for h in traverse_id(hlist_code,list) do -- no dir etc needed
- if not firstheight then
- firstheight = h.height
- end
- lastdepth = h.depth
- noflines = noflines + 1
- local l = h.list
- if l then
- if h.subtype == box_code then -- maybe more
- lastlinelength = h.width
- else
- lastlinelength = node_dimensions(l) -- used to be: hpack(copy(l)).width
- end
- if lastlinelength > maxwidth then
- maxwidth = lastlinelength
- end
- if lastlinelength < minwidth or minwidth == 0 then
- minwidth = lastlinelength
- end
- totalwidth = totalwidth + lastlinelength
- end
- end
- if firstheight then
- if maxwidth ~= 0 then
- for h in traverse_id(hlist_code,list) do
- local l = h.list
- if l then
- if h.subtype == box_code then
- -- explicit box, no 'line'
- else
- -- if h.width ~= maxwidth then -- else no display math handling (uses shift)
- -- challenge: adapt glue_set
- -- h.glue_set = h.glue_set * h.width/maxwidth -- interesting ... doesn't matter much
- -- h.width = maxwidth
- h.list = hpack(l,maxwidth,'exactly',h.dir)
- h.shift = 0 -- needed for display math
- h.width = maxwidth
- -- end
- end
- end
- end
- end
- box.width = maxwidth
- end
- end
- end
- -- print("reshape", noflines, firstheight or 0, lastdepth or 0)
- texsetcount("global","framednoflines", noflines)
- texsetdimen("global","framedfirstheight", firstheight or 0)
- texsetdimen("global","framedlastdepth", lastdepth or 0)
- texsetdimen("global","framedminwidth", minwidth)
- texsetdimen("global","framedmaxwidth", maxwidth)
- texsetdimen("global","framedaveragewidth", noflines > 0 and totalwidth/noflines or 0)
-end
-
-function commands.doanalyzeframedbox(n)
- local box = texbox[n]
- local noflines = 0
- local firstheight = nil
- local lastdepth = nil
- if box.width ~= 0 then
- local list = box.list
- if list then
- for h in traverse_id(hlist_code,list) do
- if not firstheight then
- firstheight = h.height
- end
- lastdepth = h.depth
- noflines = noflines + 1
- end
- end
- end
- -- print("analyze", noflines, firstheight or 0, lastdepth or 0)
- texsetcount("global","framednoflines", noflines)
- texsetdimen("global","framedfirstheight", firstheight or 0)
- texsetdimen("global","framedlastdepth", lastdepth or 0)
-end
+if not modules then modules = { } end modules ['pack-rul'] = {
+ version = 1.001,
+ comment = "companion to pack-rul.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+
An explanation is given in the history document mk.
+--ldx]]--
+
+local texsetdimen, texsetcount, texbox = tex.setdimen, tex.setcount, tex.box
+local hpack, free, copy, traverse_id = node.hpack, node.free, node.copy_list, node.traverse_id
+local texdimen, texcount = tex.dimen, tex.count
+
+local hlist_code = nodes.nodecodes.hlist
+local box_code = nodes.listcodes.box
+local node_dimensions = node.dimensions
+
+function commands.doreshapeframedbox(n)
+ local box = texbox[n]
+ local noflines = 0
+ local firstheight = nil
+ local lastdepth = nil
+ local lastlinelength = 0
+ local minwidth = 0
+ local maxwidth = 0
+ local totalwidth = 0
+ if box.width ~= 0 then
+ local list = box.list
+ if list then
+ for h in traverse_id(hlist_code,list) do -- no dir etc needed
+ if not firstheight then
+ firstheight = h.height
+ end
+ lastdepth = h.depth
+ noflines = noflines + 1
+ local l = h.list
+ if l then
+ if h.subtype == box_code then -- maybe more
+ lastlinelength = h.width
+ else
+ lastlinelength = node_dimensions(l) -- used to be: hpack(copy(l)).width
+ end
+ if lastlinelength > maxwidth then
+ maxwidth = lastlinelength
+ end
+ if lastlinelength < minwidth or minwidth == 0 then
+ minwidth = lastlinelength
+ end
+ totalwidth = totalwidth + lastlinelength
+ end
+ end
+ if firstheight then
+ if maxwidth ~= 0 then
+ for h in traverse_id(hlist_code,list) do
+ local l = h.list
+ if l then
+ if h.subtype == box_code then
+ -- explicit box, no 'line'
+ else
+ -- if h.width ~= maxwidth then -- else no display math handling (uses shift)
+ -- challenge: adapt glue_set
+ -- h.glue_set = h.glue_set * h.width/maxwidth -- interesting ... doesn't matter much
+ -- h.width = maxwidth
+ h.list = hpack(l,maxwidth,'exactly',h.dir)
+ h.shift = 0 -- needed for display math
+ h.width = maxwidth
+ -- end
+ end
+ end
+ end
+ end
+ box.width = maxwidth
+ end
+ end
+ end
+ -- print("reshape", noflines, firstheight or 0, lastdepth or 0)
+ texsetcount("global","framednoflines", noflines)
+ texsetdimen("global","framedfirstheight", firstheight or 0)
+ texsetdimen("global","framedlastdepth", lastdepth or 0)
+ texsetdimen("global","framedminwidth", minwidth)
+ texsetdimen("global","framedmaxwidth", maxwidth)
+ texsetdimen("global","framedaveragewidth", noflines > 0 and totalwidth/noflines or 0)
+end
+
+function commands.doanalyzeframedbox(n)
+ local box = texbox[n]
+ local noflines = 0
+ local firstheight = nil
+ local lastdepth = nil
+ if box.width ~= 0 then
+ local list = box.list
+ if list then
+ for h in traverse_id(hlist_code,list) do
+ if not firstheight then
+ firstheight = h.height
+ end
+ lastdepth = h.depth
+ noflines = noflines + 1
+ end
+ end
+ end
+ -- print("analyze", noflines, firstheight or 0, lastdepth or 0)
+ texsetcount("global","framednoflines", noflines)
+ texsetdimen("global","framedfirstheight", firstheight or 0)
+ texsetdimen("global","framedlastdepth", lastdepth or 0)
+end
diff --git a/tex/context/base/page-flt.lua b/tex/context/base/page-flt.lua
index ab7a534eb..cd78b9356 100644
--- a/tex/context/base/page-flt.lua
+++ b/tex/context/base/page-flt.lua
@@ -1,289 +1,289 @@
-if not modules then modules = { } end modules ['page-flt'] = {
- version = 1.001,
- comment = "companion to page-flt.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- floats -> managers.floats
--- some functions are a tex/lua mix so we need a separation
-
-local insert, remove = table.insert, table.remove
-local find = string.find
-local setdimen, setcount, texbox = tex.setdimen, tex.setcount, tex.box
-
-local copy_node_list = node.copy_list
-
-local trace_floats = false trackers.register("graphics.floats", function(v) trace_floats = v end) -- name might change
-
-local report_floats = logs.reporter("structure","floats")
-
-local C, S, P, lpegmatch = lpeg.C, lpeg.S, lpeg.P, lpeg.match
-
--- we use floatbox, floatwidth, floatheight
--- text page leftpage rightpage (todo: top, bottom, margin, order)
-
-floats = floats or { }
-local floats = floats
-
-local noffloats, last, default, pushed = 0, nil, "text", { }
-
-local function initialize()
- return {
- text = { },
- page = { },
- leftpage = { },
- rightpage = { },
- somewhere = { },
- }
-end
-
-local stacks = initialize()
-
--- list location
-
-function floats.stacked(which) -- floats.thenofstacked
- return #stacks[which or default]
-end
-
-function floats.push()
- insert(pushed,stacks)
- stacks = initialize()
- setcount("global","savednoffloats",0)
-end
-
-function floats.pop()
- local popped = remove(pushed)
- if popped then
- for which, stack in next, stacks do
- for i=1,#stack do
- insert(popped[which],stack[i])
- end
- end
- stacks = popped
- setcount("global","savednoffloats",#stacks[default])
- end
-end
-
-local function setdimensions(b)
- local w, h, d = 0, 0, 0
- if b then
- w, h, d = b.width, b.height, b.depth
- end
- setdimen("global","floatwidth", w)
- setdimen("global","floatheight", h+d)
- return w, h, d
-end
-
-local function get(stack,n,bylabel)
- if bylabel then
- for i=1,#stack do
- local s = stack[i]
- local n = string.topattern(tostring(n)) -- to be sure
- if find(s.data.label,n) then
- return s, s.box, i
- end
- end
- else
- n = n or #stack
- if n > 0 then
- local t = stack[n]
- if t then
- return t, t.box, n
- end
- end
- end
-end
-
-function floats.save(which,data)
- which = which or default
- local b = texbox.floatbox
- if b then
- local stack = stacks[which]
- noffloats = noffloats + 1
- local w, h, d = b.width, b.height, b.depth
- local t = {
- n = noffloats,
- data = data or { },
- box = copy_node_list(b),
- }
- texbox.floatbox = nil
- insert(stack,t)
- setcount("global","savednoffloats",#stacks[default])
- if trace_floats then
- report_floats("%s, category %a, number %a, slot %a, width %p, height %p, depth %p","saving",which,noffloats,#stack,w,h,d)
- else
- interfaces.showmessage("floatblocks",2,noffloats)
- end
- else
- report_floats("ignoring empty, category %a, number %a",which,noffloats)
- end
-end
-
-function floats.resave(which)
- if last then
- which = which or default
- local stack = stacks[which]
- local b = texbox.floatbox
- local w, h, d = b.width, b.height, b.depth
- last.box = copy_node_list(b)
- texbox.floatbox = nil
- insert(stack,1,last)
- setcount("global","savednoffloats",#stacks[default])
- if trace_floats then
- report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","resaving",which,noffloats,#stack,w,h,d)
- else
- interfaces.showmessage("floatblocks",2,noffloats)
- end
- else
- report_floats("unable to resave float")
- end
-end
-
-function floats.flush(which,n,bylabel)
- which = which or default
- local stack = stacks[which]
- local t, b, n = get(stack,n or 1,bylabel)
- if t then
- local w, h, d = setdimensions(b)
- if trace_floats then
- report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","flushing",which,t.n,n,w,h,d)
- else
- interfaces.showmessage("floatblocks",3,t.n)
- end
- texbox.floatbox = b
- last = remove(stack,n)
- last.box = nil
- setcount("global","savednoffloats",#stacks[default]) -- default?
- else
- setdimensions()
- end
-end
-
-function floats.consult(which,n)
- which = which or default
- local stack = stacks[which]
- local t, b, n = get(stack,n)
- if t then
- local w, h, d = setdimensions(b)
- if trace_floats then
- report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","consulting",which,t.n,n,w,h,d)
- end
- return t, b, n
- else
- if trace_floats then
- report_floats("nothing to consult")
- end
- setdimensions()
- end
-end
-
-function floats.collect(which,maxwidth,distance)
- which = which or default
- local stack = stacks[which]
- local n, m = #stack, 0
- for i=1,n do
- local t, b, n = get(stack,i)
- if t then
- local w, h, d = setdimensions(b)
- if w + distance < maxwidth then
- m = m + 1
- maxwidth = maxwidth - w - distance
- else
- break
- end
- else
- break
- end
- end
- if m == 0 then
- m = 1
- end
- setcount("global","nofcollectedfloats",m)
-end
-
-function floats.getvariable(name,default)
- local value = last and last.data[name] or default
- return value ~= "" and value
-end
-
-function floats.checkedpagefloat(packed)
- if structures.pages.is_odd() then
- if #stacks.rightpage > 0 then
- return "rightpage"
- elseif #stacks.page > 0 then
- return "page"
- elseif #stacks.leftpage > 0 then
- if packed then
- return "leftpage"
- end
- end
- else
- if #stacks.leftpage > 0 then
- return "leftpage"
- elseif #stacks.page > 0 then
- return "page"
- elseif #stacks.rightpage > 0 then
- if packed then
- return "rightpage"
- end
- end
- end
-end
-
-function floats.nofstacked()
- return #stacks[which or default] or 0
-end
-
--- todo: check for digits !
-
-local method = C((1-S(", :"))^1)
-local position = P(":") * C((1-S("*,"))^1) * (P("*") * C((1-S(","))^1))^0
-local label = P(":") * C((1-S(",*: "))^0)
-
-local pattern = method * (
- label * position * C("")
- + C("") * position * C("")
- + label * C("") * C("")
- + C("") * C("") * C("")
-) + C("") * C("") * C("") * C("")
-
--- inspect { lpegmatch(pattern,"somewhere:blabla,crap") }
--- inspect { lpegmatch(pattern,"somewhere:1*2") }
--- inspect { lpegmatch(pattern,"somewhere:blabla:1*2") }
--- inspect { lpegmatch(pattern,"somewhere::1*2") }
--- inspect { lpegmatch(pattern,"somewhere,") }
--- inspect { lpegmatch(pattern,"somewhere") }
--- inspect { lpegmatch(pattern,"") }
-
-function floats.analysemethod(str) -- will become a more extensive parser
- return lpegmatch(pattern,str or "")
-end
-
--- interface
-
-local context = context
-local setvalue = context.setvalue
-
-commands.flushfloat = floats.flush
-commands.savefloat = floats.save
-commands.resavefloat = floats.resave
-commands.pushfloat = floats.push
-commands.popfloat = floats.pop
-commands.consultfloat = floats.consult
-commands.collectfloat = floats.collect
-
-function commands.getfloatvariable (...) local v = floats.getvariable(...) if v then context(v) end end
-function commands.checkedpagefloat (...) local v = floats.checkedpagefloat(...) if v then context(v) end end
-
-function commands.nofstackedfloats (...) context(floats.nofstacked(...)) end
-function commands.doifelsesavedfloat(...) commands.doifelse(floats.nofstacked(...)>0) end
-
-function commands.analysefloatmethod(str) -- currently only one method
- local method, label, row, column = floats.analysemethod(str)
- setvalue("floatmethod",method or "")
- setvalue("floatlabel", label or "")
- setvalue("floatrow", row or "")
- setvalue("floatcolumn",column or "")
-end
+if not modules then modules = { } end modules ['page-flt'] = {
+ version = 1.001,
+ comment = "companion to page-flt.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- floats -> managers.floats
+-- some functions are a tex/lua mix so we need a separation
+
+local insert, remove = table.insert, table.remove
+local find = string.find
+local setdimen, setcount, texbox = tex.setdimen, tex.setcount, tex.box
+
+local copy_node_list = node.copy_list
+
+local trace_floats = false trackers.register("graphics.floats", function(v) trace_floats = v end) -- name might change
+
+local report_floats = logs.reporter("structure","floats")
+
+local C, S, P, lpegmatch = lpeg.C, lpeg.S, lpeg.P, lpeg.match
+
+-- we use floatbox, floatwidth, floatheight
+-- text page leftpage rightpage (todo: top, bottom, margin, order)
+
+floats = floats or { }
+local floats = floats
+
+local noffloats, last, default, pushed = 0, nil, "text", { }
+
+local function initialize()
+ return {
+ text = { },
+ page = { },
+ leftpage = { },
+ rightpage = { },
+ somewhere = { },
+ }
+end
+
+local stacks = initialize()
+
+-- list location
+
+function floats.stacked(which) -- floats.thenofstacked
+ return #stacks[which or default]
+end
+
+function floats.push()
+ insert(pushed,stacks)
+ stacks = initialize()
+ setcount("global","savednoffloats",0)
+end
+
+function floats.pop()
+ local popped = remove(pushed)
+ if popped then
+ for which, stack in next, stacks do
+ for i=1,#stack do
+ insert(popped[which],stack[i])
+ end
+ end
+ stacks = popped
+ setcount("global","savednoffloats",#stacks[default])
+ end
+end
+
+local function setdimensions(b)
+ local w, h, d = 0, 0, 0
+ if b then
+ w, h, d = b.width, b.height, b.depth
+ end
+ setdimen("global","floatwidth", w)
+ setdimen("global","floatheight", h+d)
+ return w, h, d
+end
+
+local function get(stack,n,bylabel)
+ if bylabel then
+ for i=1,#stack do
+ local s = stack[i]
+ local n = string.topattern(tostring(n)) -- to be sure
+ if find(s.data.label,n) then
+ return s, s.box, i
+ end
+ end
+ else
+ n = n or #stack
+ if n > 0 then
+ local t = stack[n]
+ if t then
+ return t, t.box, n
+ end
+ end
+ end
+end
+
+function floats.save(which,data)
+ which = which or default
+ local b = texbox.floatbox
+ if b then
+ local stack = stacks[which]
+ noffloats = noffloats + 1
+ local w, h, d = b.width, b.height, b.depth
+ local t = {
+ n = noffloats,
+ data = data or { },
+ box = copy_node_list(b),
+ }
+ texbox.floatbox = nil
+ insert(stack,t)
+ setcount("global","savednoffloats",#stacks[default])
+ if trace_floats then
+ report_floats("%s, category %a, number %a, slot %a, width %p, height %p, depth %p","saving",which,noffloats,#stack,w,h,d)
+ else
+ interfaces.showmessage("floatblocks",2,noffloats)
+ end
+ else
+ report_floats("ignoring empty, category %a, number %a",which,noffloats)
+ end
+end
+
+function floats.resave(which)
+ if last then
+ which = which or default
+ local stack = stacks[which]
+ local b = texbox.floatbox
+ local w, h, d = b.width, b.height, b.depth
+ last.box = copy_node_list(b)
+ texbox.floatbox = nil
+ insert(stack,1,last)
+ setcount("global","savednoffloats",#stacks[default])
+ if trace_floats then
+ report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","resaving",which,noffloats,#stack,w,h,d)
+ else
+ interfaces.showmessage("floatblocks",2,noffloats)
+ end
+ else
+ report_floats("unable to resave float")
+ end
+end
+
+function floats.flush(which,n,bylabel)
+ which = which or default
+ local stack = stacks[which]
+ local t, b, n = get(stack,n or 1,bylabel)
+ if t then
+ local w, h, d = setdimensions(b)
+ if trace_floats then
+ report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","flushing",which,t.n,n,w,h,d)
+ else
+ interfaces.showmessage("floatblocks",3,t.n)
+ end
+ texbox.floatbox = b
+ last = remove(stack,n)
+ last.box = nil
+ setcount("global","savednoffloats",#stacks[default]) -- default?
+ else
+ setdimensions()
+ end
+end
+
+function floats.consult(which,n)
+ which = which or default
+ local stack = stacks[which]
+ local t, b, n = get(stack,n)
+ if t then
+ local w, h, d = setdimensions(b)
+ if trace_floats then
+ report_floats("%s, category %a, number %a, slot %a width %p, height %p, depth %p","consulting",which,t.n,n,w,h,d)
+ end
+ return t, b, n
+ else
+ if trace_floats then
+ report_floats("nothing to consult")
+ end
+ setdimensions()
+ end
+end
+
+function floats.collect(which,maxwidth,distance)
+ which = which or default
+ local stack = stacks[which]
+ local n, m = #stack, 0
+ for i=1,n do
+ local t, b, n = get(stack,i)
+ if t then
+ local w, h, d = setdimensions(b)
+ if w + distance < maxwidth then
+ m = m + 1
+ maxwidth = maxwidth - w - distance
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if m == 0 then
+ m = 1
+ end
+ setcount("global","nofcollectedfloats",m)
+end
+
+function floats.getvariable(name,default)
+ local value = last and last.data[name] or default
+ return value ~= "" and value
+end
+
+function floats.checkedpagefloat(packed)
+ if structures.pages.is_odd() then
+ if #stacks.rightpage > 0 then
+ return "rightpage"
+ elseif #stacks.page > 0 then
+ return "page"
+ elseif #stacks.leftpage > 0 then
+ if packed then
+ return "leftpage"
+ end
+ end
+ else
+ if #stacks.leftpage > 0 then
+ return "leftpage"
+ elseif #stacks.page > 0 then
+ return "page"
+ elseif #stacks.rightpage > 0 then
+ if packed then
+ return "rightpage"
+ end
+ end
+ end
+end
+
+function floats.nofstacked()
+ return #stacks[which or default] or 0
+end
+
+-- todo: check for digits !
+
+local method = C((1-S(", :"))^1)
+local position = P(":") * C((1-S("*,"))^1) * (P("*") * C((1-S(","))^1))^0
+local label = P(":") * C((1-S(",*: "))^0)
+
+local pattern = method * (
+ label * position * C("")
+ + C("") * position * C("")
+ + label * C("") * C("")
+ + C("") * C("") * C("")
+) + C("") * C("") * C("") * C("")
+
+-- inspect { lpegmatch(pattern,"somewhere:blabla,crap") }
+-- inspect { lpegmatch(pattern,"somewhere:1*2") }
+-- inspect { lpegmatch(pattern,"somewhere:blabla:1*2") }
+-- inspect { lpegmatch(pattern,"somewhere::1*2") }
+-- inspect { lpegmatch(pattern,"somewhere,") }
+-- inspect { lpegmatch(pattern,"somewhere") }
+-- inspect { lpegmatch(pattern,"") }
+
+function floats.analysemethod(str) -- will become a more extensive parser
+ return lpegmatch(pattern,str or "")
+end
+
+-- interface
+
+local context = context
+local setvalue = context.setvalue
+
+commands.flushfloat = floats.flush
+commands.savefloat = floats.save
+commands.resavefloat = floats.resave
+commands.pushfloat = floats.push
+commands.popfloat = floats.pop
+commands.consultfloat = floats.consult
+commands.collectfloat = floats.collect
+
+function commands.getfloatvariable (...) local v = floats.getvariable(...) if v then context(v) end end
+function commands.checkedpagefloat (...) local v = floats.checkedpagefloat(...) if v then context(v) end end
+
+function commands.nofstackedfloats (...) context(floats.nofstacked(...)) end
+function commands.doifelsesavedfloat(...) commands.doifelse(floats.nofstacked(...)>0) end
+
+function commands.analysefloatmethod(str) -- currently only one method
+ local method, label, row, column = floats.analysemethod(str)
+ setvalue("floatmethod",method or "")
+ setvalue("floatlabel", label or "")
+ setvalue("floatrow", row or "")
+ setvalue("floatcolumn",column or "")
+end
diff --git a/tex/context/base/page-inj.lua b/tex/context/base/page-inj.lua
index 5b450d60e..205f8d397 100644
--- a/tex/context/base/page-inj.lua
+++ b/tex/context/base/page-inj.lua
@@ -1,101 +1,101 @@
-if not modules then modules = { } end modules ["page-inj"] = {
- version = 1.000,
- comment = "Page injections",
- author = "Wolfgang Schuster & Hans Hagen",
- copyright = "Wolfgang Schuster & Hans Hagen",
- license = "see context related readme files",
-}
-
--- Adapted a bit by HH: numbered states, tracking, delayed, order, etc.
-
-local injections = pagebuilders.injections or { }
-pagebuilders.injections = injections
-
-local report = logs.reporter("pagebuilder","injections")
-local trace = false trackers.register("pagebuilder.injections",function(v) trace = v end)
-
-local variables = interfaces.variables
-
-local v_yes = variables.yes
-local v_previous = variables.previous
-local v_next = variables.next
-
-local order = 0
-local cache = { }
-
-function injections.save(specification) -- maybe not public, just commands.*
- order = order + 1
- cache[#cache+1] = {
- order = order,
- name = specification.name,
- state = tonumber(specification.state) or specification.state,
- parameters = specification.userdata,
- }
- tex.setcount("global","c_page_boxes_flush_n",#cache)
-end
-
-function injections.flushbefore() -- maybe not public, just commands.*
- if #cache > 0 then
- local delayed = { }
- context.unprotect()
- for i=1,#cache do
- local c = cache[i]
- local oldstate = c.state
- if oldstate == v_previous then
- if trace then
- report("entry %a, order %a, flushing due to state %a",i,c.order,oldstate)
- end
- context.page_injections_flush_saved(c.name,c.parameters)
- elseif type(oldstate) == "number" and oldstate < 0 then
- local newstate = oldstate + 1
- if newstate >= 0 then
- newstate = v_previous
- end
- if trace then
- report("entry %a, order %a, changing state from %a to %a",i,c.order,oldstate,newstate)
- end
- c.state = newstate
- delayed[#delayed+1] = c
- else
- delayed[#delayed+1] = c
- end
- end
- context.unprotect()
- cache = delayed
- tex.setcount("global","c_page_boxes_flush_n",#cache)
- end
-end
-
-function injections.flushafter() -- maybe not public, just commands.*
- if #cache > 0 then
- local delayed = { }
- context.unprotect()
- for i=1,#cache do
- local c = cache[i]
- local oldstate = c.state
- if oldstate == v_next then
- if trace then
- report("entry %a, order %a, flushing due to state %a",i,c.order,oldstate)
- end
- context.page_injections_flush_saved(c.name,c.parameters)
- elseif type(oldstate) == "number" and oldstate> 0 then
- local newstate = oldstate- 1
- if newstate <= 0 then
- newstate = v_next
- end
- if trace then
- report("entry %a, order %a, changing state from %a to %a",i,c.order,oldstate,newstate)
- end
- c.state = newstate
- delayed[#delayed+1] = c
- end
- end
- context.protect()
- cache = delayed
- tex.setcount("global","c_page_boxes_flush_n",#cache)
- end
-end
-
-commands.page_injections_save = injections.save
-commands.page_injections_flush_after = injections.flushafter
-commands.page_injections_flush_before = injections.flushbefore
+if not modules then modules = { } end modules ["page-inj"] = {
+ version = 1.000,
+ comment = "Page injections",
+ author = "Wolfgang Schuster & Hans Hagen",
+ copyright = "Wolfgang Schuster & Hans Hagen",
+ license = "see context related readme files",
+}
+
+-- Adapted a bit by HH: numbered states, tracking, delayed, order, etc.
+
+local injections = pagebuilders.injections or { }
+pagebuilders.injections = injections
+
+local report = logs.reporter("pagebuilder","injections")
+local trace = false trackers.register("pagebuilder.injections",function(v) trace = v end)
+
+local variables = interfaces.variables
+
+local v_yes = variables.yes
+local v_previous = variables.previous
+local v_next = variables.next
+
+local order = 0
+local cache = { }
+
+function injections.save(specification) -- maybe not public, just commands.*
+ order = order + 1
+ cache[#cache+1] = {
+ order = order,
+ name = specification.name,
+ state = tonumber(specification.state) or specification.state,
+ parameters = specification.userdata,
+ }
+ tex.setcount("global","c_page_boxes_flush_n",#cache)
+end
+
+function injections.flushbefore() -- maybe not public, just commands.*
+ if #cache > 0 then
+ local delayed = { }
+ context.unprotect()
+ for i=1,#cache do
+ local c = cache[i]
+ local oldstate = c.state
+ if oldstate == v_previous then
+ if trace then
+ report("entry %a, order %a, flushing due to state %a",i,c.order,oldstate)
+ end
+ context.page_injections_flush_saved(c.name,c.parameters)
+ elseif type(oldstate) == "number" and oldstate < 0 then
+ local newstate = oldstate + 1
+ if newstate >= 0 then
+ newstate = v_previous
+ end
+ if trace then
+ report("entry %a, order %a, changing state from %a to %a",i,c.order,oldstate,newstate)
+ end
+ c.state = newstate
+ delayed[#delayed+1] = c
+ else
+ delayed[#delayed+1] = c
+ end
+ end
+ context.unprotect()
+ cache = delayed
+ tex.setcount("global","c_page_boxes_flush_n",#cache)
+ end
+end
+
+function injections.flushafter() -- maybe not public, just commands.*
+ if #cache > 0 then
+ local delayed = { }
+ context.unprotect()
+ for i=1,#cache do
+ local c = cache[i]
+ local oldstate = c.state
+ if oldstate == v_next then
+ if trace then
+ report("entry %a, order %a, flushing due to state %a",i,c.order,oldstate)
+ end
+ context.page_injections_flush_saved(c.name,c.parameters)
+ elseif type(oldstate) == "number" and oldstate> 0 then
+ local newstate = oldstate- 1
+ if newstate <= 0 then
+ newstate = v_next
+ end
+ if trace then
+ report("entry %a, order %a, changing state from %a to %a",i,c.order,oldstate,newstate)
+ end
+ c.state = newstate
+ delayed[#delayed+1] = c
+ end
+ end
+ context.protect()
+ cache = delayed
+ tex.setcount("global","c_page_boxes_flush_n",#cache)
+ end
+end
+
+commands.page_injections_save = injections.save
+commands.page_injections_flush_after = injections.flushafter
+commands.page_injections_flush_before = injections.flushbefore
diff --git a/tex/context/base/page-ins.lua b/tex/context/base/page-ins.lua
index 7f870735d..15656a231 100644
--- a/tex/context/base/page-ins.lua
+++ b/tex/context/base/page-ins.lua
@@ -1,97 +1,97 @@
-if not modules then modules = { } end modules ['page-ins'] = {
- version = 1.001,
- comment = "companion to page-mix.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
- -- public = {
- -- functions = {
- -- "inserts.define",
- -- "inserts.getdata",
- -- },
- -- commands = {
- -- "defineinsertion",
- -- "inserttionnumber",
- -- }
- -- }
-}
-
--- Maybe we should only register in lua and forget about the tex end.
-
-structures = structures or { }
-structures.inserts = structures.inserts or { }
-local inserts = structures.inserts
-
-local report_inserts = logs.reporter("inserts")
-
-local allocate = utilities.storage.allocate
-
-inserts.stored = inserts.stored or allocate { } -- combining them in one is inefficient in the
-inserts.data = inserts.data or allocate { } -- bytecode storage pool
-
-local variables = interfaces.variables
-local v_page = variables.page
-local v_columns = variables.columns
-local v_firstcolumn = variables.firstcolumn
-local v_lastcolumn = variables.lastcolumn
-local v_text = variables.text
-
-storage.register("structures/inserts/stored", inserts.stored, "structures.inserts.stored")
-
-local data = inserts.data
-local stored = inserts.stored
-
-for name, specification in next, stored do
- data[specification.number] = specification
- data[name] = specification
-end
-
-function inserts.define(name,specification)
- specification.name= name
- local number = specification.number or 0
- data[name] = specification
- data[number] = specification
- -- only needed at runtime as this get stored in a bytecode register
- stored[name] = specification
- if not specification.location then
- specification.location = v_page
- end
- return specification
-end
-
-function inserts.setup(name,settings)
- local specification = data[name]
- for k, v in next, settings do
- -- maybe trace change
- specification[k] = v
- end
- return specification
-end
-
-function inserts.setlocation(name,location) -- a practical fast one
- data[name].location = location
-end
-
-function inserts.getlocation(name,location)
- return data[name].location or v_page
-end
-
-function inserts.getdata(name) -- or number
- return data[name]
-end
-
-function inserts.getname(number)
- return data[name].name
-end
-
-function inserts.getnumber(name)
- return data[name].number
-end
-
--- interface
-
-commands.defineinsertion = inserts.define
-commands.setupinsertion = inserts.setup
-commands.setinsertionlocation = inserts.setlocation
-commands.insertionnumber = function(name) context(data[name].number or 0) end
-
+if not modules then modules = { } end modules ['page-ins'] = {
+ version = 1.001,
+ comment = "companion to page-mix.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+ -- public = {
+ -- functions = {
+ -- "inserts.define",
+ -- "inserts.getdata",
+ -- },
+ -- commands = {
+ -- "defineinsertion",
+ -- "inserttionnumber",
+ -- }
+ -- }
+}
+
+-- Maybe we should only register in lua and forget about the tex end.
+
+structures = structures or { }
+structures.inserts = structures.inserts or { }
+local inserts = structures.inserts
+
+local report_inserts = logs.reporter("inserts")
+
+local allocate = utilities.storage.allocate
+
+inserts.stored = inserts.stored or allocate { } -- combining them in one is inefficient in the
+inserts.data = inserts.data or allocate { } -- bytecode storage pool
+
+local variables = interfaces.variables
+local v_page = variables.page
+local v_columns = variables.columns
+local v_firstcolumn = variables.firstcolumn
+local v_lastcolumn = variables.lastcolumn
+local v_text = variables.text
+
+storage.register("structures/inserts/stored", inserts.stored, "structures.inserts.stored")
+
+local data = inserts.data
+local stored = inserts.stored
+
+for name, specification in next, stored do
+ data[specification.number] = specification
+ data[name] = specification
+end
+
+function inserts.define(name,specification)
+ specification.name= name
+ local number = specification.number or 0
+ data[name] = specification
+ data[number] = specification
+ -- only needed at runtime as this get stored in a bytecode register
+ stored[name] = specification
+ if not specification.location then
+ specification.location = v_page
+ end
+ return specification
+end
+
+function inserts.setup(name,settings)
+ local specification = data[name]
+ for k, v in next, settings do
+ -- maybe trace change
+ specification[k] = v
+ end
+ return specification
+end
+
+function inserts.setlocation(name,location) -- a practical fast one
+ data[name].location = location
+end
+
+function inserts.getlocation(name,location)
+ return data[name].location or v_page
+end
+
+function inserts.getdata(name) -- or number
+ return data[name]
+end
+
+function inserts.getname(number)
+ return data[name].name
+end
+
+function inserts.getnumber(name)
+ return data[name].number
+end
+
+-- interface
+
+commands.defineinsertion = inserts.define
+commands.setupinsertion = inserts.setup
+commands.setinsertionlocation = inserts.setlocation
+commands.insertionnumber = function(name) context(data[name].number or 0) end
+
diff --git a/tex/context/base/page-lin.lua b/tex/context/base/page-lin.lua
index e6b500e8b..5f7ea7eed 100644
--- a/tex/context/base/page-lin.lua
+++ b/tex/context/base/page-lin.lua
@@ -1,290 +1,290 @@
-if not modules then modules = { } end modules ['page-lin'] = {
- version = 1.001,
- comment = "companion to page-lin.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- experimental -> will become builders
-
-local trace_numbers = false trackers.register("lines.numbers", function(v) trace_numbers = v end)
-
-local report_lines = logs.reporter("lines")
-
-local texbox = tex.box
-
-local attributes, nodes, node, context = attributes, nodes, node, context
-
-nodes.lines = nodes.lines or { }
-local lines = nodes.lines
-
-lines.data = lines.data or { } -- start step tag
-local data = lines.data
-local last = #data
-
-lines.scratchbox = lines.scratchbox or 0
-
-local leftmarginwidth = nodes.leftmarginwidth
-
-storage.register("lines/data", lines.data, "nodes.lines.data")
-
--- if there is demand for it, we can support multiple numbering streams
--- and use more than one attibute
-
-local variables = interfaces.variables
-
-local nodecodes = nodes.nodecodes
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local whatsit_code = nodecodes.whatsit
-
-local a_displaymath = attributes.private('displaymath')
-local a_linenumber = attributes.private('linenumber')
-local a_linereference = attributes.private('linereference')
-local a_verbatimline = attributes.private('verbatimline')
-
-local current_list = { }
-local cross_references = { }
-local chunksize = 250 -- not used in boxed
-
-local traverse_id = node.traverse_id
-local traverse = node.traverse
-local copy_node = node.copy
-local hpack_node = node.hpack
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
-
--- cross referencing
-
-function lines.number(n)
- n = tonumber(n)
- local cr = cross_references[n] or 0
- cross_references[n] = nil
- return cr
-end
-
-local function resolve(n,m) -- we can now check the 'line' flag (todo)
- while n do
- local id = n.id
- if id == whatsit_code then -- why whatsit
- local a = n[a_linereference]
- if a then
- cross_references[a] = m
- end
- elseif id == hlist_code or id == vlist_code then
- resolve(n.list,m)
- end
- n = n.next
- end
-end
-
-function lines.finalize(t)
- local getnumber = lines.number
- for _,p in next, t do
- for _,r in next, p do
- local m = r.metadata
- if m and m.kind == "line" then
- local e = r.entries
- local u = r.userdata
- e.linenumber = getnumber(e.text or 0) -- we can nil e.text
- e.conversion = u and u.conversion
- r.userdata = nil -- hack
- end
- end
- end
-end
-
-local filters = structures.references.filters
-local helpers = structures.helpers
-
-structures.references.registerfinalizer(lines.finalize)
-
-filters.line = filters.line or { }
-
-function filters.line.default(data)
--- helpers.title(data.entries.linenumber or "?",data.metadata)
- context.convertnumber(data.entries.conversion or "numbers",data.entries.linenumber or "0")
-end
-
-function filters.line.page(data,prefixspec,pagespec) -- redundant
- helpers.prefixpage(data,prefixspec,pagespec)
-end
-
-function filters.line.linenumber(data) -- raw
- context(data.entries.linenumber or "0")
-end
-
--- boxed variant, todo: use number mechanism
-
-lines.boxed = { }
-local boxed = lines.boxed
-
--- todo: cache setups, and free id no longer used
--- use interfaces.cachesetup(t)
-
-function boxed.register(configuration)
- last = last + 1
- data[last] = configuration
- if trace_numbers then
- report_lines("registering setup %a",last)
- end
- return last
-end
-
-function commands.registerlinenumbering(configuration)
- context(boxed.register(configuration))
-end
-
-function boxed.setup(n,configuration)
- local d = data[n]
- if d then
- if trace_numbers then
- report_lines("updating setup %a",n)
- end
- for k,v in next, configuration do
- d[k] = v
- end
- else
- if trace_numbers then
- report_lines("registering setup %a (br)",n)
- end
- data[n] = configuration
- end
- return n
-end
-
-commands.setuplinenumbering = boxed.setup
-
-local function check_number(n,a,skip,sameline)
- local d = data[a]
- if d then
- local tag, skipflag, s = d.tag or "", 0, d.start or 1
- current_list[#current_list+1] = { n, s }
- if sameline then
- skipflag = 0
- if trace_numbers then
- report_lines("skipping broken line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
- end
- elseif not skip and s % d.step == 0 then
- skipflag, d.start = 1, s + 1 -- (d.step or 1)
- if trace_numbers then
- report_lines("making number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
- end
- else
- skipflag, d.start = 0, s + 1 -- (d.step or 1)
- if trace_numbers then
- report_lines("skipping line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
- end
- end
- context.makelinenumber(tag,skipflag,s,n.shift,n.width,leftmarginwidth(n.list),n.dir)
- end
-end
-
--- xlist
--- xlist
--- hlist
-
-local function identify(list)
- if list then
- for n in traverse_id(hlist_code,list) do
- if n[a_linenumber] then
- return list
- end
- end
- local n = list
- while n do
- local id = n.id
- if id == hlist_code or id == vlist_code then
- local ok = identify(n.list)
- if ok then
- return ok
- end
- end
- n = n.next
- end
- end
-end
-
-function boxed.stage_zero(n)
- return identify(texbox[n].list)
-end
-
--- reset ranges per page
--- store first and last per page
--- maybe just set marks directly
-
-function boxed.stage_one(n,nested)
- current_list = { }
- local head = texbox[n]
- if head then
- local list = head.list
- if nested then
- list = identify(list)
- end
- local last_a, last_v, skip = nil, -1, false
- for n in traverse_id(hlist_code,list) do -- attr test here and quit as soon as zero found
- if n.height == 0 and n.depth == 0 then
- -- skip funny hlists -- todo: check line subtype
- else
- local list = n.list
- local a = list[a_linenumber]
- if a and a > 0 then
- if last_a ~= a then
- local da = data[a]
- local ma = da.method
- if ma == variables.next then
- skip = true
- elseif ma == variables.page then
- da.start = 1 -- eventually we will have a normal counter
- end
- last_a = a
- if trace_numbers then
- report_lines("starting line number range %s: start %s, continue",a,da.start,da.continue or "no")
- end
- end
- if n[a_displaymath] then
- if nodes.is_display_math(n) then
- check_number(n,a,skip)
- end
- else
- local v = list[a_verbatimline]
- if not v or v ~= last_v then
- last_v = v
- check_number(n,a,skip)
- else
- check_number(n,a,skip,true)
- end
- end
- skip = false
- end
- end
- end
- end
-end
-
-function boxed.stage_two(n,m)
- if #current_list > 0 then
- m = m or lines.scratchbox
- local t, tn = { }, 0
- for l in traverse_id(hlist_code,texbox[m].list) do
- tn = tn + 1
- t[tn] = copy_node(l)
- end
- for i=1,#current_list do
- local li = current_list[i]
- local n, m, ti = li[1], li[2], t[i]
- if ti then
- ti.next, n.list = n.list, ti
- resolve(n,m)
- else
- report_lines("error in linenumbering (1)")
- return
- end
- end
- end
-end
-
-commands.linenumbersstageone = boxed.stage_one
-commands.linenumbersstagetwo = boxed.stage_two
+if not modules then modules = { } end modules ['page-lin'] = {
+ version = 1.001,
+ comment = "companion to page-lin.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- experimental -> will become builders
+
+local trace_numbers = false trackers.register("lines.numbers", function(v) trace_numbers = v end)
+
+local report_lines = logs.reporter("lines")
+
+local texbox = tex.box
+
+local attributes, nodes, node, context = attributes, nodes, node, context
+
+nodes.lines = nodes.lines or { }
+local lines = nodes.lines
+
+lines.data = lines.data or { } -- start step tag
+local data = lines.data
+local last = #data
+
+lines.scratchbox = lines.scratchbox or 0
+
+local leftmarginwidth = nodes.leftmarginwidth
+
+storage.register("lines/data", lines.data, "nodes.lines.data")
+
+-- if there is demand for it, we can support multiple numbering streams
+-- and use more than one attibute
+
+local variables = interfaces.variables
+
+local nodecodes = nodes.nodecodes
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local whatsit_code = nodecodes.whatsit
+
+local a_displaymath = attributes.private('displaymath')
+local a_linenumber = attributes.private('linenumber')
+local a_linereference = attributes.private('linereference')
+local a_verbatimline = attributes.private('verbatimline')
+
+local current_list = { }
+local cross_references = { }
+local chunksize = 250 -- not used in boxed
+
+local traverse_id = node.traverse_id
+local traverse = node.traverse
+local copy_node = node.copy
+local hpack_node = node.hpack
+local insert_node_after = node.insert_after
+local insert_node_before = node.insert_before
+
+-- cross referencing
+
+function lines.number(n)
+ n = tonumber(n)
+ local cr = cross_references[n] or 0
+ cross_references[n] = nil
+ return cr
+end
+
+local function resolve(n,m) -- we can now check the 'line' flag (todo)
+ while n do
+ local id = n.id
+ if id == whatsit_code then -- why whatsit
+ local a = n[a_linereference]
+ if a then
+ cross_references[a] = m
+ end
+ elseif id == hlist_code or id == vlist_code then
+ resolve(n.list,m)
+ end
+ n = n.next
+ end
+end
+
+function lines.finalize(t)
+ local getnumber = lines.number
+ for _,p in next, t do
+ for _,r in next, p do
+ local m = r.metadata
+ if m and m.kind == "line" then
+ local e = r.entries
+ local u = r.userdata
+ e.linenumber = getnumber(e.text or 0) -- we can nil e.text
+ e.conversion = u and u.conversion
+ r.userdata = nil -- hack
+ end
+ end
+ end
+end
+
+local filters = structures.references.filters
+local helpers = structures.helpers
+
+structures.references.registerfinalizer(lines.finalize)
+
+filters.line = filters.line or { }
+
+function filters.line.default(data)
+-- helpers.title(data.entries.linenumber or "?",data.metadata)
+ context.convertnumber(data.entries.conversion or "numbers",data.entries.linenumber or "0")
+end
+
+function filters.line.page(data,prefixspec,pagespec) -- redundant
+ helpers.prefixpage(data,prefixspec,pagespec)
+end
+
+function filters.line.linenumber(data) -- raw
+ context(data.entries.linenumber or "0")
+end
+
+-- boxed variant, todo: use number mechanism
+
+lines.boxed = { }
+local boxed = lines.boxed
+
+-- todo: cache setups, and free id no longer used
+-- use interfaces.cachesetup(t)
+
+function boxed.register(configuration)
+ last = last + 1
+ data[last] = configuration
+ if trace_numbers then
+ report_lines("registering setup %a",last)
+ end
+ return last
+end
+
+function commands.registerlinenumbering(configuration)
+ context(boxed.register(configuration))
+end
+
+function boxed.setup(n,configuration)
+ local d = data[n]
+ if d then
+ if trace_numbers then
+ report_lines("updating setup %a",n)
+ end
+ for k,v in next, configuration do
+ d[k] = v
+ end
+ else
+ if trace_numbers then
+ report_lines("registering setup %a (br)",n)
+ end
+ data[n] = configuration
+ end
+ return n
+end
+
+commands.setuplinenumbering = boxed.setup
+
+local function check_number(n,a,skip,sameline)
+ local d = data[a]
+ if d then
+ local tag, skipflag, s = d.tag or "", 0, d.start or 1
+ current_list[#current_list+1] = { n, s }
+ if sameline then
+ skipflag = 0
+ if trace_numbers then
+ report_lines("skipping broken line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
+ end
+ elseif not skip and s % d.step == 0 then
+ skipflag, d.start = 1, s + 1 -- (d.step or 1)
+ if trace_numbers then
+ report_lines("making number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
+ end
+ else
+ skipflag, d.start = 0, s + 1 -- (d.step or 1)
+ if trace_numbers then
+ report_lines("skipping line number %s for setup %a: %s (%s)",#current_list,a,s,d.continue or "no")
+ end
+ end
+ context.makelinenumber(tag,skipflag,s,n.shift,n.width,leftmarginwidth(n.list),n.dir)
+ end
+end
+
+-- xlist
+-- xlist
+-- hlist
+
+local function identify(list)
+ if list then
+ for n in traverse_id(hlist_code,list) do
+ if n[a_linenumber] then
+ return list
+ end
+ end
+ local n = list
+ while n do
+ local id = n.id
+ if id == hlist_code or id == vlist_code then
+ local ok = identify(n.list)
+ if ok then
+ return ok
+ end
+ end
+ n = n.next
+ end
+ end
+end
+
+function boxed.stage_zero(n)
+ return identify(texbox[n].list)
+end
+
+-- reset ranges per page
+-- store first and last per page
+-- maybe just set marks directly
+
+function boxed.stage_one(n,nested)
+ current_list = { }
+ local head = texbox[n]
+ if head then
+ local list = head.list
+ if nested then
+ list = identify(list)
+ end
+ local last_a, last_v, skip = nil, -1, false
+ for n in traverse_id(hlist_code,list) do -- attr test here and quit as soon as zero found
+ if n.height == 0 and n.depth == 0 then
+ -- skip funny hlists -- todo: check line subtype
+ else
+ local list = n.list
+ local a = list[a_linenumber]
+ if a and a > 0 then
+ if last_a ~= a then
+ local da = data[a]
+ local ma = da.method
+ if ma == variables.next then
+ skip = true
+ elseif ma == variables.page then
+ da.start = 1 -- eventually we will have a normal counter
+ end
+ last_a = a
+ if trace_numbers then
+ report_lines("starting line number range %s: start %s, continue",a,da.start,da.continue or "no")
+ end
+ end
+ if n[a_displaymath] then
+ if nodes.is_display_math(n) then
+ check_number(n,a,skip)
+ end
+ else
+ local v = list[a_verbatimline]
+ if not v or v ~= last_v then
+ last_v = v
+ check_number(n,a,skip)
+ else
+ check_number(n,a,skip,true)
+ end
+ end
+ skip = false
+ end
+ end
+ end
+ end
+end
+
+function boxed.stage_two(n,m)
+ if #current_list > 0 then
+ m = m or lines.scratchbox
+ local t, tn = { }, 0
+ for l in traverse_id(hlist_code,texbox[m].list) do
+ tn = tn + 1
+ t[tn] = copy_node(l)
+ end
+ for i=1,#current_list do
+ local li = current_list[i]
+ local n, m, ti = li[1], li[2], t[i]
+ if ti then
+ ti.next, n.list = n.list, ti
+ resolve(n,m)
+ else
+ report_lines("error in linenumbering (1)")
+ return
+ end
+ end
+ end
+end
+
+commands.linenumbersstageone = boxed.stage_one
+commands.linenumbersstagetwo = boxed.stage_two
diff --git a/tex/context/base/page-mix.lua b/tex/context/base/page-mix.lua
index cf0094787..999427b8f 100644
--- a/tex/context/base/page-mix.lua
+++ b/tex/context/base/page-mix.lua
@@ -1,695 +1,695 @@
-if not modules then modules = { } end modules ["page-mix"] = {
- version = 1.001,
- comment = "companion to page-mix.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- inserts.getname(name)
-
--- local node, tex = node, tex
--- local nodes, interfaces, utilities = nodes, interfaces, utilities
--- local trackers, logs, storage = trackers, logs, storage
--- local number, table = number, table
-
-local concat = table.concat
-
-local nodecodes = nodes.nodecodes
-local gluecodes = nodes.gluecodes
-local nodepool = nodes.pool
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local kern_code = nodecodes.kern
-local glue_code = nodecodes.glue
-local penalty_code = nodecodes.penalty
-local insert_code = nodecodes.ins
-local mark_code = nodecodes.mark
-
-local new_hlist = nodepool.hlist
-local new_vlist = nodepool.vlist
-local new_glue = nodepool.glue
-
-local hpack = node.hpack
-local vpack = node.vpack
-local freenode = node.free
-
-local texbox = tex.box
-local texskip = tex.skip
-local texdimen = tex.dimen
-local points = number.points
-local settings_to_hash = utilities.parsers.settings_to_hash
-
-local variables = interfaces.variables
-local v_yes = variables.yes
-local v_global = variables["global"]
-local v_local = variables["local"]
-local v_columns = variables.columns
-
-local trace_state = false trackers.register("mixedcolumns.trace", function(v) trace_state = v end)
-local trace_detail = false trackers.register("mixedcolumns.detail", function(v) trace_detail = v end)
-
-local report_state = logs.reporter("mixed columns")
-
-pagebuilders = pagebuilders or { }
-pagebuilders.mixedcolumns = pagebuilders.mixedcolumns or { }
-local mixedcolumns = pagebuilders.mixedcolumns
-
-local forcedbreak = -123
-
--- initializesplitter(specification)
--- cleanupsplitter()
-
--- Inserts complicate matters a lot. In order to deal with them well, we need to
--- distinguish several cases.
---
--- (1) full page columns: firstcolumn, columns, lastcolumn, page
--- (2) mid page columns : firstcolumn, columns, lastcolumn, page
---
--- We need to collect them accordingly.
-
-local function collectinserts(result,nxt,nxtid)
- local inserts, currentskips, nextskips, inserttotal = { }, 0, 0, 0
- while nxt do
- if nxtid == insert_code then
- inserttotal = inserttotal + nxt.height + nxt.depth
- local s = nxt.subtype
- local c = inserts[s]
- if not c then
- c = { }
- inserts[s] = c
- local width = texskip[s].width
- if not result.inserts[s] then
- currentskips = currentskips + width
- end
- nextskips = nextskips + width
- end
- c[#c+1] = nxt
- if trace_detail then
- report_state("insert of class %s found",s)
- end
- elseif nxtid == mark_code then
- if trace_detail then
- report_state("mark found")
- end
- else
- break
- end
- nxt = nxt.next
- if nxt then
- nxtid = nxt.id
- else
- break
- end
- end
- return nxt, inserts, currentskips, nextskips, inserttotal
-end
-
-local function appendinserts(ri,inserts)
- for class, collected in next, inserts do
- local ric = ri[class]
- if not ric then
- -- assign to collected
- ri[class] = collected
- else
- -- append to collected
- for j=1,#collected do
- ric[#ric+1] = collected[j]
- end
- end
- end
-end
-
-local function discardtopglue(current,discarded)
- local size = 0
- while current do
- local id = current.id
- if id == glue_code then
- size = size + current.spec.width
- discarded[#discarded+1] = current
- current = current.next
- elseif id == penalty_code then
- if current.penalty == forcedbreak then
- discarded[#discarded+1] = current
- current = current.next
- while current do
- local id = current.id
- if id == glue_code then
- size = size + current.spec.width
- discarded[#discarded+1] = current
- current = current.next
- else
- break
- end
- end
- else
- discarded[#discarded+1] = current
- current = current.next
- end
- else
- break
- end
- end
- return current, size
-end
-
-local function stripbottomglue(results,discarded)
- local height = 0
- for i=1,#results do
- local r = results[i]
- local t = r.tail
- while t and t ~= r.head do
- local prev = t.prev
- if not prev then
- break
- end
- local id = t.id
- if id == penalty_code then
- if t.penalty == forcedbreak then
- break
- else
- discarded[#discarded+1] = t
- r.tail = prev
- t = prev
- end
- elseif id == glue_code then
- discarded[#discarded+1] = t
- local width = t.spec.width
- if trace_state then
- report_state("columns %s, discarded bottom glue %p",i,width)
- end
- r.height = r.height - width
- r.tail = prev
- t = prev
- else
- break
- end
- end
- if r.height > height then
- height = r.height
- end
- end
- return height
-end
-
-local function setsplit(specification) -- a rather large function
- local box = specification.box
- if not box then
- report_state("fatal error, no box")
- return
- end
- local list = texbox[box]
- if not list then
- report_state("fatal error, no list")
- return
- end
- local head = list.head or specification.originalhead
- if not head then
- report_state("fatal error, no head")
- return
- end
- local discarded = { }
- local originalhead = head
- local originalwidth = specification.originalwidth or list.width
- local originalheight = specification.originalheight or list.height
- local current = head
- local skipped = 0
- local height = 0
- local depth = 0
- local skip = 0
- local options = settings_to_hash(specification.option or "")
- local stripbottom = specification.alternative == v_local
- local cycle = specification.cycle or 1
- local nofcolumns = specification.nofcolumns or 1
- if nofcolumns == 0 then
- nofcolumns = 1
- end
- local preheight = specification.preheight or 0
- local extra = specification.extra or 0
- local maxheight = specification.maxheight
- local optimal = originalheight/nofcolumns
- if specification.balance ~= v_yes then
- optimal = maxheight
- end
- local target = optimal + extra
- local overflow = target > maxheight - preheight
- local threshold = specification.threshold or 0
- if overflow then
- target = maxheight - preheight
- end
- if trace_state then
- report_state("cycle %s, maxheight %p, preheight %p, target %p, overflow %a, extra %p",
- cycle, maxheight, preheight , target, overflow, extra)
- end
- local results = { }
- for i=1,nofcolumns do
- results[i] = {
- head = false,
- tail = false,
- height = 0,
- depth = 0,
- inserts = { },
- delta = 0,
- }
- end
- local column = 1
- local line = 0
- local result = results[column]
- local lasthead = nil
- local rest = nil
- local function gotonext()
- if head == lasthead then
- if trace_state then
- report_state("empty column %s, needs more work",column)
- end
- rest = current
- return false, 0
- else
- lasthead = head
- result.head = head
- if current == head then
- result.tail = head
- else
- result.tail = current.prev
- end
- result.height = height
- result.depth = depth
- end
- head = current
- height = 0
- depth = 0
- if column == nofcolumns then
- column = 0 -- nicer in trace
- rest = head
- -- lasthead = head
- return false, 0
- else
- local skipped
- column = column + 1
- result = results[column]
- current, skipped = discardtopglue(current,discarded)
- head = current
- -- lasthead = head
- return true, skipped
- end
- end
- local function checked(advance,where)
- local total = skip + height + depth + advance
- local delta = total - target
- local state = "same"
- local okay = false
- local skipped = 0
- local curcol = column
- if delta > threshold then
- result.delta = delta
- okay, skipped = gotonext()
- if okay then
- state = "next"
- else
- state = "quit"
- end
- end
- if trace_detail then
- report_state("%-7s > column %s, delta %p, threshold %p, advance %p, total %p, target %p, discarded %p => %a (height %p, depth %p, skip %p)",
- where,curcol,delta,threshold,advance,total,target,state,skipped,height,depth,skip)
- end
- return state, skipped
- end
- current, skipped = discardtopglue(current,discarded)
- if trace_detail and skipped ~= 0 then
- report_state("check > column 1, discarded %p",skipped)
- end
- head = current
- while current do
- local id = current.id
- local nxt = current.next
-local lastcolumn = column
- if id == hlist_code or id == vlist_code then
- line = line + 1
- local nxtid = nxt and nxt.id
- local inserts, currentskips, nextskips, inserttotal = nil, 0, 0, 0
- local advance = current.height -- + current.depth
- if nxt and (nxtid == insert_code or nxtid == mark_code) then
- nxt, inserts, localskips, insertskips, inserttotal = collectinserts(result,nxt,nxtid)
- end
- local state, skipped = checked(advance+inserttotal+currentskips,"line")
- if trace_state then
- report_state("%-7s > column %s, state %a, line %s, advance %p, insert %p, height %p","line",column,state,line,advance,inserttotal,height)
- if skipped ~= 0 then
- report_state("%-7s > column %s, discarded %p","line",column,skipped)
- end
- end
- if state == "quit" then
- break
- else
- height = height + depth + skip + advance + inserttotal
- if state == "next" then
- height = height + nextskips
- else
- height = height + currentskips
- end
- end
- depth = current.depth
- skip = 0
- if inserts then
- appendinserts(result.inserts,inserts)
- end
- elseif id == glue_code then
- local advance = current.spec.width
- if advance ~= 0 then
- local state, skipped = checked(advance,"glue")
- if trace_state then
- report_state("%-7s > column %s, state %a, advance %p, height %p","glue",column,state,advance,height)
- if skipped ~= 0 then
- report_state("%-7s > column %s, discarded %p","glue",column,skipped)
- end
- end
- if state == "quit" then
- break
- end
- height = height + depth + skip
- depth = 0
- skip = height > 0 and advance or 0
- end
- elseif id == kern_code then
- local advance = current.kern
- if advance ~= 0 then
- local state, skipped = checked(advance,"kern")
- if trace_state then
- report_state("%-7s > column %s, state %a, advance %p, height %p, state %a","kern",column,state,advance,height)
- if skipped ~= 0 then
- report_state("%-7s > column %s, discarded %p","kern",column,skipped)
- end
- end
- if state == "quit" then
- break
- end
- height = height + depth + skip + advance
- depth = 0
- skip = 0
- end
- elseif id == penalty_code then
- local penalty = current.penalty
- if penalty == 0 then
- -- don't bother
- elseif penalty == forcedbreak then
- local okay, skipped = gotonext()
- if okay then
- if trace_state then
- report_state("cycle: %s, forced column break (same page)",cycle)
- if skipped ~= 0 then
- report_state("%-7s > column %s, discarded %p","penalty",column,skipped)
- end
- end
- else
- if trace_state then
- report_state("cycle: %s, forced column break (next page)",cycle)
- if skipped ~= 0 then
- report_state("%-7s > column %s, discarded %p","penalty",column,skipped)
- end
- end
- break
- end
- else
- -- todo: nobreak etc ... we might need to backtrack so we need to remember
- -- the last acceptable break
- -- club and widow and such i.e. resulting penalties (if we care)
- end
- end
-if lastcolumn == column then
- nxt = current.next -- can have changed
-end
- if nxt then
- current = nxt
- elseif head == lasthead then
- -- to be checked but break needed as otherwise we have a loop
- if trace_state then
- report_state("quit as head is lasthead")
- end
- break
- else
- local r = results[column]
- r.head = head
- r.tail = current
- r.height = height
- r.depth = depth
- break
- end
- end
- if not current then
- if trace_state then
- report_state("nilling rest")
- end
- rest = nil
- elseif rest == lasthead then
- if trace_state then
- report_state("nilling rest as rest is lasthead")
- end
- rest = nil
- end
-
- if stripbottom then
- local height = stripbottomglue(results,discarded)
- if height > 0 then
- target = height
- end
- end
-
- specification.results = results
- specification.height = target
- specification.originalheight = originalheight
- specification.originalwidth = originalwidth
- specification.originalhead = originalhead
- specification.targetheight = target or 0
- specification.rest = rest
- specification.overflow = overflow
- specification.discarded = discarded
-
- texbox[specification.box].head = nil
-
- return specification
-end
-
-function mixedcolumns.finalize(result)
- if result then
- local results = result.results
- for i=1,result.nofcolumns do
- local r = results[i]
- local h = r.head
- if h then
- h.prev = nil
- local t = r.tail
- if t then
- t.next = nil
- else
- h.next = nil
- r.tail = h
- end
- for c, list in next, r.inserts do
- local t = { }
- for i=1,#list do
- local l = list[i]
- local h = new_hlist()
- t[i] = h
- h.head = l.head
- h.height = l.height
- h.depth = l.depth
- l.head = nil
- end
- t[1].prev = nil -- needs checking
- t[#t].next = nil -- needs checking
- r.inserts[c] = t
- end
- end
- end
- end
-end
-
-local splitruns = 0
-
-local function report_deltas(result,str)
- local t = { }
- for i=1,result.nofcolumns do
- t[#t+1] = points(result.results[i].delta or 0)
- end
- report_state("%s, cycles %s, deltas % | t",str,result.cycle or 1,t)
-end
-
-function mixedcolumns.setsplit(specification)
- splitruns = splitruns + 1
- if trace_state then
- report_state("split run %s",splitruns)
- end
- local result = setsplit(specification)
- if result then
- if result.overflow then
- if trace_state then
- report_deltas(result,"overflow")
- end
- -- we might have some rest
- elseif result.rest and specification.balance == v_yes then
- local step = specification.step or 65536*2
- local cycle = 1
- local cycles = specification.cycles or 100
- while result.rest and cycle <= cycles do
- specification.extra = cycle * step
- result = setsplit(specification) or result
- if trace_state then
- report_state("cycle: %s.%s, original height %p, total height %p",
- splitruns,cycle,result.originalheight,result.nofcolumns*result.targetheight)
- end
- cycle = cycle + 1
- specification.cycle = cycle
- end
- if cycle > cycles then
- report_deltas(result,"too many balancing cycles")
- elseif trace_state then
- report_deltas(result,"balanced")
- end
- elseif trace_state then
- report_deltas(result,"done")
- end
- return result
- elseif trace_state then
- report_state("no result")
- end
-end
-
-local topskip_code = gluecodes.topskip
-local baselineskip_code = gluecodes.baselineskip
-
-function mixedcolumns.getsplit(result,n)
- if not result then
- report_state("flush, column %s, no result",n)
- return
- end
- local r = result.results[n]
- if not r then
- report_state("flush, column %s, empty",n)
- end
- local h = r.head
- if not h then
- return new_glue(result.originalwidth)
- end
-
- h.prev = nil -- move up
- local strutht = result.strutht
- local strutdp = result.strutdp
- local lineheight = strutht + strutdp
-
- local v = new_vlist()
- v.head = h
-
- -- local v = vpack(h,"exactly",height)
-
- if result.alternative == v_global then -- option
- result.height = result.maxheight
- end
-
- local ht = 0
- local dp = 0
- local wd = result.originalwidth
-
- local grid = result.grid
-
- if grid then
- ht = lineheight * math.ceil(result.height/lineheight) - strutdp
- dp = strutdp
- else
- ht = result.height
- dp = result.depth
- end
-
- v.width = wd
- v.height = ht
- v.depth = dp
-
- if trace_state then
- local id = h.id
- if id == hlist_code then
- report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"top line",nodes.toutf(h.list))
- else
- report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"head node",nodecodes[id])
- end
- end
-
- for c, list in next, r.inserts do
- -- tex.setbox("global",c,vpack(nodes.concat(list)))
- -- tex.setbox(c,vpack(nodes.concat(list)))
- texbox[c] = vpack(nodes.concat(list))
- r.inserts[c] = nil
- end
-
- return v
-end
-
-function mixedcolumns.getrest(result)
- local rest = result and result.rest
- result.rest = nil -- to be sure
- return rest
-end
-
-function mixedcolumns.getlist(result)
- local originalhead = result and result.originalhead
- result.originalhead = nil -- to be sure
- return originalhead
-end
-
-function mixedcolumns.cleanup(result)
- local discarded = result.discarded
- for i=1,#discarded do
- freenode(discarded[i])
- end
- result.discarded = { }
-end
-
--- interface --
-
-local result
-
-function commands.mixsetsplit(specification)
- if result then
- for k, v in next, specification do
- result[k] = v
- end
- result = mixedcolumns.setsplit(result)
- else
- result = mixedcolumns.setsplit(specification)
- end
-end
-
-function commands.mixgetsplit(n)
- if result then
- context(mixedcolumns.getsplit(result,n))
- end
-end
-
-function commands.mixfinalize()
- if result then
- mixedcolumns.finalize(result)
- end
-end
-
-function commands.mixflushrest()
- if result then
- context(mixedcolumns.getrest(result))
- end
-end
-
-function commands.mixflushlist()
- if result then
- context(mixedcolumns.getlist(result))
- end
-end
-
-function commands.mixstate()
- context(result and result.rest and 1 or 0)
-end
-
-function commands.mixcleanup()
- if result then
- mixedcolumns.cleanup(result)
- result = nil
- end
-end
+if not modules then modules = { } end modules ["page-mix"] = {
+ version = 1.001,
+ comment = "companion to page-mix.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- inserts.getname(name)
+
+-- local node, tex = node, tex
+-- local nodes, interfaces, utilities = nodes, interfaces, utilities
+-- local trackers, logs, storage = trackers, logs, storage
+-- local number, table = number, table
+
+local concat = table.concat
+
+local nodecodes = nodes.nodecodes
+local gluecodes = nodes.gluecodes
+local nodepool = nodes.pool
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local kern_code = nodecodes.kern
+local glue_code = nodecodes.glue
+local penalty_code = nodecodes.penalty
+local insert_code = nodecodes.ins
+local mark_code = nodecodes.mark
+
+local new_hlist = nodepool.hlist
+local new_vlist = nodepool.vlist
+local new_glue = nodepool.glue
+
+local hpack = node.hpack
+local vpack = node.vpack
+local freenode = node.free
+
+local texbox = tex.box
+local texskip = tex.skip
+local texdimen = tex.dimen
+local points = number.points
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local variables = interfaces.variables
+local v_yes = variables.yes
+local v_global = variables["global"]
+local v_local = variables["local"]
+local v_columns = variables.columns
+
+local trace_state = false trackers.register("mixedcolumns.trace", function(v) trace_state = v end)
+local trace_detail = false trackers.register("mixedcolumns.detail", function(v) trace_detail = v end)
+
+local report_state = logs.reporter("mixed columns")
+
+pagebuilders = pagebuilders or { }
+pagebuilders.mixedcolumns = pagebuilders.mixedcolumns or { }
+local mixedcolumns = pagebuilders.mixedcolumns
+
+local forcedbreak = -123
+
+-- initializesplitter(specification)
+-- cleanupsplitter()
+
+-- Inserts complicate matters a lot. In order to deal with them well, we need to
+-- distinguish several cases.
+--
+-- (1) full page columns: firstcolumn, columns, lastcolumn, page
+-- (2) mid page columns : firstcolumn, columns, lastcolumn, page
+--
+-- We need to collect them accordingly.
+
+local function collectinserts(result,nxt,nxtid)
+ local inserts, currentskips, nextskips, inserttotal = { }, 0, 0, 0
+ while nxt do
+ if nxtid == insert_code then
+ inserttotal = inserttotal + nxt.height + nxt.depth
+ local s = nxt.subtype
+ local c = inserts[s]
+ if not c then
+ c = { }
+ inserts[s] = c
+ local width = texskip[s].width
+ if not result.inserts[s] then
+ currentskips = currentskips + width
+ end
+ nextskips = nextskips + width
+ end
+ c[#c+1] = nxt
+ if trace_detail then
+ report_state("insert of class %s found",s)
+ end
+ elseif nxtid == mark_code then
+ if trace_detail then
+ report_state("mark found")
+ end
+ else
+ break
+ end
+ nxt = nxt.next
+ if nxt then
+ nxtid = nxt.id
+ else
+ break
+ end
+ end
+ return nxt, inserts, currentskips, nextskips, inserttotal
+end
+
+local function appendinserts(ri,inserts)
+ for class, collected in next, inserts do
+ local ric = ri[class]
+ if not ric then
+ -- assign to collected
+ ri[class] = collected
+ else
+ -- append to collected
+ for j=1,#collected do
+ ric[#ric+1] = collected[j]
+ end
+ end
+ end
+end
+
+local function discardtopglue(current,discarded)
+ local size = 0
+ while current do
+ local id = current.id
+ if id == glue_code then
+ size = size + current.spec.width
+ discarded[#discarded+1] = current
+ current = current.next
+ elseif id == penalty_code then
+ if current.penalty == forcedbreak then
+ discarded[#discarded+1] = current
+ current = current.next
+ while current do
+ local id = current.id
+ if id == glue_code then
+ size = size + current.spec.width
+ discarded[#discarded+1] = current
+ current = current.next
+ else
+ break
+ end
+ end
+ else
+ discarded[#discarded+1] = current
+ current = current.next
+ end
+ else
+ break
+ end
+ end
+ return current, size
+end
+
+local function stripbottomglue(results,discarded)
+ local height = 0
+ for i=1,#results do
+ local r = results[i]
+ local t = r.tail
+ while t and t ~= r.head do
+ local prev = t.prev
+ if not prev then
+ break
+ end
+ local id = t.id
+ if id == penalty_code then
+ if t.penalty == forcedbreak then
+ break
+ else
+ discarded[#discarded+1] = t
+ r.tail = prev
+ t = prev
+ end
+ elseif id == glue_code then
+ discarded[#discarded+1] = t
+ local width = t.spec.width
+ if trace_state then
+ report_state("columns %s, discarded bottom glue %p",i,width)
+ end
+ r.height = r.height - width
+ r.tail = prev
+ t = prev
+ else
+ break
+ end
+ end
+ if r.height > height then
+ height = r.height
+ end
+ end
+ return height
+end
+
+local function setsplit(specification) -- a rather large function
+ local box = specification.box
+ if not box then
+ report_state("fatal error, no box")
+ return
+ end
+ local list = texbox[box]
+ if not list then
+ report_state("fatal error, no list")
+ return
+ end
+ local head = list.head or specification.originalhead
+ if not head then
+ report_state("fatal error, no head")
+ return
+ end
+ local discarded = { }
+ local originalhead = head
+ local originalwidth = specification.originalwidth or list.width
+ local originalheight = specification.originalheight or list.height
+ local current = head
+ local skipped = 0
+ local height = 0
+ local depth = 0
+ local skip = 0
+ local options = settings_to_hash(specification.option or "")
+ local stripbottom = specification.alternative == v_local
+ local cycle = specification.cycle or 1
+ local nofcolumns = specification.nofcolumns or 1
+ if nofcolumns == 0 then
+ nofcolumns = 1
+ end
+ local preheight = specification.preheight or 0
+ local extra = specification.extra or 0
+ local maxheight = specification.maxheight
+ local optimal = originalheight/nofcolumns
+ if specification.balance ~= v_yes then
+ optimal = maxheight
+ end
+ local target = optimal + extra
+ local overflow = target > maxheight - preheight
+ local threshold = specification.threshold or 0
+ if overflow then
+ target = maxheight - preheight
+ end
+ if trace_state then
+ report_state("cycle %s, maxheight %p, preheight %p, target %p, overflow %a, extra %p",
+ cycle, maxheight, preheight , target, overflow, extra)
+ end
+ local results = { }
+ for i=1,nofcolumns do
+ results[i] = {
+ head = false,
+ tail = false,
+ height = 0,
+ depth = 0,
+ inserts = { },
+ delta = 0,
+ }
+ end
+ local column = 1
+ local line = 0
+ local result = results[column]
+ local lasthead = nil
+ local rest = nil
+ local function gotonext()
+ if head == lasthead then
+ if trace_state then
+ report_state("empty column %s, needs more work",column)
+ end
+ rest = current
+ return false, 0
+ else
+ lasthead = head
+ result.head = head
+ if current == head then
+ result.tail = head
+ else
+ result.tail = current.prev
+ end
+ result.height = height
+ result.depth = depth
+ end
+ head = current
+ height = 0
+ depth = 0
+ if column == nofcolumns then
+ column = 0 -- nicer in trace
+ rest = head
+ -- lasthead = head
+ return false, 0
+ else
+ local skipped
+ column = column + 1
+ result = results[column]
+ current, skipped = discardtopglue(current,discarded)
+ head = current
+ -- lasthead = head
+ return true, skipped
+ end
+ end
+ local function checked(advance,where)
+ local total = skip + height + depth + advance
+ local delta = total - target
+ local state = "same"
+ local okay = false
+ local skipped = 0
+ local curcol = column
+ if delta > threshold then
+ result.delta = delta
+ okay, skipped = gotonext()
+ if okay then
+ state = "next"
+ else
+ state = "quit"
+ end
+ end
+ if trace_detail then
+ report_state("%-7s > column %s, delta %p, threshold %p, advance %p, total %p, target %p, discarded %p => %a (height %p, depth %p, skip %p)",
+ where,curcol,delta,threshold,advance,total,target,state,skipped,height,depth,skip)
+ end
+ return state, skipped
+ end
+ current, skipped = discardtopglue(current,discarded)
+ if trace_detail and skipped ~= 0 then
+ report_state("check > column 1, discarded %p",skipped)
+ end
+ head = current
+ while current do
+ local id = current.id
+ local nxt = current.next
+local lastcolumn = column
+ if id == hlist_code or id == vlist_code then
+ line = line + 1
+ local nxtid = nxt and nxt.id
+ local inserts, currentskips, nextskips, inserttotal = nil, 0, 0, 0
+ local advance = current.height -- + current.depth
+ if nxt and (nxtid == insert_code or nxtid == mark_code) then
+ nxt, inserts, localskips, insertskips, inserttotal = collectinserts(result,nxt,nxtid)
+ end
+ local state, skipped = checked(advance+inserttotal+currentskips,"line")
+ if trace_state then
+ report_state("%-7s > column %s, state %a, line %s, advance %p, insert %p, height %p","line",column,state,line,advance,inserttotal,height)
+ if skipped ~= 0 then
+ report_state("%-7s > column %s, discarded %p","line",column,skipped)
+ end
+ end
+ if state == "quit" then
+ break
+ else
+ height = height + depth + skip + advance + inserttotal
+ if state == "next" then
+ height = height + nextskips
+ else
+ height = height + currentskips
+ end
+ end
+ depth = current.depth
+ skip = 0
+ if inserts then
+ appendinserts(result.inserts,inserts)
+ end
+ elseif id == glue_code then
+ local advance = current.spec.width
+ if advance ~= 0 then
+ local state, skipped = checked(advance,"glue")
+ if trace_state then
+ report_state("%-7s > column %s, state %a, advance %p, height %p","glue",column,state,advance,height)
+ if skipped ~= 0 then
+ report_state("%-7s > column %s, discarded %p","glue",column,skipped)
+ end
+ end
+ if state == "quit" then
+ break
+ end
+ height = height + depth + skip
+ depth = 0
+ skip = height > 0 and advance or 0
+ end
+ elseif id == kern_code then
+ local advance = current.kern
+ if advance ~= 0 then
+ local state, skipped = checked(advance,"kern")
+ if trace_state then
+ report_state("%-7s > column %s, state %a, advance %p, height %p, state %a","kern",column,state,advance,height)
+ if skipped ~= 0 then
+ report_state("%-7s > column %s, discarded %p","kern",column,skipped)
+ end
+ end
+ if state == "quit" then
+ break
+ end
+ height = height + depth + skip + advance
+ depth = 0
+ skip = 0
+ end
+ elseif id == penalty_code then
+ local penalty = current.penalty
+ if penalty == 0 then
+ -- don't bother
+ elseif penalty == forcedbreak then
+ local okay, skipped = gotonext()
+ if okay then
+ if trace_state then
+ report_state("cycle: %s, forced column break (same page)",cycle)
+ if skipped ~= 0 then
+ report_state("%-7s > column %s, discarded %p","penalty",column,skipped)
+ end
+ end
+ else
+ if trace_state then
+ report_state("cycle: %s, forced column break (next page)",cycle)
+ if skipped ~= 0 then
+ report_state("%-7s > column %s, discarded %p","penalty",column,skipped)
+ end
+ end
+ break
+ end
+ else
+ -- todo: nobreak etc ... we might need to backtrack so we need to remember
+ -- the last acceptable break
+ -- club and widow and such i.e. resulting penalties (if we care)
+ end
+ end
+if lastcolumn == column then
+ nxt = current.next -- can have changed
+end
+ if nxt then
+ current = nxt
+ elseif head == lasthead then
+ -- to be checked but break needed as otherwise we have a loop
+ if trace_state then
+ report_state("quit as head is lasthead")
+ end
+ break
+ else
+ local r = results[column]
+ r.head = head
+ r.tail = current
+ r.height = height
+ r.depth = depth
+ break
+ end
+ end
+ if not current then
+ if trace_state then
+ report_state("nilling rest")
+ end
+ rest = nil
+ elseif rest == lasthead then
+ if trace_state then
+ report_state("nilling rest as rest is lasthead")
+ end
+ rest = nil
+ end
+
+ if stripbottom then
+ local height = stripbottomglue(results,discarded)
+ if height > 0 then
+ target = height
+ end
+ end
+
+ specification.results = results
+ specification.height = target
+ specification.originalheight = originalheight
+ specification.originalwidth = originalwidth
+ specification.originalhead = originalhead
+ specification.targetheight = target or 0
+ specification.rest = rest
+ specification.overflow = overflow
+ specification.discarded = discarded
+
+ texbox[specification.box].head = nil
+
+ return specification
+end
+
+function mixedcolumns.finalize(result)
+ if result then
+ local results = result.results
+ for i=1,result.nofcolumns do
+ local r = results[i]
+ local h = r.head
+ if h then
+ h.prev = nil
+ local t = r.tail
+ if t then
+ t.next = nil
+ else
+ h.next = nil
+ r.tail = h
+ end
+ for c, list in next, r.inserts do
+ local t = { }
+ for i=1,#list do
+ local l = list[i]
+ local h = new_hlist()
+ t[i] = h
+ h.head = l.head
+ h.height = l.height
+ h.depth = l.depth
+ l.head = nil
+ end
+ t[1].prev = nil -- needs checking
+ t[#t].next = nil -- needs checking
+ r.inserts[c] = t
+ end
+ end
+ end
+ end
+end
+
+local splitruns = 0
+
+local function report_deltas(result,str)
+ local t = { }
+ for i=1,result.nofcolumns do
+ t[#t+1] = points(result.results[i].delta or 0)
+ end
+ report_state("%s, cycles %s, deltas % | t",str,result.cycle or 1,t)
+end
+
+function mixedcolumns.setsplit(specification)
+ splitruns = splitruns + 1
+ if trace_state then
+ report_state("split run %s",splitruns)
+ end
+ local result = setsplit(specification)
+ if result then
+ if result.overflow then
+ if trace_state then
+ report_deltas(result,"overflow")
+ end
+ -- we might have some rest
+ elseif result.rest and specification.balance == v_yes then
+ local step = specification.step or 65536*2
+ local cycle = 1
+ local cycles = specification.cycles or 100
+ while result.rest and cycle <= cycles do
+ specification.extra = cycle * step
+ result = setsplit(specification) or result
+ if trace_state then
+ report_state("cycle: %s.%s, original height %p, total height %p",
+ splitruns,cycle,result.originalheight,result.nofcolumns*result.targetheight)
+ end
+ cycle = cycle + 1
+ specification.cycle = cycle
+ end
+ if cycle > cycles then
+ report_deltas(result,"too many balancing cycles")
+ elseif trace_state then
+ report_deltas(result,"balanced")
+ end
+ elseif trace_state then
+ report_deltas(result,"done")
+ end
+ return result
+ elseif trace_state then
+ report_state("no result")
+ end
+end
+
+local topskip_code = gluecodes.topskip
+local baselineskip_code = gluecodes.baselineskip
+
+function mixedcolumns.getsplit(result,n)
+ if not result then
+ report_state("flush, column %s, no result",n)
+ return
+ end
+ local r = result.results[n]
+ if not r then
+ report_state("flush, column %s, empty",n)
+ end
+ local h = r.head
+ if not h then
+ return new_glue(result.originalwidth)
+ end
+
+ h.prev = nil -- move up
+ local strutht = result.strutht
+ local strutdp = result.strutdp
+ local lineheight = strutht + strutdp
+
+ local v = new_vlist()
+ v.head = h
+
+ -- local v = vpack(h,"exactly",height)
+
+ if result.alternative == v_global then -- option
+ result.height = result.maxheight
+ end
+
+ local ht = 0
+ local dp = 0
+ local wd = result.originalwidth
+
+ local grid = result.grid
+
+ if grid then
+ ht = lineheight * math.ceil(result.height/lineheight) - strutdp
+ dp = strutdp
+ else
+ ht = result.height
+ dp = result.depth
+ end
+
+ v.width = wd
+ v.height = ht
+ v.depth = dp
+
+ if trace_state then
+ local id = h.id
+ if id == hlist_code then
+ report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"top line",nodes.toutf(h.list))
+ else
+ report_state("flush, column %s, grid %a, width %p, height %p, depth %p, %s: %s",n,grid,wd,ht,dp,"head node",nodecodes[id])
+ end
+ end
+
+ for c, list in next, r.inserts do
+ -- tex.setbox("global",c,vpack(nodes.concat(list)))
+ -- tex.setbox(c,vpack(nodes.concat(list)))
+ texbox[c] = vpack(nodes.concat(list))
+ r.inserts[c] = nil
+ end
+
+ return v
+end
+
+function mixedcolumns.getrest(result)
+ local rest = result and result.rest
+ result.rest = nil -- to be sure
+ return rest
+end
+
+function mixedcolumns.getlist(result)
+ local originalhead = result and result.originalhead
+ result.originalhead = nil -- to be sure
+ return originalhead
+end
+
+function mixedcolumns.cleanup(result)
+ local discarded = result.discarded
+ for i=1,#discarded do
+ freenode(discarded[i])
+ end
+ result.discarded = { }
+end
+
+-- interface --
+
+local result
+
+function commands.mixsetsplit(specification)
+ if result then
+ for k, v in next, specification do
+ result[k] = v
+ end
+ result = mixedcolumns.setsplit(result)
+ else
+ result = mixedcolumns.setsplit(specification)
+ end
+end
+
+function commands.mixgetsplit(n)
+ if result then
+ context(mixedcolumns.getsplit(result,n))
+ end
+end
+
+function commands.mixfinalize()
+ if result then
+ mixedcolumns.finalize(result)
+ end
+end
+
+function commands.mixflushrest()
+ if result then
+ context(mixedcolumns.getrest(result))
+ end
+end
+
+function commands.mixflushlist()
+ if result then
+ context(mixedcolumns.getlist(result))
+ end
+end
+
+function commands.mixstate()
+ context(result and result.rest and 1 or 0)
+end
+
+function commands.mixcleanup()
+ if result then
+ mixedcolumns.cleanup(result)
+ result = nil
+ end
+end
diff --git a/tex/context/base/page-pst.lua b/tex/context/base/page-pst.lua
index 8586830cf..1256d4067 100644
--- a/tex/context/base/page-pst.lua
+++ b/tex/context/base/page-pst.lua
@@ -1,78 +1,78 @@
-if not modules then modules = { } end modules ['page-pst'] = {
- version = 1.001,
- comment = "companion to page-pst.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: adapt message
-
-local format, validstring = string.format, string.valid
-local sortedkeys = table.sortedkeys
-
-local cache = { }
-
-local function flush(page)
- local c = cache[page]
- if c then
- for i=1,#c do
- context.viafile(c[i],format("page.%s",validstring(page,"nopage")))
- end
- cache[page] = nil
- end
-end
-
-local function setnextpage()
- local n = next(cache) and sortedkeys(cache)[1]
- if not n then
- n = 0 -- nothing in the cache
- elseif n == 0 then
- n = -1 -- generic buffer (0)
- elseif n > 0 then
- -- upcoming page (realpageno)
- end
- tex.setcount("global","c_page_postponed_blocks_next_page",n)
-end
-
-function commands.flushpostponedblocks(page)
- -- we need to flush previously pending pages as well and the zero
- -- slot is the generic one so that one is always flushed
- local t = sortedkeys(cache)
- local p = tonumber(page) or tex.count.realpageno or 0
- for i=1,#t do
- local ti = t[i]
- if ti <= p then
- flush(ti)
- else
- break
- end
- end
- setnextpage()
-end
-
-function commands.registerpostponedblock(page)
- if type(page) == "string" then
- if string.find(page,"^+") then
- page = tex.count.realpageno + (tonumber(page) or 1) -- future delta page
- else
- page = tonumber(page) or 0 -- preferred page or otherwise first possible occasion
- end
- end
- if not page then
- page = 0
- end
- local c = cache[page]
- if not c then
- c = { }
- cache[page] = c
- end
- c[#c+1] = buffers.raw("postponedblock")
- buffers.erase("postponedblock")
- if page == 0 then
- interfaces.showmessage("layouts",3,#c)
- else
- interfaces.showmessage("layouts",3,string.format("%s (realpage: %s)",#c,page))
- end
- setnextpage()
-end
+if not modules then modules = { } end modules ['page-pst'] = {
+ version = 1.001,
+ comment = "companion to page-pst.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: adapt message
+
+local format, validstring = string.format, string.valid
+local sortedkeys = table.sortedkeys
+
+local cache = { }
+
+local function flush(page)
+ local c = cache[page]
+ if c then
+ for i=1,#c do
+ context.viafile(c[i],format("page.%s",validstring(page,"nopage")))
+ end
+ cache[page] = nil
+ end
+end
+
+local function setnextpage()
+ local n = next(cache) and sortedkeys(cache)[1]
+ if not n then
+ n = 0 -- nothing in the cache
+ elseif n == 0 then
+ n = -1 -- generic buffer (0)
+ elseif n > 0 then
+ -- upcoming page (realpageno)
+ end
+ tex.setcount("global","c_page_postponed_blocks_next_page",n)
+end
+
+function commands.flushpostponedblocks(page)
+ -- we need to flush previously pending pages as well and the zero
+ -- slot is the generic one so that one is always flushed
+ local t = sortedkeys(cache)
+ local p = tonumber(page) or tex.count.realpageno or 0
+ for i=1,#t do
+ local ti = t[i]
+ if ti <= p then
+ flush(ti)
+ else
+ break
+ end
+ end
+ setnextpage()
+end
+
+function commands.registerpostponedblock(page)
+ if type(page) == "string" then
+ if string.find(page,"^+") then
+ page = tex.count.realpageno + (tonumber(page) or 1) -- future delta page
+ else
+ page = tonumber(page) or 0 -- preferred page or otherwise first possible occasion
+ end
+ end
+ if not page then
+ page = 0
+ end
+ local c = cache[page]
+ if not c then
+ c = { }
+ cache[page] = c
+ end
+ c[#c+1] = buffers.raw("postponedblock")
+ buffers.erase("postponedblock")
+ if page == 0 then
+ interfaces.showmessage("layouts",3,#c)
+ else
+ interfaces.showmessage("layouts",3,string.format("%s (realpage: %s)",#c,page))
+ end
+ setnextpage()
+end
diff --git a/tex/context/base/page-str.lua b/tex/context/base/page-str.lua
index f6314657f..b9b5086cf 100644
--- a/tex/context/base/page-str.lua
+++ b/tex/context/base/page-str.lua
@@ -1,232 +1,232 @@
-if not modules then modules = { } end modules ['page-str'] = {
- version = 1.001,
- comment = "companion to page-str.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- streams -> managers.streams
-
--- work in progresss .. unfinished
-
-local concat, insert, remove = table.concat, table.insert, table.remove
-
-local find_tail, write_node, free_node, copy_nodelist = node.slide, node.write, node.free, node.copy_list
-local vpack_nodelist, hpack_nodelist = node.vpack, node.hpack
-local texdimen, texbox = tex.dimen, tex.box
-local settings_to_array = utilities.parsers.settings_to_array
-
-local nodes, node = nodes, node
-
-local nodepool = nodes.pool
-local tasks = nodes.tasks
-
-local new_kern = nodepool.kern
-local new_glyph = nodepool.glyph
-
-local trace_collecting = false trackers.register("streams.collecting", function(v) trace_collecting = v end)
-local trace_flushing = false trackers.register("streams.flushing", function(v) trace_flushing = v end)
-
-local report_streams = logs.reporter("streams")
-
-streams = streams or { } -- might move to the builders namespace
-local streams = streams
-
-local data, name, stack = { }, nil, { }
-
-function streams.enable(newname)
- if newname == "default" then
- name = nil
- else
- name = newname
- end
-end
-
-function streams.disable()
- name = stack[#stack]
-end
-
-function streams.start(newname)
- insert(stack,name)
- name = newname
-end
-
-function streams.stop(newname)
- name = remove(stack)
-end
-
-function streams.collect(head,where)
- if name and head and name ~= "default" then
- local tail = node.slide(head)
- local dana = data[name]
- if not dana then
- dana = { }
- data[name] = dana
- end
- local last = dana[#dana]
- if last then
- local tail = find_tail(last)
- tail.next, head.prev = head, tail
- elseif last == false then
- dana[#dana] = head
- else
- dana[1] = head
- end
- if trace_collecting then
- report_streams("appending snippet %a to slot %s",name,#dana)
- end
- return nil, true
- else
- return head, false
- end
-end
-
-function streams.push(thename)
- if not thename or thename == "" then
- thename = name
- end
- if thename and thename ~= "" then
- local dana = data[thename]
- if dana then
- dana[#dana+1] = false
- if trace_collecting then
- report_streams("pushing snippet %a",thename)
- end
- end
- end
-end
-
-function streams.flush(name,copy) -- problem: we need to migrate afterwards
- local dana = data[name]
- if dana then
- local dn = #dana
- if dn == 0 then
- -- nothing to flush
- elseif copy then
- if trace_flushing then
- report_streams("flushing copies of %s slots of %a",dn,name)
- end
- for i=1,dn do
- local di = dana[i]
- if di then
- write_node(copy_nodelist(di.list)) -- list, will be option
- end
- end
- if copy then
- data[name] = nil
- end
- else
- if trace_flushing then
- report_streams("flushing %s slots of %a",dn,name)
- end
- for i=1,dn do
- local di = dana[i]
- if di then
- write_node(di.list) -- list, will be option
- di.list = nil
- free_node(di)
- end
- end
- end
- end
-end
-
-function streams.synchronize(list) -- this is an experiment !
- -- we don't optimize this as we want to trace in detail
- list = settings_to_array(list)
- local max = 0
- if trace_flushing then
- report_streams("synchronizing list: % t",list)
- end
- for i=1,#list do
- local dana = data[list[i]]
- if dana then
- local n = #dana
- if n > max then
- max = n
- end
- end
- end
- if trace_flushing then
- report_streams("maximum number of slots: %s",max)
- end
- for m=1,max do
- local height, depth = 0, 0
- for i=1,#list do
- local name = list[i]
- local dana = data[name]
- local slot = dana[m]
- if slot then
- local vbox = vpack_nodelist(slot)
- local ht, dp = vbox.height, vbox.depth
- if ht > height then
- height = ht
- end
- if dp > depth then
- depth = dp
- end
- dana[m] = vbox
- if trace_flushing then
- report_streams("slot %s of %a is packed to height %p and depth %p",m,name,ht,dp)
- end
- end
- end
- if trace_flushing then
- report_streams("slot %s has max height %p and max depth %p",m,height,depth)
- end
- local strutht, strutdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth
- local struthtdp = strutht + strutdp
- for i=1,#list do
- local name = list[i]
- local dana = data[name]
- local vbox = dana[m]
- if vbox then
- local delta_height = height - vbox.height
- local delta_depth = depth - vbox.depth
- if delta_height > 0 or delta_depth > 0 then
- if false then
- -- actually we need to add glue and repack
- vbox.height, vbox.depth = height, depth
- if trace_flushing then
- report_streams("slot %s of %a with delta (%p,%p) is compensated",m,i,delta_height,delta_depth)
- end
- else
- -- this is not yet ok as we also need to keep an eye on vertical spacing
- -- so we might need to do some splitting or whatever
- local tail = vbox.list and find_tail(vbox.list)
- local n, delta = 0, delta_height -- for tracing
- while delta > 0 do
- -- we need to add some interline penalties
- local line = copy_nodelist(tex.box.strutbox)
- line.height, line.depth = strutht, strutdp
- if tail then
- tail.next, line.prev = line, tail
- end
- tail = line
- n, delta = n +1, delta - struthtdp
- end
- dana[m] = vpack_nodelist(vbox.list)
- vbox.list = nil
- free_node(vbox)
- if trace_flushing then
- report_streams("slot %s:%s with delta (%p,%p) is compensated by %s lines",m,i,delta_height,delta_depth,n)
- end
- end
- end
- else
- -- make dummy
- end
- end
- end
-end
-
-tasks.appendaction("mvlbuilders", "normalizers", "streams.collect")
-
-tasks.disableaction("mvlbuilders", "streams.collect")
-
-function streams.initialize()
- tasks.enableaction ("mvlbuilders", "streams.collect")
-end
-
--- todo: remove empty last { }'s
+if not modules then modules = { } end modules ['page-str'] = {
+ version = 1.001,
+ comment = "companion to page-str.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- streams -> managers.streams
+
+-- work in progresss .. unfinished
+
+local concat, insert, remove = table.concat, table.insert, table.remove
+
+local find_tail, write_node, free_node, copy_nodelist = node.slide, node.write, node.free, node.copy_list
+local vpack_nodelist, hpack_nodelist = node.vpack, node.hpack
+local texdimen, texbox = tex.dimen, tex.box
+local settings_to_array = utilities.parsers.settings_to_array
+
+local nodes, node = nodes, node
+
+local nodepool = nodes.pool
+local tasks = nodes.tasks
+
+local new_kern = nodepool.kern
+local new_glyph = nodepool.glyph
+
+local trace_collecting = false trackers.register("streams.collecting", function(v) trace_collecting = v end)
+local trace_flushing = false trackers.register("streams.flushing", function(v) trace_flushing = v end)
+
+local report_streams = logs.reporter("streams")
+
+streams = streams or { } -- might move to the builders namespace
+local streams = streams
+
+local data, name, stack = { }, nil, { }
+
+function streams.enable(newname)
+ if newname == "default" then
+ name = nil
+ else
+ name = newname
+ end
+end
+
+function streams.disable()
+ name = stack[#stack]
+end
+
+function streams.start(newname)
+ insert(stack,name)
+ name = newname
+end
+
+function streams.stop(newname)
+ name = remove(stack)
+end
+
+function streams.collect(head,where)
+ if name and head and name ~= "default" then
+ local tail = node.slide(head)
+ local dana = data[name]
+ if not dana then
+ dana = { }
+ data[name] = dana
+ end
+ local last = dana[#dana]
+ if last then
+ local tail = find_tail(last)
+ tail.next, head.prev = head, tail
+ elseif last == false then
+ dana[#dana] = head
+ else
+ dana[1] = head
+ end
+ if trace_collecting then
+ report_streams("appending snippet %a to slot %s",name,#dana)
+ end
+ return nil, true
+ else
+ return head, false
+ end
+end
+
+function streams.push(thename)
+ if not thename or thename == "" then
+ thename = name
+ end
+ if thename and thename ~= "" then
+ local dana = data[thename]
+ if dana then
+ dana[#dana+1] = false
+ if trace_collecting then
+ report_streams("pushing snippet %a",thename)
+ end
+ end
+ end
+end
+
+function streams.flush(name,copy) -- problem: we need to migrate afterwards
+ local dana = data[name]
+ if dana then
+ local dn = #dana
+ if dn == 0 then
+ -- nothing to flush
+ elseif copy then
+ if trace_flushing then
+ report_streams("flushing copies of %s slots of %a",dn,name)
+ end
+ for i=1,dn do
+ local di = dana[i]
+ if di then
+ write_node(copy_nodelist(di.list)) -- list, will be option
+ end
+ end
+ if copy then
+ data[name] = nil
+ end
+ else
+ if trace_flushing then
+ report_streams("flushing %s slots of %a",dn,name)
+ end
+ for i=1,dn do
+ local di = dana[i]
+ if di then
+ write_node(di.list) -- list, will be option
+ di.list = nil
+ free_node(di)
+ end
+ end
+ end
+ end
+end
+
+function streams.synchronize(list) -- this is an experiment !
+ -- we don't optimize this as we want to trace in detail
+ list = settings_to_array(list)
+ local max = 0
+ if trace_flushing then
+ report_streams("synchronizing list: % t",list)
+ end
+ for i=1,#list do
+ local dana = data[list[i]]
+ if dana then
+ local n = #dana
+ if n > max then
+ max = n
+ end
+ end
+ end
+ if trace_flushing then
+ report_streams("maximum number of slots: %s",max)
+ end
+ for m=1,max do
+ local height, depth = 0, 0
+ for i=1,#list do
+ local name = list[i]
+ local dana = data[name]
+ local slot = dana[m]
+ if slot then
+ local vbox = vpack_nodelist(slot)
+ local ht, dp = vbox.height, vbox.depth
+ if ht > height then
+ height = ht
+ end
+ if dp > depth then
+ depth = dp
+ end
+ dana[m] = vbox
+ if trace_flushing then
+ report_streams("slot %s of %a is packed to height %p and depth %p",m,name,ht,dp)
+ end
+ end
+ end
+ if trace_flushing then
+ report_streams("slot %s has max height %p and max depth %p",m,height,depth)
+ end
+ local strutht, strutdp = texdimen.globalbodyfontstrutheight, texdimen.globalbodyfontstrutdepth
+ local struthtdp = strutht + strutdp
+ for i=1,#list do
+ local name = list[i]
+ local dana = data[name]
+ local vbox = dana[m]
+ if vbox then
+ local delta_height = height - vbox.height
+ local delta_depth = depth - vbox.depth
+ if delta_height > 0 or delta_depth > 0 then
+ if false then
+ -- actually we need to add glue and repack
+ vbox.height, vbox.depth = height, depth
+ if trace_flushing then
+ report_streams("slot %s of %a with delta (%p,%p) is compensated",m,i,delta_height,delta_depth)
+ end
+ else
+ -- this is not yet ok as we also need to keep an eye on vertical spacing
+ -- so we might need to do some splitting or whatever
+ local tail = vbox.list and find_tail(vbox.list)
+ local n, delta = 0, delta_height -- for tracing
+ while delta > 0 do
+ -- we need to add some interline penalties
+ local line = copy_nodelist(tex.box.strutbox)
+ line.height, line.depth = strutht, strutdp
+ if tail then
+ tail.next, line.prev = line, tail
+ end
+ tail = line
+ n, delta = n +1, delta - struthtdp
+ end
+ dana[m] = vpack_nodelist(vbox.list)
+ vbox.list = nil
+ free_node(vbox)
+ if trace_flushing then
+ report_streams("slot %s:%s with delta (%p,%p) is compensated by %s lines",m,i,delta_height,delta_depth,n)
+ end
+ end
+ end
+ else
+ -- make dummy
+ end
+ end
+ end
+end
+
+tasks.appendaction("mvlbuilders", "normalizers", "streams.collect")
+
+tasks.disableaction("mvlbuilders", "streams.collect")
+
+function streams.initialize()
+ tasks.enableaction ("mvlbuilders", "streams.collect")
+end
+
+-- todo: remove empty last { }'s
diff --git a/tex/context/base/regi-8859-1.lua b/tex/context/base/regi-8859-1.lua
index ff2182afa..2a3caea54 100644
--- a/tex/context/base/regi-8859-1.lua
+++ b/tex/context/base/regi-8859-1.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-8859-1'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
- 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
- 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
- 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF,
- 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
- 0x00D0, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF,
- 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
- 0x00F0, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x00FF
-}
+if not modules then modules = { } end modules ['regi-8859-1'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
+ 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
+ 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
+ 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF,
+ 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
+ 0x00D0, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF,
+ 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
+ 0x00F0, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x00FF
+}
diff --git a/tex/context/base/regi-8859-10.lua b/tex/context/base/regi-8859-10.lua
index f23744b4a..1d3888c9e 100644
--- a/tex/context/base/regi-8859-10.lua
+++ b/tex/context/base/regi-8859-10.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-8859-10'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
- 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
- 0x00A0, 0x0104, 0x0112, 0x0122, 0x012A, 0x0128, 0x0136, 0x00A7, 0x013B, 0x0110, 0x0160, 0x0166, 0x017D, 0x00AD, 0x016A, 0x014A,
- 0x00B0, 0x0105, 0x0113, 0x0123, 0x012B, 0x0129, 0x0137, 0x00B7, 0x013C, 0x0111, 0x0161, 0x0167, 0x017E, 0x2015, 0x016B, 0x014B,
- 0x0100, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x012E, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x0116, 0x00CD, 0x00CE, 0x00CF,
- 0x00D0, 0x0145, 0x014C, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x0168, 0x00D8, 0x0172, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF,
- 0x0101, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x012F, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x0117, 0x00ED, 0x00EE, 0x00EF,
- 0x00F0, 0x0146, 0x014D, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x0169, 0x00F8, 0x0173, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x0138
-}
+if not modules then modules = { } end modules ['regi-8859-10'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
+ 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
+ 0x00A0, 0x0104, 0x0112, 0x0122, 0x012A, 0x0128, 0x0136, 0x00A7, 0x013B, 0x0110, 0x0160, 0x0166, 0x017D, 0x00AD, 0x016A, 0x014A,
+ 0x00B0, 0x0105, 0x0113, 0x0123, 0x012B, 0x0129, 0x0137, 0x00B7, 0x013C, 0x0111, 0x0161, 0x0167, 0x017E, 0x2015, 0x016B, 0x014B,
+ 0x0100, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x012E, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x0116, 0x00CD, 0x00CE, 0x00CF,
+ 0x00D0, 0x0145, 0x014C, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x0168, 0x00D8, 0x0172, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF,
+ 0x0101, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x012F, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x0117, 0x00ED, 0x00EE, 0x00EF,
+ 0x00F0, 0x0146, 0x014D, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x0169, 0x00F8, 0x0173, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x0138
+}
diff --git a/tex/context/base/regi-8859-11.lua b/tex/context/base/regi-8859-11.lua
index 54e5626c2..f7a87efe9 100644
--- a/tex/context/base/regi-8859-11.lua
+++ b/tex/context/base/regi-8859-11.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-8859-11'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
- 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
- 0x00A0, 0x0E01, 0x0E02, 0x0E03, 0x0E04, 0x0E05, 0x0E06, 0x0E07, 0x0E08, 0x0E09, 0x0E0A, 0x0E0B, 0x0E0C, 0x0E0D, 0x0E0E, 0x0E0F,
- 0x0E10, 0x0E11, 0x0E12, 0x0E13, 0x0E14, 0x0E15, 0x0E16, 0x0E17, 0x0E18, 0x0E19, 0x0E1A, 0x0E1B, 0x0E1C, 0x0E1D, 0x0E1E, 0x0E1F,
- 0x0E20, 0x0E21, 0x0E22, 0x0E23, 0x0E24, 0x0E25, 0x0E26, 0x0E27, 0x0E28, 0x0E29, 0x0E2A, 0x0E2B, 0x0E2C, 0x0E2D, 0x0E2E, 0x0E2F,
- 0x0E30, 0x0E31, 0x0E32, 0x0E33, 0x0E34, 0x0E35, 0x0E36, 0x0E37, 0x0E38, 0x0E39, 0x0E3A, 0x0000, 0x0000, 0x0000, 0x0000, 0x0E3F,
- 0x0E40, 0x0E41, 0x0E42, 0x0E43, 0x0E44, 0x0E45, 0x0E46, 0x0E47, 0x0E48, 0x0E49, 0x0E4A, 0x0E4B, 0x0E4C, 0x0E4D, 0x0E4E, 0x0E4F,
- 0x0E50, 0x0E51, 0x0E52, 0x0E53, 0x0E54, 0x0E55, 0x0E56, 0x0E57, 0x0E58, 0x0E59, 0x0E5A, 0x0E5B, 0x0000, 0x0000, 0x0000, 0x0000
-}
+if not modules then modules = { } end modules ['regi-8859-11'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
+ 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
+ 0x00A0, 0x0E01, 0x0E02, 0x0E03, 0x0E04, 0x0E05, 0x0E06, 0x0E07, 0x0E08, 0x0E09, 0x0E0A, 0x0E0B, 0x0E0C, 0x0E0D, 0x0E0E, 0x0E0F,
+ 0x0E10, 0x0E11, 0x0E12, 0x0E13, 0x0E14, 0x0E15, 0x0E16, 0x0E17, 0x0E18, 0x0E19, 0x0E1A, 0x0E1B, 0x0E1C, 0x0E1D, 0x0E1E, 0x0E1F,
+ 0x0E20, 0x0E21, 0x0E22, 0x0E23, 0x0E24, 0x0E25, 0x0E26, 0x0E27, 0x0E28, 0x0E29, 0x0E2A, 0x0E2B, 0x0E2C, 0x0E2D, 0x0E2E, 0x0E2F,
+ 0x0E30, 0x0E31, 0x0E32, 0x0E33, 0x0E34, 0x0E35, 0x0E36, 0x0E37, 0x0E38, 0x0E39, 0x0E3A, 0x0000, 0x0000, 0x0000, 0x0000, 0x0E3F,
+ 0x0E40, 0x0E41, 0x0E42, 0x0E43, 0x0E44, 0x0E45, 0x0E46, 0x0E47, 0x0E48, 0x0E49, 0x0E4A, 0x0E4B, 0x0E4C, 0x0E4D, 0x0E4E, 0x0E4F,
+ 0x0E50, 0x0E51, 0x0E52, 0x0E53, 0x0E54, 0x0E55, 0x0E56, 0x0E57, 0x0E58, 0x0E59, 0x0E5A, 0x0E5B, 0x0000, 0x0000, 0x0000, 0x0000
+}
diff --git a/tex/context/base/regi-8859-13.lua b/tex/context/base/regi-8859-13.lua
index 1646133b5..163b441c7 100644
--- a/tex/context/base/regi-8859-13.lua
+++ b/tex/context/base/regi-8859-13.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-8859-13'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
- 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
- 0x00A0, 0x201D, 0x00A2, 0x00A3, 0x00A4, 0x201E, 0x00A6, 0x00A7, 0x00D8, 0x00A9, 0x0156, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00C6,
- 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x201C, 0x00B5, 0x00B6, 0x00B7, 0x00F8, 0x00B9, 0x0157, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00E6,
- 0x0104, 0x012E, 0x0100, 0x0106, 0x00C4, 0x00C5, 0x0118, 0x0112, 0x010C, 0x00C9, 0x0179, 0x0116, 0x0122, 0x0136, 0x012A, 0x013B,
- 0x0160, 0x0143, 0x0145, 0x00D3, 0x014C, 0x00D5, 0x00D6, 0x00D7, 0x0172, 0x0141, 0x015A, 0x016A, 0x00DC, 0x017B, 0x017D, 0x00DF,
- 0x0105, 0x012F, 0x0101, 0x0107, 0x00E4, 0x00E5, 0x0119, 0x0113, 0x010D, 0x00E9, 0x017A, 0x0117, 0x0123, 0x0137, 0x012B, 0x013C,
- 0x0161, 0x0144, 0x0146, 0x00F3, 0x014D, 0x00F5, 0x00F6, 0x00F7, 0x0173, 0x0142, 0x015B, 0x016B, 0x00FC, 0x017C, 0x017E, 0x2019
-}
+if not modules then modules = { } end modules ['regi-8859-13'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
+ 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
+ 0x00A0, 0x201D, 0x00A2, 0x00A3, 0x00A4, 0x201E, 0x00A6, 0x00A7, 0x00D8, 0x00A9, 0x0156, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00C6,
+ 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x201C, 0x00B5, 0x00B6, 0x00B7, 0x00F8, 0x00B9, 0x0157, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00E6,
+ 0x0104, 0x012E, 0x0100, 0x0106, 0x00C4, 0x00C5, 0x0118, 0x0112, 0x010C, 0x00C9, 0x0179, 0x0116, 0x0122, 0x0136, 0x012A, 0x013B,
+ 0x0160, 0x0143, 0x0145, 0x00D3, 0x014C, 0x00D5, 0x00D6, 0x00D7, 0x0172, 0x0141, 0x015A, 0x016A, 0x00DC, 0x017B, 0x017D, 0x00DF,
+ 0x0105, 0x012F, 0x0101, 0x0107, 0x00E4, 0x00E5, 0x0119, 0x0113, 0x010D, 0x00E9, 0x017A, 0x0117, 0x0123, 0x0137, 0x012B, 0x013C,
+ 0x0161, 0x0144, 0x0146, 0x00F3, 0x014D, 0x00F5, 0x00F6, 0x00F7, 0x0173, 0x0142, 0x015B, 0x016B, 0x00FC, 0x017C, 0x017E, 0x2019
+}
diff --git a/tex/context/base/regi-8859-14.lua b/tex/context/base/regi-8859-14.lua
index 2b0c68814..b69eaecea 100644
--- a/tex/context/base/regi-8859-14.lua
+++ b/tex/context/base/regi-8859-14.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-8859-14'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
- 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
- 0x00A0, 0x1E02, 0x1E03, 0x00A3, 0x010A, 0x010B, 0x1E0A, 0x00A7, 0x1E80, 0x00A9, 0x1E82, 0x1E0B, 0x1EF2, 0x00AD, 0x00AE, 0x0178,
- 0x1E1E, 0x1E1F, 0x0120, 0x0121, 0x1E40, 0x1E41, 0x00B6, 0x1E56, 0x1E81, 0x1E57, 0x1E83, 0x1E60, 0x1EF3, 0x1E84, 0x1E85, 0x1E61,
- 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
- 0x0174, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x1E6A, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x0176, 0x00DF,
- 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
- 0x0175, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x1E6B, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x0177, 0x00FF
-}
+if not modules then modules = { } end modules ['regi-8859-14'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
+ 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
+ 0x00A0, 0x1E02, 0x1E03, 0x00A3, 0x010A, 0x010B, 0x1E0A, 0x00A7, 0x1E80, 0x00A9, 0x1E82, 0x1E0B, 0x1EF2, 0x00AD, 0x00AE, 0x0178,
+ 0x1E1E, 0x1E1F, 0x0120, 0x0121, 0x1E40, 0x1E41, 0x00B6, 0x1E56, 0x1E81, 0x1E57, 0x1E83, 0x1E60, 0x1EF3, 0x1E84, 0x1E85, 0x1E61,
+ 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
+ 0x0174, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x1E6A, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x0176, 0x00DF,
+ 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
+ 0x0175, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x1E6B, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x0177, 0x00FF
+}
diff --git a/tex/context/base/regi-8859-15.lua b/tex/context/base/regi-8859-15.lua
index 48861f396..3bc1d527a 100644
--- a/tex/context/base/regi-8859-15.lua
+++ b/tex/context/base/regi-8859-15.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-8859-15'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
- 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
- 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x20AC, 0x00A5, 0x0160, 0x00A7, 0x0161, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
- 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x017D, 0x00B5, 0x00B6, 0x00B7, 0x017E, 0x00B9, 0x00BA, 0x00BB, 0x0152, 0x0153, 0x0178, 0x00BF,
- 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
- 0x00D0, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF,
- 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
- 0x00F0, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x00FF
-}
+if not modules then modules = { } end modules ['regi-8859-15'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
+ 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
+ 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x20AC, 0x00A5, 0x0160, 0x00A7, 0x0161, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
+ 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x017D, 0x00B5, 0x00B6, 0x00B7, 0x017E, 0x00B9, 0x00BA, 0x00BB, 0x0152, 0x0153, 0x0178, 0x00BF,
+ 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
+ 0x00D0, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF,
+ 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
+ 0x00F0, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x00FF
+}
diff --git a/tex/context/base/regi-8859-16.lua b/tex/context/base/regi-8859-16.lua
index e122a2042..c2a235363 100644
--- a/tex/context/base/regi-8859-16.lua
+++ b/tex/context/base/regi-8859-16.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-8859-16'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
- 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
- 0x00A0, 0x0104, 0x0105, 0x0141, 0x20AC, 0x201E, 0x0160, 0x00A7, 0x0161, 0x00A9, 0x0218, 0x00AB, 0x0179, 0x00AD, 0x017A, 0x017B,
- 0x00B0, 0x00B1, 0x010C, 0x0142, 0x017D, 0x201D, 0x00B6, 0x00B7, 0x017E, 0x010D, 0x0219, 0x00BB, 0x0152, 0x0153, 0x0178, 0x017C,
- 0x00C0, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x0106, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
- 0x0110, 0x0143, 0x00D2, 0x00D3, 0x00D4, 0x0150, 0x00D6, 0x015A, 0x0170, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x0118, 0x021A, 0x00DF,
- 0x00E0, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x0107, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
- 0x0111, 0x0144, 0x00F2, 0x00F3, 0x00F4, 0x0151, 0x00F6, 0x015B, 0x0171, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x0119, 0x021B, 0x00FF
-}
+if not modules then modules = { } end modules ['regi-8859-16'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
+ 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
+ 0x00A0, 0x0104, 0x0105, 0x0141, 0x20AC, 0x201E, 0x0160, 0x00A7, 0x0161, 0x00A9, 0x0218, 0x00AB, 0x0179, 0x00AD, 0x017A, 0x017B,
+ 0x00B0, 0x00B1, 0x010C, 0x0142, 0x017D, 0x201D, 0x00B6, 0x00B7, 0x017E, 0x010D, 0x0219, 0x00BB, 0x0152, 0x0153, 0x0178, 0x017C,
+ 0x00C0, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x0106, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
+ 0x0110, 0x0143, 0x00D2, 0x00D3, 0x00D4, 0x0150, 0x00D6, 0x015A, 0x0170, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x0118, 0x021A, 0x00DF,
+ 0x00E0, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x0107, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
+ 0x0111, 0x0144, 0x00F2, 0x00F3, 0x00F4, 0x0151, 0x00F6, 0x015B, 0x0171, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x0119, 0x021B, 0x00FF
+}
diff --git a/tex/context/base/regi-8859-2.lua b/tex/context/base/regi-8859-2.lua
index affd6c3ca..f0fe5f404 100644
--- a/tex/context/base/regi-8859-2.lua
+++ b/tex/context/base/regi-8859-2.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-8859-2'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
- 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
- 0x00A0, 0x0104, 0x02D8, 0x0141, 0x00A4, 0x013D, 0x015A, 0x00A7, 0x00A8, 0x0160, 0x015E, 0x0164, 0x0179, 0x00AD, 0x017D, 0x017B,
- 0x00B0, 0x0105, 0x02DB, 0x0142, 0x00B4, 0x013E, 0x015B, 0x02C7, 0x00B8, 0x0161, 0x015F, 0x0165, 0x017A, 0x02DD, 0x017E, 0x017C,
- 0x0154, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x0139, 0x0106, 0x00C7, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x011A, 0x00CD, 0x00CE, 0x010E,
- 0x0110, 0x0143, 0x0147, 0x00D3, 0x00D4, 0x0150, 0x00D6, 0x00D7, 0x0158, 0x016E, 0x00DA, 0x0170, 0x00DC, 0x00DD, 0x0162, 0x00DF,
- 0x0155, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x013A, 0x0107, 0x00E7, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x011B, 0x00ED, 0x00EE, 0x010F,
- 0x0111, 0x0144, 0x0148, 0x00F3, 0x00F4, 0x0151, 0x00F6, 0x00F7, 0x0159, 0x016F, 0x00FA, 0x0171, 0x00FC, 0x00FD, 0x0163, 0x02D9
-}
+if not modules then modules = { } end modules ['regi-8859-2'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
+ 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
+ 0x00A0, 0x0104, 0x02D8, 0x0141, 0x00A4, 0x013D, 0x015A, 0x00A7, 0x00A8, 0x0160, 0x015E, 0x0164, 0x0179, 0x00AD, 0x017D, 0x017B,
+ 0x00B0, 0x0105, 0x02DB, 0x0142, 0x00B4, 0x013E, 0x015B, 0x02C7, 0x00B8, 0x0161, 0x015F, 0x0165, 0x017A, 0x02DD, 0x017E, 0x017C,
+ 0x0154, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x0139, 0x0106, 0x00C7, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x011A, 0x00CD, 0x00CE, 0x010E,
+ 0x0110, 0x0143, 0x0147, 0x00D3, 0x00D4, 0x0150, 0x00D6, 0x00D7, 0x0158, 0x016E, 0x00DA, 0x0170, 0x00DC, 0x00DD, 0x0162, 0x00DF,
+ 0x0155, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x013A, 0x0107, 0x00E7, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x011B, 0x00ED, 0x00EE, 0x010F,
+ 0x0111, 0x0144, 0x0148, 0x00F3, 0x00F4, 0x0151, 0x00F6, 0x00F7, 0x0159, 0x016F, 0x00FA, 0x0171, 0x00FC, 0x00FD, 0x0163, 0x02D9
+}
diff --git a/tex/context/base/regi-8859-3.lua b/tex/context/base/regi-8859-3.lua
index 4b5c54b4f..e84220bde 100644
--- a/tex/context/base/regi-8859-3.lua
+++ b/tex/context/base/regi-8859-3.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-8859-3'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
- 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
- 0x00A0, 0x0126, 0x02D8, 0x00A3, 0x00A4, 0x0000, 0x0124, 0x00A7, 0x00A8, 0x0130, 0x015E, 0x011E, 0x0134, 0x00AD, 0x0000, 0x017B,
- 0x00B0, 0x0127, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x0125, 0x00B7, 0x00B8, 0x0131, 0x015F, 0x011F, 0x0135, 0x00BD, 0x0000, 0x017C,
- 0x00C0, 0x00C1, 0x00C2, 0x0000, 0x00C4, 0x010A, 0x0108, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
- 0x0000, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x0120, 0x00D6, 0x00D7, 0x011C, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x016C, 0x015C, 0x00DF,
- 0x00E0, 0x00E1, 0x00E2, 0x0000, 0x00E4, 0x010B, 0x0109, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
- 0x0000, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x0121, 0x00F6, 0x00F7, 0x011D, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x016D, 0x015D, 0x02D9
-}
+if not modules then modules = { } end modules ['regi-8859-3'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
+ 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
+ 0x00A0, 0x0126, 0x02D8, 0x00A3, 0x00A4, 0x0000, 0x0124, 0x00A7, 0x00A8, 0x0130, 0x015E, 0x011E, 0x0134, 0x00AD, 0x0000, 0x017B,
+ 0x00B0, 0x0127, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x0125, 0x00B7, 0x00B8, 0x0131, 0x015F, 0x011F, 0x0135, 0x00BD, 0x0000, 0x017C,
+ 0x00C0, 0x00C1, 0x00C2, 0x0000, 0x00C4, 0x010A, 0x0108, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
+ 0x0000, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x0120, 0x00D6, 0x00D7, 0x011C, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x016C, 0x015C, 0x00DF,
+ 0x00E0, 0x00E1, 0x00E2, 0x0000, 0x00E4, 0x010B, 0x0109, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
+ 0x0000, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x0121, 0x00F6, 0x00F7, 0x011D, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x016D, 0x015D, 0x02D9
+}
diff --git a/tex/context/base/regi-8859-4.lua b/tex/context/base/regi-8859-4.lua
index 774ec2e10..9fdc39a40 100644
--- a/tex/context/base/regi-8859-4.lua
+++ b/tex/context/base/regi-8859-4.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-8859-4'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
- 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
- 0x00A0, 0x0104, 0x0138, 0x0156, 0x00A4, 0x0128, 0x013B, 0x00A7, 0x00A8, 0x0160, 0x0112, 0x0122, 0x0166, 0x00AD, 0x017D, 0x00AF,
- 0x00B0, 0x0105, 0x02DB, 0x0157, 0x00B4, 0x0129, 0x013C, 0x02C7, 0x00B8, 0x0161, 0x0113, 0x0123, 0x0167, 0x014A, 0x017E, 0x014B,
- 0x0100, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x012E, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x0116, 0x00CD, 0x00CE, 0x012A,
- 0x0110, 0x0145, 0x014C, 0x0136, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x0172, 0x00DA, 0x00DB, 0x00DC, 0x0168, 0x016A, 0x00DF,
- 0x0101, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x012F, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x0117, 0x00ED, 0x00EE, 0x012B,
- 0x0111, 0x0146, 0x014D, 0x0137, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x0173, 0x00FA, 0x00FB, 0x00FC, 0x0169, 0x016B, 0x02D9
-}
+if not modules then modules = { } end modules ['regi-8859-4'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
+ 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
+ 0x00A0, 0x0104, 0x0138, 0x0156, 0x00A4, 0x0128, 0x013B, 0x00A7, 0x00A8, 0x0160, 0x0112, 0x0122, 0x0166, 0x00AD, 0x017D, 0x00AF,
+ 0x00B0, 0x0105, 0x02DB, 0x0157, 0x00B4, 0x0129, 0x013C, 0x02C7, 0x00B8, 0x0161, 0x0113, 0x0123, 0x0167, 0x014A, 0x017E, 0x014B,
+ 0x0100, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x012E, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x0116, 0x00CD, 0x00CE, 0x012A,
+ 0x0110, 0x0145, 0x014C, 0x0136, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x0172, 0x00DA, 0x00DB, 0x00DC, 0x0168, 0x016A, 0x00DF,
+ 0x0101, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x012F, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x0117, 0x00ED, 0x00EE, 0x012B,
+ 0x0111, 0x0146, 0x014D, 0x0137, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x0173, 0x00FA, 0x00FB, 0x00FC, 0x0169, 0x016B, 0x02D9
+}
diff --git a/tex/context/base/regi-8859-5.lua b/tex/context/base/regi-8859-5.lua
index 1137f37bb..af35a71b8 100644
--- a/tex/context/base/regi-8859-5.lua
+++ b/tex/context/base/regi-8859-5.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-8859-5'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
- 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
- 0x00A0, 0x0401, 0x0402, 0x0403, 0x0404, 0x0405, 0x0406, 0x0407, 0x0408, 0x0409, 0x040A, 0x040B, 0x040C, 0x00AD, 0x040E, 0x040F,
- 0x0410, 0x0411, 0x0412, 0x0413, 0x0414, 0x0415, 0x0416, 0x0417, 0x0418, 0x0419, 0x041A, 0x041B, 0x041C, 0x041D, 0x041E, 0x041F,
- 0x0420, 0x0421, 0x0422, 0x0423, 0x0424, 0x0425, 0x0426, 0x0427, 0x0428, 0x0429, 0x042A, 0x042B, 0x042C, 0x042D, 0x042E, 0x042F,
- 0x0430, 0x0431, 0x0432, 0x0433, 0x0434, 0x0435, 0x0436, 0x0437, 0x0438, 0x0439, 0x043A, 0x043B, 0x043C, 0x043D, 0x043E, 0x043F,
- 0x0440, 0x0441, 0x0442, 0x0443, 0x0444, 0x0445, 0x0446, 0x0447, 0x0448, 0x0449, 0x044A, 0x044B, 0x044C, 0x044D, 0x044E, 0x044F,
- 0x2116, 0x0451, 0x0452, 0x0453, 0x0454, 0x0455, 0x0456, 0x0457, 0x0458, 0x0459, 0x045A, 0x045B, 0x045C, 0x00A7, 0x045E, 0x045F
-}
+if not modules then modules = { } end modules ['regi-8859-5'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
+ 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
+ 0x00A0, 0x0401, 0x0402, 0x0403, 0x0404, 0x0405, 0x0406, 0x0407, 0x0408, 0x0409, 0x040A, 0x040B, 0x040C, 0x00AD, 0x040E, 0x040F,
+ 0x0410, 0x0411, 0x0412, 0x0413, 0x0414, 0x0415, 0x0416, 0x0417, 0x0418, 0x0419, 0x041A, 0x041B, 0x041C, 0x041D, 0x041E, 0x041F,
+ 0x0420, 0x0421, 0x0422, 0x0423, 0x0424, 0x0425, 0x0426, 0x0427, 0x0428, 0x0429, 0x042A, 0x042B, 0x042C, 0x042D, 0x042E, 0x042F,
+ 0x0430, 0x0431, 0x0432, 0x0433, 0x0434, 0x0435, 0x0436, 0x0437, 0x0438, 0x0439, 0x043A, 0x043B, 0x043C, 0x043D, 0x043E, 0x043F,
+ 0x0440, 0x0441, 0x0442, 0x0443, 0x0444, 0x0445, 0x0446, 0x0447, 0x0448, 0x0449, 0x044A, 0x044B, 0x044C, 0x044D, 0x044E, 0x044F,
+ 0x2116, 0x0451, 0x0452, 0x0453, 0x0454, 0x0455, 0x0456, 0x0457, 0x0458, 0x0459, 0x045A, 0x045B, 0x045C, 0x00A7, 0x045E, 0x045F
+}
diff --git a/tex/context/base/regi-8859-6.lua b/tex/context/base/regi-8859-6.lua
index 651ae79ff..89ca3ce7f 100644
--- a/tex/context/base/regi-8859-6.lua
+++ b/tex/context/base/regi-8859-6.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-8859-6'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
- 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
- 0x00A0, 0x0000, 0x0000, 0x0000, 0x00A4, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x060C, 0x00AD, 0x0000, 0x0000,
- 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x061B, 0x0000, 0x0000, 0x0000, 0x061F,
- 0x0000, 0x0621, 0x0622, 0x0623, 0x0624, 0x0625, 0x0626, 0x0627, 0x0628, 0x0629, 0x062A, 0x062B, 0x062C, 0x062D, 0x062E, 0x062F,
- 0x0630, 0x0631, 0x0632, 0x0633, 0x0634, 0x0635, 0x0636, 0x0637, 0x0638, 0x0639, 0x063A, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000,
- 0x0640, 0x0641, 0x0642, 0x0643, 0x0644, 0x0645, 0x0646, 0x0647, 0x0648, 0x0649, 0x064A, 0x064B, 0x064C, 0x064D, 0x064E, 0x064F,
- 0x0650, 0x0651, 0x0652, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000
-}
+if not modules then modules = { } end modules ['regi-8859-6'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
+ 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
+ 0x00A0, 0x0000, 0x0000, 0x0000, 0x00A4, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x060C, 0x00AD, 0x0000, 0x0000,
+ 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x061B, 0x0000, 0x0000, 0x0000, 0x061F,
+ 0x0000, 0x0621, 0x0622, 0x0623, 0x0624, 0x0625, 0x0626, 0x0627, 0x0628, 0x0629, 0x062A, 0x062B, 0x062C, 0x062D, 0x062E, 0x062F,
+ 0x0630, 0x0631, 0x0632, 0x0633, 0x0634, 0x0635, 0x0636, 0x0637, 0x0638, 0x0639, 0x063A, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000,
+ 0x0640, 0x0641, 0x0642, 0x0643, 0x0644, 0x0645, 0x0646, 0x0647, 0x0648, 0x0649, 0x064A, 0x064B, 0x064C, 0x064D, 0x064E, 0x064F,
+ 0x0650, 0x0651, 0x0652, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000
+}
diff --git a/tex/context/base/regi-8859-7.lua b/tex/context/base/regi-8859-7.lua
index 08cbbab6e..8769b0483 100644
--- a/tex/context/base/regi-8859-7.lua
+++ b/tex/context/base/regi-8859-7.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-8859-7'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
- 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
- 0x00A0, 0x2018, 0x2019, 0x00A3, 0x20AC, 0x20AF, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x037A, 0x00AB, 0x00AC, 0x00AD, 0x0000, 0x2015,
- 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x0384, 0x0385, 0x0386, 0x00B7, 0x0388, 0x0389, 0x038A, 0x00BB, 0x038C, 0x00BD, 0x038E, 0x038F,
- 0x0390, 0x0391, 0x0392, 0x0393, 0x0394, 0x0395, 0x0396, 0x0397, 0x0398, 0x0399, 0x039A, 0x039B, 0x039C, 0x039D, 0x039E, 0x039F,
- 0x03A0, 0x03A1, 0x0000, 0x03A3, 0x03A4, 0x03A5, 0x03A6, 0x03A7, 0x03A8, 0x03A9, 0x03AA, 0x03AB, 0x03AC, 0x03AD, 0x03AE, 0x03AF,
- 0x03B0, 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5, 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA, 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF,
- 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4, 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9, 0x03CA, 0x03CB, 0x03CC, 0x03CD, 0x03CE, 0x0000
-}
+if not modules then modules = { } end modules ['regi-8859-7'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
+ 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
+ 0x00A0, 0x2018, 0x2019, 0x00A3, 0x20AC, 0x20AF, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x037A, 0x00AB, 0x00AC, 0x00AD, 0x0000, 0x2015,
+ 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x0384, 0x0385, 0x0386, 0x00B7, 0x0388, 0x0389, 0x038A, 0x00BB, 0x038C, 0x00BD, 0x038E, 0x038F,
+ 0x0390, 0x0391, 0x0392, 0x0393, 0x0394, 0x0395, 0x0396, 0x0397, 0x0398, 0x0399, 0x039A, 0x039B, 0x039C, 0x039D, 0x039E, 0x039F,
+ 0x03A0, 0x03A1, 0x0000, 0x03A3, 0x03A4, 0x03A5, 0x03A6, 0x03A7, 0x03A8, 0x03A9, 0x03AA, 0x03AB, 0x03AC, 0x03AD, 0x03AE, 0x03AF,
+ 0x03B0, 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5, 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA, 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF,
+ 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4, 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9, 0x03CA, 0x03CB, 0x03CC, 0x03CD, 0x03CE, 0x0000
+}
diff --git a/tex/context/base/regi-8859-8.lua b/tex/context/base/regi-8859-8.lua
index b69609991..e72d7c7fb 100644
--- a/tex/context/base/regi-8859-8.lua
+++ b/tex/context/base/regi-8859-8.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-8859-8'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
- 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
- 0x00A0, 0x0000, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00D7, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
- 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00F7, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x0000,
- 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000,
- 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x2017,
- 0x05D0, 0x05D1, 0x05D2, 0x05D3, 0x05D4, 0x05D5, 0x05D6, 0x05D7, 0x05D8, 0x05D9, 0x05DA, 0x05DB, 0x05DC, 0x05DD, 0x05DE, 0x05DF,
- 0x05E0, 0x05E1, 0x05E2, 0x05E3, 0x05E4, 0x05E5, 0x05E6, 0x05E7, 0x05E8, 0x05E9, 0x05EA, 0x0000, 0x0000, 0x200E, 0x200F, 0x0000
-}
+if not modules then modules = { } end modules ['regi-8859-8'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
+ 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
+ 0x00A0, 0x0000, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00D7, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
+ 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00F7, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x0000,
+ 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000,
+ 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x2017,
+ 0x05D0, 0x05D1, 0x05D2, 0x05D3, 0x05D4, 0x05D5, 0x05D6, 0x05D7, 0x05D8, 0x05D9, 0x05DA, 0x05DB, 0x05DC, 0x05DD, 0x05DE, 0x05DF,
+ 0x05E0, 0x05E1, 0x05E2, 0x05E3, 0x05E4, 0x05E5, 0x05E6, 0x05E7, 0x05E8, 0x05E9, 0x05EA, 0x0000, 0x0000, 0x200E, 0x200F, 0x0000
+}
diff --git a/tex/context/base/regi-8859-9.lua b/tex/context/base/regi-8859-9.lua
index 773307fff..eb9515af9 100644
--- a/tex/context/base/regi-8859-9.lua
+++ b/tex/context/base/regi-8859-9.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-8859-9'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
- 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
- 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
- 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF,
- 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
- 0x011E, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x0130, 0x015E, 0x00DF,
- 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
- 0x011F, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x0131, 0x015F, 0x00FF
-}
+if not modules then modules = { } end modules ['regi-8859-9'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0080, 0x0081, 0x0082, 0x0083, 0x0084, 0x0085, 0x0086, 0x0087, 0x0088, 0x0089, 0x008A, 0x008B, 0x008C, 0x008D, 0x008E, 0x008F,
+ 0x0090, 0x0091, 0x0092, 0x0093, 0x0094, 0x0095, 0x0096, 0x0097, 0x0098, 0x0099, 0x009A, 0x009B, 0x009C, 0x009D, 0x009E, 0x009F,
+ 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
+ 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF,
+ 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
+ 0x011E, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x0130, 0x015E, 0x00DF,
+ 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
+ 0x011F, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x0131, 0x015F, 0x00FF
+}
diff --git a/tex/context/base/regi-cp1250.lua b/tex/context/base/regi-cp1250.lua
index 00d55d1b8..80a4b8639 100644
--- a/tex/context/base/regi-cp1250.lua
+++ b/tex/context/base/regi-cp1250.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-cp1250'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x20AC, 0x0000, 0x201A, 0x0000, 0x201E, 0x2026, 0x2020, 0x2021, 0x0000, 0x2030, 0x0160, 0x2039, 0x015A, 0x0164, 0x017D, 0x0179,
- 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0161, 0x203A, 0x015B, 0x0165, 0x017E, 0x017A,
- 0x00A0, 0x02C7, 0x02D8, 0x0141, 0x00A4, 0x0104, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x015E, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x017B,
- 0x00B0, 0x00B1, 0x02DB, 0x0142, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x0105, 0x015F, 0x00BB, 0x013D, 0x02DD, 0x013E, 0x017C,
- 0x0154, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x0139, 0x0106, 0x00C7, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x011A, 0x00CD, 0x00CE, 0x010E,
- 0x0110, 0x0143, 0x0147, 0x00D3, 0x00D4, 0x0150, 0x00D6, 0x00D7, 0x0158, 0x016E, 0x00DA, 0x0170, 0x00DC, 0x00DD, 0x0162, 0x00DF,
- 0x0155, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x013A, 0x0107, 0x00E7, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x011B, 0x00ED, 0x00EE, 0x010F,
- 0x0111, 0x0144, 0x0148, 0x00F3, 0x00F4, 0x0151, 0x00F6, 0x00F7, 0x0159, 0x016F, 0x00FA, 0x0171, 0x00FC, 0x00FD, 0x0163, 0x02D9
-}
+if not modules then modules = { } end modules ['regi-cp1250'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x20AC, 0x0000, 0x201A, 0x0000, 0x201E, 0x2026, 0x2020, 0x2021, 0x0000, 0x2030, 0x0160, 0x2039, 0x015A, 0x0164, 0x017D, 0x0179,
+ 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0161, 0x203A, 0x015B, 0x0165, 0x017E, 0x017A,
+ 0x00A0, 0x02C7, 0x02D8, 0x0141, 0x00A4, 0x0104, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x015E, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x017B,
+ 0x00B0, 0x00B1, 0x02DB, 0x0142, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x0105, 0x015F, 0x00BB, 0x013D, 0x02DD, 0x013E, 0x017C,
+ 0x0154, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x0139, 0x0106, 0x00C7, 0x010C, 0x00C9, 0x0118, 0x00CB, 0x011A, 0x00CD, 0x00CE, 0x010E,
+ 0x0110, 0x0143, 0x0147, 0x00D3, 0x00D4, 0x0150, 0x00D6, 0x00D7, 0x0158, 0x016E, 0x00DA, 0x0170, 0x00DC, 0x00DD, 0x0162, 0x00DF,
+ 0x0155, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x013A, 0x0107, 0x00E7, 0x010D, 0x00E9, 0x0119, 0x00EB, 0x011B, 0x00ED, 0x00EE, 0x010F,
+ 0x0111, 0x0144, 0x0148, 0x00F3, 0x00F4, 0x0151, 0x00F6, 0x00F7, 0x0159, 0x016F, 0x00FA, 0x0171, 0x00FC, 0x00FD, 0x0163, 0x02D9
+}
diff --git a/tex/context/base/regi-cp1251.lua b/tex/context/base/regi-cp1251.lua
index 7bb72e0cc..07f1d81ad 100644
--- a/tex/context/base/regi-cp1251.lua
+++ b/tex/context/base/regi-cp1251.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-cp1251'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x0402, 0x0403, 0x201A, 0x0453, 0x201E, 0x2026, 0x2020, 0x2021, 0x20AC, 0x2030, 0x0409, 0x2039, 0x040A, 0x040C, 0x040B, 0x040F,
- 0x0452, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0459, 0x203A, 0x045A, 0x045C, 0x045B, 0x045F,
- 0x00A0, 0x040E, 0x045E, 0x0408, 0x00A4, 0x0490, 0x00A6, 0x00A7, 0x0401, 0x00A9, 0x0404, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x0407,
- 0x00B0, 0x00B1, 0x0406, 0x0456, 0x0491, 0x00B5, 0x00B6, 0x00B7, 0x0451, 0x2116, 0x0454, 0x00BB, 0x0458, 0x0405, 0x0455, 0x0457,
- 0x0410, 0x0411, 0x0412, 0x0413, 0x0414, 0x0415, 0x0416, 0x0417, 0x0418, 0x0419, 0x041A, 0x041B, 0x041C, 0x041D, 0x041E, 0x041F,
- 0x0420, 0x0421, 0x0422, 0x0423, 0x0424, 0x0425, 0x0426, 0x0427, 0x0428, 0x0429, 0x042A, 0x042B, 0x042C, 0x042D, 0x042E, 0x042F,
- 0x0430, 0x0431, 0x0432, 0x0433, 0x0434, 0x0435, 0x0436, 0x0437, 0x0438, 0x0439, 0x043A, 0x043B, 0x043C, 0x043D, 0x043E, 0x043F,
- 0x0440, 0x0441, 0x0442, 0x0443, 0x0444, 0x0445, 0x0446, 0x0447, 0x0448, 0x0449, 0x044A, 0x044B, 0x044C, 0x044D, 0x044E, 0x044F
-}
+if not modules then modules = { } end modules ['regi-cp1251'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x0402, 0x0403, 0x201A, 0x0453, 0x201E, 0x2026, 0x2020, 0x2021, 0x20AC, 0x2030, 0x0409, 0x2039, 0x040A, 0x040C, 0x040B, 0x040F,
+ 0x0452, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0459, 0x203A, 0x045A, 0x045C, 0x045B, 0x045F,
+ 0x00A0, 0x040E, 0x045E, 0x0408, 0x00A4, 0x0490, 0x00A6, 0x00A7, 0x0401, 0x00A9, 0x0404, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x0407,
+ 0x00B0, 0x00B1, 0x0406, 0x0456, 0x0491, 0x00B5, 0x00B6, 0x00B7, 0x0451, 0x2116, 0x0454, 0x00BB, 0x0458, 0x0405, 0x0455, 0x0457,
+ 0x0410, 0x0411, 0x0412, 0x0413, 0x0414, 0x0415, 0x0416, 0x0417, 0x0418, 0x0419, 0x041A, 0x041B, 0x041C, 0x041D, 0x041E, 0x041F,
+ 0x0420, 0x0421, 0x0422, 0x0423, 0x0424, 0x0425, 0x0426, 0x0427, 0x0428, 0x0429, 0x042A, 0x042B, 0x042C, 0x042D, 0x042E, 0x042F,
+ 0x0430, 0x0431, 0x0432, 0x0433, 0x0434, 0x0435, 0x0436, 0x0437, 0x0438, 0x0439, 0x043A, 0x043B, 0x043C, 0x043D, 0x043E, 0x043F,
+ 0x0440, 0x0441, 0x0442, 0x0443, 0x0444, 0x0445, 0x0446, 0x0447, 0x0448, 0x0449, 0x044A, 0x044B, 0x044C, 0x044D, 0x044E, 0x044F
+}
diff --git a/tex/context/base/regi-cp1252.lua b/tex/context/base/regi-cp1252.lua
index 86954c9af..08bd22bf6 100644
--- a/tex/context/base/regi-cp1252.lua
+++ b/tex/context/base/regi-cp1252.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-cp1252'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0160, 0x2039, 0x0152, 0x0000, 0x017D, 0x0000,
- 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0161, 0x203A, 0x0153, 0x0000, 0x017E, 0x0178,
- 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
- 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF,
- 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
- 0x00D0, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF,
- 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
- 0x00F0, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x00FF
-}
+if not modules then modules = { } end modules ['regi-cp1252'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0160, 0x2039, 0x0152, 0x0000, 0x017D, 0x0000,
+ 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0161, 0x203A, 0x0153, 0x0000, 0x017E, 0x0178,
+ 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
+ 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF,
+ 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
+ 0x00D0, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x00DD, 0x00DE, 0x00DF,
+ 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
+ 0x00F0, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x00FD, 0x00FE, 0x00FF
+}
diff --git a/tex/context/base/regi-cp1253.lua b/tex/context/base/regi-cp1253.lua
index 31a411efe..d272692cf 100644
--- a/tex/context/base/regi-cp1253.lua
+++ b/tex/context/base/regi-cp1253.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-cp1253'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x0000, 0x2030, 0x0000, 0x2039, 0x0000, 0x0000, 0x0000, 0x0000,
- 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0000, 0x203A, 0x0000, 0x0000, 0x0000, 0x0000,
- 0x00A0, 0x0385, 0x0386, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x0000, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x2015,
- 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x0384, 0x00B5, 0x00B6, 0x00B7, 0x0388, 0x0389, 0x038A, 0x00BB, 0x038C, 0x00BD, 0x038E, 0x038F,
- 0x0390, 0x0391, 0x0392, 0x0393, 0x0394, 0x0395, 0x0396, 0x0397, 0x0398, 0x0399, 0x039A, 0x039B, 0x039C, 0x039D, 0x039E, 0x039F,
- 0x03A0, 0x03A1, 0x0000, 0x03A3, 0x03A4, 0x03A5, 0x03A6, 0x03A7, 0x03A8, 0x03A9, 0x03AA, 0x03AB, 0x03AC, 0x03AD, 0x03AE, 0x03AF,
- 0x03B0, 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5, 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA, 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF,
- 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4, 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9, 0x03CA, 0x03CB, 0x03CC, 0x03CD, 0x03CE, 0x0000
-}
+if not modules then modules = { } end modules ['regi-cp1253'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x0000, 0x2030, 0x0000, 0x2039, 0x0000, 0x0000, 0x0000, 0x0000,
+ 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0000, 0x203A, 0x0000, 0x0000, 0x0000, 0x0000,
+ 0x00A0, 0x0385, 0x0386, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x0000, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x2015,
+ 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x0384, 0x00B5, 0x00B6, 0x00B7, 0x0388, 0x0389, 0x038A, 0x00BB, 0x038C, 0x00BD, 0x038E, 0x038F,
+ 0x0390, 0x0391, 0x0392, 0x0393, 0x0394, 0x0395, 0x0396, 0x0397, 0x0398, 0x0399, 0x039A, 0x039B, 0x039C, 0x039D, 0x039E, 0x039F,
+ 0x03A0, 0x03A1, 0x0000, 0x03A3, 0x03A4, 0x03A5, 0x03A6, 0x03A7, 0x03A8, 0x03A9, 0x03AA, 0x03AB, 0x03AC, 0x03AD, 0x03AE, 0x03AF,
+ 0x03B0, 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5, 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA, 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF,
+ 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4, 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9, 0x03CA, 0x03CB, 0x03CC, 0x03CD, 0x03CE, 0x0000
+}
diff --git a/tex/context/base/regi-cp1254.lua b/tex/context/base/regi-cp1254.lua
index 73b9927c6..c8ef03da9 100644
--- a/tex/context/base/regi-cp1254.lua
+++ b/tex/context/base/regi-cp1254.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-cp1254'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0160, 0x2039, 0x0152, 0x0000, 0x0000, 0x0000,
- 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0161, 0x203A, 0x0153, 0x0000, 0x0000, 0x0178,
- 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
- 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF,
- 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
- 0x011E, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x0130, 0x015E, 0x00DF,
- 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
- 0x011F, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x0131, 0x015F, 0x00FF
-}
+if not modules then modules = { } end modules ['regi-cp1254'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0160, 0x2039, 0x0152, 0x0000, 0x0000, 0x0000,
+ 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0161, 0x203A, 0x0153, 0x0000, 0x0000, 0x0178,
+ 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
+ 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF,
+ 0x00C0, 0x00C1, 0x00C2, 0x00C3, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x00CC, 0x00CD, 0x00CE, 0x00CF,
+ 0x011E, 0x00D1, 0x00D2, 0x00D3, 0x00D4, 0x00D5, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x0130, 0x015E, 0x00DF,
+ 0x00E0, 0x00E1, 0x00E2, 0x00E3, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x00EC, 0x00ED, 0x00EE, 0x00EF,
+ 0x011F, 0x00F1, 0x00F2, 0x00F3, 0x00F4, 0x00F5, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x0131, 0x015F, 0x00FF
+}
diff --git a/tex/context/base/regi-cp1255.lua b/tex/context/base/regi-cp1255.lua
index 2abb16b54..7f33b67a8 100644
--- a/tex/context/base/regi-cp1255.lua
+++ b/tex/context/base/regi-cp1255.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-cp1255'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0000, 0x2039, 0x0000, 0x0000, 0x0000, 0x0000,
- 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0000, 0x203A, 0x0000, 0x0000, 0x0000, 0x0000,
- 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x20AA, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00D7, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
- 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00F7, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF,
- 0x05B0, 0x05B1, 0x05B2, 0x05B3, 0x05B4, 0x05B5, 0x05B6, 0x05B7, 0x05B8, 0x05B9, 0x0000, 0x05BB, 0x05BC, 0x05BD, 0x05BE, 0x05BF,
- 0x05C0, 0x05C1, 0x05C2, 0x05C3, 0x05F0, 0x05F1, 0x05F2, 0x05F3, 0x05F4, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000,
- 0x05D0, 0x05D1, 0x05D2, 0x05D3, 0x05D4, 0x05D5, 0x05D6, 0x05D7, 0x05D8, 0x05D9, 0x05DA, 0x05DB, 0x05DC, 0x05DD, 0x05DE, 0x05DF,
- 0x05E0, 0x05E1, 0x05E2, 0x05E3, 0x05E4, 0x05E5, 0x05E6, 0x05E7, 0x05E8, 0x05E9, 0x05EA, 0x0000, 0x0000, 0x200E, 0x200F, 0x0000
-}
+if not modules then modules = { } end modules ['regi-cp1255'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0000, 0x2039, 0x0000, 0x0000, 0x0000, 0x0000,
+ 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0000, 0x203A, 0x0000, 0x0000, 0x0000, 0x0000,
+ 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x20AA, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00D7, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
+ 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00F7, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF,
+ 0x05B0, 0x05B1, 0x05B2, 0x05B3, 0x05B4, 0x05B5, 0x05B6, 0x05B7, 0x05B8, 0x05B9, 0x0000, 0x05BB, 0x05BC, 0x05BD, 0x05BE, 0x05BF,
+ 0x05C0, 0x05C1, 0x05C2, 0x05C3, 0x05F0, 0x05F1, 0x05F2, 0x05F3, 0x05F4, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000, 0x0000,
+ 0x05D0, 0x05D1, 0x05D2, 0x05D3, 0x05D4, 0x05D5, 0x05D6, 0x05D7, 0x05D8, 0x05D9, 0x05DA, 0x05DB, 0x05DC, 0x05DD, 0x05DE, 0x05DF,
+ 0x05E0, 0x05E1, 0x05E2, 0x05E3, 0x05E4, 0x05E5, 0x05E6, 0x05E7, 0x05E8, 0x05E9, 0x05EA, 0x0000, 0x0000, 0x200E, 0x200F, 0x0000
+}
diff --git a/tex/context/base/regi-cp1256.lua b/tex/context/base/regi-cp1256.lua
index a0697c321..e9a4363c7 100644
--- a/tex/context/base/regi-cp1256.lua
+++ b/tex/context/base/regi-cp1256.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-cp1256'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x20AC, 0x067E, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0679, 0x2039, 0x0152, 0x0686, 0x0698, 0x0688,
- 0x06AF, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x06A9, 0x2122, 0x0691, 0x203A, 0x0153, 0x200C, 0x200D, 0x06BA,
- 0x00A0, 0x060C, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x06BE, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
- 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x061B, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x061F,
- 0x06C1, 0x0621, 0x0622, 0x0623, 0x0624, 0x0625, 0x0626, 0x0627, 0x0628, 0x0629, 0x062A, 0x062B, 0x062C, 0x062D, 0x062E, 0x062F,
- 0x0630, 0x0631, 0x0632, 0x0633, 0x0634, 0x0635, 0x0636, 0x00D7, 0x0637, 0x0638, 0x0639, 0x063A, 0x0640, 0x0641, 0x0642, 0x0643,
- 0x00E0, 0x0644, 0x00E2, 0x0645, 0x0646, 0x0647, 0x0648, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x0649, 0x064A, 0x00EE, 0x00EF,
- 0x064B, 0x064C, 0x064D, 0x064E, 0x00F4, 0x064F, 0x0650, 0x00F7, 0x0651, 0x00F9, 0x0652, 0x00FB, 0x00FC, 0x200E, 0x200F, 0x06D2
-}
+if not modules then modules = { } end modules ['regi-cp1256'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x20AC, 0x067E, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0679, 0x2039, 0x0152, 0x0686, 0x0698, 0x0688,
+ 0x06AF, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x06A9, 0x2122, 0x0691, 0x203A, 0x0153, 0x200C, 0x200D, 0x06BA,
+ 0x00A0, 0x060C, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x06BE, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
+ 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x061B, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x061F,
+ 0x06C1, 0x0621, 0x0622, 0x0623, 0x0624, 0x0625, 0x0626, 0x0627, 0x0628, 0x0629, 0x062A, 0x062B, 0x062C, 0x062D, 0x062E, 0x062F,
+ 0x0630, 0x0631, 0x0632, 0x0633, 0x0634, 0x0635, 0x0636, 0x00D7, 0x0637, 0x0638, 0x0639, 0x063A, 0x0640, 0x0641, 0x0642, 0x0643,
+ 0x00E0, 0x0644, 0x00E2, 0x0645, 0x0646, 0x0647, 0x0648, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x0649, 0x064A, 0x00EE, 0x00EF,
+ 0x064B, 0x064C, 0x064D, 0x064E, 0x00F4, 0x064F, 0x0650, 0x00F7, 0x0651, 0x00F9, 0x0652, 0x00FB, 0x00FC, 0x200E, 0x200F, 0x06D2
+}
diff --git a/tex/context/base/regi-cp1257.lua b/tex/context/base/regi-cp1257.lua
index 6e39c10d4..a4a492a13 100644
--- a/tex/context/base/regi-cp1257.lua
+++ b/tex/context/base/regi-cp1257.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-cp1257'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x20AC, 0x0000, 0x201A, 0x0000, 0x201E, 0x2026, 0x2020, 0x2021, 0x0000, 0x2030, 0x0000, 0x2039, 0x0000, 0x00A8, 0x02C7, 0x00B8,
- 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0000, 0x203A, 0x0000, 0x00AF, 0x02DB, 0x0000,
- 0x00A0, 0x0000, 0x00A2, 0x00A3, 0x00A4, 0x0000, 0x00A6, 0x00A7, 0x00D8, 0x00A9, 0x0156, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00C6,
- 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00F8, 0x00B9, 0x0157, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00E6,
- 0x0104, 0x012E, 0x0100, 0x0106, 0x00C4, 0x00C5, 0x0118, 0x0112, 0x010C, 0x00C9, 0x0179, 0x0116, 0x0122, 0x0136, 0x012A, 0x013B,
- 0x0160, 0x0143, 0x0145, 0x00D3, 0x014C, 0x00D5, 0x00D6, 0x00D7, 0x0172, 0x0141, 0x015A, 0x016A, 0x00DC, 0x017B, 0x017D, 0x00DF,
- 0x0105, 0x012F, 0x0101, 0x0107, 0x00E4, 0x00E5, 0x0119, 0x0113, 0x010D, 0x00E9, 0x017A, 0x0117, 0x0123, 0x0137, 0x012B, 0x013C,
- 0x0161, 0x0144, 0x0146, 0x00F3, 0x014D, 0x00F5, 0x00F6, 0x00F7, 0x0173, 0x0142, 0x015B, 0x016B, 0x00FC, 0x017C, 0x017E, 0x02D9
-}
+if not modules then modules = { } end modules ['regi-cp1257'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x20AC, 0x0000, 0x201A, 0x0000, 0x201E, 0x2026, 0x2020, 0x2021, 0x0000, 0x2030, 0x0000, 0x2039, 0x0000, 0x00A8, 0x02C7, 0x00B8,
+ 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x0000, 0x2122, 0x0000, 0x203A, 0x0000, 0x00AF, 0x02DB, 0x0000,
+ 0x00A0, 0x0000, 0x00A2, 0x00A3, 0x00A4, 0x0000, 0x00A6, 0x00A7, 0x00D8, 0x00A9, 0x0156, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00C6,
+ 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00F8, 0x00B9, 0x0157, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00E6,
+ 0x0104, 0x012E, 0x0100, 0x0106, 0x00C4, 0x00C5, 0x0118, 0x0112, 0x010C, 0x00C9, 0x0179, 0x0116, 0x0122, 0x0136, 0x012A, 0x013B,
+ 0x0160, 0x0143, 0x0145, 0x00D3, 0x014C, 0x00D5, 0x00D6, 0x00D7, 0x0172, 0x0141, 0x015A, 0x016A, 0x00DC, 0x017B, 0x017D, 0x00DF,
+ 0x0105, 0x012F, 0x0101, 0x0107, 0x00E4, 0x00E5, 0x0119, 0x0113, 0x010D, 0x00E9, 0x017A, 0x0117, 0x0123, 0x0137, 0x012B, 0x013C,
+ 0x0161, 0x0144, 0x0146, 0x00F3, 0x014D, 0x00F5, 0x00F6, 0x00F7, 0x0173, 0x0142, 0x015B, 0x016B, 0x00FC, 0x017C, 0x017E, 0x02D9
+}
diff --git a/tex/context/base/regi-cp1258.lua b/tex/context/base/regi-cp1258.lua
index cf64d2ab6..a4630e7e9 100644
--- a/tex/context/base/regi-cp1258.lua
+++ b/tex/context/base/regi-cp1258.lua
@@ -1,26 +1,26 @@
-if not modules then modules = { } end modules ['regi-cp1258'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-return { [0] =
- 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
- 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
- 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
- 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
- 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
- 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
- 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
- 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
- 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0000, 0x2039, 0x0152, 0x0000, 0x0000, 0x0000,
- 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0000, 0x203A, 0x0153, 0x0000, 0x0000, 0x0178,
- 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
- 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF,
- 0x00C0, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x0300, 0x00CD, 0x00CE, 0x00CF,
- 0x0110, 0x00D1, 0x0309, 0x00D3, 0x00D4, 0x01A0, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x01AF, 0x0303, 0x00DF,
- 0x00E0, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x0301, 0x00ED, 0x00EE, 0x00EF,
- 0x0111, 0x00F1, 0x0323, 0x00F3, 0x00F4, 0x01A1, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x01B0, 0x20AB, 0x00FF
-}
+if not modules then modules = { } end modules ['regi-cp1258'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+return { [0] =
+ 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000A, 0x000B, 0x000C, 0x000D, 0x000E, 0x000F,
+ 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0016, 0x0017, 0x0018, 0x0019, 0x001A, 0x001B, 0x001C, 0x001D, 0x001E, 0x001F,
+ 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002A, 0x002B, 0x002C, 0x002D, 0x002E, 0x002F,
+ 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003A, 0x003B, 0x003C, 0x003D, 0x003E, 0x003F,
+ 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004A, 0x004B, 0x004C, 0x004D, 0x004E, 0x004F,
+ 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005A, 0x005B, 0x005C, 0x005D, 0x005E, 0x005F,
+ 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, 0x006B, 0x006C, 0x006D, 0x006E, 0x006F,
+ 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007A, 0x007B, 0x007C, 0x007D, 0x007E, 0x007F,
+ 0x20AC, 0x0000, 0x201A, 0x0192, 0x201E, 0x2026, 0x2020, 0x2021, 0x02C6, 0x2030, 0x0000, 0x2039, 0x0152, 0x0000, 0x0000, 0x0000,
+ 0x0000, 0x2018, 0x2019, 0x201C, 0x201D, 0x2022, 0x2013, 0x2014, 0x02DC, 0x2122, 0x0000, 0x203A, 0x0153, 0x0000, 0x0000, 0x0178,
+ 0x00A0, 0x00A1, 0x00A2, 0x00A3, 0x00A4, 0x00A5, 0x00A6, 0x00A7, 0x00A8, 0x00A9, 0x00AA, 0x00AB, 0x00AC, 0x00AD, 0x00AE, 0x00AF,
+ 0x00B0, 0x00B1, 0x00B2, 0x00B3, 0x00B4, 0x00B5, 0x00B6, 0x00B7, 0x00B8, 0x00B9, 0x00BA, 0x00BB, 0x00BC, 0x00BD, 0x00BE, 0x00BF,
+ 0x00C0, 0x00C1, 0x00C2, 0x0102, 0x00C4, 0x00C5, 0x00C6, 0x00C7, 0x00C8, 0x00C9, 0x00CA, 0x00CB, 0x0300, 0x00CD, 0x00CE, 0x00CF,
+ 0x0110, 0x00D1, 0x0309, 0x00D3, 0x00D4, 0x01A0, 0x00D6, 0x00D7, 0x00D8, 0x00D9, 0x00DA, 0x00DB, 0x00DC, 0x01AF, 0x0303, 0x00DF,
+ 0x00E0, 0x00E1, 0x00E2, 0x0103, 0x00E4, 0x00E5, 0x00E6, 0x00E7, 0x00E8, 0x00E9, 0x00EA, 0x00EB, 0x0301, 0x00ED, 0x00EE, 0x00EF,
+ 0x0111, 0x00F1, 0x0323, 0x00F3, 0x00F4, 0x01A1, 0x00F6, 0x00F7, 0x00F8, 0x00F9, 0x00FA, 0x00FB, 0x00FC, 0x01B0, 0x20AB, 0x00FF
+}
diff --git a/tex/context/base/regi-demo.lua b/tex/context/base/regi-demo.lua
index 689f44e32..f709a11aa 100644
--- a/tex/context/base/regi-demo.lua
+++ b/tex/context/base/regi-demo.lua
@@ -1,22 +1,22 @@
-if not modules then modules = { } end modules ['regi-demo'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- digits -> *
-
-return {
- [0x0030] = 0x002A,
- [0x0031] = 0x002A,
- [0x0032] = 0x002A,
- [0x0033] = 0x002A,
- [0x0034] = 0x002A,
- [0x0035] = 0x002A,
- [0x0036] = 0x002A,
- [0x0037] = 0x002A,
- [0x0038] = 0x002A,
- [0x0039] = 0x002A,
-}
+if not modules then modules = { } end modules ['regi-demo'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- digits -> *
+
+return {
+ [0x0030] = 0x002A,
+ [0x0031] = 0x002A,
+ [0x0032] = 0x002A,
+ [0x0033] = 0x002A,
+ [0x0034] = 0x002A,
+ [0x0035] = 0x002A,
+ [0x0036] = 0x002A,
+ [0x0037] = 0x002A,
+ [0x0038] = 0x002A,
+ [0x0039] = 0x002A,
+}
diff --git a/tex/context/base/regi-ini.lua b/tex/context/base/regi-ini.lua
index d5d278b16..784a1ed46 100644
--- a/tex/context/base/regi-ini.lua
+++ b/tex/context/base/regi-ini.lua
@@ -1,388 +1,388 @@
-if not modules then modules = { } end modules ['regi-ini'] = {
- version = 1.001,
- comment = "companion to regi-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-
Regimes take care of converting the input characters into
- sequences. The conversion tables are loaded at
-runtime.
We will hook regime handling code into the input methods.
---ldx]]--
-
-local trace_translating = false trackers.register("regimes.translating", function(v) trace_translating = v end)
-
-local report_loading = logs.reporter("regimes","loading")
-local report_translating = logs.reporter("regimes","translating")
-
-regimes = regimes or { }
-local regimes = regimes
-
-local mapping = allocate {
- utf = false
-}
-
-local backmapping = allocate {
-}
-
--- regimes.mapping = mapping
-
-local synonyms = { -- backward compatibility list
-
- ["windows-1250"] = "cp1250",
- ["windows-1251"] = "cp1251",
- ["windows-1252"] = "cp1252",
- ["windows-1253"] = "cp1253",
- ["windows-1254"] = "cp1254",
- ["windows-1255"] = "cp1255",
- ["windows-1256"] = "cp1256",
- ["windows-1257"] = "cp1257",
- ["windows-1258"] = "cp1258",
-
- ["il1"] = "8859-1",
- ["il2"] = "8859-2",
- ["il3"] = "8859-3",
- ["il4"] = "8859-4",
- ["il5"] = "8859-9",
- ["il6"] = "8859-10",
- ["il7"] = "8859-13",
- ["il8"] = "8859-14",
- ["il9"] = "8859-15",
- ["il10"] = "8859-16",
-
- ["iso-8859-1"] = "8859-1",
- ["iso-8859-2"] = "8859-2",
- ["iso-8859-3"] = "8859-3",
- ["iso-8859-4"] = "8859-4",
- ["iso-8859-9"] = "8859-9",
- ["iso-8859-10"] = "8859-10",
- ["iso-8859-13"] = "8859-13",
- ["iso-8859-14"] = "8859-14",
- ["iso-8859-15"] = "8859-15",
- ["iso-8859-16"] = "8859-16",
-
- ["latin1"] = "8859-1",
- ["latin2"] = "8859-2",
- ["latin3"] = "8859-3",
- ["latin4"] = "8859-4",
- ["latin5"] = "8859-9",
- ["latin6"] = "8859-10",
- ["latin7"] = "8859-13",
- ["latin8"] = "8859-14",
- ["latin9"] = "8859-15",
- ["latin10"] = "8859-16",
-
- ["utf-8"] = "utf",
- ["utf8"] = "utf",
- [""] = "utf",
-
- ["windows"] = "cp1252",
-
-}
-
-local currentregime = "utf"
-
-local function loadregime(mapping,regime)
- local name = resolvers.findfile(format("regi-%s.lua",regime)) or ""
- local data = name ~= "" and dofile(name)
- if data then
- vector = { }
- for eightbit, unicode in next, data do
- vector[char(eightbit)] = utfchar(unicode)
- end
- report_loading("vector %a is loaded",regime)
- else
- vector = false
- report_loading("vector %a is unknown",regime)
- end
- mapping[regime] = vector
- return vector
-end
-
-local function loadreverse(t,k)
- local t = { }
- for k, v in next, mapping[k] do
- t[v] = k
- end
- backmapping[k] = t
- return t
-end
-
-setmetatableindex(mapping, loadregime)
-setmetatableindex(backmapping,loadreverse)
-
-local function translate(line,regime)
- if line and #line > 0 then
- local map = mapping[regime and synonyms[regime] or regime or currentregime]
- if map then
- line = gsub(line,".",map)
- end
- end
- return line
-end
-
--- local remappers = { }
---
--- local function toregime(vector,str,default) -- toregime('8859-1',"abcde Ä","?")
--- local t = backmapping[vector]
--- local remapper = remappers[vector]
--- if not remapper then
--- remapper = utf.remapper(t)
--- remappers[t] = remapper
--- end
--- local m = getmetatable(t)
--- setmetatableindex(t, function(t,k)
--- local v = default or "?"
--- t[k] = v
--- return v
--- end)
--- str = remapper(str)
--- setmetatable(t,m)
--- return str
--- end
---
--- -- much faster (but only matters when we have > 10K calls
-
-local cache = { } -- if really needed we can copy vectors and hash defaults
-
-setmetatableindex(cache, function(t,k)
- local v = { remappers = { } }
- t[k] = v
- return v
-end)
-
-local function toregime(vector,str,default) -- toregime('8859-1',"abcde Ä","?")
- local d = default or "?"
- local c = cache[vector].remappers
- local r = c[d]
- if not r then
- local t = fastcopy(backmapping[vector])
- setmetatableindex(t, function(t,k)
- local v = d
- t[k] = v
- return v
- end)
- r = utf.remapper(t)
- c[d] = r
- end
- return r(str)
-end
-
-local function disable()
- currentregime = "utf"
- sequencers.disableaction(textlineactions,"regimes.process")
-end
-
-local function enable(regime)
- regime = synonyms[regime] or regime
- if mapping[regime] == false then
- disable()
- else
- currentregime = regime
- sequencers.enableaction(textlineactions,"regimes.process")
- end
-end
-
-regimes.toregime = toregime
-regimes.translate = translate
-regimes.enable = enable
-regimes.disable = disable
-
--- The following function can be used when we want to make sure that
--- utf gets passed unharmed. This is needed for modules.
-
-local level = 0
-
-function regimes.process(str,filename,currentline,noflines,coding)
- if level == 0 and coding ~= "utf-8" then
- str = translate(str,currentregime)
- if trace_translating then
- report_translating("utf: %s",str)
- end
- end
- return str
-end
-
-local function push()
- level = level + 1
- if trace_translating then
- report_translating("pushing level %s",level)
- end
-end
-
-local function pop()
- if level > 0 then
- if trace_translating then
- report_translating("popping level %s",level)
- end
- level = level - 1
- end
-end
-
-regimes.push = push
-regimes.pop = pop
-
-sequencers.prependaction(textlineactions,"system","regimes.process")
-sequencers.disableaction(textlineactions,"regimes.process")
-
--- interface:
-
-commands.enableregime = enable
-commands.disableregime = disable
-
-commands.pushregime = push
-commands.popregime = pop
-
-function commands.currentregime()
- context(currentregime)
-end
-
-local stack = { }
-
-function commands.startregime(regime)
- insert(stack,currentregime)
- if trace_translating then
- report_translating("start using %a",regime)
- end
- enable(regime)
-end
-
-function commands.stopregime()
- if #stack > 0 then
- local regime = remove(stack)
- if trace_translating then
- report_translating("stop using %a",regime)
- end
- enable(regime)
- end
-end
-
--- Next we provide some hacks. Unfortunately we run into crappy encoded
--- (read : mixed) encoded xml files that have these ë ä ö ü sequences
--- instead of ë ä ö ü
-
-local patterns = { }
-
--- function regimes.cleanup(regime,str)
--- local p = patterns[regime]
--- if p == nil then
--- regime = regime and synonyms[regime] or regime or currentregime
--- local vector = regime ~= "utf" and mapping[regime]
--- if vector then
--- local list = { }
--- for k, uchar in next, vector do
--- local stream = totable(uchar)
--- for i=1,#stream do
--- stream[i] = vector[stream[i]]
--- end
--- list[concat(stream)] = uchar
--- end
--- p = lpeg.append(list,nil,true)
--- p = Cs((p+1)^0)
--- -- lpeg.print(p) -- size 1604
--- else
--- p = false
--- end
--- patterns[vector] = p
--- end
--- return p and lpegmatch(p,str) or str
--- end
---
--- twice as fast and much less lpeg bytecode
-
-function regimes.cleanup(regime,str)
- local p = patterns[regime]
- if p == nil then
- regime = regime and synonyms[regime] or regime or currentregime
- local vector = regime ~= "utf" and mapping[regime]
- if vector then
- local utfchars = { }
- local firsts = { }
- for k, uchar in next, vector do
- local stream = { }
- local split = totable(uchar)
- local nofsplits = #split
- if nofsplits > 1 then
- local first
- for i=1,nofsplits do
- local u = vector[split[i]]
- if not first then
- first = firsts[u]
- if not first then
- first = { }
- firsts[u] = first
- end
- end
- stream[i] = u
- end
- local nofstream = #stream
- if nofstream > 1 then
- first[#first+1] = concat(stream,2,nofstream)
- utfchars[concat(stream)] = uchar
- end
- end
- end
- p = P(false)
- for k, v in next, firsts do
- local q = P(false)
- for i=1,#v do
- q = q + P(v[i])
- end
- p = p + P(k) * q
- end
- p = Cs(((p+1)/utfchars)^1)
- -- lpeg.print(p) -- size: 1042
- else
- p = false
- end
- patterns[regime] = p
- end
- return p and lpegmatch(p,str) or str
-end
-
--- local map = require("regi-cp1252")
--- local old = [[test ë ä ö ü crap]]
--- local new = correctencoding(map,old)
---
--- print(old,new)
-
--- obsolete:
---
--- function regimes.setsynonym(synonym,target)
--- synonyms[synonym] = target
--- end
---
--- function regimes.truename(regime)
--- return regime and synonyms[regime] or regime or currentregime
--- end
---
--- commands.setregimesynonym = regimes.setsynonym
---
--- function commands.trueregimename(regime)
--- context(regimes.truename(regime))
--- end
---
--- function regimes.load(regime)
--- return mapping[synonyms[regime] or regime]
--- end
+if not modules then modules = { } end modules ['regi-ini'] = {
+ version = 1.001,
+ comment = "companion to regi-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+
Regimes take care of converting the input characters into
+ sequences. The conversion tables are loaded at
+runtime.
")
- else -- if v == "integerscale" then
- context(tostring(tk))
- end
- context.NC()
- local synonym = (not prefix and synonyms[k]) or (prefix and synonyms[format("%s.%s",prefix,k)])
- if synonym then
- context(format("(%s)",concat(synonym," ")))
- end
- context.NC()
- context.NR()
- elseif nesting == false then
- context("
")
+ else -- if v == "integerscale" then
+ context(tostring(tk))
+ end
+ context.NC()
+ local synonym = (not prefix and synonyms[k]) or (prefix and synonyms[format("%s.%s",prefix,k)])
+ if synonym then
+ context(format("(%s)",concat(synonym," ")))
+ end
+ context.NC()
+ context.NR()
+ elseif nesting == false then
+ context("
")
+ else -- true or nil
+ typesettable(t[k],v,synonyms,nesting,k)
+ end
+ end
+ if not prefix then
+ context.stoptabulate()
+ end
+ end
+end
+
+local function typeset(t,keys,nesting,prefix)
+ local synonyms = keys.synonyms or { }
+ local collected = { }
+ for k, v in next, synonyms do
+ local c = collected[v]
+ if not c then
+ c = { }
+ collected[v] = c
+ end
+ c[#c+1] = k
+ end
+ for k, v in next, collected do
+ table.sort(v)
+ end
+ typesettable(t,keys,collected,nesting,prefix)
+end
+
+tabletracers.typeset = typeset
+
+function tabletracers.showproperties(nesting)
+ local tfmdata = fonts.hashes.identifiers[font.current()]
+ typeset(tfmdata.properties,fonts.constructors.keys.properties,nesting)
+end
+
+function tabletracers.showparameters(nesting)
+ local tfmdata = fonts.hashes.identifiers[font.current()]
+ typeset(tfmdata.parameters,fonts.constructors.keys.parameters,nesting)
+end
+
+function tabletracers.showpositionings()
+ local tfmdata = fonts.hashes.identifiers[font.current()]
+ local resources = tfmdata.resources
+ if resources then
+ local features = resources.features
+ if features then
+ local gpos = features.gpos
+ if gpos and next(gpos) then
+ context.starttabulate { "|Tl|Tl|Tlp|" }
+ for feature, scripts in sortedhash(gpos) do
+ for script, languages in sortedhash(scripts) do
+ context.NC()
+ context(feature)
+ context.NC()
+ context(script)
+ context.NC()
+ context(concat(sortedkeys(languages)," "))
+ context.NC()
+ context.NR()
+ end
+ end
+ context.stoptabulate()
+ else
+ context("no entries")
+ context.par()
+ end
+ end
+ end
+end
+
+local dynamics = true
+
+function tabletracers.showsubstitutions()
+ local tfmdata = fonts.hashes.identifiers[font.current()]
+ local resources = tfmdata.resources
+ if resources then
+ local features = resources.features
+ if features then
+ local gsub = features.gsub
+ if gsub then
+ local makes_sense = { }
+ for feature, scripts in sortedhash(gsub) do
+ for script, languages in sortedhash(scripts) do
+ for language in sortedhash(languages) do
+ local tag = format("dummy-%s-%s-%s",feature,script,language)
+ local fnt = format("file:%s*%s",file.basename(tfmdata.properties.filename),tag)
+ context.definefontfeature (
+ { tag },
+ {
+ mode = "node",
+ script = script,
+ language = language,
+ [feature] = "yes"
+ }
+ )
+ if not dynamics then
+ context.definefont( { fnt }, { fnt } )
+ end
+ makes_sense[#makes_sense+1] = {
+ feature = feature,
+ tag = tag,
+ script = script,
+ language = language,
+ fontname = fnt,
+ }
+ end
+ end
+ end
+ if #makes_sense > 0 then
+ context.starttabulate { "|Tl|Tl|Tl|p|" }
+ for i=1,#makes_sense do
+ local data = makes_sense[i]
+ local script = data.script
+ local language = data.language
+ context.NC()
+ context(data.feature)
+ context.NC()
+ context(script)
+ context.NC()
+ context(language)
+ context.NC()
+ if not dynamics then
+ context.startfont { data.fontname }
+ else
+ context.addff(data.tag)
+ end
+ context.verbatim(samples.lowercase [script][language]) context.par()
+ context.verbatim(samples.uppercase [script][language]) context.par()
+ context.verbatim(samples.digits [script][language]) context.par()
+ context.verbatim(samples.punctuation[script][language]) context.quad()
+ context.verbatim(samples.symbols [script][language])
+ if not dynamics then
+ context.stopfont()
+ end
+ context.NC()
+ context.NR()
+ end
+ context.stoptabulate()
+ else
+ context("no entries")
+ context.par()
+ end
+ end
+ end
+ end
+end
+
+function tabletracers.showall(specification) -- not interfaced
+
+ specification = interfaces.checkedspecification(specification)
+
+ if specification.title then
+ context.starttitle { title = specification.title }
+ end
+
+ context.startsubject { title = "Properties" }
+ tabletracers.showproperties()
+ context.stopsubject()
+
+ context.startsubject { title = "Parameters" }
+ tabletracers.showparameters()
+ context.stopsubject()
+
+ context.startsubject { title = "Positioning features" }
+ tabletracers.showpositionings()
+ context.stopsubject()
+
+ context.startsubject { title = "Substitution features" }
+ tabletracers.showsubstitutions()
+ context.stopsubject()
+
+ if title then
+ context.stoptitle()
+ end
+
+end
diff --git a/tex/context/base/s-fonts-vectors.lua b/tex/context/base/s-fonts-vectors.lua
index 1bac0ae8b..436f3e63d 100644
--- a/tex/context/base/s-fonts-vectors.lua
+++ b/tex/context/base/s-fonts-vectors.lua
@@ -1,104 +1,104 @@
-if not modules then modules = { } end modules ['s-fonts-vectors'] = {
- version = 1.001,
- comment = "companion to s-fonts-vectors.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-moduledata.fonts = moduledata.fonts or { }
-moduledata.fonts.protrusions = moduledata.fonts.protrusions or { }
-moduledata.fonts.expansions = moduledata.fonts.expansions or { }
-
-local NC, NR = context.NC, context.NR
-
-local classes = fonts.protrusions.classes
-local vectors = fonts.protrusions.vectors
-
-function moduledata.fonts.protrusions.showvector(specification)
- specification = interfaces.checkedspecification(specification)
- local vector = vectors[specification.name or "?"]
- if vector then
- context.blank()
- context.startcolumns { n = specification.columns or 3 }
- context.starttabulate { "|T||cw(.5em)||" }
- for unicode, values in table.sortedhash(vector) do
- NC() context("%U",unicode)
- NC() context("%.02f",values[1])
- NC() context("%c",unicode)
- NC() context("%.02f",values[2])
- NC() NR()
- end
- context.stoptabulate()
- context.stopcolumns()
- context.blank()
- end
-end
-
-function moduledata.fonts.protrusions.showclass(specification)
- specification = interfaces.checkedspecification(specification)
- local class = specification.name and classes[specification.name]
- local classes = class and { class} or classes
- context.starttabulate { "|l|l|r|r|r|" }
- NC() context.bold("name")
- NC() context.bold("vector")
- NC() context.bold("factor")
- NC() context.bold("left")
- NC() context.bold("right")
- NC() NR()
- for name, class in table.sortedhash(classes) do
- NC() context(name)
- NC() context(class.vector)
- NC() context("%.02f",class.factor)
- NC() context("%.02f",class.left)
- NC() context("%.02f",class.right)
- NC() NR()
- end
- context.stoptabulate()
-end
-
-local classes = fonts.expansions.classes
-local vectors = fonts.expansions.vectors
-
-function moduledata.fonts.expansions.showvector(specification)
- specification = interfaces.checkedspecification(specification)
- local vector = vectors[specification.name or "?"]
- if vector then
- context.blank()
- context.startcolumns { n = specification.columns or 3 }
- context.starttabulate { "|T|cw(.5em)||" }
- for unicode, value in table.sortedhash(vector) do
- NC() context("%U",unicode)
- NC() context("%c",unicode)
- NC() context("%.02f",value)
- NC() NR()
- end
- context.stoptabulate()
- context.stopcolumns()
- context.blank()
- end
-end
-
-function moduledata.fonts.expansions.showclass(specification)
- specification = interfaces.checkedspecification(specification)
- local class = specification.name and classes[specification.name]
- local classes = class and { class} or classes
- context.starttabulate { "|l|l|r|r|r|" }
- NC() context.bold("name")
- NC() context.bold("vector")
- NC() context.bold("step")
- NC() context.bold("factor")
- NC() context.bold("stretch")
- NC() context.bold("shrink")
- NC() NR()
- for name, class in table.sortedhash(classes) do
- NC() context(name)
- NC() context(class.vector)
- NC() context("%.02f",class.step)
- NC() context("%.02f",class.factor)
- NC() context("% 2i",class.stretch)
- NC() context("% 2i",class.shrink)
- NC() NR()
- end
- context.stoptabulate()
-end
+if not modules then modules = { } end modules ['s-fonts-vectors'] = {
+ version = 1.001,
+ comment = "companion to s-fonts-vectors.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+moduledata.fonts = moduledata.fonts or { }
+moduledata.fonts.protrusions = moduledata.fonts.protrusions or { }
+moduledata.fonts.expansions = moduledata.fonts.expansions or { }
+
+local NC, NR = context.NC, context.NR
+
+local classes = fonts.protrusions.classes
+local vectors = fonts.protrusions.vectors
+
+function moduledata.fonts.protrusions.showvector(specification)
+ specification = interfaces.checkedspecification(specification)
+ local vector = vectors[specification.name or "?"]
+ if vector then
+ context.blank()
+ context.startcolumns { n = specification.columns or 3 }
+ context.starttabulate { "|T||cw(.5em)||" }
+ for unicode, values in table.sortedhash(vector) do
+ NC() context("%U",unicode)
+ NC() context("%.02f",values[1])
+ NC() context("%c",unicode)
+ NC() context("%.02f",values[2])
+ NC() NR()
+ end
+ context.stoptabulate()
+ context.stopcolumns()
+ context.blank()
+ end
+end
+
+function moduledata.fonts.protrusions.showclass(specification)
+ specification = interfaces.checkedspecification(specification)
+ local class = specification.name and classes[specification.name]
+ local classes = class and { class} or classes
+ context.starttabulate { "|l|l|r|r|r|" }
+ NC() context.bold("name")
+ NC() context.bold("vector")
+ NC() context.bold("factor")
+ NC() context.bold("left")
+ NC() context.bold("right")
+ NC() NR()
+ for name, class in table.sortedhash(classes) do
+ NC() context(name)
+ NC() context(class.vector)
+ NC() context("%.02f",class.factor)
+ NC() context("%.02f",class.left)
+ NC() context("%.02f",class.right)
+ NC() NR()
+ end
+ context.stoptabulate()
+end
+
+local classes = fonts.expansions.classes
+local vectors = fonts.expansions.vectors
+
+function moduledata.fonts.expansions.showvector(specification)
+ specification = interfaces.checkedspecification(specification)
+ local vector = vectors[specification.name or "?"]
+ if vector then
+ context.blank()
+ context.startcolumns { n = specification.columns or 3 }
+ context.starttabulate { "|T|cw(.5em)||" }
+ for unicode, value in table.sortedhash(vector) do
+ NC() context("%U",unicode)
+ NC() context("%c",unicode)
+ NC() context("%.02f",value)
+ NC() NR()
+ end
+ context.stoptabulate()
+ context.stopcolumns()
+ context.blank()
+ end
+end
+
+function moduledata.fonts.expansions.showclass(specification)
+ specification = interfaces.checkedspecification(specification)
+ local class = specification.name and classes[specification.name]
+ local classes = class and { class} or classes
+ context.starttabulate { "|l|l|r|r|r|" }
+ NC() context.bold("name")
+ NC() context.bold("vector")
+ NC() context.bold("step")
+ NC() context.bold("factor")
+ NC() context.bold("stretch")
+ NC() context.bold("shrink")
+ NC() NR()
+ for name, class in table.sortedhash(classes) do
+ NC() context(name)
+ NC() context(class.vector)
+ NC() context("%.02f",class.step)
+ NC() context("%.02f",class.factor)
+ NC() context("% 2i",class.stretch)
+ NC() context("% 2i",class.shrink)
+ NC() NR()
+ end
+ context.stoptabulate()
+end
diff --git a/tex/context/base/s-lan-03.mkiv b/tex/context/base/s-lan-03.mkiv
index a490261f4..6b46a49b1 100644
--- a/tex/context/base/s-lan-03.mkiv
+++ b/tex/context/base/s-lan-03.mkiv
@@ -36,5 +36,5 @@ function languages.words.tracers.showwords(filename)
end
\stopluacode
-% \ctxlua{languages.words.tracers.showwords("words-003.words")}
+\ctxlua{languages.words.tracers.showwords("words-003.words")}
diff --git a/tex/context/base/s-languages-sorting.lua b/tex/context/base/s-languages-sorting.lua
new file mode 100644
index 000000000..b7d75f8b8
--- /dev/null
+++ b/tex/context/base/s-languages-sorting.lua
@@ -0,0 +1,118 @@
+if not modules then modules = { } end modules ['s-languages-system'] = {
+ version = 1.001,
+ comment = "companion to s-languages-system.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+moduledata.languages = moduledata.languages or { }
+moduledata.languages.sorting = moduledata.languages.sorting or { }
+
+local formatters = string.formatters
+local utfbyte, utfcharacters = utf.byte, utf.characters
+local sortedpairs = table.sortedpairs
+
+local definitions = sorters.definitions
+local constants = sorters.constants
+local replacementoffset = constants.replacementoffset
+
+local currentfont = font.current
+local fontchars = fonts.hashes.characters
+
+local c_darkblue = { "darkblue" }
+local c_darkred = { "darkred" }
+local f_chr = formatters["\\tttf%H"]
+
+local function chr(str,done)
+ if done then
+ context.space()
+ end
+ local c = fontchars[currentfont()]
+ for s in utfcharacters(str) do
+ local u = utfbyte(s)
+ if c[u] then
+ context(s)
+ elseif u > replacementoffset then
+ context.color(c_darkblue, f_chr(u))
+ else
+ context.color(c_darkred, f_chr(u))
+ end
+ end
+ return true
+end
+
+local function map(a,b,done)
+ if done then
+ context.space()
+ end
+ -- context.tttf()
+ chr(a)
+ context("=")
+ chr(b)
+ return true
+end
+
+local function nop()
+ -- context.tttf()
+ context("none")
+end
+
+local function key(data,field)
+ context.NC()
+ context(field)
+ context.NC()
+ context(data[field])
+ context.NC()
+ context.NR()
+end
+
+function moduledata.languages.sorting.showinstalled(tag)
+ if not tag or tag == "" or tag == interfaces.variables.all then
+ for tag, data in sortedpairs(definitions) do
+ moduledata.languages.sorting.showinstalled (tag)
+ end
+ else
+ sorters.update() -- syncs data
+ local data = definitions[tag]
+ if data then
+ context.starttabulate { "|lB|pl|" }
+ key(data,"language")
+ key(data,"parent")
+ key(data,"method")
+ context.NC()
+ context("replacements")
+ context.NC()
+ local replacements = data.replacements
+ if #replacements == 0 then
+ nop()
+ else
+ for i=1,#replacements do
+ local r = replacements[i]
+ map(r[1],r[2],i > 1)
+ end
+ end
+ context.NC()
+ context.NR()
+ context.NC()
+ context("order")
+ context.NC()
+ local orders = data.orders
+ for i=1,#orders do
+ chr(orders[i],i > 1)
+ end
+ context.NC()
+ context.NR()
+ context.NC()
+ context("entries")
+ context.NC()
+ local done = false
+ for k, e in sortedpairs(data.entries) do
+ done = map(k,e,done)
+ end
+ context.NC()
+ context.NR()
+ context.stoptabulate()
+ end
+ end
+end
diff --git a/tex/context/base/s-languages-sorting.mkiv b/tex/context/base/s-languages-sorting.mkiv
new file mode 100644
index 000000000..67acda6f9
--- /dev/null
+++ b/tex/context/base/s-languages-sorting.mkiv
@@ -0,0 +1,30 @@
+%D \module
+%D [ file=s-languages-sorting, % s-lan-02.mkiv
+%D version=2010.09.21,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Language Sorting,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\startmodule[languages-sorting]
+
+\registerctxluafile{s-languages-sorting}{}
+
+\installmodulecommandluasingle \showinstalledsorting {moduledata.languages.sorting.showinstalled}
+
+\stopmodule
+
+\continueifinputfile{s-languages-sorting.mkiv}
+
+\usemodule[art-01]
+
+\starttext
+
+ \showinstalledsorting
+
+\stoptext
diff --git a/tex/context/base/s-languages-system.lua b/tex/context/base/s-languages-system.lua
new file mode 100644
index 000000000..4c27b5b2a
--- /dev/null
+++ b/tex/context/base/s-languages-system.lua
@@ -0,0 +1,35 @@
+if not modules then modules = { } end modules ['s-languages-system'] = {
+ version = 1.001,
+ comment = "companion to s-languages-system.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+moduledata.languages = moduledata.languages or { }
+moduledata.languages.system = moduledata.languages.system or { }
+
+local NC, NR, HL = context.NC, context.NR, context.HL
+
+function moduledata.languages.system.showinstalled()
+ local numbers = languages.numbers
+ local registered = languages.registered
+ context.starttabulate { "|r|l|l|l|l|" }
+ NC() context("id")
+ NC() context("tag")
+ NC() context("synonyms")
+ NC() context("parent")
+ NC() context("loaded")
+ NC() NR() HL()
+ for i=1,#numbers do
+ local tag = numbers[i]
+ local data = registered[tag]
+ NC() context(data.number)
+ NC() context(tag)
+ NC() context("% t",table.sortedkeys(data.synonyms))
+ NC() context(data.parent)
+ NC() context("%+t",table.sortedkeys(data.used))
+ NC() NR()
+ end
+ context.stoptabulate()
+end
diff --git a/tex/context/base/s-languages-system.mkiv b/tex/context/base/s-languages-system.mkiv
new file mode 100644
index 000000000..363720374
--- /dev/null
+++ b/tex/context/base/s-languages-system.mkiv
@@ -0,0 +1,30 @@
+%D \module
+%D [ file=s-languages-system, % moved from local s-lan-01
+%D version=2013.05.19,
+%D title=\CONTEXT\ Style File,
+%D subtitle=Installed Languages,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\startmodule[languages-system]
+
+\registerctxluafile{s-languages-system}{}
+
+\installmodulecommandluasingle \showinstalledlanguages {moduledata.languages.system.showinstalled}
+
+\stopmodule
+
+\continueifinputfile{s-languages-system.mkiv}
+
+\usemodule[art-01]
+
+\starttext
+
+ \showinstalledlanguages
+
+\stoptext
diff --git a/tex/context/base/s-math-coverage.lua b/tex/context/base/s-math-coverage.lua
index 258019c9d..52e9b777c 100644
--- a/tex/context/base/s-math-coverage.lua
+++ b/tex/context/base/s-math-coverage.lua
@@ -1,180 +1,180 @@
-if not modules then modules = { } end modules ['s-math-coverage'] = {
- version = 1.001,
- comment = "companion to s-math-coverage.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-moduledata.math = moduledata.math or { }
-moduledata.math.coverage = moduledata.math.coverage or { }
-
-local utfchar, utfbyte = utf.char, utf.byte
-local formatters, lower = string.formatters, string.lower
-local concat = table.concat
-
-local context = context
-local NC, NR, HL = context.NC, context.NR, context.HL
-local char, getglyph, bold = context.char, context.getglyph, context.bold
-
-local ucgreek = {
- 0x0391, 0x0392, 0x0393, 0x0394, 0x0395,
- 0x0396, 0x0397, 0x0398, 0x0399, 0x039A,
- 0x039B, 0x039C, 0x039D, 0x039E, 0x039F,
- 0x03A0, 0x03A1, 0x03A3, 0x03A4, 0x03A5,
- 0x03A6, 0x03A7, 0x03A8, 0x03A9
-}
-
-local lcgreek = {
- 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5,
- 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA,
- 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF,
- 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4,
- 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9,
- 0x03D1, 0x03D5, 0x03D6, 0x03F0, 0x03F1,
- 0x03F4, 0x03F5
-}
-
-local ucletters = {
- 0x00041, 0x00042, 0x00043, 0x00044, 0x00045,
- 0x00046, 0x00047, 0x00048, 0x00049, 0x0004A,
- 0x0004B, 0x0004C, 0x0004D, 0x0004E, 0x0004F,
- 0x00050, 0x00051, 0x00052, 0x00053, 0x00054,
- 0x00055, 0x00056, 0x00057, 0x00058, 0x00059,
- 0x0005A,
-}
-
-local lcletters = {
- 0x00061, 0x00062, 0x00063, 0x00064, 0x00065,
- 0x00066, 0x00067, 0x00068, 0x00069, 0x0006A,
- 0x0006B, 0x0006C, 0x0006D, 0x0006E, 0x0006F,
- 0x00070, 0x00071, 0x00072, 0x00073, 0x00074,
- 0x00075, 0x00076, 0x00077, 0x00078, 0x00079,
- 0x0007A,
-}
-
-local digits = {
- 0x00030, 0x00031, 0x00032, 0x00033, 0x00034,
- 0x00035, 0x00036, 0x00037, 0x00038, 0x00039,
-}
-
-local styles = {
- "regular", "sansserif", "monospaced", "fraktur", "script", "blackboard"
-}
-
-local alternatives = {
- "normal", "bold", "italic", "bolditalic"
-}
-
-local alphabets = {
- ucletters, lcletters, ucgreek, lcgreek, digits,
-}
-
-local getboth = mathematics.getboth
-local remapalphabets = mathematics.remapalphabets
-
-local chardata = characters.data
-local superscripts = characters.superscripts
-local subscripts = characters.subscripts
-
-function moduledata.math.coverage.showalphabets()
- context.starttabulate { "|lT|l|Tl|" }
- for i=1,#styles do
- local style = styles[i]
- for i=1,#alternatives do
- local alternative = alternatives[i]
- for i=1,#alphabets do
- local alphabet = alphabets[i]
- NC()
- if i == 1 then
- context("%s %s",style,alternative)
- end
- NC()
- context.startimath()
- context.setmathattribute(style,alternative)
- for i=1,#alphabet do
- local letter = alphabet[i]
- local id = getboth(style,alternative)
- local unicode = remapalphabets(letter,id)
- if not unicode then
- context.underbar(utfchar(letter))
- elseif unicode == letter then
- context(utfchar(unicode))
- else
- context(utfchar(unicode))
- end
- end
- context.stopimath()
- NC()
- local first = alphabet[1]
- local last = alphabet[#alphabet]
- local id = getboth(style,alternative)
- local f_unicode = remapalphabets(first,id) or utfbyte(first)
- local l_unicode = remapalphabets(last,id) or utfbyte(last)
- context("%05X - %05X",f_unicode,l_unicode)
- NC()
- NR()
- end
- end
- end
- context.stoptabulate()
-end
-
-function moduledata.math.coverage.showcharacters()
- context.startcolumns()
- context.setupalign { "nothyphenated" }
- context.starttabulate { "|T|i2|Tpl|" }
- for u, d in table.sortedpairs(chardata) do
- local mathclass = d.mathclass
- local mathspec = d.mathspec
- if mathclass or mathspec then
- NC()
- context("%05X",u)
- NC()
- getglyph("MathRoman",u)
- NC()
- if mathspec then
- local t = { }
- for i=1,#mathspec do
- t[mathspec[i].class] = true
- end
- t = table.sortedkeys(t)
- context("% t",t)
- else
- context(mathclass)
- end
- NC()
- NR()
- end
- end
- context.stoptabulate()
- context.stopcolumns()
-end
-
--- This is a somewhat tricky table as we need to bypass the math machinery.
-
-function moduledata.math.coverage.showscripts()
- context.starttabulate { "|cT|c|cT|c|c|c|l|" }
- for k, v in table.sortedpairs(table.merged(superscripts,subscripts)) do
- local ck = utfchar(k)
- local cv = utfchar(v)
- local ss = superscripts[k] and "^" or "_"
- NC()
- context("%05X",k)
- NC()
- context(ck)
- NC()
- context("%05X",v)
- NC()
- context(cv)
- NC()
- context.formatted.rawmathematics("x%s = x%s%s",ck,ss,cv)
- NC()
- context.formatted.mathematics("x%s = x%s%s",ck,ss,cv)
- NC()
- context(lower(chardata[k].description))
- NC()
- NR()
- end
- context.stoptabulate()
-end
+if not modules then modules = { } end modules ['s-math-coverage'] = {
+ version = 1.001,
+ comment = "companion to s-math-coverage.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+moduledata.math = moduledata.math or { }
+moduledata.math.coverage = moduledata.math.coverage or { }
+
+local utfchar, utfbyte = utf.char, utf.byte
+local formatters, lower = string.formatters, string.lower
+local concat = table.concat
+
+local context = context
+local NC, NR, HL = context.NC, context.NR, context.HL
+local char, getglyph, bold = context.char, context.getglyph, context.bold
+
+local ucgreek = {
+ 0x0391, 0x0392, 0x0393, 0x0394, 0x0395,
+ 0x0396, 0x0397, 0x0398, 0x0399, 0x039A,
+ 0x039B, 0x039C, 0x039D, 0x039E, 0x039F,
+ 0x03A0, 0x03A1, 0x03A3, 0x03A4, 0x03A5,
+ 0x03A6, 0x03A7, 0x03A8, 0x03A9
+}
+
+local lcgreek = {
+ 0x03B1, 0x03B2, 0x03B3, 0x03B4, 0x03B5,
+ 0x03B6, 0x03B7, 0x03B8, 0x03B9, 0x03BA,
+ 0x03BB, 0x03BC, 0x03BD, 0x03BE, 0x03BF,
+ 0x03C0, 0x03C1, 0x03C2, 0x03C3, 0x03C4,
+ 0x03C5, 0x03C6, 0x03C7, 0x03C8, 0x03C9,
+ 0x03D1, 0x03D5, 0x03D6, 0x03F0, 0x03F1,
+ 0x03F4, 0x03F5
+}
+
+local ucletters = {
+ 0x00041, 0x00042, 0x00043, 0x00044, 0x00045,
+ 0x00046, 0x00047, 0x00048, 0x00049, 0x0004A,
+ 0x0004B, 0x0004C, 0x0004D, 0x0004E, 0x0004F,
+ 0x00050, 0x00051, 0x00052, 0x00053, 0x00054,
+ 0x00055, 0x00056, 0x00057, 0x00058, 0x00059,
+ 0x0005A,
+}
+
+local lcletters = {
+ 0x00061, 0x00062, 0x00063, 0x00064, 0x00065,
+ 0x00066, 0x00067, 0x00068, 0x00069, 0x0006A,
+ 0x0006B, 0x0006C, 0x0006D, 0x0006E, 0x0006F,
+ 0x00070, 0x00071, 0x00072, 0x00073, 0x00074,
+ 0x00075, 0x00076, 0x00077, 0x00078, 0x00079,
+ 0x0007A,
+}
+
+local digits = {
+ 0x00030, 0x00031, 0x00032, 0x00033, 0x00034,
+ 0x00035, 0x00036, 0x00037, 0x00038, 0x00039,
+}
+
+local styles = {
+ "regular", "sansserif", "monospaced", "fraktur", "script", "blackboard"
+}
+
+local alternatives = {
+ "normal", "bold", "italic", "bolditalic"
+}
+
+local alphabets = {
+ ucletters, lcletters, ucgreek, lcgreek, digits,
+}
+
+local getboth = mathematics.getboth
+local remapalphabets = mathematics.remapalphabets
+
+local chardata = characters.data
+local superscripts = characters.superscripts
+local subscripts = characters.subscripts
+
+function moduledata.math.coverage.showalphabets()
+ context.starttabulate { "|lT|l|Tl|" }
+ for i=1,#styles do
+ local style = styles[i]
+ for i=1,#alternatives do
+ local alternative = alternatives[i]
+ for i=1,#alphabets do
+ local alphabet = alphabets[i]
+ NC()
+ if i == 1 then
+ context("%s %s",style,alternative)
+ end
+ NC()
+ context.startimath()
+ context.setmathattribute(style,alternative)
+ for i=1,#alphabet do
+ local letter = alphabet[i]
+ local id = getboth(style,alternative)
+ local unicode = remapalphabets(letter,id)
+ if not unicode then
+ context.underbar(utfchar(letter))
+ elseif unicode == letter then
+ context(utfchar(unicode))
+ else
+ context(utfchar(unicode))
+ end
+ end
+ context.stopimath()
+ NC()
+ local first = alphabet[1]
+ local last = alphabet[#alphabet]
+ local id = getboth(style,alternative)
+ local f_unicode = remapalphabets(first,id) or utfbyte(first)
+ local l_unicode = remapalphabets(last,id) or utfbyte(last)
+ context("%05X - %05X",f_unicode,l_unicode)
+ NC()
+ NR()
+ end
+ end
+ end
+ context.stoptabulate()
+end
+
+function moduledata.math.coverage.showcharacters()
+ context.startcolumns()
+ context.setupalign { "nothyphenated" }
+ context.starttabulate { "|T|i2|Tpl|" }
+ for u, d in table.sortedpairs(chardata) do
+ local mathclass = d.mathclass
+ local mathspec = d.mathspec
+ if mathclass or mathspec then
+ NC()
+ context("%05X",u)
+ NC()
+ getglyph("MathRoman",u)
+ NC()
+ if mathspec then
+ local t = { }
+ for i=1,#mathspec do
+ t[mathspec[i].class] = true
+ end
+ t = table.sortedkeys(t)
+ context("% t",t)
+ else
+ context(mathclass)
+ end
+ NC()
+ NR()
+ end
+ end
+ context.stoptabulate()
+ context.stopcolumns()
+end
+
+-- This is a somewhat tricky table as we need to bypass the math machinery.
+
+function moduledata.math.coverage.showscripts()
+ context.starttabulate { "|cT|c|cT|c|c|c|l|" }
+ for k, v in table.sortedpairs(table.merged(superscripts,subscripts)) do
+ local ck = utfchar(k)
+ local cv = utfchar(v)
+ local ss = superscripts[k] and "^" or "_"
+ NC()
+ context("%05X",k)
+ NC()
+ context(ck)
+ NC()
+ context("%05X",v)
+ NC()
+ context(cv)
+ NC()
+ context.formatted.rawmathematics("x%s = x%s%s",ck,ss,cv)
+ NC()
+ context.formatted.mathematics("x%s = x%s%s",ck,ss,cv)
+ NC()
+ context(lower(chardata[k].description))
+ NC()
+ NR()
+ end
+ context.stoptabulate()
+end
diff --git a/tex/context/base/s-math-parameters.lua b/tex/context/base/s-math-parameters.lua
index 8e8c15a2d..50500466a 100644
--- a/tex/context/base/s-math-parameters.lua
+++ b/tex/context/base/s-math-parameters.lua
@@ -1,135 +1,135 @@
-if not modules then modules = { } end modules ['s-math-coverage'] = {
- version = 1.001,
- comment = "companion to s-math-coverage.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-moduledata.math = moduledata.math or { }
-moduledata.math.parameters = moduledata.math.parameters or { }
-
-local tables = utilities.tables.definedtable("math","tracing","spacing","tables")
-
-tables.styleaxis = {
- "ord", "op", "bin", "rel", "open", "close", "punct", "inner",
-}
-
-tables.parameters = {
- "quad", "axis", "operatorsize",
- "overbarkern", "overbarrule", "overbarvgap",
- "underbarkern", "underbarrule", "underbarvgap",
- "radicalkern", "radicalrule", "radicalvgap",
- "radicaldegreebefore", "radicaldegreeafter", "radicaldegreeraise",
- "stackvgap", "stacknumup", "stackdenomdown",
- "fractionrule", "fractionnumvgap", "fractionnumup",
- "fractiondenomvgap", "fractiondenomdown", "fractiondelsize",
- "limitabovevgap", "limitabovebgap", "limitabovekern",
- "limitbelowvgap", "limitbelowbgap", "limitbelowkern",
- "underdelimitervgap", "underdelimiterbgap",
- "overdelimitervgap", "overdelimiterbgap",
- "subshiftdrop", "supshiftdrop", "subshiftdown",
- "subsupshiftdown", "subtopmax", "supshiftup",
- "supbottommin", "supsubbottommax", "subsupvgap",
- "spaceafterscript", "connectoroverlapmin",
-}
-
-tables.styles = {
- "display",
- "text",
- "script",
- "scriptscript",
-}
-
-function tables.stripmu(str)
- str = string.gsub(str,"mu","")
- str = string.gsub(str," ","")
- str = string.gsub(str,"plus","+")
- str = string.gsub(str,"minus","-")
- return str
-end
-
-function tables.strippt(old)
- local new = string.gsub(old,"pt","")
- if new ~= old then
- new = string.format("%0.4f",tonumber(new))
- end
- return new
-end
-
-function moduledata.math.parameters.showspacing()
-
- local styles = tables.styles
- local styleaxis = tables.styleaxis
-
- context.starttabulate { "|Tl|Tl|" .. string.rep("Tc|",(#styles*2)) }
- context.HL()
- context.NC()
- context.NC()
- context.NC()
- for i=1,#styles do
- context.bold(styles[i])
- context.NC()
- context.bold("(cramped)")
- context.NC()
- end
- context.NR()
- context.HL()
- for i=1,#styleaxis do
- -- print(key,tex.getmath(key,"text"))
- local one = styleaxis[i]
- for j=1,#styleaxis do
- local two = styleaxis[j]
- context.NC()
- if j == 1 then
- context.bold(one)
- end
- context.NC()
- context.bold(two)
- context.NC()
- for i=1,#styles do
- context("\\ctxlua{context(math.tracing.spacing.tables.stripmu('\\the\\Umath%s%sspacing\\%sstyle'))}",one,two,styles[i])
- context.NC()
- context("\\ctxlua{context(math.tracing.spacing.tables.stripmu('\\the\\Umath%s%sspacing\\cramped%sstyle'))}",one,two,styles[i])
- context.NC()
- end
- context.NR()
- end
- end
- context.stoptabulate()
-end
-
-function moduledata.math.parameters.showparameters()
-
- local styles = tables.styles
- local parameters = tables.parameters
-
- context.starttabulate { "|l|" .. string.rep("Tc|",(#styles*2)) }
- context.HL()
- context.NC()
- context.NC()
- for i=1,#styles do
- context.bold(styles[i])
- context.NC()
- context.bold("(cramped)")
- context.NC()
- end
- context.NR()
- context.HL()
- for i=1,#parameters do
- local parameter = parameters[i]
- -- print(parameter,tex.getmath(parameter,"text"))
- context.NC()
- context.type(parameter)
- context.NC()
- for i=1,#styles do
- context("\\ctxlua{context(math.tracing.spacing.tables.strippt('\\the\\Umath%s\\%sstyle'))}",parameter,styles[i])
- context.NC()
- context("\\ctxlua{context(math.tracing.spacing.tables.strippt('\\the\\Umath%s\\cramped%sstyle'))}",parameter,styles[i])
- context.NC()
- end
- context.NR()
- end
- context.stoptabulate()
-
-end
+if not modules then modules = { } end modules ['s-math-coverage'] = {
+ version = 1.001,
+ comment = "companion to s-math-coverage.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+moduledata.math = moduledata.math or { }
+moduledata.math.parameters = moduledata.math.parameters or { }
+
+local tables = utilities.tables.definedtable("math","tracing","spacing","tables")
+
+tables.styleaxis = {
+ "ord", "op", "bin", "rel", "open", "close", "punct", "inner",
+}
+
+tables.parameters = {
+ "quad", "axis", "operatorsize",
+ "overbarkern", "overbarrule", "overbarvgap",
+ "underbarkern", "underbarrule", "underbarvgap",
+ "radicalkern", "radicalrule", "radicalvgap",
+ "radicaldegreebefore", "radicaldegreeafter", "radicaldegreeraise",
+ "stackvgap", "stacknumup", "stackdenomdown",
+ "fractionrule", "fractionnumvgap", "fractionnumup",
+ "fractiondenomvgap", "fractiondenomdown", "fractiondelsize",
+ "limitabovevgap", "limitabovebgap", "limitabovekern",
+ "limitbelowvgap", "limitbelowbgap", "limitbelowkern",
+ "underdelimitervgap", "underdelimiterbgap",
+ "overdelimitervgap", "overdelimiterbgap",
+ "subshiftdrop", "supshiftdrop", "subshiftdown",
+ "subsupshiftdown", "subtopmax", "supshiftup",
+ "supbottommin", "supsubbottommax", "subsupvgap",
+ "spaceafterscript", "connectoroverlapmin",
+}
+
+tables.styles = {
+ "display",
+ "text",
+ "script",
+ "scriptscript",
+}
+
+function tables.stripmu(str)
+ str = string.gsub(str,"mu","")
+ str = string.gsub(str," ","")
+ str = string.gsub(str,"plus","+")
+ str = string.gsub(str,"minus","-")
+ return str
+end
+
+function tables.strippt(old)
+ local new = string.gsub(old,"pt","")
+ if new ~= old then
+ new = string.format("%0.4f",tonumber(new))
+ end
+ return new
+end
+
+function moduledata.math.parameters.showspacing()
+
+ local styles = tables.styles
+ local styleaxis = tables.styleaxis
+
+ context.starttabulate { "|Tl|Tl|" .. string.rep("Tc|",(#styles*2)) }
+ context.HL()
+ context.NC()
+ context.NC()
+ context.NC()
+ for i=1,#styles do
+ context.bold(styles[i])
+ context.NC()
+ context.bold("(cramped)")
+ context.NC()
+ end
+ context.NR()
+ context.HL()
+ for i=1,#styleaxis do
+ -- print(key,tex.getmath(key,"text"))
+ local one = styleaxis[i]
+ for j=1,#styleaxis do
+ local two = styleaxis[j]
+ context.NC()
+ if j == 1 then
+ context.bold(one)
+ end
+ context.NC()
+ context.bold(two)
+ context.NC()
+ for i=1,#styles do
+ context("\\ctxlua{context(math.tracing.spacing.tables.stripmu('\\the\\Umath%s%sspacing\\%sstyle'))}",one,two,styles[i])
+ context.NC()
+ context("\\ctxlua{context(math.tracing.spacing.tables.stripmu('\\the\\Umath%s%sspacing\\cramped%sstyle'))}",one,two,styles[i])
+ context.NC()
+ end
+ context.NR()
+ end
+ end
+ context.stoptabulate()
+end
+
+function moduledata.math.parameters.showparameters()
+
+ local styles = tables.styles
+ local parameters = tables.parameters
+
+ context.starttabulate { "|l|" .. string.rep("Tc|",(#styles*2)) }
+ context.HL()
+ context.NC()
+ context.NC()
+ for i=1,#styles do
+ context.bold(styles[i])
+ context.NC()
+ context.bold("(cramped)")
+ context.NC()
+ end
+ context.NR()
+ context.HL()
+ for i=1,#parameters do
+ local parameter = parameters[i]
+ -- print(parameter,tex.getmath(parameter,"text"))
+ context.NC()
+ context.type(parameter)
+ context.NC()
+ for i=1,#styles do
+ context("\\ctxlua{context(math.tracing.spacing.tables.strippt('\\the\\Umath%s\\%sstyle'))}",parameter,styles[i])
+ context.NC()
+ context("\\ctxlua{context(math.tracing.spacing.tables.strippt('\\the\\Umath%s\\cramped%sstyle'))}",parameter,styles[i])
+ context.NC()
+ end
+ context.NR()
+ end
+ context.stoptabulate()
+
+end
diff --git a/tex/context/base/s-pre-71.lua b/tex/context/base/s-pre-71.lua
index bfa45a705..7d5c011f1 100644
--- a/tex/context/base/s-pre-71.lua
+++ b/tex/context/base/s-pre-71.lua
@@ -1,63 +1,63 @@
-if not modules then modules = { } end modules ['steps'] = {
- version = 1.001,
- comment = "companion to steps.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-moduledata.steps = moduledata.steps or { }
-local steps = moduledata.steps
-
-local locations = {
- 'lefttop',
- 'middletop',
- 'righttop',
- 'middleleft',
- 'middle',
- 'middleright',
- 'leftbottom',
- 'middlebottom',
- 'rightbottom',
-}
-
-local done, current, previous, n
-
-function steps.reset_locations()
- done, current, previous, n = table.tohash(locations,false), 0, 0, 0
-end
-
-function steps.next_location(loc)
- previous = current
- n = n + 1
- loc = loc and loc ~= "" and tonumber(loc)
- while true do
- current = loc or math.random(1,#locations)
- if not done[current] then
- done[current] = true
- break
- end
- end
-end
-
-function steps.current_location()
- context(locations[current] or "")
-end
-
-function steps.previous_location()
- context(locations[previous] or "")
-end
-
-function steps.current_n()
- context(current)
-end
-
-function steps.previous_n()
- context(previous)
-end
-
-function steps.step()
- context(n)
-end
-
-steps.reset_locations()
+if not modules then modules = { } end modules ['steps'] = {
+ version = 1.001,
+ comment = "companion to steps.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+moduledata.steps = moduledata.steps or { }
+local steps = moduledata.steps
+
+local locations = {
+ 'lefttop',
+ 'middletop',
+ 'righttop',
+ 'middleleft',
+ 'middle',
+ 'middleright',
+ 'leftbottom',
+ 'middlebottom',
+ 'rightbottom',
+}
+
+local done, current, previous, n
+
+function steps.reset_locations()
+ done, current, previous, n = table.tohash(locations,false), 0, 0, 0
+end
+
+function steps.next_location(loc)
+ previous = current
+ n = n + 1
+ loc = loc and loc ~= "" and tonumber(loc)
+ while true do
+ current = loc or math.random(1,#locations)
+ if not done[current] then
+ done[current] = true
+ break
+ end
+ end
+end
+
+function steps.current_location()
+ context(locations[current] or "")
+end
+
+function steps.previous_location()
+ context(locations[previous] or "")
+end
+
+function steps.current_n()
+ context(current)
+end
+
+function steps.previous_n()
+ context(previous)
+end
+
+function steps.step()
+ context(n)
+end
+
+steps.reset_locations()
diff --git a/tex/context/base/scrn-but.lua b/tex/context/base/scrn-but.lua
index e49372ce9..4766df9d7 100644
--- a/tex/context/base/scrn-but.lua
+++ b/tex/context/base/scrn-but.lua
@@ -1,19 +1,19 @@
-if not modules then modules = { } end modules ['scrn-but'] = {
- version = 1.001,
- comment = "companion to scrn-but.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local f_two_colon = string.formatters["%s:%s"]
-
-function commands.registerbuttons(tag,register,language)
- local data = sorters.definitions[language]
- local orders = daya and data.orders or sorters.definitions.default.orders
- local tag = tag == "" and { "" } or { tag }
- for i=1,#orders do
- local order = orders[i]
- context.menubutton(tag,f_two_colon(register,order),order)
- end
-end
+if not modules then modules = { } end modules ['scrn-but'] = {
+ version = 1.001,
+ comment = "companion to scrn-but.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local f_two_colon = string.formatters["%s:%s"]
+
+function commands.registerbuttons(tag,register,language)
+ local data = sorters.definitions[language]
+ local orders = daya and data.orders or sorters.definitions.default.orders
+ local tag = tag == "" and { "" } or { tag }
+ for i=1,#orders do
+ local order = orders[i]
+ context.menubutton(tag,f_two_colon(register,order),order)
+ end
+end
diff --git a/tex/context/base/scrn-fld.lua b/tex/context/base/scrn-fld.lua
index 9836cbebe..846385686 100644
--- a/tex/context/base/scrn-fld.lua
+++ b/tex/context/base/scrn-fld.lua
@@ -1,85 +1,85 @@
-if not modules then modules = { } end modules ['scrn-fld'] = {
- version = 1.001,
- comment = "companion to scrn-fld.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- we should move some code from lpdf-fld to here
-
-local variables = interfaces.variables
-local v_yes = variables.yes
-
-local fields = { }
-interactions.fields = fields
-
-local codeinjections = backends.codeinjections
-local nodeinjections = backends.nodeinjections
-
-local function define(specification)
- codeinjections.definefield(specification)
-end
-
-local function defineset(name,set)
- codeinjections.definefield(name,set)
-end
-
-local function clone(specification)
- codeinjections.clonefield(specification)
-end
-
-local function insert(name,specification)
- return nodeinjections.typesetfield(name,specification)
-end
-
-fields.define = define
-fields.defineset = defineset
-fields.clone = clone
-fields.insert = insert
-
-commands.definefield = define
-commands.definefieldset = defineset
-commands.clonefield = clone
-
-function commands.insertfield(name,specification)
- tex.box["b_scrn_field_body"] = insert(name,specification)
-end
-
--- (for the monent) only tex interface
-
-function commands.getfieldcategory(name)
- local g = codeinjections.getfieldcategory(name)
- if g then
- context(g)
- end
-end
-
-function commands.getdefaultfieldvalue(name)
- local d = codeinjections.getdefaultfieldvalue(name)
- if d then
- context(d)
- end
-end
-
-function commands.exportformdata(export)
- if export == v_yes then
- codeinjections.exportformdata()
- end
-end
-
-function commands.setformsmethod(method)
- codeinjections.setformsmethod(method)
-end
-
-function commands.doiffieldcategoryelse(name)
- commands.doifelse(codeinjections.validfieldcategory(name))
-end
-
-function commands.doiffieldsetelse(tag)
- commands.doifelse(codeinjections.validfieldset(name))
-end
-
-function commands.doiffieldelse(name)
- commands.doifelse(codeinjections.validfield(name))
-end
+if not modules then modules = { } end modules ['scrn-fld'] = {
+ version = 1.001,
+ comment = "companion to scrn-fld.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- we should move some code from lpdf-fld to here
+
+local variables = interfaces.variables
+local v_yes = variables.yes
+
+local fields = { }
+interactions.fields = fields
+
+local codeinjections = backends.codeinjections
+local nodeinjections = backends.nodeinjections
+
+local function define(specification)
+ codeinjections.definefield(specification)
+end
+
+local function defineset(name,set)
+ codeinjections.definefield(name,set)
+end
+
+local function clone(specification)
+ codeinjections.clonefield(specification)
+end
+
+local function insert(name,specification)
+ return nodeinjections.typesetfield(name,specification)
+end
+
+fields.define = define
+fields.defineset = defineset
+fields.clone = clone
+fields.insert = insert
+
+commands.definefield = define
+commands.definefieldset = defineset
+commands.clonefield = clone
+
+function commands.insertfield(name,specification)
+ tex.box["b_scrn_field_body"] = insert(name,specification)
+end
+
+-- (for the monent) only tex interface
+
+function commands.getfieldcategory(name)
+ local g = codeinjections.getfieldcategory(name)
+ if g then
+ context(g)
+ end
+end
+
+function commands.getdefaultfieldvalue(name)
+ local d = codeinjections.getdefaultfieldvalue(name)
+ if d then
+ context(d)
+ end
+end
+
+function commands.exportformdata(export)
+ if export == v_yes then
+ codeinjections.exportformdata()
+ end
+end
+
+function commands.setformsmethod(method)
+ codeinjections.setformsmethod(method)
+end
+
+function commands.doiffieldcategoryelse(name)
+ commands.doifelse(codeinjections.validfieldcategory(name))
+end
+
+function commands.doiffieldsetelse(tag)
+ commands.doifelse(codeinjections.validfieldset(name))
+end
+
+function commands.doiffieldelse(name)
+ commands.doifelse(codeinjections.validfield(name))
+end
diff --git a/tex/context/base/scrn-hlp.lua b/tex/context/base/scrn-hlp.lua
index 5f8368c6d..06abb3237 100644
--- a/tex/context/base/scrn-hlp.lua
+++ b/tex/context/base/scrn-hlp.lua
@@ -1,119 +1,119 @@
-if not modules then modules = { } end modules ['scrn-hlp'] = {
- version = 1.001,
- comment = "companion to scrn-hlp.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format = string.format
-
-local help = { }
-interactions.help = help
-
-local a_help = attributes.private("help")
-
-local copy_nodelist = node.copy_list
-local hpack_nodelist = node.hpack
-
-local register_list = nodes.pool.register
-
-local nodecodes = nodes.nodecodes
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-
-local data, references = { }, { }
-
-local helpscript = [[
- function Hide_All_Help(prefix) {
- var n = 0
- while (true) {
- n += 1 ;
- v = this.getField(prefix + n) ;
- if (v) {
- v.hidden = true ;
- this.dirty = false ;
- } else {
- return ;
- }
- }
- }
-]]
-
-local template = "javascript(Hide_All_Help{help:}),action(show{help:%s})"
-
-function help.register(number,name,box)
- if helpscript then
- interactions.javascripts.setpreamble("HelpTexts",helpscript)
- helpscript = false
- end
- local b = copy_nodelist(tex.box[box])
- register_list(b)
- data[number] = b
- if name and name ~= "" then
- references[name] = number
- structures.references.define("",name,format(template,number))
- end
-end
-
-local function collect(head,used)
- while head do
- local id = head.id
- if id == hlist_code then
- local a = head[a_help]
- if a then
- if not used then
- used = { a }
- else
- used[#used+1] = a
- end
- else
- used = collect(head.list,used)
- end
- elseif id == vlist_code then
- used = collect(head.list,used)
- end
- head = head.next
- end
- return used
-end
-
-function help.collect(box)
- if next(data) then
- return collect(tex.box[box].list)
- end
-end
-
-commands.registerhelp = help.register
-
-function commands.collecthelp(box)
- local used = help.collect(box)
- if used then
- local done = { }
- context.startoverlay()
- for i=1,#used do
- local d = data[used[i]]
- if d and not done[d] then
- local box = hpack_nodelist(copy_nodelist(d))
- context(false,box)
- done[d] = true
- else
- -- error
- end
- end
- context.stopoverlay()
- end
-end
-
-function help.reference(name)
- return references[name] or tonumber(name) or 0
-end
-
-function commands.helpreference(name)
- context(references[name] or tonumber(name) or 0)
-end
-
-function commands.helpaction(name)
- context(template,references[name] or tonumber(name) or 0)
-end
+if not modules then modules = { } end modules ['scrn-hlp'] = {
+ version = 1.001,
+ comment = "companion to scrn-hlp.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format = string.format
+
+local help = { }
+interactions.help = help
+
+local a_help = attributes.private("help")
+
+local copy_nodelist = node.copy_list
+local hpack_nodelist = node.hpack
+
+local register_list = nodes.pool.register
+
+local nodecodes = nodes.nodecodes
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+
+local data, references = { }, { }
+
+local helpscript = [[
+ function Hide_All_Help(prefix) {
+ var n = 0
+ while (true) {
+ n += 1 ;
+ v = this.getField(prefix + n) ;
+ if (v) {
+ v.hidden = true ;
+ this.dirty = false ;
+ } else {
+ return ;
+ }
+ }
+ }
+]]
+
+local template = "javascript(Hide_All_Help{help:}),action(show{help:%s})"
+
+function help.register(number,name,box)
+ if helpscript then
+ interactions.javascripts.setpreamble("HelpTexts",helpscript)
+ helpscript = false
+ end
+ local b = copy_nodelist(tex.box[box])
+ register_list(b)
+ data[number] = b
+ if name and name ~= "" then
+ references[name] = number
+ structures.references.define("",name,format(template,number))
+ end
+end
+
+local function collect(head,used)
+ while head do
+ local id = head.id
+ if id == hlist_code then
+ local a = head[a_help]
+ if a then
+ if not used then
+ used = { a }
+ else
+ used[#used+1] = a
+ end
+ else
+ used = collect(head.list,used)
+ end
+ elseif id == vlist_code then
+ used = collect(head.list,used)
+ end
+ head = head.next
+ end
+ return used
+end
+
+function help.collect(box)
+ if next(data) then
+ return collect(tex.box[box].list)
+ end
+end
+
+commands.registerhelp = help.register
+
+function commands.collecthelp(box)
+ local used = help.collect(box)
+ if used then
+ local done = { }
+ context.startoverlay()
+ for i=1,#used do
+ local d = data[used[i]]
+ if d and not done[d] then
+ local box = hpack_nodelist(copy_nodelist(d))
+ context(false,box)
+ done[d] = true
+ else
+ -- error
+ end
+ end
+ context.stopoverlay()
+ end
+end
+
+function help.reference(name)
+ return references[name] or tonumber(name) or 0
+end
+
+function commands.helpreference(name)
+ context(references[name] or tonumber(name) or 0)
+end
+
+function commands.helpaction(name)
+ context(template,references[name] or tonumber(name) or 0)
+end
diff --git a/tex/context/base/scrn-ini.lua b/tex/context/base/scrn-ini.lua
index 4831408f9..deca9cbbb 100644
--- a/tex/context/base/scrn-ini.lua
+++ b/tex/context/base/scrn-ini.lua
@@ -1,32 +1,32 @@
-if not modules then modules = { } end modules ['scrn-ini'] = {
- version = 1.001,
- comment = "companion to scrn-int.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local next = next
-
-interactions = { }
-interactions.general = interactions.general or { }
-local general = interactions.general
-
-local codeinjections = backends.codeinjections
-
-local identitydata = { }
-
-local function setupidentity(specification)
- for k, v in next, specification do
- identitydata[k] = v
- end
- codeinjections.setupidentity(specification)
-end
-
-function general.getidentity()
- return identitydata
-end
-
-general.setupidentity = setupidentity
-
-commands.setupidentity = setupidentity
+if not modules then modules = { } end modules ['scrn-ini'] = {
+ version = 1.001,
+ comment = "companion to scrn-int.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local next = next
+
+interactions = { }
+interactions.general = interactions.general or { }
+local general = interactions.general
+
+local codeinjections = backends.codeinjections
+
+local identitydata = { }
+
+local function setupidentity(specification)
+ for k, v in next, specification do
+ identitydata[k] = v
+ end
+ codeinjections.setupidentity(specification)
+end
+
+function general.getidentity()
+ return identitydata
+end
+
+general.setupidentity = setupidentity
+
+commands.setupidentity = setupidentity
diff --git a/tex/context/base/scrn-pag.lua b/tex/context/base/scrn-pag.lua
index 7003d0285..2a44ffbcd 100644
--- a/tex/context/base/scrn-pag.lua
+++ b/tex/context/base/scrn-pag.lua
@@ -1,27 +1,27 @@
-if not modules then modules = { } end modules ['scrn-pag'] = {
- version = 1.001,
- comment = "companion to scrn-pag.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-interactions = interactions or { }
-interactions.pages = interactions.pages or { }
-local pages = interactions.pages
-
-local codeinjections = backends.codeinjections
-
-local function setupcanvas(specification)
- codeinjections.setupcanvas(specification)
-end
-
-local function setpagetransition(specification)
- codeinjections.setpagetransition(specification)
-end
-
-pages.setupcanvas = setupcanvas
-pages.setpagetransition = setpagetransition
-
-commands.setupcanvas = setupcanvas
-commands.setpagetransition = setpagetransition
+if not modules then modules = { } end modules ['scrn-pag'] = {
+ version = 1.001,
+ comment = "companion to scrn-pag.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+interactions = interactions or { }
+interactions.pages = interactions.pages or { }
+local pages = interactions.pages
+
+local codeinjections = backends.codeinjections
+
+local function setupcanvas(specification)
+ codeinjections.setupcanvas(specification)
+end
+
+local function setpagetransition(specification)
+ codeinjections.setpagetransition(specification)
+end
+
+pages.setupcanvas = setupcanvas
+pages.setpagetransition = setpagetransition
+
+commands.setupcanvas = setupcanvas
+commands.setpagetransition = setpagetransition
diff --git a/tex/context/base/scrn-ref.lua b/tex/context/base/scrn-ref.lua
index df71b6a97..fb79ff6d8 100644
--- a/tex/context/base/scrn-ref.lua
+++ b/tex/context/base/scrn-ref.lua
@@ -1,65 +1,65 @@
-if not modules then modules = { } end modules ['scrn-ref'] = {
- version = 1.001,
- comment = "companion to scrn-int.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-interactions = interactions or { }
-interactions.references = interactions.references or { }
-local references = interactions.references
-
-local codeinjections = backends.codeinjections
-
-local expandcurrent = structures.references.expandcurrent
-local identify = structures.references.identify
-
-local function check(what)
- if what and what ~= "" then
- local set, bug = identify("",what)
- return not bug and #set > 0 and set
- end
-end
-
-local function setopendocumentaction(open)
- local opendocument = check(open)
- if opendocument then
- codeinjections.registerdocumentopenaction(opendocument)
- expandcurrent()
- end
-end
-
-local function setclosedocumentaction(close)
- local closedocument = check(close)
- if closedocument then
- codeinjections.registerdocumentcloseaction(closedocument)
- expandcurrent()
- end
-end
-
-local function setopenpageaction(open)
- local openpage = check(open)
- if openpage then
- codeinjections.registerpageopenaction(openpage)
- expandcurrent()
- end
-end
-
-local function setclosepageaction(close)
- local closepage = check(close)
- if closepage then
- codeinjections.registerpagecloseaction(closepage)
- expandcurrent()
- end
-end
-
-references.setopendocument = setopendocumentaction
-references.setclosedocument = setclosedocumentaction
-references.setopenpage = setopenpageaction
-references.setclosepage = setclosepageaction
-
-commands.setopendocumentaction = setopendocumentaction
-commands.setclosedocumentaction = setclosedocumentaction
-commands.setopenpageaction = setopenpageaction
-commands.setclosepageaction = setclosepageaction
+if not modules then modules = { } end modules ['scrn-ref'] = {
+ version = 1.001,
+ comment = "companion to scrn-int.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+interactions = interactions or { }
+interactions.references = interactions.references or { }
+local references = interactions.references
+
+local codeinjections = backends.codeinjections
+
+local expandcurrent = structures.references.expandcurrent
+local identify = structures.references.identify
+
+local function check(what)
+ if what and what ~= "" then
+ local set, bug = identify("",what)
+ return not bug and #set > 0 and set
+ end
+end
+
+local function setopendocumentaction(open)
+ local opendocument = check(open)
+ if opendocument then
+ codeinjections.registerdocumentopenaction(opendocument)
+ expandcurrent()
+ end
+end
+
+local function setclosedocumentaction(close)
+ local closedocument = check(close)
+ if closedocument then
+ codeinjections.registerdocumentcloseaction(closedocument)
+ expandcurrent()
+ end
+end
+
+local function setopenpageaction(open)
+ local openpage = check(open)
+ if openpage then
+ codeinjections.registerpageopenaction(openpage)
+ expandcurrent()
+ end
+end
+
+local function setclosepageaction(close)
+ local closepage = check(close)
+ if closepage then
+ codeinjections.registerpagecloseaction(closepage)
+ expandcurrent()
+ end
+end
+
+references.setopendocument = setopendocumentaction
+references.setclosedocument = setclosedocumentaction
+references.setopenpage = setopenpageaction
+references.setclosepage = setclosepageaction
+
+commands.setopendocumentaction = setopendocumentaction
+commands.setclosedocumentaction = setclosedocumentaction
+commands.setopenpageaction = setopenpageaction
+commands.setclosepageaction = setclosepageaction
diff --git a/tex/context/base/scrn-wid.lua b/tex/context/base/scrn-wid.lua
index 4ad46761e..e0c3d54b6 100644
--- a/tex/context/base/scrn-wid.lua
+++ b/tex/context/base/scrn-wid.lua
@@ -1,214 +1,214 @@
-if not modules then modules = { } end modules ['scrn-wid'] = {
- version = 1.001,
- comment = "companion to scrn-wid.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-interactions = interactions or { }
-local interactions = interactions
-
-local attachments = { }
-local comments = { }
-local soundclips = { }
-local renderings = { }
-local linkedlists = { }
-
-interactions.attachments = attachments
-interactions.soundclips = soundclips
-interactions.renderings = renderings
-interactions.linkedlists = linkedlists
-
-local jobpasses = job.passes
-
-local codeinjections = backends.codeinjections
-local nodeinjections = backends.nodeinjections
-
-local variables = interfaces.variables
-local v_auto = variables.auto
-
-local trace_attachments = false trackers.register("widgets.attachments", function(v) trace_attachments = v end)
-
-local report_attachments = logs.reporter("widgets","attachments")
-
--- Symbols
-
-function commands.presetsymbollist(list)
- codeinjections.presetsymbollist(list)
-end
-
--- Attachments
---
--- registered : unique id
--- tag : used at the tex end
--- file : name that the file has on the filesystem
--- name : name that the file will get in the output
--- title : up to the backend
--- subtitle : up to the backend
--- author : up to the backend
--- method : up to the backend (hidden == no rendering)
-
-local nofautoattachments, lastregistered = 0, nil
-
-local function checkregistered(specification)
- local registered = specification.registered
- if not registered or registered == "" or registered == v_auto then
- nofautoattachments = nofautoattachments + 1
- lastregistered = "attachment-" .. nofautoattachments
- specification.registered = lastregistered
- return lastregistered
- else
- return registered
- end
-end
-
-local function checkbuffer(specification)
- local buffer = specification.buffer
- if buffer ~= "" then
- specification.data = buffers.getcontent(buffer) or ""
- end
-end
-
-function attachments.register(specification) -- beware of tag/registered mixup(tag is namespace)
- local registered = checkregistered(specification)
- checkbuffer(specification)
- attachments[registered] = specification
- if trace_attachments then
- report_attachments("registering %a",registered)
- end
- return specification
-end
-
-function attachments.insert(specification)
- local registered = checkregistered(specification)
- local r = attachments[registered]
- if r then
- if trace_attachments then
- report_attachments("including registered %a",registered)
- end
- for k, v in next, r do
- local s = specification[k]
- if s == "" then
- specification[k] = v
- end
- end
- elseif trace_attachments then
- report_attachments("including unregistered %a",registered)
- end
- checkbuffer(specification)
- return nodeinjections.attachfile(specification)
-end
-
-commands.registerattachment = attachments.register
-
-function commands.insertattachment(specification)
- tex.box["b_scrn_attachment_link"] = attachments.insert(specification)
-end
-
--- Comment
-
-function comments.insert(specification)
- local buffer = specification.buffer
- if buffer ~= "" then
- specification.data = buffers.getcontent(buffer) or ""
- end
- return nodeinjections.comment(specification)
-end
-
-function commands.insertcomment(specification)
- tex.box["b_scrn_comment_link"] = comments.insert(specification)
-end
-
--- Soundclips
-
-function soundclips.register(specification)
- local tag = specification.tag
- if tag and tag ~= "" then
- local filename = specification.file
- if not filename or filename == "" then
- filename = tag
- specification.file = filename
- end
- soundclips[tag] = specification
- return specification
- end
-end
-
-function soundclips.insert(tag)
- local sc = soundclips[tag]
- if not sc then
- -- todo: message
- return soundclips.register { tag = tag }
- else
- return sc
- end
-end
-
-commands.registersoundclip = soundclips.register
-commands.insertsoundclip = soundclips.insert
-
--- Renderings
-
-function renderings.register(specification)
- if specification.label then
- renderings[specification.label] = specification
- return specification
- end
-end
-
-function renderings.rendering(label)
- local rn = renderings[label]
- if not rn then
- -- todo: message
- return renderings.register { label = label }
- else
- return rn
- end
-end
-
-local function var(label,key)
- local rn = renderings[label]
- return rn and rn[key] or ""
-end
-
-renderings.var = var
-
-function commands.renderingvar(label,key)
- context(var(label,key))
-end
-
-commands.registerrendering = renderings.register
-
--- Rendering:
-
-function commands.insertrenderingwindow(specification)
- codeinjections.insertrenderingwindow(specification)
-end
-
--- Linkedlists (only a context interface)
-
-function commands.definelinkedlist(tag)
- -- no need
-end
-
-function commands.enhancelinkedlist(tag,n)
- local ll = jobpasses.gettobesaved(tag)
- if ll then
- ll[n] = texcount.realpageno
- end
-end
-
-function commands.addlinklistelement(tag)
- local tobesaved = jobpasses.gettobesaved(tag)
- local collected = jobpasses.getcollected(tag) or { }
- local currentlink = #tobesaved + 1
- local noflinks = #collected
- tobesaved[currentlink] = 0
- local f = collected[1] or 0
- local l = collected[noflinks] or 0
- local p = collected[currentlink-1] or f
- local n = collected[currentlink+1] or l
- context.setlinkedlistproperties(currentlink,noflinks,f,p,n,l)
- -- context.ctxlatelua(function() commands.enhancelinkedlist(tag,currentlink) end)
-end
+if not modules then modules = { } end modules ['scrn-wid'] = {
+ version = 1.001,
+ comment = "companion to scrn-wid.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+interactions = interactions or { }
+local interactions = interactions
+
+local attachments = { }
+local comments = { }
+local soundclips = { }
+local renderings = { }
+local linkedlists = { }
+
+interactions.attachments = attachments
+interactions.soundclips = soundclips
+interactions.renderings = renderings
+interactions.linkedlists = linkedlists
+
+local jobpasses = job.passes
+
+local codeinjections = backends.codeinjections
+local nodeinjections = backends.nodeinjections
+
+local variables = interfaces.variables
+local v_auto = variables.auto
+
+local trace_attachments = false trackers.register("widgets.attachments", function(v) trace_attachments = v end)
+
+local report_attachments = logs.reporter("widgets","attachments")
+
+-- Symbols
+
+function commands.presetsymbollist(list)
+ codeinjections.presetsymbollist(list)
+end
+
+-- Attachments
+--
+-- registered : unique id
+-- tag : used at the tex end
+-- file : name that the file has on the filesystem
+-- name : name that the file will get in the output
+-- title : up to the backend
+-- subtitle : up to the backend
+-- author : up to the backend
+-- method : up to the backend (hidden == no rendering)
+
+local nofautoattachments, lastregistered = 0, nil
+
+local function checkregistered(specification)
+ local registered = specification.registered
+ if not registered or registered == "" or registered == v_auto then
+ nofautoattachments = nofautoattachments + 1
+ lastregistered = "attachment-" .. nofautoattachments
+ specification.registered = lastregistered
+ return lastregistered
+ else
+ return registered
+ end
+end
+
+local function checkbuffer(specification)
+ local buffer = specification.buffer
+ if buffer ~= "" then
+ specification.data = buffers.getcontent(buffer) or ""
+ end
+end
+
+function attachments.register(specification) -- beware of tag/registered mixup(tag is namespace)
+ local registered = checkregistered(specification)
+ checkbuffer(specification)
+ attachments[registered] = specification
+ if trace_attachments then
+ report_attachments("registering %a",registered)
+ end
+ return specification
+end
+
+function attachments.insert(specification)
+ local registered = checkregistered(specification)
+ local r = attachments[registered]
+ if r then
+ if trace_attachments then
+ report_attachments("including registered %a",registered)
+ end
+ for k, v in next, r do
+ local s = specification[k]
+ if s == "" then
+ specification[k] = v
+ end
+ end
+ elseif trace_attachments then
+ report_attachments("including unregistered %a",registered)
+ end
+ checkbuffer(specification)
+ return nodeinjections.attachfile(specification)
+end
+
+commands.registerattachment = attachments.register
+
+function commands.insertattachment(specification)
+ tex.box["b_scrn_attachment_link"] = attachments.insert(specification)
+end
+
+-- Comment
+
+function comments.insert(specification)
+ local buffer = specification.buffer
+ if buffer ~= "" then
+ specification.data = buffers.getcontent(buffer) or ""
+ end
+ return nodeinjections.comment(specification)
+end
+
+function commands.insertcomment(specification)
+ tex.box["b_scrn_comment_link"] = comments.insert(specification)
+end
+
+-- Soundclips
+
+function soundclips.register(specification)
+ local tag = specification.tag
+ if tag and tag ~= "" then
+ local filename = specification.file
+ if not filename or filename == "" then
+ filename = tag
+ specification.file = filename
+ end
+ soundclips[tag] = specification
+ return specification
+ end
+end
+
+function soundclips.insert(tag)
+ local sc = soundclips[tag]
+ if not sc then
+ -- todo: message
+ return soundclips.register { tag = tag }
+ else
+ return sc
+ end
+end
+
+commands.registersoundclip = soundclips.register
+commands.insertsoundclip = soundclips.insert
+
+-- Renderings
+
+function renderings.register(specification)
+ if specification.label then
+ renderings[specification.label] = specification
+ return specification
+ end
+end
+
+function renderings.rendering(label)
+ local rn = renderings[label]
+ if not rn then
+ -- todo: message
+ return renderings.register { label = label }
+ else
+ return rn
+ end
+end
+
+local function var(label,key)
+ local rn = renderings[label]
+ return rn and rn[key] or ""
+end
+
+renderings.var = var
+
+function commands.renderingvar(label,key)
+ context(var(label,key))
+end
+
+commands.registerrendering = renderings.register
+
+-- Rendering:
+
+function commands.insertrenderingwindow(specification)
+ codeinjections.insertrenderingwindow(specification)
+end
+
+-- Linkedlists (only a context interface)
+
+function commands.definelinkedlist(tag)
+ -- no need
+end
+
+function commands.enhancelinkedlist(tag,n)
+ local ll = jobpasses.gettobesaved(tag)
+ if ll then
+ ll[n] = texcount.realpageno
+ end
+end
+
+function commands.addlinklistelement(tag)
+ local tobesaved = jobpasses.gettobesaved(tag)
+ local collected = jobpasses.getcollected(tag) or { }
+ local currentlink = #tobesaved + 1
+ local noflinks = #collected
+ tobesaved[currentlink] = 0
+ local f = collected[1] or 0
+ local l = collected[noflinks] or 0
+ local p = collected[currentlink-1] or f
+ local n = collected[currentlink+1] or l
+ context.setlinkedlistproperties(currentlink,noflinks,f,p,n,l)
+ -- context.ctxlatelua(function() commands.enhancelinkedlist(tag,currentlink) end)
+end
diff --git a/tex/context/base/scrp-cjk.lua b/tex/context/base/scrp-cjk.lua
index f7167b45c..083fc4e53 100644
--- a/tex/context/base/scrp-cjk.lua
+++ b/tex/context/base/scrp-cjk.lua
@@ -1,951 +1,951 @@
-if not modules then modules = { } end modules ['scrp-cjk'] = {
- version = 1.001,
- comment = "companion to scrp-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- We can speed this up by preallocating nodes and copying them but the
--- gain is not that large.
-
--- The input line endings: there is no way to distinguish between
--- inline spaces and endofline turned into spaces (would not make
--- sense either because otherwise a wanted space at the end of a
--- line would have to be a hard coded ones.
-
-local utfchar = utf.char
-
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
-local remove_node = nodes.remove
-
-local nodepool = nodes.pool
-local new_glue = nodepool.glue
-local new_kern = nodepool.kern
-local new_penalty = nodepool.penalty
-
-local nodecodes = nodes.nodecodes
-local skipcodes = nodes.skipcodes
-local glyph_code = nodecodes.glyph
-local glue_code = nodecodes.glue
-local userskip_code = skipcodes.userskip
-
-local a_scriptstatus = attributes.private('scriptstatus')
-local a_scriptinjection = attributes.private('scriptinjection')
-
-local categorytonumber = scripts.categorytonumber
-local numbertocategory = scripts.numbertocategory
-local hash = scripts.hash
-local numbertodataset = scripts.numbertodataset
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-local quaddata = fonthashes.quads
-local spacedata = fonthashes.spaces
-
-local trace_details = false trackers.register("scripts.details", function(v) trace_details = v end)
-
-local report_details = logs.reporter("scripts","detail")
-
--- raggedleft is controlled by leftskip and we might end up with a situation where
--- the intercharacter spacing interferes with this; the solution is to patch the
--- nodelist but better is to use veryraggedleft
-
-local inter_char_shrink = 0
-local inter_char_stretch = 0
-local inter_char_half_shrink = 0
-local inter_char_half_stretch = 0
-local inter_char_quarter_shrink = 0
-local inter_char_quarter_stretch = 0
-
-local full_char_width = 0
-local half_char_width = 0
-local quarter_char_width = 0
-
-local inter_char_hangul_penalty = 0
-
-local function set_parameters(font,data)
- -- beware: parameters can be nil in e.g. punk variants
- local quad = quaddata[font]
- full_char_width = quad
- half_char_width = quad/2
- quarter_char_width = quad/4
- inter_char_shrink = data.inter_char_shrink_factor * quad
- inter_char_stretch = data.inter_char_stretch_factor * quad
- inter_char_half_shrink = data.inter_char_half_shrink_factor * quad
- inter_char_half_stretch = data.inter_char_half_stretch_factor * quad
- inter_char_quarter_shrink = data.inter_char_quarter_shrink_factor * quad
- inter_char_quarter_stretch = data.inter_char_quarter_stretch_factor * quad
- inter_char_hangul_penalty = data.inter_char_hangul_penalty
-end
-
--- a test version did compensate for crappy halfwidth but we can best do that
--- at font definition time and/or just assume a correct font
-
-local function trace_detail(current,what)
- local prev = current.prev
- local c_id = current.id
- local p_id = prev and prev.id
- if c_id == glyph_code then
- local c_ch = current.char
- if p_id == glyph_code then
- local p_ch = p_id and prev.char
- report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,c_ch,hash[c_ch])
- else
- report_details("[%s] [%C %a]",what,c_ch,hash[c_ch])
- end
- else
- if p_id == glyph_code then
- local p_ch = p_id and prev.char
- report_details("[%C %a] [%s]",p_ch,hash[p_ch],what)
- else
- report_details("[%s]",what)
- end
- end
-end
-
-local function trace_detail_between(p,n,what)
- local p_ch = p.char
- local n_ch = n.char
- report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,n_ch,hash[n_ch])
-end
-
-local function nobreak(head,current)
- if trace_details then
- trace_detail(current,"break")
- end
- insert_node_before(head,current,new_penalty(10000))
-end
-
-local function stretch_break(head,current)
- if trace_details then
- trace_detail(current,"stretch break")
- end
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
-end
-
-local function shrink_break(head,current)
- if trace_details then
- trace_detail(current,"shrink break")
- end
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
-end
-
-local function nobreak_stretch(head,current)
- if trace_details then
- trace_detail(current,"no break stretch")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
-end
-
-local function korean_break(head,current)
- if trace_details then
- trace_detail(current,"korean break")
- end
- insert_node_before(head,current,new_penalty(inter_char_hangul_penalty))
-end
-
-local function nobreak_shrink(head,current)
- if trace_details then
- trace_detail(current,"nobreak shrink")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
-end
-
-local function nobreak_autoshrink(head,current)
- if trace_details then
- trace_detail(current,"nobreak autoshrink")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
-end
-
-local function nobreak_stretch_nobreak_shrink(head,current)
- if trace_details then
- trace_detail(current,"nobreak stretch nobreak shrink")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
-end
-
-local function nobreak_stretch_nobreak_autoshrink(head,current)
- if trace_details then
- trace_detail(current,"nobreak stretch nobreak autoshrink")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
-end
-
-local function nobreak_shrink_nobreak_stretch(head,current)
- if trace_details then
- trace_detail(current,"nobreak shrink nobreak stretch")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
-end
-
-local function nobreak_autoshrink_nobreak_stretch(head,current)
- if trace_details then
- trace_detail(current,"nobreak autoshrink nobreak stretch")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
-end
-
-local function nobreak_shrink_break_stretch(head,current)
- if trace_details then
- trace_detail(current,"nobreak shrink break stretch")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
-end
-
-local function nobreak_autoshrink_break_stretch(head,current)
- if trace_details then
- trace_detail(current,"nobreak autoshrink break stretch")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
-end
-
-local function nobreak_shrink_break_stretch_nobreak_shrink(head,current)
- if trace_details then
- trace_detail(current,"nobreak shrink break stretch nobreak shrink")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
-end
-
-local function japanese_between_full_close_open(head,current) -- todo: check width
- if trace_details then
- trace_detail(current,"japanese between full close open")
- end
- insert_node_before(head,current,new_kern(-half_char_width))
- insert_node_before(head,current,new_glue(half_char_width,0,inter_char_half_shrink))
- insert_node_before(head,current,new_kern(-half_char_width))
-end
-
-local function japanese_between_full_close_full_close(head,current) -- todo: check width
- if trace_details then
- trace_detail(current,"japanese between full close full close")
- end
- insert_node_before(head,current,new_kern(-half_char_width))
- -- insert_node_before(head,current,new_glue(half_char_width,0,inter_char_half_shrink))
-end
-
-local function japanese_before_full_width_punct(head,current) -- todo: check width
- if trace_details then
- trace_detail(current,"japanese before full width punct")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(quarter_char_width,0,inter_char_quarter_shrink))
- insert_node_before(head,current,new_kern(-quarter_char_width))
-end
-
-local function japanese_after_full_width_punct(head,current) -- todo: check width
- if trace_details then
- trace_detail(current,"japanese after full width punct")
- end
- insert_node_before(head,current,new_kern(-quarter_char_width))
- insert_node_before(head,current,new_glue(quarter_char_width,0,inter_char_quarter_shrink))
-end
-
-local function nobreak_autoshrink_break_stretch_nobreak_autoshrink(head,current)
- if trace_details then
- trace_detail(current,"nobreak autoshrink break stretch nobreak autoshrink")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
-end
-
-local function nobreak_autoshrink_break_stretch_nobreak_shrink(head,current)
- if trace_details then
- trace_detail(current,"nobreak autoshrink break stretch nobreak shrink")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
-end
-
-local function nobreak_shrink_break_stretch_nobreak_autoshrink(head,current)
- if trace_details then
- trace_detail(current,"nobreak shrink break stretch nobreak autoshrink")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
-end
-
-local function nobreak_stretch_break_shrink(head,current)
- if trace_details then
- trace_detail(current,"nobreak stretch break shrink")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
-end
-
-local function nobreak_stretch_break_autoshrink(head,current)
- if trace_details then
- trace_detail(current,"nobreak stretch break autoshrink")
- end
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
- insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
-end
-
--- Korean: hangul
-
-local korean_0 = {
-}
-
-local korean_1 = {
- jamo_initial = korean_break,
- korean = korean_break,
- chinese = korean_break,
- hiragana = korean_break,
- katakana = korean_break,
- half_width_open = stretch_break,
- half_width_close = nobreak,
- full_width_open = stretch_break,
- full_width_close = nobreak,
- full_width_punct = nobreak,
--- hyphen = nil,
- non_starter = korean_break,
- other = korean_break,
-}
-
-local korean_2 = {
- jamo_initial = stretch_break,
- korean = stretch_break,
- chinese = stretch_break,
- hiragana = stretch_break,
- katakana = stretch_break,
- half_width_open = stretch_break,
- half_width_close = nobreak,
- full_width_open = stretch_break,
- full_width_close = nobreak,
- full_width_punct = nobreak,
--- hyphen = nil,
- non_starter = stretch_break,
- other = stretch_break,
-}
-
-local korean_3 = {
- jamo_initial = stretch_break,
- korean = stretch_break,
- chinese = stretch_break,
- hiragana = stretch_break,
- katakana = stretch_break,
- half_width_open = stretch_break,
- half_width_close = nobreak,
- full_width_open = stretch_break,
- full_width_close = nobreak,
- full_width_punct = nobreak,
--- hyphen = nil,
- non_starter = nobreak,
- other = nobreak,
-}
-
-local korean_4 = {
- jamo_initial = nobreak,
- korean = nobreak,
- chinese = nobreak,
- hiragana = nobreak,
- katakana = nobreak,
- half_width_open = nobreak,
- half_width_close = nobreak,
- full_width_open = nobreak,
- full_width_close = nobreak,
- full_width_punct = nobreak,
- hyphen = nobreak,
- non_starter = nobreak,
- other = nobreak,
-}
-
-local korean_5 = {
- jamo_initial = stretch_break,
- korean = stretch_break,
- chinese = stretch_break,
- hiragana = stretch_break,
- katakana = stretch_break,
- half_width_open = stretch_break,
- half_width_close = nobreak_stretch,
- full_width_open = stretch_break,
- full_width_close = nobreak_stretch,
- full_width_punct = nobreak_stretch,
- hyphen = nobreak_stretch,
- non_starter = nobreak_stretch,
- other = stretch_break,
-}
-
-local injectors = { -- [previous] [current]
- jamo_final = korean_1,
- korean = korean_1,
- chinese = korean_1,
- hiragana = korean_1,
- katakana = korean_1,
- hyphen = korean_2,
- start = korean_0,
- other = korean_2,
- non_starter = korean_3,
- full_width_open = korean_4,
- half_width_open = korean_4,
- full_width_close = korean_5,
- full_width_punct = korean_5,
- half_width_close = korean_5,
-}
-
-local function process(head,first,last)
- if first ~= last then
- local lastfont, previous, last = nil, "start", nil
- while true do
- local upcoming, id = first.next, first.id
- if id == glyph_code then
- local a = first[a_scriptstatus]
- local current = numbertocategory[a]
- local action = injectors[previous]
- if action then
- action = action[current]
- if action then
- local font = first.font
- if font ~= lastfont then
- lastfont = font
- set_parameters(font,numbertodataset[first[a_scriptinjection]])
- end
- action(head,first)
- end
- end
- previous = current
- else -- glue
- local p, n = first.prev, upcoming
- if p and n then
- local pid, nid = p.id, n.id
- if pid == glyph_code and nid == glyph_code then
- local pa, na = p[a_scriptstatus], n[a_scriptstatus]
- local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
- if not pcjk or not ncjk
- or pcjk == "korean" or ncjk == "korean"
- or pcjk == "other" or ncjk == "other"
- or pcjk == "jamo_final" or ncjk == "jamo_initial" then
- previous = "start"
- else -- if head ~= first then
- remove_node(head,first,true)
- previous = pcjk
- -- else
- -- previous = pcjk
- end
- else
- previous = "start"
- end
- else
- previous = "start"
- end
- end
- if upcoming == last then -- was stop
- break
- else
- first = upcoming
- end
- end
- end
-end
-
-scripts.installmethod {
- name = "hangul",
- injector = process,
- datasets = { -- todo: metatables
- default = {
- inter_char_shrink_factor = 0.50, -- of quad
- inter_char_stretch_factor = 0.50, -- of quad
- inter_char_half_shrink_factor = 0.50, -- of quad
- inter_char_half_stretch_factor = 0.50, -- of quad
- inter_char_quarter_shrink_factor = 0.50, -- of quad
- inter_char_quarter_stretch_factor = 0.50, -- of quad
- inter_char_hangul_penalty = 50,
- },
- },
-}
-
--- Chinese: hanzi
-
-local chinese_0 = {
-}
-
-local chinese_1 = {
- jamo_initial = korean_break,
- korean = korean_break,
- chinese = stretch_break,
- hiragana = stretch_break,
- katakana = stretch_break,
- half_width_open = nobreak_stretch_break_autoshrink,
- half_width_close = nobreak_stretch,
- full_width_open = nobreak_stretch_break_shrink,
- full_width_close = nobreak_stretch,
- full_width_punct = nobreak_stretch,
--- hyphen = nil,
- non_starter = nobreak_stretch,
- other = stretch_break,
-}
-
-local chinese_2 = {
- jamo_initial = korean_break,
- korean = stretch_break,
- chinese = stretch_break,
- hiragana = stretch_break,
- katakana = stretch_break,
- half_width_open = nobreak_stretch_break_autoshrink,
- half_width_close = nobreak_stretch,
- full_width_open = nobreak_stretch_break_shrink,
- full_width_close = nobreak_stretch,
- full_width_punct = nobreak_stretch,
- hyphen = nobreak_stretch,
- non_starter = nobreak_stretch,
- other = stretch_break,
-}
-
-local chinese_3 = {
- jamo_initial = korean_break,
- korean = stretch_break,
- chinese = stretch_break,
- hiragana = stretch_break,
- katakana = stretch_break,
- half_width_open = nobreak_stretch_break_autoshrink,
- half_width_close = nobreak_stretch,
- full_width_open = nobreak_stretch_break_shrink,
- full_width_close = nobreak_stretch,
- full_width_punct = nobreak_stretch,
--- hyphen = nil,
- non_starter = nobreak_stretch,
- other = stretch_break,
-}
-
-local chinese_4 = {
--- jamo_initial = nil,
--- korean = nil,
--- chinese = nil,
--- hiragana = nil,
--- katakana = nil,
- half_width_open = nobreak_autoshrink,
- half_width_close = nil,
- full_width_open = nobreak_shrink,
- full_width_close = nobreak,
- full_width_punct = nobreak,
--- hyphen = nil,
- non_starter = nobreak,
--- other = nil,
-}
-
-local chinese_5 = {
- jamo_initial = stretch_break,
- korean = stretch_break,
- chinese = stretch_break,
- hiragana = stretch_break,
- katakana = stretch_break,
- half_width_open = nobreak_stretch_break_autoshrink,
- half_width_close = nobreak_stretch,
- full_width_open = nobreak_stretch_break_shrink,
- full_width_close = nobreak_stretch,
- full_width_punct = nobreak_stretch,
--- hyphen = nil,
- non_starter = nobreak_stretch,
- other = stretch_break,
-}
-
-local chinese_6 = {
- jamo_initial = nobreak_stretch,
- korean = nobreak_stretch,
- chinese = nobreak_stretch,
- hiragana = nobreak_stretch,
- katakana = nobreak_stretch,
- half_width_open = nobreak_stretch_break_autoshrink,
- half_width_close = nobreak_stretch,
- full_width_open = nobreak_stretch_break_shrink,
- full_width_close = nobreak_stretch,
- full_width_punct = nobreak_stretch,
- hyphen = nobreak_stretch,
- non_starter = nobreak_stretch,
- other = nobreak_stretch,
-}
-
-local chinese_7 = {
- jami_initial = nobreak_shrink_break_stretch,
- korean = nobreak_shrink_break_stretch,
- chinese = stretch_break, -- nobreak_shrink_break_stretch,
- hiragana = stretch_break, -- nobreak_shrink_break_stretch,
- katakana = stretch_break, -- nobreak_shrink_break_stretch,
- half_width_open = nobreak_shrink_break_stretch_nobreak_autoshrink,
- half_width_close = nobreak_shrink_nobreak_stretch,
- full_width_open = nobreak_shrink_break_stretch_nobreak_shrink,
- full_width_close = nobreak_shrink_nobreak_stretch,
- full_width_punct = nobreak_shrink_nobreak_stretch,
- hyphen = nobreak_shrink_break_stretch,
- non_starter = nobreak_shrink_break_stretch,
- other = nobreak_shrink_break_stretch,
-}
-
-local chinese_8 = {
- jami_initial = nobreak_shrink_break_stretch,
- korean = nobreak_autoshrink_break_stretch,
- chinese = stretch_break, -- nobreak_autoshrink_break_stretch,
- hiragana = stretch_break, -- nobreak_autoshrink_break_stretch,
- katakana = stretch_break, -- nobreak_autoshrink_break_stretch,
- half_width_open = nobreak_autoshrink_break_stretch_nobreak_autoshrink,
- half_width_close = nobreak_autoshrink_nobreak_stretch,
- full_width_open = nobreak_autoshrink_break_stretch_nobreak_shrink,
- full_width_close = nobreak_autoshrink_nobreak_stretch,
- full_width_punct = nobreak_autoshrink_nobreak_stretch,
- hyphen = nobreak_autoshrink_break_stretch,
- non_starter = nobreak_autoshrink_break_stretch,
- other = nobreak_autoshrink_break_stretch,
-}
-
-local injectors = { -- [previous] [current]
- jamo_final = chinese_1,
- korean = chinese_1,
- chinese = chinese_2,
- hiragana = chinese_2,
- katakana = chinese_2,
- hyphen = chinese_3,
- start = chinese_4,
- other = chinese_5,
- non_starter = chinese_5,
- full_width_open = chinese_6,
- half_width_open = chinese_6,
- full_width_close = chinese_7,
- full_width_punct = chinese_7,
- half_width_close = chinese_8,
-}
-
-local function process(head,first,last)
- if first ~= last then
- local lastfont, previous, last = nil, "start", nil
- while true do
- local upcoming, id = first.next, first.id
- if id == glyph_code then
- local a = first[a_scriptstatus]
- local current = numbertocategory[a]
- local action = injectors[previous]
- if action then
- action = action[current]
- if action then
- local font = first.font
- if font ~= lastfont then
- lastfont = font
- set_parameters(font,numbertodataset[first[a_scriptinjection]])
- end
- action(head,first)
- end
- end
- previous = current
- else -- glue
- local p, n = first.prev, upcoming
- if p and n then
- local pid, nid = p.id, n.id
- if pid == glyph_code and nid == glyph_code then
- local pa, na = p[a_scriptstatus], n[a_scriptstatus]
- local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
- if not pcjk or not ncjk
- or pcjk == "korean" or ncjk == "korean"
- or pcjk == "other" or ncjk == "other"
- or pcjk == "jamo_final" or ncjk == "jamo_initial"
- or pcjk == "half_width_close" or ncjk == "half_width_open" then -- extra compared to korean
- previous = "start"
- else -- if head ~= first then
- remove_node(head,first,true)
- previous = pcjk
- -- else
- -- previous = pcjk
- end
- else
- previous = "start"
- end
- else
- previous = "start"
- end
- end
- if upcoming == last then -- was stop
- break
- else
- first = upcoming
- end
- end
- end
-end
-
-scripts.installmethod {
- name = "hanzi",
- injector = process,
- datasets = {
- default = {
- inter_char_shrink_factor = 0.50, -- of quad
- inter_char_stretch_factor = 0.50, -- of quad
- inter_char_half_shrink_factor = 0.50, -- of quad
- inter_char_half_stretch_factor = 0.50, -- of quad
- inter_char_quarter_shrink_factor = 0.50, -- of quad
- inter_char_quarter_stretch_factor = 0.50, -- of quad
- inter_char_hangul_penalty = 50,
- },
- },
-}
-
--- Japanese: idiographic, hiragana, katakana, romanji / jis
-
-local japanese_0 = {
-}
-
-local japanese_1 = {
- jamo_initial = korean_break,
- korean = korean_break,
- chinese = stretch_break,
- hiragana = stretch_break,
- katakana = stretch_break,
- half_width_open = nobreak_stretch_break_autoshrink,
- half_width_close = nobreak_stretch,
- full_width_open = nobreak_stretch_break_shrink,
- full_width_close = nobreak_stretch,
- full_width_punct = nobreak_stretch,
--- hyphen = nil,
- non_starter = nobreak_stretch,
- other = stretch_break,
-}
-
-local japanese_2 = {
- jamo_initial = korean_break,
- korean = stretch_break,
- chinese = stretch_break,
- hiragana = stretch_break,
- katakana = stretch_break,
- half_width_open = nobreak_stretch_break_autoshrink,
- half_width_close = nobreak_stretch,
- full_width_open = nobreak_stretch_break_shrink,
- full_width_close = nobreak_stretch,
- full_width_punct = japanese_before_full_width_punct, -- nobreak_stretch,
- hyphen = nobreak_stretch,
- non_starter = nobreak_stretch,
- other = stretch_break,
-}
-
-local japanese_3 = {
- jamo_initial = korean_break,
- korean = stretch_break,
- chinese = stretch_break,
- hiragana = stretch_break,
- katakana = stretch_break,
- half_width_open = nobreak_stretch_break_autoshrink,
- half_width_close = nobreak_stretch,
- full_width_open = nobreak_stretch_break_shrink,
- full_width_close = nobreak_stretch,
- full_width_punct = nobreak_stretch,
--- hyphen = nil,
- non_starter = nobreak_stretch,
- other = stretch_break,
-}
-
-local japanese_4 = {
--- jamo_initial = nil,
--- korean = nil,
--- chinese = nil,
--- hiragana = nil,
--- katakana = nil,
- half_width_open = nobreak_autoshrink,
- half_width_close = nil,
- full_width_open = nobreak_shrink,
- full_width_close = nobreak,
- full_width_punct = nobreak,
--- hyphen = nil,
- non_starter = nobreak,
--- other = nil,
-}
-
-local japanese_5 = {
- jamo_initial = stretch_break,
- korean = stretch_break,
- chinese = stretch_break,
- hiragana = stretch_break,
- katakana = stretch_break,
- half_width_open = nobreak_stretch_break_autoshrink,
- half_width_close = nobreak_stretch,
- full_width_open = nobreak_stretch_break_shrink,
- full_width_close = nobreak_stretch,
- full_width_punct = nobreak_stretch,
--- hyphen = nil,
- non_starter = nobreak_stretch,
- other = stretch_break,
-}
-
-local japanese_6 = {
- jamo_initial = nobreak_stretch,
- korean = nobreak_stretch,
- chinese = nobreak_stretch,
- hiragana = nobreak_stretch,
- katakana = nobreak_stretch,
- half_width_open = nobreak_stretch_break_autoshrink,
- half_width_close = nobreak_stretch,
- full_width_open = nobreak_stretch_break_shrink,
- full_width_close = nobreak_stretch,
- full_width_punct = nobreak_stretch,
- hyphen = nobreak_stretch,
- non_starter = nobreak_stretch,
- other = nobreak_stretch,
-}
-
-local japanese_7 = {
- jami_initial = nobreak_shrink_break_stretch,
- korean = nobreak_shrink_break_stretch,
- chinese = japanese_after_full_width_punct, -- stretch_break
- hiragana = japanese_after_full_width_punct, -- stretch_break
- katakana = japanese_after_full_width_punct, -- stretch_break
- half_width_open = nobreak_shrink_break_stretch_nobreak_autoshrink,
- half_width_close = nobreak_shrink_nobreak_stretch,
- full_width_open = japanese_between_full_close_open, -- !!
- full_width_close = japanese_between_full_close_full_close, -- nobreak_shrink_nobreak_stretch,
- full_width_punct = nobreak_shrink_nobreak_stretch,
- hyphen = nobreak_shrink_break_stretch,
- non_starter = nobreak_shrink_break_stretch,
- other = nobreak_shrink_break_stretch,
-}
-
-local japanese_8 = {
- jami_initial = nobreak_shrink_break_stretch,
- korean = nobreak_autoshrink_break_stretch,
- chinese = stretch_break,
- hiragana = stretch_break,
- katakana = stretch_break,
- half_width_open = nobreak_autoshrink_break_stretch_nobreak_autoshrink,
- half_width_close = nobreak_autoshrink_nobreak_stretch,
- full_width_open = nobreak_autoshrink_break_stretch_nobreak_shrink,
- full_width_close = nobreak_autoshrink_nobreak_stretch,
- full_width_punct = nobreak_autoshrink_nobreak_stretch,
- hyphen = nobreak_autoshrink_break_stretch,
- non_starter = nobreak_autoshrink_break_stretch,
- other = nobreak_autoshrink_break_stretch,
-}
-
-local injectors = { -- [previous] [current]
- jamo_final = japanese_1,
- korean = japanese_1,
- chinese = japanese_2,
- hiragana = japanese_2,
- katakana = japanese_2,
- hyphen = japanese_3,
- start = japanese_4,
- other = japanese_5,
- non_starter = japanese_5,
- full_width_open = japanese_6,
- half_width_open = japanese_6,
- full_width_close = japanese_7,
- full_width_punct = japanese_7,
- half_width_close = japanese_8,
-}
-
-local function process(head,first,last)
- if first ~= last then
- local lastfont, previous, last = nil, "start", nil
- while true do
- local upcoming, id = first.next, first.id
- if id == glyph_code then
- local a = first[a_scriptstatus]
- local current = numbertocategory[a]
- local action = injectors[previous]
- if action then
- action = action[current]
- if action then
- local font = first.font
- if font ~= lastfont then
- lastfont = font
- set_parameters(font,numbertodataset[first[a_scriptinjection]])
- end
- action(head,first)
- end
- end
- previous = current
-
--- elseif id == math_code then
--- upcoming = end_of_math(current).next
--- previous = "start"
-
- else -- glue
- local p, n = first.prev, upcoming -- we should remember prev
- if p and n then
- local pid, nid = p.id, n.id
- if pid == glyph_code and nid == glyph_code then
- local pa, na = p[a_scriptstatus], n[a_scriptstatus]
- local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
- if not pcjk or not ncjk
- or pcjk == "korean" or ncjk == "korean"
- or pcjk == "other" or ncjk == "other"
- or pcjk == "jamo_final" or ncjk == "jamo_initial"
- or pcjk == "half_width_close" or ncjk == "half_width_open" then -- extra compared to korean
- previous = "start"
- else -- if head ~= first then
-if id == glue_code and first.subtype == userskip_code then -- also scriptstatus check?
- -- for the moment no distinction possible between space and userskip
- local w = first.spec.width
- local s = spacedata[p.font]
- if w == s then -- could be option
- if trace_details then
- trace_detail_between(p,n,"space removed")
- end
- remove_node(head,first,true)
- end
-end
- previous = pcjk
- -- else
- -- previous = pcjk
- end
- else
- previous = "start"
- end
- else
- previous = "start"
- end
- end
- if upcoming == last then -- was stop
- break
- else
- first = upcoming
- end
- end
- end
-end
-
-scripts.installmethod {
- name = "nihongo", -- what name to use?
- injector = process,
- datasets = {
- default = {
- inter_char_shrink_factor = 0.50, -- of quad
- inter_char_stretch_factor = 0.50, -- of quad
- inter_char_half_shrink_factor = 0.50, -- of quad
- inter_char_half_stretch_factor = 0.50, -- of quad
- inter_char_quarter_shrink_factor = 0.25, -- of quad
- inter_char_quarter_stretch_factor = 0.25, -- of quad
- inter_char_hangul_penalty = 50,
- },
- },
-}
-
+if not modules then modules = { } end modules ['scrp-cjk'] = {
+ version = 1.001,
+ comment = "companion to scrp-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- We can speed this up by preallocating nodes and copying them but the
+-- gain is not that large.
+
+-- The input line endings: there is no way to distinguish between
+-- inline spaces and endofline turned into spaces (would not make
+-- sense either because otherwise a wanted space at the end of a
+-- line would have to be a hard coded ones.
+
+local utfchar = utf.char
+
+local insert_node_after = node.insert_after
+local insert_node_before = node.insert_before
+local remove_node = nodes.remove
+
+local nodepool = nodes.pool
+local new_glue = nodepool.glue
+local new_kern = nodepool.kern
+local new_penalty = nodepool.penalty
+
+local nodecodes = nodes.nodecodes
+local skipcodes = nodes.skipcodes
+local glyph_code = nodecodes.glyph
+local glue_code = nodecodes.glue
+local userskip_code = skipcodes.userskip
+
+local a_scriptstatus = attributes.private('scriptstatus')
+local a_scriptinjection = attributes.private('scriptinjection')
+
+local categorytonumber = scripts.categorytonumber
+local numbertocategory = scripts.numbertocategory
+local hash = scripts.hash
+local numbertodataset = scripts.numbertodataset
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+local quaddata = fonthashes.quads
+local spacedata = fonthashes.spaces
+
+local trace_details = false trackers.register("scripts.details", function(v) trace_details = v end)
+
+local report_details = logs.reporter("scripts","detail")
+
+-- raggedleft is controlled by leftskip and we might end up with a situation where
+-- the intercharacter spacing interferes with this; the solution is to patch the
+-- nodelist but better is to use veryraggedleft
+
+local inter_char_shrink = 0
+local inter_char_stretch = 0
+local inter_char_half_shrink = 0
+local inter_char_half_stretch = 0
+local inter_char_quarter_shrink = 0
+local inter_char_quarter_stretch = 0
+
+local full_char_width = 0
+local half_char_width = 0
+local quarter_char_width = 0
+
+local inter_char_hangul_penalty = 0
+
+local function set_parameters(font,data)
+ -- beware: parameters can be nil in e.g. punk variants
+ local quad = quaddata[font]
+ full_char_width = quad
+ half_char_width = quad/2
+ quarter_char_width = quad/4
+ inter_char_shrink = data.inter_char_shrink_factor * quad
+ inter_char_stretch = data.inter_char_stretch_factor * quad
+ inter_char_half_shrink = data.inter_char_half_shrink_factor * quad
+ inter_char_half_stretch = data.inter_char_half_stretch_factor * quad
+ inter_char_quarter_shrink = data.inter_char_quarter_shrink_factor * quad
+ inter_char_quarter_stretch = data.inter_char_quarter_stretch_factor * quad
+ inter_char_hangul_penalty = data.inter_char_hangul_penalty
+end
+
+-- a test version did compensate for crappy halfwidth but we can best do that
+-- at font definition time and/or just assume a correct font
+
+local function trace_detail(current,what)
+ local prev = current.prev
+ local c_id = current.id
+ local p_id = prev and prev.id
+ if c_id == glyph_code then
+ local c_ch = current.char
+ if p_id == glyph_code then
+ local p_ch = p_id and prev.char
+ report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,c_ch,hash[c_ch])
+ else
+ report_details("[%s] [%C %a]",what,c_ch,hash[c_ch])
+ end
+ else
+ if p_id == glyph_code then
+ local p_ch = p_id and prev.char
+ report_details("[%C %a] [%s]",p_ch,hash[p_ch],what)
+ else
+ report_details("[%s]",what)
+ end
+ end
+end
+
+local function trace_detail_between(p,n,what)
+ local p_ch = p.char
+ local n_ch = n.char
+ report_details("[%C %a] [%s] [%C %a]",p_ch,hash[p_ch],what,n_ch,hash[n_ch])
+end
+
+local function nobreak(head,current)
+ if trace_details then
+ trace_detail(current,"break")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+end
+
+local function stretch_break(head,current)
+ if trace_details then
+ trace_detail(current,"stretch break")
+ end
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+end
+
+local function shrink_break(head,current)
+ if trace_details then
+ trace_detail(current,"shrink break")
+ end
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+end
+
+local function nobreak_stretch(head,current)
+ if trace_details then
+ trace_detail(current,"no break stretch")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+end
+
+local function korean_break(head,current)
+ if trace_details then
+ trace_detail(current,"korean break")
+ end
+ insert_node_before(head,current,new_penalty(inter_char_hangul_penalty))
+end
+
+local function nobreak_shrink(head,current)
+ if trace_details then
+ trace_detail(current,"nobreak shrink")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+end
+
+local function nobreak_autoshrink(head,current)
+ if trace_details then
+ trace_detail(current,"nobreak autoshrink")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+end
+
+local function nobreak_stretch_nobreak_shrink(head,current)
+ if trace_details then
+ trace_detail(current,"nobreak stretch nobreak shrink")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+end
+
+local function nobreak_stretch_nobreak_autoshrink(head,current)
+ if trace_details then
+ trace_detail(current,"nobreak stretch nobreak autoshrink")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+end
+
+local function nobreak_shrink_nobreak_stretch(head,current)
+ if trace_details then
+ trace_detail(current,"nobreak shrink nobreak stretch")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+end
+
+local function nobreak_autoshrink_nobreak_stretch(head,current)
+ if trace_details then
+ trace_detail(current,"nobreak autoshrink nobreak stretch")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+end
+
+local function nobreak_shrink_break_stretch(head,current)
+ if trace_details then
+ trace_detail(current,"nobreak shrink break stretch")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+end
+
+local function nobreak_autoshrink_break_stretch(head,current)
+ if trace_details then
+ trace_detail(current,"nobreak autoshrink break stretch")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+end
+
+local function nobreak_shrink_break_stretch_nobreak_shrink(head,current)
+ if trace_details then
+ trace_detail(current,"nobreak shrink break stretch nobreak shrink")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+end
+
+local function japanese_between_full_close_open(head,current) -- todo: check width
+ if trace_details then
+ trace_detail(current,"japanese between full close open")
+ end
+ insert_node_before(head,current,new_kern(-half_char_width))
+ insert_node_before(head,current,new_glue(half_char_width,0,inter_char_half_shrink))
+ insert_node_before(head,current,new_kern(-half_char_width))
+end
+
+local function japanese_between_full_close_full_close(head,current) -- todo: check width
+ if trace_details then
+ trace_detail(current,"japanese between full close full close")
+ end
+ insert_node_before(head,current,new_kern(-half_char_width))
+ -- insert_node_before(head,current,new_glue(half_char_width,0,inter_char_half_shrink))
+end
+
+local function japanese_before_full_width_punct(head,current) -- todo: check width
+ if trace_details then
+ trace_detail(current,"japanese before full width punct")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(quarter_char_width,0,inter_char_quarter_shrink))
+ insert_node_before(head,current,new_kern(-quarter_char_width))
+end
+
+local function japanese_after_full_width_punct(head,current) -- todo: check width
+ if trace_details then
+ trace_detail(current,"japanese after full width punct")
+ end
+ insert_node_before(head,current,new_kern(-quarter_char_width))
+ insert_node_before(head,current,new_glue(quarter_char_width,0,inter_char_quarter_shrink))
+end
+
+local function nobreak_autoshrink_break_stretch_nobreak_autoshrink(head,current)
+ if trace_details then
+ trace_detail(current,"nobreak autoshrink break stretch nobreak autoshrink")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+end
+
+local function nobreak_autoshrink_break_stretch_nobreak_shrink(head,current)
+ if trace_details then
+ trace_detail(current,"nobreak autoshrink break stretch nobreak shrink")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+end
+
+local function nobreak_shrink_break_stretch_nobreak_autoshrink(head,current)
+ if trace_details then
+ trace_detail(current,"nobreak shrink break stretch nobreak autoshrink")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+end
+
+local function nobreak_stretch_break_shrink(head,current)
+ if trace_details then
+ trace_detail(current,"nobreak stretch break shrink")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+end
+
+local function nobreak_stretch_break_autoshrink(head,current)
+ if trace_details then
+ trace_detail(current,"nobreak stretch break autoshrink")
+ end
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,new_glue(0,inter_char_stretch,0))
+ insert_node_before(head,current,new_glue(0,0,inter_char_half_shrink))
+end
+
+-- Korean: hangul
+
+local korean_0 = {
+}
+
+local korean_1 = {
+ jamo_initial = korean_break,
+ korean = korean_break,
+ chinese = korean_break,
+ hiragana = korean_break,
+ katakana = korean_break,
+ half_width_open = stretch_break,
+ half_width_close = nobreak,
+ full_width_open = stretch_break,
+ full_width_close = nobreak,
+ full_width_punct = nobreak,
+-- hyphen = nil,
+ non_starter = korean_break,
+ other = korean_break,
+}
+
+local korean_2 = {
+ jamo_initial = stretch_break,
+ korean = stretch_break,
+ chinese = stretch_break,
+ hiragana = stretch_break,
+ katakana = stretch_break,
+ half_width_open = stretch_break,
+ half_width_close = nobreak,
+ full_width_open = stretch_break,
+ full_width_close = nobreak,
+ full_width_punct = nobreak,
+-- hyphen = nil,
+ non_starter = stretch_break,
+ other = stretch_break,
+}
+
+local korean_3 = {
+ jamo_initial = stretch_break,
+ korean = stretch_break,
+ chinese = stretch_break,
+ hiragana = stretch_break,
+ katakana = stretch_break,
+ half_width_open = stretch_break,
+ half_width_close = nobreak,
+ full_width_open = stretch_break,
+ full_width_close = nobreak,
+ full_width_punct = nobreak,
+-- hyphen = nil,
+ non_starter = nobreak,
+ other = nobreak,
+}
+
+local korean_4 = {
+ jamo_initial = nobreak,
+ korean = nobreak,
+ chinese = nobreak,
+ hiragana = nobreak,
+ katakana = nobreak,
+ half_width_open = nobreak,
+ half_width_close = nobreak,
+ full_width_open = nobreak,
+ full_width_close = nobreak,
+ full_width_punct = nobreak,
+ hyphen = nobreak,
+ non_starter = nobreak,
+ other = nobreak,
+}
+
+local korean_5 = {
+ jamo_initial = stretch_break,
+ korean = stretch_break,
+ chinese = stretch_break,
+ hiragana = stretch_break,
+ katakana = stretch_break,
+ half_width_open = stretch_break,
+ half_width_close = nobreak_stretch,
+ full_width_open = stretch_break,
+ full_width_close = nobreak_stretch,
+ full_width_punct = nobreak_stretch,
+ hyphen = nobreak_stretch,
+ non_starter = nobreak_stretch,
+ other = stretch_break,
+}
+
+local injectors = { -- [previous] [current]
+ jamo_final = korean_1,
+ korean = korean_1,
+ chinese = korean_1,
+ hiragana = korean_1,
+ katakana = korean_1,
+ hyphen = korean_2,
+ start = korean_0,
+ other = korean_2,
+ non_starter = korean_3,
+ full_width_open = korean_4,
+ half_width_open = korean_4,
+ full_width_close = korean_5,
+ full_width_punct = korean_5,
+ half_width_close = korean_5,
+}
+
+local function process(head,first,last)
+ if first ~= last then
+ local lastfont, previous, last = nil, "start", nil
+ while true do
+ local upcoming, id = first.next, first.id
+ if id == glyph_code then
+ local a = first[a_scriptstatus]
+ local current = numbertocategory[a]
+ local action = injectors[previous]
+ if action then
+ action = action[current]
+ if action then
+ local font = first.font
+ if font ~= lastfont then
+ lastfont = font
+ set_parameters(font,numbertodataset[first[a_scriptinjection]])
+ end
+ action(head,first)
+ end
+ end
+ previous = current
+ else -- glue
+ local p, n = first.prev, upcoming
+ if p and n then
+ local pid, nid = p.id, n.id
+ if pid == glyph_code and nid == glyph_code then
+ local pa, na = p[a_scriptstatus], n[a_scriptstatus]
+ local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
+ if not pcjk or not ncjk
+ or pcjk == "korean" or ncjk == "korean"
+ or pcjk == "other" or ncjk == "other"
+ or pcjk == "jamo_final" or ncjk == "jamo_initial" then
+ previous = "start"
+ else -- if head ~= first then
+ remove_node(head,first,true)
+ previous = pcjk
+ -- else
+ -- previous = pcjk
+ end
+ else
+ previous = "start"
+ end
+ else
+ previous = "start"
+ end
+ end
+ if upcoming == last then -- was stop
+ break
+ else
+ first = upcoming
+ end
+ end
+ end
+end
+
+scripts.installmethod {
+ name = "hangul",
+ injector = process,
+ datasets = { -- todo: metatables
+ default = {
+ inter_char_shrink_factor = 0.50, -- of quad
+ inter_char_stretch_factor = 0.50, -- of quad
+ inter_char_half_shrink_factor = 0.50, -- of quad
+ inter_char_half_stretch_factor = 0.50, -- of quad
+ inter_char_quarter_shrink_factor = 0.50, -- of quad
+ inter_char_quarter_stretch_factor = 0.50, -- of quad
+ inter_char_hangul_penalty = 50,
+ },
+ },
+}
+
+-- Chinese: hanzi
+
+local chinese_0 = {
+}
+
+local chinese_1 = {
+ jamo_initial = korean_break,
+ korean = korean_break,
+ chinese = stretch_break,
+ hiragana = stretch_break,
+ katakana = stretch_break,
+ half_width_open = nobreak_stretch_break_autoshrink,
+ half_width_close = nobreak_stretch,
+ full_width_open = nobreak_stretch_break_shrink,
+ full_width_close = nobreak_stretch,
+ full_width_punct = nobreak_stretch,
+-- hyphen = nil,
+ non_starter = nobreak_stretch,
+ other = stretch_break,
+}
+
+local chinese_2 = {
+ jamo_initial = korean_break,
+ korean = stretch_break,
+ chinese = stretch_break,
+ hiragana = stretch_break,
+ katakana = stretch_break,
+ half_width_open = nobreak_stretch_break_autoshrink,
+ half_width_close = nobreak_stretch,
+ full_width_open = nobreak_stretch_break_shrink,
+ full_width_close = nobreak_stretch,
+ full_width_punct = nobreak_stretch,
+ hyphen = nobreak_stretch,
+ non_starter = nobreak_stretch,
+ other = stretch_break,
+}
+
+local chinese_3 = {
+ jamo_initial = korean_break,
+ korean = stretch_break,
+ chinese = stretch_break,
+ hiragana = stretch_break,
+ katakana = stretch_break,
+ half_width_open = nobreak_stretch_break_autoshrink,
+ half_width_close = nobreak_stretch,
+ full_width_open = nobreak_stretch_break_shrink,
+ full_width_close = nobreak_stretch,
+ full_width_punct = nobreak_stretch,
+-- hyphen = nil,
+ non_starter = nobreak_stretch,
+ other = stretch_break,
+}
+
+local chinese_4 = {
+-- jamo_initial = nil,
+-- korean = nil,
+-- chinese = nil,
+-- hiragana = nil,
+-- katakana = nil,
+ half_width_open = nobreak_autoshrink,
+ half_width_close = nil,
+ full_width_open = nobreak_shrink,
+ full_width_close = nobreak,
+ full_width_punct = nobreak,
+-- hyphen = nil,
+ non_starter = nobreak,
+-- other = nil,
+}
+
+local chinese_5 = {
+ jamo_initial = stretch_break,
+ korean = stretch_break,
+ chinese = stretch_break,
+ hiragana = stretch_break,
+ katakana = stretch_break,
+ half_width_open = nobreak_stretch_break_autoshrink,
+ half_width_close = nobreak_stretch,
+ full_width_open = nobreak_stretch_break_shrink,
+ full_width_close = nobreak_stretch,
+ full_width_punct = nobreak_stretch,
+-- hyphen = nil,
+ non_starter = nobreak_stretch,
+ other = stretch_break,
+}
+
+local chinese_6 = {
+ jamo_initial = nobreak_stretch,
+ korean = nobreak_stretch,
+ chinese = nobreak_stretch,
+ hiragana = nobreak_stretch,
+ katakana = nobreak_stretch,
+ half_width_open = nobreak_stretch_break_autoshrink,
+ half_width_close = nobreak_stretch,
+ full_width_open = nobreak_stretch_break_shrink,
+ full_width_close = nobreak_stretch,
+ full_width_punct = nobreak_stretch,
+ hyphen = nobreak_stretch,
+ non_starter = nobreak_stretch,
+ other = nobreak_stretch,
+}
+
+local chinese_7 = {
+ jami_initial = nobreak_shrink_break_stretch,
+ korean = nobreak_shrink_break_stretch,
+ chinese = stretch_break, -- nobreak_shrink_break_stretch,
+ hiragana = stretch_break, -- nobreak_shrink_break_stretch,
+ katakana = stretch_break, -- nobreak_shrink_break_stretch,
+ half_width_open = nobreak_shrink_break_stretch_nobreak_autoshrink,
+ half_width_close = nobreak_shrink_nobreak_stretch,
+ full_width_open = nobreak_shrink_break_stretch_nobreak_shrink,
+ full_width_close = nobreak_shrink_nobreak_stretch,
+ full_width_punct = nobreak_shrink_nobreak_stretch,
+ hyphen = nobreak_shrink_break_stretch,
+ non_starter = nobreak_shrink_break_stretch,
+ other = nobreak_shrink_break_stretch,
+}
+
+local chinese_8 = {
+ jami_initial = nobreak_shrink_break_stretch,
+ korean = nobreak_autoshrink_break_stretch,
+ chinese = stretch_break, -- nobreak_autoshrink_break_stretch,
+ hiragana = stretch_break, -- nobreak_autoshrink_break_stretch,
+ katakana = stretch_break, -- nobreak_autoshrink_break_stretch,
+ half_width_open = nobreak_autoshrink_break_stretch_nobreak_autoshrink,
+ half_width_close = nobreak_autoshrink_nobreak_stretch,
+ full_width_open = nobreak_autoshrink_break_stretch_nobreak_shrink,
+ full_width_close = nobreak_autoshrink_nobreak_stretch,
+ full_width_punct = nobreak_autoshrink_nobreak_stretch,
+ hyphen = nobreak_autoshrink_break_stretch,
+ non_starter = nobreak_autoshrink_break_stretch,
+ other = nobreak_autoshrink_break_stretch,
+}
+
+local injectors = { -- [previous] [current]
+ jamo_final = chinese_1,
+ korean = chinese_1,
+ chinese = chinese_2,
+ hiragana = chinese_2,
+ katakana = chinese_2,
+ hyphen = chinese_3,
+ start = chinese_4,
+ other = chinese_5,
+ non_starter = chinese_5,
+ full_width_open = chinese_6,
+ half_width_open = chinese_6,
+ full_width_close = chinese_7,
+ full_width_punct = chinese_7,
+ half_width_close = chinese_8,
+}
+
+local function process(head,first,last)
+ if first ~= last then
+ local lastfont, previous, last = nil, "start", nil
+ while true do
+ local upcoming, id = first.next, first.id
+ if id == glyph_code then
+ local a = first[a_scriptstatus]
+ local current = numbertocategory[a]
+ local action = injectors[previous]
+ if action then
+ action = action[current]
+ if action then
+ local font = first.font
+ if font ~= lastfont then
+ lastfont = font
+ set_parameters(font,numbertodataset[first[a_scriptinjection]])
+ end
+ action(head,first)
+ end
+ end
+ previous = current
+ else -- glue
+ local p, n = first.prev, upcoming
+ if p and n then
+ local pid, nid = p.id, n.id
+ if pid == glyph_code and nid == glyph_code then
+ local pa, na = p[a_scriptstatus], n[a_scriptstatus]
+ local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
+ if not pcjk or not ncjk
+ or pcjk == "korean" or ncjk == "korean"
+ or pcjk == "other" or ncjk == "other"
+ or pcjk == "jamo_final" or ncjk == "jamo_initial"
+ or pcjk == "half_width_close" or ncjk == "half_width_open" then -- extra compared to korean
+ previous = "start"
+ else -- if head ~= first then
+ remove_node(head,first,true)
+ previous = pcjk
+ -- else
+ -- previous = pcjk
+ end
+ else
+ previous = "start"
+ end
+ else
+ previous = "start"
+ end
+ end
+ if upcoming == last then -- was stop
+ break
+ else
+ first = upcoming
+ end
+ end
+ end
+end
+
+scripts.installmethod {
+ name = "hanzi",
+ injector = process,
+ datasets = {
+ default = {
+ inter_char_shrink_factor = 0.50, -- of quad
+ inter_char_stretch_factor = 0.50, -- of quad
+ inter_char_half_shrink_factor = 0.50, -- of quad
+ inter_char_half_stretch_factor = 0.50, -- of quad
+ inter_char_quarter_shrink_factor = 0.50, -- of quad
+ inter_char_quarter_stretch_factor = 0.50, -- of quad
+ inter_char_hangul_penalty = 50,
+ },
+ },
+}
+
+-- Japanese: idiographic, hiragana, katakana, romanji / jis
+
+local japanese_0 = {
+}
+
+local japanese_1 = {
+ jamo_initial = korean_break,
+ korean = korean_break,
+ chinese = stretch_break,
+ hiragana = stretch_break,
+ katakana = stretch_break,
+ half_width_open = nobreak_stretch_break_autoshrink,
+ half_width_close = nobreak_stretch,
+ full_width_open = nobreak_stretch_break_shrink,
+ full_width_close = nobreak_stretch,
+ full_width_punct = nobreak_stretch,
+-- hyphen = nil,
+ non_starter = nobreak_stretch,
+ other = stretch_break,
+}
+
+local japanese_2 = {
+ jamo_initial = korean_break,
+ korean = stretch_break,
+ chinese = stretch_break,
+ hiragana = stretch_break,
+ katakana = stretch_break,
+ half_width_open = nobreak_stretch_break_autoshrink,
+ half_width_close = nobreak_stretch,
+ full_width_open = nobreak_stretch_break_shrink,
+ full_width_close = nobreak_stretch,
+ full_width_punct = japanese_before_full_width_punct, -- nobreak_stretch,
+ hyphen = nobreak_stretch,
+ non_starter = nobreak_stretch,
+ other = stretch_break,
+}
+
+local japanese_3 = {
+ jamo_initial = korean_break,
+ korean = stretch_break,
+ chinese = stretch_break,
+ hiragana = stretch_break,
+ katakana = stretch_break,
+ half_width_open = nobreak_stretch_break_autoshrink,
+ half_width_close = nobreak_stretch,
+ full_width_open = nobreak_stretch_break_shrink,
+ full_width_close = nobreak_stretch,
+ full_width_punct = nobreak_stretch,
+-- hyphen = nil,
+ non_starter = nobreak_stretch,
+ other = stretch_break,
+}
+
+local japanese_4 = {
+-- jamo_initial = nil,
+-- korean = nil,
+-- chinese = nil,
+-- hiragana = nil,
+-- katakana = nil,
+ half_width_open = nobreak_autoshrink,
+ half_width_close = nil,
+ full_width_open = nobreak_shrink,
+ full_width_close = nobreak,
+ full_width_punct = nobreak,
+-- hyphen = nil,
+ non_starter = nobreak,
+-- other = nil,
+}
+
+local japanese_5 = {
+ jamo_initial = stretch_break,
+ korean = stretch_break,
+ chinese = stretch_break,
+ hiragana = stretch_break,
+ katakana = stretch_break,
+ half_width_open = nobreak_stretch_break_autoshrink,
+ half_width_close = nobreak_stretch,
+ full_width_open = nobreak_stretch_break_shrink,
+ full_width_close = nobreak_stretch,
+ full_width_punct = nobreak_stretch,
+-- hyphen = nil,
+ non_starter = nobreak_stretch,
+ other = stretch_break,
+}
+
+local japanese_6 = {
+ jamo_initial = nobreak_stretch,
+ korean = nobreak_stretch,
+ chinese = nobreak_stretch,
+ hiragana = nobreak_stretch,
+ katakana = nobreak_stretch,
+ half_width_open = nobreak_stretch_break_autoshrink,
+ half_width_close = nobreak_stretch,
+ full_width_open = nobreak_stretch_break_shrink,
+ full_width_close = nobreak_stretch,
+ full_width_punct = nobreak_stretch,
+ hyphen = nobreak_stretch,
+ non_starter = nobreak_stretch,
+ other = nobreak_stretch,
+}
+
+local japanese_7 = {
+ jami_initial = nobreak_shrink_break_stretch,
+ korean = nobreak_shrink_break_stretch,
+ chinese = japanese_after_full_width_punct, -- stretch_break
+ hiragana = japanese_after_full_width_punct, -- stretch_break
+ katakana = japanese_after_full_width_punct, -- stretch_break
+ half_width_open = nobreak_shrink_break_stretch_nobreak_autoshrink,
+ half_width_close = nobreak_shrink_nobreak_stretch,
+ full_width_open = japanese_between_full_close_open, -- !!
+ full_width_close = japanese_between_full_close_full_close, -- nobreak_shrink_nobreak_stretch,
+ full_width_punct = nobreak_shrink_nobreak_stretch,
+ hyphen = nobreak_shrink_break_stretch,
+ non_starter = nobreak_shrink_break_stretch,
+ other = nobreak_shrink_break_stretch,
+}
+
+local japanese_8 = {
+ jami_initial = nobreak_shrink_break_stretch,
+ korean = nobreak_autoshrink_break_stretch,
+ chinese = stretch_break,
+ hiragana = stretch_break,
+ katakana = stretch_break,
+ half_width_open = nobreak_autoshrink_break_stretch_nobreak_autoshrink,
+ half_width_close = nobreak_autoshrink_nobreak_stretch,
+ full_width_open = nobreak_autoshrink_break_stretch_nobreak_shrink,
+ full_width_close = nobreak_autoshrink_nobreak_stretch,
+ full_width_punct = nobreak_autoshrink_nobreak_stretch,
+ hyphen = nobreak_autoshrink_break_stretch,
+ non_starter = nobreak_autoshrink_break_stretch,
+ other = nobreak_autoshrink_break_stretch,
+}
+
+local injectors = { -- [previous] [current]
+ jamo_final = japanese_1,
+ korean = japanese_1,
+ chinese = japanese_2,
+ hiragana = japanese_2,
+ katakana = japanese_2,
+ hyphen = japanese_3,
+ start = japanese_4,
+ other = japanese_5,
+ non_starter = japanese_5,
+ full_width_open = japanese_6,
+ half_width_open = japanese_6,
+ full_width_close = japanese_7,
+ full_width_punct = japanese_7,
+ half_width_close = japanese_8,
+}
+
+local function process(head,first,last)
+ if first ~= last then
+ local lastfont, previous, last = nil, "start", nil
+ while true do
+ local upcoming, id = first.next, first.id
+ if id == glyph_code then
+ local a = first[a_scriptstatus]
+ local current = numbertocategory[a]
+ local action = injectors[previous]
+ if action then
+ action = action[current]
+ if action then
+ local font = first.font
+ if font ~= lastfont then
+ lastfont = font
+ set_parameters(font,numbertodataset[first[a_scriptinjection]])
+ end
+ action(head,first)
+ end
+ end
+ previous = current
+
+-- elseif id == math_code then
+-- upcoming = end_of_math(current).next
+-- previous = "start"
+
+ else -- glue
+ local p, n = first.prev, upcoming -- we should remember prev
+ if p and n then
+ local pid, nid = p.id, n.id
+ if pid == glyph_code and nid == glyph_code then
+ local pa, na = p[a_scriptstatus], n[a_scriptstatus]
+ local pcjk, ncjk = pa and numbertocategory[pa], na and numbertocategory[na]
+ if not pcjk or not ncjk
+ or pcjk == "korean" or ncjk == "korean"
+ or pcjk == "other" or ncjk == "other"
+ or pcjk == "jamo_final" or ncjk == "jamo_initial"
+ or pcjk == "half_width_close" or ncjk == "half_width_open" then -- extra compared to korean
+ previous = "start"
+ else -- if head ~= first then
+if id == glue_code and first.subtype == userskip_code then -- also scriptstatus check?
+ -- for the moment no distinction possible between space and userskip
+ local w = first.spec.width
+ local s = spacedata[p.font]
+ if w == s then -- could be option
+ if trace_details then
+ trace_detail_between(p,n,"space removed")
+ end
+ remove_node(head,first,true)
+ end
+end
+ previous = pcjk
+ -- else
+ -- previous = pcjk
+ end
+ else
+ previous = "start"
+ end
+ else
+ previous = "start"
+ end
+ end
+ if upcoming == last then -- was stop
+ break
+ else
+ first = upcoming
+ end
+ end
+ end
+end
+
+scripts.installmethod {
+ name = "nihongo", -- what name to use?
+ injector = process,
+ datasets = {
+ default = {
+ inter_char_shrink_factor = 0.50, -- of quad
+ inter_char_stretch_factor = 0.50, -- of quad
+ inter_char_half_shrink_factor = 0.50, -- of quad
+ inter_char_half_stretch_factor = 0.50, -- of quad
+ inter_char_quarter_shrink_factor = 0.25, -- of quad
+ inter_char_quarter_stretch_factor = 0.25, -- of quad
+ inter_char_hangul_penalty = 50,
+ },
+ },
+}
+
diff --git a/tex/context/base/scrp-eth.lua b/tex/context/base/scrp-eth.lua
index 597afa1b5..20b00a0ec 100644
--- a/tex/context/base/scrp-eth.lua
+++ b/tex/context/base/scrp-eth.lua
@@ -1,150 +1,150 @@
-if not modules then modules = { } end modules ['scrp-eth'] = {
- version = 1.001,
- comment = "companion to scrp-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- at some point I will review the script code but for the moment we
--- do it this way; so space settings like with cjk yet
-
-local insert_node_before = node.insert_before
-
-local nodepool = nodes.pool
-
-local new_glue = nodepool.glue
-local new_penalty = nodepool.penalty
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-
-local a_scriptstatus = attributes.private('scriptstatus')
-local a_scriptinjection = attributes.private('scriptinjection')
-
-local categorytonumber = scripts.categorytonumber
-local numbertocategory = scripts.numbertocategory
-local hash = scripts.hash
-local numbertodataset = scripts.numbertodataset
-
-local fonthashes = fonts.hashes
-local parameters = fonthashes.parameters
-
-local space, stretch, shrink, lastfont
-
-local inter_character_space_factor = 1
-local inter_character_stretch_factor = 1
-local inter_character_shrink_factor = 1
-
-local function space_glue(current)
- local data = numbertodataset[current[a_scriptinjection]]
- if data then
- inter_character_space_factor = data.inter_character_space_factor or 1
- inter_character_stretch_factor = data.inter_character_stretch_factor or 1
- inter_character_shrink_factor = data.inter_character_shrink_factor or 1
- end
- local font = current.font
- if lastfont ~= font then
- local pf = parameters[font]
- space = pf.space
- stretch = pf.space_stretch
- shrink = pf.space_shrink
- lastfont = font
- end
- return new_glue(
- inter_character_space_factor * space,
- inter_character_stretch_factor * stretch,
- inter_character_shrink_factor * shrink
- )
-end
-
-local function insert_space(head,current)
- insert_node_before(head,current,space_glue(current))
-end
-
-local function insert_zerowidthspace(head,current)
- insert_node_before(head,current,new_glue(0))
-end
-
-local function insert_nobreakspace(head,current)
- insert_node_before(head,current,new_penalty(10000))
- insert_node_before(head,current,space_glue(current))
-end
-
--- syllable [zerowidthspace] syllable
--- syllable [zerowidthspace] word
--- syllable [zerowidthspace] sentence
--- word [nobreakspace] syllable
--- word [space] word
--- word [space] sentence
--- sentence [nobreakspace] syllable
--- sentence [space] word
--- sentence [space] sentence
-
-local injectors = { -- [previous] [current]
- ethiopic_syllable = {
- ethiopic_syllable = insert_zerowidthspace,
- ethiopic_word = insert_nobreakspace,
- ethiopic_sentence = insert_nobreakspace,
- },
- ethiopic_word = {
- ethiopic_syllable = insert_space,
- ethiopic_word = insert_space,
- ethiopic_sentence = insert_space,
- },
- ethiopic_sentence = {
- ethiopic_syllable = insert_space,
- ethiopic_word = insert_space,
- ethiopic_sentence = insert_space,
- },
-}
-
-local function process(head,first,last)
- if first ~= last then
- local injector = false
- local current = first
- while current do
- local id = current.id
- if id == glyph_code then
- local scriptstatus = current[a_scriptstatus]
- local category = numbertocategory[scriptstatus]
- if injector then
- local action = injector[category]
- if action then
- action(head,current)
- end
- end
- injector = injectors[category]
- else
- -- nothing yet
- end
- if current == last then
- break
- else
- current = current.next
- end
- end
- end
-end
-
-scripts.installmethod {
- name = "ethiopic",
- injector = process,
- datasets = {
- default = {
- inter_character_space_factor = 1,
- inter_character_stretch_factor = 1,
- inter_character_shrink_factor = 1,
- },
- half = {
- inter_character_space_factor = 0.5,
- inter_character_stretch_factor = 0.5,
- inter_character_shrink_factor = 0.5,
- },
- quarter = {
- inter_character_space_factor = 0.25,
- inter_character_stretch_factor = 0.25,
- inter_character_shrink_factor = 0.25,
- },
- },
-}
+if not modules then modules = { } end modules ['scrp-eth'] = {
+ version = 1.001,
+ comment = "companion to scrp-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- at some point I will review the script code but for the moment we
+-- do it this way; so space settings like with cjk yet
+
+local insert_node_before = node.insert_before
+
+local nodepool = nodes.pool
+
+local new_glue = nodepool.glue
+local new_penalty = nodepool.penalty
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+
+local a_scriptstatus = attributes.private('scriptstatus')
+local a_scriptinjection = attributes.private('scriptinjection')
+
+local categorytonumber = scripts.categorytonumber
+local numbertocategory = scripts.numbertocategory
+local hash = scripts.hash
+local numbertodataset = scripts.numbertodataset
+
+local fonthashes = fonts.hashes
+local parameters = fonthashes.parameters
+
+local space, stretch, shrink, lastfont
+
+local inter_character_space_factor = 1
+local inter_character_stretch_factor = 1
+local inter_character_shrink_factor = 1
+
+local function space_glue(current)
+ local data = numbertodataset[current[a_scriptinjection]]
+ if data then
+ inter_character_space_factor = data.inter_character_space_factor or 1
+ inter_character_stretch_factor = data.inter_character_stretch_factor or 1
+ inter_character_shrink_factor = data.inter_character_shrink_factor or 1
+ end
+ local font = current.font
+ if lastfont ~= font then
+ local pf = parameters[font]
+ space = pf.space
+ stretch = pf.space_stretch
+ shrink = pf.space_shrink
+ lastfont = font
+ end
+ return new_glue(
+ inter_character_space_factor * space,
+ inter_character_stretch_factor * stretch,
+ inter_character_shrink_factor * shrink
+ )
+end
+
+local function insert_space(head,current)
+ insert_node_before(head,current,space_glue(current))
+end
+
+local function insert_zerowidthspace(head,current)
+ insert_node_before(head,current,new_glue(0))
+end
+
+local function insert_nobreakspace(head,current)
+ insert_node_before(head,current,new_penalty(10000))
+ insert_node_before(head,current,space_glue(current))
+end
+
+-- syllable [zerowidthspace] syllable
+-- syllable [zerowidthspace] word
+-- syllable [zerowidthspace] sentence
+-- word [nobreakspace] syllable
+-- word [space] word
+-- word [space] sentence
+-- sentence [nobreakspace] syllable
+-- sentence [space] word
+-- sentence [space] sentence
+
+local injectors = { -- [previous] [current]
+ ethiopic_syllable = {
+ ethiopic_syllable = insert_zerowidthspace,
+ ethiopic_word = insert_nobreakspace,
+ ethiopic_sentence = insert_nobreakspace,
+ },
+ ethiopic_word = {
+ ethiopic_syllable = insert_space,
+ ethiopic_word = insert_space,
+ ethiopic_sentence = insert_space,
+ },
+ ethiopic_sentence = {
+ ethiopic_syllable = insert_space,
+ ethiopic_word = insert_space,
+ ethiopic_sentence = insert_space,
+ },
+}
+
+local function process(head,first,last)
+ if first ~= last then
+ local injector = false
+ local current = first
+ while current do
+ local id = current.id
+ if id == glyph_code then
+ local scriptstatus = current[a_scriptstatus]
+ local category = numbertocategory[scriptstatus]
+ if injector then
+ local action = injector[category]
+ if action then
+ action(head,current)
+ end
+ end
+ injector = injectors[category]
+ else
+ -- nothing yet
+ end
+ if current == last then
+ break
+ else
+ current = current.next
+ end
+ end
+ end
+end
+
+scripts.installmethod {
+ name = "ethiopic",
+ injector = process,
+ datasets = {
+ default = {
+ inter_character_space_factor = 1,
+ inter_character_stretch_factor = 1,
+ inter_character_shrink_factor = 1,
+ },
+ half = {
+ inter_character_space_factor = 0.5,
+ inter_character_stretch_factor = 0.5,
+ inter_character_shrink_factor = 0.5,
+ },
+ quarter = {
+ inter_character_space_factor = 0.25,
+ inter_character_stretch_factor = 0.25,
+ inter_character_shrink_factor = 0.25,
+ },
+ },
+}
diff --git a/tex/context/base/scrp-ini.lua b/tex/context/base/scrp-ini.lua
index 18f86475f..fbe673db9 100644
--- a/tex/context/base/scrp-ini.lua
+++ b/tex/context/base/scrp-ini.lua
@@ -1,634 +1,634 @@
-if not modules then modules = { } end modules ['scrp-ini'] = {
- version = 1.001,
- comment = "companion to scrp-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- We need to rewrite this a bit ... rather old code ... will be done when japanese
--- is finished.
-
-local attributes, nodes, node = attributes, nodes, node
-
-local trace_analyzing = false trackers.register("scripts.analyzing", function(v) trace_analyzing = v end)
-local trace_injections = false trackers.register("scripts.injections", function(v) trace_injections = v end)
-
-local report_preprocessing = logs.reporter("scripts","preprocessing")
-
-local utfchar = utf.char
-
-local first_glyph = node.first_glyph or node.first_character
-local traverse_id = node.traverse_id
-
-local texsetattribute = tex.setattribute
-
-local nodecodes = nodes.nodecodes
-local unsetvalue = attributes.unsetvalue
-
-local glyph_code = nodecodes.glyph
-local glue_code = nodecodes.glue
-
-local a_scriptinjection = attributes.private('scriptinjection')
-local a_scriptsplitting = attributes.private('scriptsplitting')
-local a_scriptstatus = attributes.private('scriptstatus')
-
-local fontdata = fonts.hashes.identifiers
-local allocate = utilities.storage.allocate
-local setnodecolor = nodes.tracers.colors.set
-local setmetatableindex = table.setmetatableindex
-
-local enableaction = nodes.tasks.enableaction
-local disableaction = nodes.tasks.disableaction
-
-scripts = scripts or { }
-local scripts = scripts
-
-scripts.hash = scripts.hash or { }
-local hash = scripts.hash
-
-local handlers = allocate()
-scripts.handlers = handlers
-
-local injectors = allocate()
-scripts.injectors = handlers
-
-local splitters = allocate()
-scripts.splitters = splitters
-
-local hash = { -- we could put these presets in char-def.lua
- --
- -- half width opening parenthesis
- --
- [0x0028] = "half_width_open",
- [0x005B] = "half_width_open",
- [0x007B] = "half_width_open",
- [0x2018] = "half_width_open", -- ‘
- [0x201C] = "half_width_open", -- “
- --
- -- full width opening parenthesis
- --
- [0x3008] = "full_width_open", -- 〈 Left book quote
- [0x300A] = "full_width_open", -- 《 Left double book quote
- [0x300C] = "full_width_open", -- 「 left quote
- [0x300E] = "full_width_open", -- 『 left double quote
- [0x3010] = "full_width_open", -- 〠left double book quote
- [0x3014] = "full_width_open", -- 〔 left book quote
- [0x3016] = "full_width_open", --〖 left double book quote
- [0x3018] = "full_width_open", -- left tortoise bracket
- [0x301A] = "full_width_open", -- left square bracket
- [0x301D] = "full_width_open", -- reverse double prime qm
- [0xFF08] = "full_width_open", -- ( left parenthesis
- [0xFF3B] = "full_width_open", -- ï¼» left square brackets
- [0xFF5B] = "full_width_open", -- ï½› left curve bracket
- --
- -- half width closing parenthesis
- --
- [0x0029] = "half_width_close",
- [0x005D] = "half_width_close",
- [0x007D] = "half_width_close",
- [0x2019] = "half_width_close", -- ’ right quote, right
- [0x201D] = "half_width_close", -- †right double quote
- --
- -- full width closing parenthesis
- --
- [0x3009] = "full_width_close", -- 〉 book quote
- [0x300B] = "full_width_close", -- 》 double book quote
- [0x300D] = "full_width_close", -- 〠right quote, right
- [0x300F] = "full_width_close", -- 〠right double quote
- [0x3011] = "full_width_close", -- 】 right double book quote
- [0x3015] = "full_width_close", -- 〕 right book quote
- [0x3017] = "full_width_close", -- 〗 right double book quote
- [0x3019] = "full_width_close", -- right tortoise bracket
- [0x301B] = "full_width_close", -- right square bracket
- [0x301E] = "full_width_close", -- double prime qm
- [0x301F] = "full_width_close", -- low double prime qm
- [0xFF09] = "full_width_close", -- ) right parenthesis
- [0xFF3D] = "full_width_close", -- ï¼½ right square brackets
- [0xFF5D] = "full_width_close", -- ï½ right curve brackets
- --
- [0xFF62] = "half_width_open", -- left corner bracket
- [0xFF63] = "half_width_close", -- right corner bracket
- --
- -- vertical opening vertical
- --
- -- 0xFE35, 0xFE37, 0xFE39, 0xFE3B, 0xFE3D, 0xFE3F, 0xFE41, 0xFE43, 0xFE47,
- --
- -- vertical closing
- --
- -- 0xFE36, 0xFE38, 0xFE3A, 0xFE3C, 0xFE3E, 0xFE40, 0xFE42, 0xFE44, 0xFE48,
- --
- -- half width opening punctuation
- --
- --
- --
- -- full width opening punctuation
- --
- -- 0x2236, -- ∶
- -- 0xFF0C, -- ,
- --
- -- half width closing punctuation_hw
- --
- [0x0021] = "half_width_close", -- !
- [0x002C] = "half_width_close", -- ,
- [0x002E] = "half_width_close", -- .
- [0x003A] = "half_width_close", -- :
- [0x003B] = "half_width_close", -- ;
- [0x003F] = "half_width_close", -- ?
- [0xFF61] = "half_width_close", -- hw full stop
- --
- -- full width closing punctuation
- --
- [0x3001] = "full_width_close", -- ã€
- [0x3002] = "full_width_close", -- 。
- [0xFF0C] = "full_width_close", -- ,
- [0xFF0E] = "full_width_close", --
- --
- -- depends on font
- --
- [0xFF01] = "full_width_close", -- ï¼
- [0xFF1F] = "full_width_close", -- ?
- --
- [0xFF1A] = "full_width_punct", -- :
- [0xFF1B] = "full_width_punct", -- ï¼›
- --
- -- non starter
- --
- [0x3005] = "non_starter", [0x3041] = "non_starter", [0x3043] = "non_starter", [0x3045] = "non_starter", [0x3047] = "non_starter",
- [0x3049] = "non_starter", [0x3063] = "non_starter", [0x3083] = "non_starter", [0x3085] = "non_starter", [0x3087] = "non_starter",
- [0x308E] = "non_starter", [0x3095] = "non_starter", [0x3096] = "non_starter", [0x309B] = "non_starter", [0x309C] = "non_starter",
- [0x309D] = "non_starter", [0x309E] = "non_starter", [0x30A0] = "non_starter", [0x30A1] = "non_starter", [0x30A3] = "non_starter",
- [0x30A5] = "non_starter", [0x30A7] = "non_starter", [0x30A9] = "non_starter", [0x30C3] = "non_starter", [0x30E3] = "non_starter",
- [0x30E5] = "non_starter", [0x30E7] = "non_starter", [0x30EE] = "non_starter", [0x30F5] = "non_starter", [0x30F6] = "non_starter",
- [0x30FC] = "non_starter", [0x30FD] = "non_starter", [0x30FE] = "non_starter", [0x31F0] = "non_starter", [0x31F1] = "non_starter",
- [0x30F2] = "non_starter", [0x30F3] = "non_starter", [0x30F4] = "non_starter", [0x31F5] = "non_starter", [0x31F6] = "non_starter",
- [0x30F7] = "non_starter", [0x30F8] = "non_starter", [0x30F9] = "non_starter", [0x31FA] = "non_starter", [0x31FB] = "non_starter",
- [0x30FC] = "non_starter", [0x30FD] = "non_starter", [0x30FE] = "non_starter", [0x31FF] = "non_starter",
- --
- -- hyphenation
- --
- [0x2026] = "hyphen", -- … ellipsis
- [0x2014] = "hyphen", -- — hyphen
- --
- [0x1361] = "ethiopic_word",
- [0x1362] = "ethiopic_sentence",
- --
-}
-
-local function provide(t,k)
- local v
- if not tonumber(k) then v = false
- elseif (k >= 0x03040 and k <= 0x030FF)
- or (k >= 0x031F0 and k <= 0x031FF)
- or (k >= 0x032D0 and k <= 0x032FE)
- or (k >= 0x0FF00 and k <= 0x0FFEF) then v = "katakana"
- elseif (k >= 0x03400 and k <= 0x04DFF)
- or (k >= 0x04E00 and k <= 0x09FFF)
- or (k >= 0x0F900 and k <= 0x0FAFF)
- or (k >= 0x20000 and k <= 0x2A6DF)
- or (k >= 0x2F800 and k <= 0x2FA1F) then v = "chinese"
- elseif (k >= 0x0AC00 and k <= 0x0D7A3) then v = "korean"
- elseif (k >= 0x01100 and k <= 0x0115F) then v = "jamo_initial"
- elseif (k >= 0x01160 and k <= 0x011A7) then v = "jamo_medial"
- elseif (k >= 0x011A8 and k <= 0x011FF) then v = "jamo_final"
- elseif (k >= 0x01200 and k <= 0x0139F) then v = "ethiopic_syllable"
- else v = false
- end
- t[k] = v
- return v
-end
-
-setmetatableindex(hash,provide)
-
-scripts.hash = hash
-
-local numbertodataset = allocate()
-local numbertohandler = allocate()
-
---~ storage.register("scripts/hash", hash, "scripts.hash")
-
-scripts.numbertodataset = numbertodataset
-scripts.numbertohandler = numbertohandler
-
-local defaults = {
- inter_char_shrink_factor = 0,
- inter_char_shrink_factor = 0,
- inter_char_stretch_factor = 0,
- inter_char_half_shrink_factor = 0,
- inter_char_half_stretch_factor = 0,
- inter_char_quarter_shrink_factor = 0,
- inter_char_quarter_stretch_factor = 0,
- inter_char_hangul_penalty = 0,
-
- inter_word_stretch_factor = 0,
-}
-
-scripts.defaults = defaults -- so we can add more
-
-function scripts.installmethod(handler)
- local name = handler.name
- handlers[name] = handler
- local attributes = { }
- local datasets = handler.datasets
- if not datasets or not datasets.default then
- report_preprocessing("missing (default) dataset in script %a",name)
- datasets.default = { } -- slower but an error anyway
- end
- for k, v in next, datasets do
- setmetatableindex(v,defaults)
- end
- setmetatable(attributes, {
- __index = function(t,k)
- local v = datasets[k] or datasets.default
- local a = unsetvalue
- if v then
- v.name = name -- for tracing
- a = #numbertodataset + 1
- numbertodataset[a] = v
- numbertohandler[a] = handler
- end
- t[k] = a
- return a
- end
- } )
- handler.attributes = attributes
-end
-
-function scripts.installdataset(specification) -- global overload
- local method = specification.method
- local name = specification.name
- local dataset = specification.dataset
- if method and name and dataset then
- local parent = specification.parent or ""
- local handler = handlers[method]
- if handler then
- local datasets = handler.datasets
- if datasets then
- local defaultset = datasets.default
- if defaultset then
- if parent ~= "" then
- local p = datasets[parent]
- if p then
- defaultset = p
- else
- report_preprocessing("dataset, unknown parent %a for method %a",parent,method)
- end
- end
- setmetatable(dataset,defaultset)
- local existing = datasets[name]
- if existing then
- for k, v in next, existing do
- existing[k] = dataset
- end
- else
- datasets[name] = dataset
- end
- else
- report_preprocessing("dataset, no default for method %a",method)
- end
- else
- report_preprocessing("dataset, no datasets for method %a",method)
- end
- else
- report_preprocessing("dataset, no method %a",method)
- end
- else
- report_preprocessing("dataset, invalid specification") -- maybe report table
- end
-end
-
-local injectorenabled = false
-local splitterenabled = false
-
-function scripts.set(name,method,preset)
- local handler = handlers[method]
- if handler then
- if handler.injector then
- if not injectorenabled then
- enableaction("processors","scripts.injectors.handler")
- injectorenabled = true
- end
- texsetattribute(a_scriptinjection,handler.attributes[preset] or unsetvalue)
- end
- if handler.splitter then
- if not splitterenabled then
- enableaction("processors","scripts.splitters.handler")
- splitterenabled = true
- end
- texsetattribute(a_scriptsplitting,handler.attributes[preset] or unsetvalue)
- end
- if handler.initializer then
- handler.initializer(handler)
- handler.initializer = nil
- end
- else
- texsetattribute(a_scriptinjection,unsetvalue)
- texsetattribute(a_scriptsplitting,unsetvalue)
- end
-end
-
-function scripts.reset()
- texsetattribute(a_scriptinjection,unsetvalue)
- texsetattribute(a_scriptsplitting,unsetvalue)
-end
-
--- the following tables will become a proper installer (move to cjk/eth)
---
--- 0=gray 1=red 2=green 3=blue 4=yellow 5=magenta 6=cyan 7=x-yellow 8=x-magenta 9=x-cyan
-
-local scriptcolors = allocate { -- todo: just named colors
- korean = "trace:0",
- chinese = "trace:0",
- katakana = "trace:0",
- hiragana = "trace:0",
- full_width_open = "trace:1",
- full_width_close = "trace:2",
- half_width_open = "trace:3",
- half_width_close = "trace:4",
- full_width_punct = "trace:5",
- hyphen = "trace:5",
- non_starter = "trace:6",
- jamo_initial = "trace:7",
- jamo_medial = "trace:8",
- jamo_final = "trace:9",
- ethiopic_syllable = "trace:1",
- ethiopic_word = "trace:2",
- ethiopic_sentence = "trace:3",
-}
-
-scripts.colors = scriptcolors
-
-local numbertocategory = allocate { -- rather bound to cjk ... will be generalized
- "korean",
- "chinese",
- "katakana",
- "hiragana",
- "full_width_open",
- "full_width_close",
- "half_width_open",
- "half_width_close",
- "full_width_punct",
- "hyphen",
- "non_starter",
- "jamo_initial",
- "jamo_medial",
- "jamo_final",
- "ethiopic_syllable",
- "ethiopic_word",
- "ethiopic_sentence",
-}
-
-local categorytonumber = allocate(table.swapped(numbertocategory)) -- could be one table
-
-scripts.categorytonumber = categorytonumber
-scripts.numbertocategory = numbertocategory
-
-local function colorize(start,stop)
- for n in traverse_id(glyph_code,start) do
- local kind = numbertocategory[n[a_scriptstatus]]
- if kind then
- local ac = scriptcolors[kind]
- if ac then
- setnodecolor(n,ac)
- end
- end
- if n == stop then
- break
- end
- end
-end
-
-local function traced_process(head,first,last,process,a)
- if start ~= last then
- local f, l = first, last
- local name = numbertodataset[a]
- name = name and name.name or "?"
- report_preprocessing("before %s: %s",name,nodes.tosequence(f,l))
- process(head,first,last)
- report_preprocessing("after %s: %s", name,nodes.tosequence(f,l))
- end
-end
-
--- eventually we might end up with more extensive parsing
--- todo: pass t[start..stop] == original
---
--- one of the time consuming functions:
-
--- we can have a fonts.hashes.originals
-
-function scripts.injectors.handler(head)
- local start = first_glyph(head) -- we already have glyphs here (subtype 1)
- if not start then
- return head, false
- else
- local last_a, normal_process, lastfont, originals = nil, nil, nil, nil
- local done, first, last, ok = false, nil, nil, false
- while start do
- local id = start.id
- if id == glyph_code then
- local a = start[a_scriptinjection]
- if a then
- if a ~= last_a then
- if first then
- if ok then
- if trace_analyzing then
- colorize(first,last)
- end
- if trace_injections then
- traced_process(head,first,last,normal_process,last_a)
- else
- normal_process(head,first,last)
- end
- ok, done = false, true
- end
- first, last = nil, nil
- end
- last_a = a
- local handler = numbertohandler[a]
- normal_process = handler.injector
- end
- if normal_process then
- local f = start.font
- if f ~= lastfont then
- originals = fontdata[f].resources
- if resources then
- originals = resources.originals
- else
- -- can't happen
- end
- lastfont = f
- end
- local c = start.char
- if originals then
- c = originals[c] or c
- end
- local h = hash[c]
- if h then
- start[a_scriptstatus] = categorytonumber[h]
- if not first then
- first, last = start, start
- else
- last = start
- end
- -- if cjk == "chinese" or cjk == "korean" then -- we need to prevent too much ( ) processing
- ok = true
- -- end
- elseif first then
- if ok then
- if trace_analyzing then
- colorize(first,last)
- end
- if trace_injections then
- traced_process(head,first,last,normal_process,last_a)
- else
- normal_process(head,first,last)
- end
- ok, done = false, true
- end
- first, last = nil, nil
- end
- end
- elseif first then
- if ok then
- if trace_analyzing then
- colorize(first,last)
- end
- if trace_injections then
- traced_process(head,first,last,normal_process,last_a)
- else
- normal_process(head,first,last)
- end
- ok, done = false, true
- end
- first, last = nil, nil
- end
- elseif id == glue_code then
- if ok then
- -- continue
- elseif first then
- -- no chinese or korean
- first, last = nil, nil
- end
- elseif first then
- if ok then
- -- some chinese or korean
- if trace_analyzing then
- colorize(first,last)
- end
- if trace_injections then
- traced_process(head,first,last,normal_process,last_a)
- else
- normal_process(head,first,last)
- end
- first, last, ok, done = nil, nil, false, true
- elseif first then
- first, last = nil, nil
- end
- end
- start = start.next
- end
- if ok then
- if trace_analyzing then
- colorize(first,last)
- end
- if trace_injections then
- traced_process(head,first,last,normal_process,last_a)
- else
- normal_process(head,first,last)
- end
- done = true
- end
- return head, done
- end
-end
-
-function scripts.splitters.handler(head)
- return head, false
-end
-
--- new plugin:
-
-local registercontext = fonts.specifiers.registercontext
-local mergecontext = fonts.specifiers.mergecontext
-
-local otfscripts = characters.otfscripts
-
-local report_scripts = logs.reporter("scripts","auto feature")
-local trace_scripts = false trackers.register("scripts.autofeature",function(v) trace_scripts = v end)
-
-local autofontfeature = scripts.autofontfeature or { }
-scripts.autofontfeature = autofontfeature
-
-local cache_yes = { }
-local cache_nop = { }
-
-setmetatableindex(cache_yes,function(t,k) local v = { } t[k] = v return v end)
-setmetatableindex(cache_nop,function(t,k) local v = { } t[k] = v return v end)
-
--- beware: we need to tag a done (otherwise too many extra instances ... but how
--- often unpack? wait till we have a bitmap
---
--- we can consider merging this in handlers.characters(head) at some point as there
--- already check for the dynamic attribute so it saves a pass, however, then we also
--- need to check for a_scriptinjection there which nils the benefit
---
--- we can consider cheating: set all glyphs in a word as the first one but it's not
--- playing nice
-
-function autofontfeature.handler(head)
- for n in traverse_id(glyph_code,head) do
- -- if n[a_scriptinjection] then
- -- -- already tagged by script feature, maybe some day adapt
- -- else
- local char = n.char
- local script = otfscripts[char]
- if script then
- local dynamic = n[0] or 0
- local font = n.font
- if dynamic > 0 then
- local slot = cache_yes[font]
- local attr = slot[script]
- if not attr then
- attr = mergecontext(dynamic,name,2)
- slot[script] = attr
- if trace_scripts then
- report_scripts("script: %s, trigger %C, dynamic: %a, variant: %a",script,char,attr,"extended")
- end
- end
- if attr ~= 0 then
- n[0] = attr
- -- maybe set scriptinjection when associated
- end
- else
- local slot = cache_nop[font]
- local attr = slot[script]
- if not attr then
- attr = registercontext(font,script,2)
- slot[script] = attr
- if trace_scripts then
- report_scripts("script: %s, trigger %C, dynamic: %s, variant: %a",script,char,attr,"normal")
- end
- end
- if attr ~= 0 then
- n[0] = attr
- -- maybe set scriptinjection when associated
- end
- end
- end
- -- end
- end
- return head
-end
-
-function autofontfeature.enable()
- report_scripts("globally enabled")
- enableaction("processors","scripts.autofontfeature.handler")
-end
-
-function autofontfeature.disable()
- report_scripts("globally disabled")
- disableaction("processors","scripts.autofontfeature.handler")
-end
-
-commands.enableautofontscript = autofontfeature.enable
-commands.disableautofontscript = autofontfeature.disable
+if not modules then modules = { } end modules ['scrp-ini'] = {
+ version = 1.001,
+ comment = "companion to scrp-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- We need to rewrite this a bit ... rather old code ... will be done when japanese
+-- is finished.
+
+local attributes, nodes, node = attributes, nodes, node
+
+local trace_analyzing = false trackers.register("scripts.analyzing", function(v) trace_analyzing = v end)
+local trace_injections = false trackers.register("scripts.injections", function(v) trace_injections = v end)
+
+local report_preprocessing = logs.reporter("scripts","preprocessing")
+
+local utfchar = utf.char
+
+local first_glyph = node.first_glyph or node.first_character
+local traverse_id = node.traverse_id
+
+local texsetattribute = tex.setattribute
+
+local nodecodes = nodes.nodecodes
+local unsetvalue = attributes.unsetvalue
+
+local glyph_code = nodecodes.glyph
+local glue_code = nodecodes.glue
+
+local a_scriptinjection = attributes.private('scriptinjection')
+local a_scriptsplitting = attributes.private('scriptsplitting')
+local a_scriptstatus = attributes.private('scriptstatus')
+
+local fontdata = fonts.hashes.identifiers
+local allocate = utilities.storage.allocate
+local setnodecolor = nodes.tracers.colors.set
+local setmetatableindex = table.setmetatableindex
+
+local enableaction = nodes.tasks.enableaction
+local disableaction = nodes.tasks.disableaction
+
+scripts = scripts or { }
+local scripts = scripts
+
+scripts.hash = scripts.hash or { }
+local hash = scripts.hash
+
+local handlers = allocate()
+scripts.handlers = handlers
+
+local injectors = allocate()
+scripts.injectors = handlers
+
+local splitters = allocate()
+scripts.splitters = splitters
+
+local hash = { -- we could put these presets in char-def.lua
+ --
+ -- half width opening parenthesis
+ --
+ [0x0028] = "half_width_open",
+ [0x005B] = "half_width_open",
+ [0x007B] = "half_width_open",
+ [0x2018] = "half_width_open", -- ‘
+ [0x201C] = "half_width_open", -- “
+ --
+ -- full width opening parenthesis
+ --
+ [0x3008] = "full_width_open", -- 〈 Left book quote
+ [0x300A] = "full_width_open", -- 《 Left double book quote
+ [0x300C] = "full_width_open", -- 「 left quote
+ [0x300E] = "full_width_open", -- 『 left double quote
+ [0x3010] = "full_width_open", -- 〠left double book quote
+ [0x3014] = "full_width_open", -- 〔 left book quote
+ [0x3016] = "full_width_open", --〖 left double book quote
+ [0x3018] = "full_width_open", -- left tortoise bracket
+ [0x301A] = "full_width_open", -- left square bracket
+ [0x301D] = "full_width_open", -- reverse double prime qm
+ [0xFF08] = "full_width_open", -- ( left parenthesis
+ [0xFF3B] = "full_width_open", -- ï¼» left square brackets
+ [0xFF5B] = "full_width_open", -- ï½› left curve bracket
+ --
+ -- half width closing parenthesis
+ --
+ [0x0029] = "half_width_close",
+ [0x005D] = "half_width_close",
+ [0x007D] = "half_width_close",
+ [0x2019] = "half_width_close", -- ’ right quote, right
+ [0x201D] = "half_width_close", -- †right double quote
+ --
+ -- full width closing parenthesis
+ --
+ [0x3009] = "full_width_close", -- 〉 book quote
+ [0x300B] = "full_width_close", -- 》 double book quote
+ [0x300D] = "full_width_close", -- 〠right quote, right
+ [0x300F] = "full_width_close", -- 〠right double quote
+ [0x3011] = "full_width_close", -- 】 right double book quote
+ [0x3015] = "full_width_close", -- 〕 right book quote
+ [0x3017] = "full_width_close", -- 〗 right double book quote
+ [0x3019] = "full_width_close", -- right tortoise bracket
+ [0x301B] = "full_width_close", -- right square bracket
+ [0x301E] = "full_width_close", -- double prime qm
+ [0x301F] = "full_width_close", -- low double prime qm
+ [0xFF09] = "full_width_close", -- ) right parenthesis
+ [0xFF3D] = "full_width_close", -- ï¼½ right square brackets
+ [0xFF5D] = "full_width_close", -- ï½ right curve brackets
+ --
+ [0xFF62] = "half_width_open", -- left corner bracket
+ [0xFF63] = "half_width_close", -- right corner bracket
+ --
+ -- vertical opening vertical
+ --
+ -- 0xFE35, 0xFE37, 0xFE39, 0xFE3B, 0xFE3D, 0xFE3F, 0xFE41, 0xFE43, 0xFE47,
+ --
+ -- vertical closing
+ --
+ -- 0xFE36, 0xFE38, 0xFE3A, 0xFE3C, 0xFE3E, 0xFE40, 0xFE42, 0xFE44, 0xFE48,
+ --
+ -- half width opening punctuation
+ --
+ --
+ --
+ -- full width opening punctuation
+ --
+ -- 0x2236, -- ∶
+ -- 0xFF0C, -- ,
+ --
+ -- half width closing punctuation_hw
+ --
+ [0x0021] = "half_width_close", -- !
+ [0x002C] = "half_width_close", -- ,
+ [0x002E] = "half_width_close", -- .
+ [0x003A] = "half_width_close", -- :
+ [0x003B] = "half_width_close", -- ;
+ [0x003F] = "half_width_close", -- ?
+ [0xFF61] = "half_width_close", -- hw full stop
+ --
+ -- full width closing punctuation
+ --
+ [0x3001] = "full_width_close", -- ã€
+ [0x3002] = "full_width_close", -- 。
+ [0xFF0C] = "full_width_close", -- ,
+ [0xFF0E] = "full_width_close", --
+ --
+ -- depends on font
+ --
+ [0xFF01] = "full_width_close", -- ï¼
+ [0xFF1F] = "full_width_close", -- ?
+ --
+ [0xFF1A] = "full_width_punct", -- :
+ [0xFF1B] = "full_width_punct", -- ï¼›
+ --
+ -- non starter
+ --
+ [0x3005] = "non_starter", [0x3041] = "non_starter", [0x3043] = "non_starter", [0x3045] = "non_starter", [0x3047] = "non_starter",
+ [0x3049] = "non_starter", [0x3063] = "non_starter", [0x3083] = "non_starter", [0x3085] = "non_starter", [0x3087] = "non_starter",
+ [0x308E] = "non_starter", [0x3095] = "non_starter", [0x3096] = "non_starter", [0x309B] = "non_starter", [0x309C] = "non_starter",
+ [0x309D] = "non_starter", [0x309E] = "non_starter", [0x30A0] = "non_starter", [0x30A1] = "non_starter", [0x30A3] = "non_starter",
+ [0x30A5] = "non_starter", [0x30A7] = "non_starter", [0x30A9] = "non_starter", [0x30C3] = "non_starter", [0x30E3] = "non_starter",
+ [0x30E5] = "non_starter", [0x30E7] = "non_starter", [0x30EE] = "non_starter", [0x30F5] = "non_starter", [0x30F6] = "non_starter",
+ [0x30FC] = "non_starter", [0x30FD] = "non_starter", [0x30FE] = "non_starter", [0x31F0] = "non_starter", [0x31F1] = "non_starter",
+ [0x30F2] = "non_starter", [0x30F3] = "non_starter", [0x30F4] = "non_starter", [0x31F5] = "non_starter", [0x31F6] = "non_starter",
+ [0x30F7] = "non_starter", [0x30F8] = "non_starter", [0x30F9] = "non_starter", [0x31FA] = "non_starter", [0x31FB] = "non_starter",
+ [0x30FC] = "non_starter", [0x30FD] = "non_starter", [0x30FE] = "non_starter", [0x31FF] = "non_starter",
+ --
+ -- hyphenation
+ --
+ [0x2026] = "hyphen", -- … ellipsis
+ [0x2014] = "hyphen", -- — hyphen
+ --
+ [0x1361] = "ethiopic_word",
+ [0x1362] = "ethiopic_sentence",
+ --
+}
+
+local function provide(t,k)
+ local v
+ if not tonumber(k) then v = false
+ elseif (k >= 0x03040 and k <= 0x030FF)
+ or (k >= 0x031F0 and k <= 0x031FF)
+ or (k >= 0x032D0 and k <= 0x032FE)
+ or (k >= 0x0FF00 and k <= 0x0FFEF) then v = "katakana"
+ elseif (k >= 0x03400 and k <= 0x04DFF)
+ or (k >= 0x04E00 and k <= 0x09FFF)
+ or (k >= 0x0F900 and k <= 0x0FAFF)
+ or (k >= 0x20000 and k <= 0x2A6DF)
+ or (k >= 0x2F800 and k <= 0x2FA1F) then v = "chinese"
+ elseif (k >= 0x0AC00 and k <= 0x0D7A3) then v = "korean"
+ elseif (k >= 0x01100 and k <= 0x0115F) then v = "jamo_initial"
+ elseif (k >= 0x01160 and k <= 0x011A7) then v = "jamo_medial"
+ elseif (k >= 0x011A8 and k <= 0x011FF) then v = "jamo_final"
+ elseif (k >= 0x01200 and k <= 0x0139F) then v = "ethiopic_syllable"
+ else v = false
+ end
+ t[k] = v
+ return v
+end
+
+setmetatableindex(hash,provide)
+
+scripts.hash = hash
+
+local numbertodataset = allocate()
+local numbertohandler = allocate()
+
+--~ storage.register("scripts/hash", hash, "scripts.hash")
+
+scripts.numbertodataset = numbertodataset
+scripts.numbertohandler = numbertohandler
+
+local defaults = {
+ inter_char_shrink_factor = 0,
+ inter_char_shrink_factor = 0,
+ inter_char_stretch_factor = 0,
+ inter_char_half_shrink_factor = 0,
+ inter_char_half_stretch_factor = 0,
+ inter_char_quarter_shrink_factor = 0,
+ inter_char_quarter_stretch_factor = 0,
+ inter_char_hangul_penalty = 0,
+
+ inter_word_stretch_factor = 0,
+}
+
+scripts.defaults = defaults -- so we can add more
+
+function scripts.installmethod(handler)
+ local name = handler.name
+ handlers[name] = handler
+ local attributes = { }
+ local datasets = handler.datasets
+ if not datasets or not datasets.default then
+ report_preprocessing("missing (default) dataset in script %a",name)
+ datasets.default = { } -- slower but an error anyway
+ end
+ for k, v in next, datasets do
+ setmetatableindex(v,defaults)
+ end
+ setmetatable(attributes, {
+ __index = function(t,k)
+ local v = datasets[k] or datasets.default
+ local a = unsetvalue
+ if v then
+ v.name = name -- for tracing
+ a = #numbertodataset + 1
+ numbertodataset[a] = v
+ numbertohandler[a] = handler
+ end
+ t[k] = a
+ return a
+ end
+ } )
+ handler.attributes = attributes
+end
+
+function scripts.installdataset(specification) -- global overload
+ local method = specification.method
+ local name = specification.name
+ local dataset = specification.dataset
+ if method and name and dataset then
+ local parent = specification.parent or ""
+ local handler = handlers[method]
+ if handler then
+ local datasets = handler.datasets
+ if datasets then
+ local defaultset = datasets.default
+ if defaultset then
+ if parent ~= "" then
+ local p = datasets[parent]
+ if p then
+ defaultset = p
+ else
+ report_preprocessing("dataset, unknown parent %a for method %a",parent,method)
+ end
+ end
+ setmetatable(dataset,defaultset)
+ local existing = datasets[name]
+ if existing then
+ for k, v in next, existing do
+ existing[k] = dataset
+ end
+ else
+ datasets[name] = dataset
+ end
+ else
+ report_preprocessing("dataset, no default for method %a",method)
+ end
+ else
+ report_preprocessing("dataset, no datasets for method %a",method)
+ end
+ else
+ report_preprocessing("dataset, no method %a",method)
+ end
+ else
+ report_preprocessing("dataset, invalid specification") -- maybe report table
+ end
+end
+
+local injectorenabled = false
+local splitterenabled = false
+
+function scripts.set(name,method,preset)
+ local handler = handlers[method]
+ if handler then
+ if handler.injector then
+ if not injectorenabled then
+ enableaction("processors","scripts.injectors.handler")
+ injectorenabled = true
+ end
+ texsetattribute(a_scriptinjection,handler.attributes[preset] or unsetvalue)
+ end
+ if handler.splitter then
+ if not splitterenabled then
+ enableaction("processors","scripts.splitters.handler")
+ splitterenabled = true
+ end
+ texsetattribute(a_scriptsplitting,handler.attributes[preset] or unsetvalue)
+ end
+ if handler.initializer then
+ handler.initializer(handler)
+ handler.initializer = nil
+ end
+ else
+ texsetattribute(a_scriptinjection,unsetvalue)
+ texsetattribute(a_scriptsplitting,unsetvalue)
+ end
+end
+
+function scripts.reset()
+ texsetattribute(a_scriptinjection,unsetvalue)
+ texsetattribute(a_scriptsplitting,unsetvalue)
+end
+
+-- the following tables will become a proper installer (move to cjk/eth)
+--
+-- 0=gray 1=red 2=green 3=blue 4=yellow 5=magenta 6=cyan 7=x-yellow 8=x-magenta 9=x-cyan
+
+local scriptcolors = allocate { -- todo: just named colors
+ korean = "trace:0",
+ chinese = "trace:0",
+ katakana = "trace:0",
+ hiragana = "trace:0",
+ full_width_open = "trace:1",
+ full_width_close = "trace:2",
+ half_width_open = "trace:3",
+ half_width_close = "trace:4",
+ full_width_punct = "trace:5",
+ hyphen = "trace:5",
+ non_starter = "trace:6",
+ jamo_initial = "trace:7",
+ jamo_medial = "trace:8",
+ jamo_final = "trace:9",
+ ethiopic_syllable = "trace:1",
+ ethiopic_word = "trace:2",
+ ethiopic_sentence = "trace:3",
+}
+
+scripts.colors = scriptcolors
+
+local numbertocategory = allocate { -- rather bound to cjk ... will be generalized
+ "korean",
+ "chinese",
+ "katakana",
+ "hiragana",
+ "full_width_open",
+ "full_width_close",
+ "half_width_open",
+ "half_width_close",
+ "full_width_punct",
+ "hyphen",
+ "non_starter",
+ "jamo_initial",
+ "jamo_medial",
+ "jamo_final",
+ "ethiopic_syllable",
+ "ethiopic_word",
+ "ethiopic_sentence",
+}
+
+local categorytonumber = allocate(table.swapped(numbertocategory)) -- could be one table
+
+scripts.categorytonumber = categorytonumber
+scripts.numbertocategory = numbertocategory
+
+local function colorize(start,stop)
+ for n in traverse_id(glyph_code,start) do
+ local kind = numbertocategory[n[a_scriptstatus]]
+ if kind then
+ local ac = scriptcolors[kind]
+ if ac then
+ setnodecolor(n,ac)
+ end
+ end
+ if n == stop then
+ break
+ end
+ end
+end
+
+local function traced_process(head,first,last,process,a)
+ if start ~= last then
+ local f, l = first, last
+ local name = numbertodataset[a]
+ name = name and name.name or "?"
+ report_preprocessing("before %s: %s",name,nodes.tosequence(f,l))
+ process(head,first,last)
+ report_preprocessing("after %s: %s", name,nodes.tosequence(f,l))
+ end
+end
+
+-- eventually we might end up with more extensive parsing
+-- todo: pass t[start..stop] == original
+--
+-- one of the time consuming functions:
+
+-- we can have a fonts.hashes.originals
+
+function scripts.injectors.handler(head)
+ local start = first_glyph(head) -- we already have glyphs here (subtype 1)
+ if not start then
+ return head, false
+ else
+ local last_a, normal_process, lastfont, originals = nil, nil, nil, nil
+ local done, first, last, ok = false, nil, nil, false
+ while start do
+ local id = start.id
+ if id == glyph_code then
+ local a = start[a_scriptinjection]
+ if a then
+ if a ~= last_a then
+ if first then
+ if ok then
+ if trace_analyzing then
+ colorize(first,last)
+ end
+ if trace_injections then
+ traced_process(head,first,last,normal_process,last_a)
+ else
+ normal_process(head,first,last)
+ end
+ ok, done = false, true
+ end
+ first, last = nil, nil
+ end
+ last_a = a
+ local handler = numbertohandler[a]
+ normal_process = handler.injector
+ end
+ if normal_process then
+ local f = start.font
+ if f ~= lastfont then
+ originals = fontdata[f].resources
+ if resources then
+ originals = resources.originals
+ else
+ -- can't happen
+ end
+ lastfont = f
+ end
+ local c = start.char
+ if originals then
+ c = originals[c] or c
+ end
+ local h = hash[c]
+ if h then
+ start[a_scriptstatus] = categorytonumber[h]
+ if not first then
+ first, last = start, start
+ else
+ last = start
+ end
+ -- if cjk == "chinese" or cjk == "korean" then -- we need to prevent too much ( ) processing
+ ok = true
+ -- end
+ elseif first then
+ if ok then
+ if trace_analyzing then
+ colorize(first,last)
+ end
+ if trace_injections then
+ traced_process(head,first,last,normal_process,last_a)
+ else
+ normal_process(head,first,last)
+ end
+ ok, done = false, true
+ end
+ first, last = nil, nil
+ end
+ end
+ elseif first then
+ if ok then
+ if trace_analyzing then
+ colorize(first,last)
+ end
+ if trace_injections then
+ traced_process(head,first,last,normal_process,last_a)
+ else
+ normal_process(head,first,last)
+ end
+ ok, done = false, true
+ end
+ first, last = nil, nil
+ end
+ elseif id == glue_code then
+ if ok then
+ -- continue
+ elseif first then
+ -- no chinese or korean
+ first, last = nil, nil
+ end
+ elseif first then
+ if ok then
+ -- some chinese or korean
+ if trace_analyzing then
+ colorize(first,last)
+ end
+ if trace_injections then
+ traced_process(head,first,last,normal_process,last_a)
+ else
+ normal_process(head,first,last)
+ end
+ first, last, ok, done = nil, nil, false, true
+ elseif first then
+ first, last = nil, nil
+ end
+ end
+ start = start.next
+ end
+ if ok then
+ if trace_analyzing then
+ colorize(first,last)
+ end
+ if trace_injections then
+ traced_process(head,first,last,normal_process,last_a)
+ else
+ normal_process(head,first,last)
+ end
+ done = true
+ end
+ return head, done
+ end
+end
+
+function scripts.splitters.handler(head)
+ return head, false
+end
+
+-- new plugin:
+
+local registercontext = fonts.specifiers.registercontext
+local mergecontext = fonts.specifiers.mergecontext
+
+local otfscripts = characters.otfscripts
+
+local report_scripts = logs.reporter("scripts","auto feature")
+local trace_scripts = false trackers.register("scripts.autofeature",function(v) trace_scripts = v end)
+
+local autofontfeature = scripts.autofontfeature or { }
+scripts.autofontfeature = autofontfeature
+
+local cache_yes = { }
+local cache_nop = { }
+
+setmetatableindex(cache_yes,function(t,k) local v = { } t[k] = v return v end)
+setmetatableindex(cache_nop,function(t,k) local v = { } t[k] = v return v end)
+
+-- beware: we need to tag a done (otherwise too many extra instances ... but how
+-- often unpack? wait till we have a bitmap
+--
+-- we can consider merging this in handlers.characters(head) at some point as there
+-- already check for the dynamic attribute so it saves a pass, however, then we also
+-- need to check for a_scriptinjection there which nils the benefit
+--
+-- we can consider cheating: set all glyphs in a word as the first one but it's not
+-- playing nice
+
+function autofontfeature.handler(head)
+ for n in traverse_id(glyph_code,head) do
+ -- if n[a_scriptinjection] then
+ -- -- already tagged by script feature, maybe some day adapt
+ -- else
+ local char = n.char
+ local script = otfscripts[char]
+ if script then
+ local dynamic = n[0] or 0
+ local font = n.font
+ if dynamic > 0 then
+ local slot = cache_yes[font]
+ local attr = slot[script]
+ if not attr then
+ attr = mergecontext(dynamic,name,2)
+ slot[script] = attr
+ if trace_scripts then
+ report_scripts("script: %s, trigger %C, dynamic: %a, variant: %a",script,char,attr,"extended")
+ end
+ end
+ if attr ~= 0 then
+ n[0] = attr
+ -- maybe set scriptinjection when associated
+ end
+ else
+ local slot = cache_nop[font]
+ local attr = slot[script]
+ if not attr then
+ attr = registercontext(font,script,2)
+ slot[script] = attr
+ if trace_scripts then
+ report_scripts("script: %s, trigger %C, dynamic: %s, variant: %a",script,char,attr,"normal")
+ end
+ end
+ if attr ~= 0 then
+ n[0] = attr
+ -- maybe set scriptinjection when associated
+ end
+ end
+ end
+ -- end
+ end
+ return head
+end
+
+function autofontfeature.enable()
+ report_scripts("globally enabled")
+ enableaction("processors","scripts.autofontfeature.handler")
+end
+
+function autofontfeature.disable()
+ report_scripts("globally disabled")
+ disableaction("processors","scripts.autofontfeature.handler")
+end
+
+commands.enableautofontscript = autofontfeature.enable
+commands.disableautofontscript = autofontfeature.disable
diff --git a/tex/context/base/sort-ini.lua b/tex/context/base/sort-ini.lua
index 479d1c489..a07cbc6d2 100644
--- a/tex/context/base/sort-ini.lua
+++ b/tex/context/base/sort-ini.lua
@@ -1,665 +1,665 @@
-if not modules then modules = { } end modules ['sort-ini'] = {
- version = 1.001,
- comment = "companion to sort-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- It took a while to get there, but with Fleetwood Mac's "Don't Stop"
--- playing in the background we sort of got it done.
-
---[[
The code here evolved from the rather old mkii approach. There
-we concatinate the key and (raw) entry into a new string. Numbers and
-special characters get some treatment so that they sort ok. In
-addition some normalization (lowercasing, accent stripping) takes
-place and again data is appended ror prepended. Eventually these
-strings are sorted using a regular string sorter. The relative order
-of character is dealt with by weighting them. It took a while to
-figure this all out but eventually it worked ok for most languages,
-given that the right datatables were provided.
-
-
Here we do follow a similar approach but this time we don't append
-the manipulated keys and entries but create tables for each of them
-with entries being tables themselves having different properties. In
-these tables characters are represented by numbers and sorting takes
-place using these numbers. Strings are simplified using lowercasing
-as well as shape codes. Numbers are filtered and after getting an offset
-they end up at the right end of the spectrum (more clever parser will
-be added some day). There are definitely more solutions to the problem
-and it is a nice puzzle to solve.
-
-
In the future more methods can be added, as there is practically no
-limit to what goes into the tables. For that we will provide hooks.
-
-
Todo: decomposition with specific order of accents, this is
-relatively easy to do.
-
-
Todo: investigate what standards and conventions there are and see
-how they map onto this mechanism. I've learned that users can come up
-with any demand so nothing here is frozen.
-
-
In the future index entries will become more clever, i.e. they will
-have language etc properties that then can be used.
-]]--
-
-local gsub, rep, sub, sort, concat = string.gsub, string.rep, string.sub, table.sort, table.concat
-local utfbyte, utfchar, utfcharacters, utfvalues = utf.byte, utf.char, utf.characters, utf.values
-local next, type, tonumber, rawget, rawset = next, type, tonumber, rawget, rawset
-
-local allocate = utilities.storage.allocate
-local setmetatableindex = table.setmetatableindex
-
-local trace_tests = false trackers.register("sorters.tests", function(v) trace_tests = v end)
-local trace_methods = false trackers.register("sorters.methods", function(v) trace_methods = v end)
-
-local report_sorters = logs.reporter("languages","sorters")
-
-local comparers = { }
-local splitters = { }
-local definitions = allocate()
-local tracers = allocate()
-local ignoredoffset = 0x10000 -- frozen
-local replacementoffset = 0x10000 -- frozen
-local digitsoffset = 0x20000 -- frozen
-local digitsmaximum = 0xFFFFF -- frozen
-
-local lccodes = characters.lccodes
-local lcchars = characters.lcchars
-local shchars = characters.shchars
-local fscodes = characters.fscodes
-local fschars = characters.fschars
-
-local decomposed = characters.decomposed
-
-local variables = interfaces.variables
-
-local v_numbers = variables.numbers
-local v_default = variables.default
-local v_before = variables.before
-local v_after = variables.after
-local v_first = variables.first
-local v_last = variables.last
-
-local validmethods = table.tohash {
- -- "ch", -- raw character
- "mm", -- minus mapping
- "zm", -- zero mapping
- "pm", -- plus mapping
- "mc", -- lower case - 1
- "zc", -- lower case
- "pc", -- lower case + 1
- "uc", -- unicode
-}
-
-local predefinedmethods = {
- [v_default] = "zc,pc,zm,pm,uc",
- [v_before] = "mm,mc,uc",
- [v_after] = "pm,mc,uc",
- [v_first] = "pc,mm,uc",
- [v_last] = "mc,mm,uc",
-}
-
-sorters = {
- comparers = comparers,
- splitters = splitters,
- definitions = definitions,
- tracers = tracers,
- constants = {
- ignoredoffset = ignoredoffset,
- replacementoffset = replacementoffset,
- digitsoffset = digitsoffset,
- digitsmaximum = digitsmaximum,
- defaultlanguage = v_default,
- defaultmethod = v_default,
- defaultdigits = v_numbers,
- }
-}
-
-local sorters = sorters
-local constants = sorters.constants
-
-local data, language, method, digits
-local replacements, m_mappings, z_mappings, p_mappings, entries, orders, lower, upper, method, sequence
-local thefirstofsplit
-
-local mte = { -- todo: assign to t
- __index = function(t,k)
- if k and k ~= "" and utfbyte(k) < digitsoffset then -- k check really needed (see s-lan-02)
- local el
- if k then
- local l = lower[k] or lcchars[k]
- el = rawget(t,l)
- end
- if not el then
- local l = shchars[k]
- if l and l ~= k then
- if #l > 1 then
- l = sub(l,1,1) -- todo
- end
- el = rawget(t,l)
- if not el then
- l = lower[k] or lcchars[l]
- if l then
- el = rawget(t,l)
- end
- end
- end
- el = el or k
- end
- -- rawset(t,k,el)
- return el
- else
- -- rawset(t,k,k)
- end
- end
-}
-
-local noorder = false
-
-local function preparetables(data)
- local orders, lower, m_mappings, z_mappings, p_mappings = data.orders, data.lower, { }, { }, { }
- for i=1,#orders do
- local oi = orders[i]
- local n = { 2 * i }
- m_mappings[oi], z_mappings[oi], p_mappings[oi] = n, n, n
- end
- local mtm = {
- __index = function(t,k)
- local n, nn
- if k then
- if trace_tests then
- report_sorters("simplifing character %C",k)
- end
- local l = lower[k] or lcchars[k]
- if l then
- if trace_tests then
- report_sorters(" 1 lower: %C",l)
- end
- local ml = rawget(t,l)
- if ml then
- n = { }
- nn = 0
- for i=1,#ml do
- nn = nn + 1
- n[nn] = ml[i] + (t.__delta or 0)
- end
- if trace_tests then
- report_sorters(" 2 order: % t",n)
- end
- end
- end
- if not n then
- local s = shchars[k] -- maybe all components?
- if s and s ~= k then
- if trace_tests then
- report_sorters(" 3 shape: %C",s)
- end
- n = { }
- nn = 0
- for l in utfcharacters(s) do
- local ml = rawget(t,l)
- if ml then
- if trace_tests then
- report_sorters(" 4 keep: %C",l)
- end
- if ml then
- for i=1,#ml do
- nn = nn + 1
- n[nn] = ml[i]
- end
- end
- else
- l = lower[l] or lcchars[l]
- if l then
- if trace_tests then
- report_sorters(" 5 lower: %C",l)
- end
- local ml = rawget(t,l)
- if ml then
- for i=1,#ml do
- nn = nn + 1
- n[nn] = ml[i] + (t.__delta or 0)
- end
- end
- end
- end
- end
- else
- -- -- we probably never enter this branch
- -- -- fschars returns a single char
- --
- -- s = fschars[k]
- -- if s and s ~= k then
- -- if trace_tests then
- -- report_sorters(" 6 split: %s",s)
- -- end
- -- local ml = rawget(t,s)
- -- if ml then
- -- n = { }
- -- nn = 0
- -- for i=1,#ml do
- -- nn = nn + 1
- -- n[nn] = ml[i]
- -- end
- -- end
- -- end
- local b = utfbyte(k)
- n = decomposed[b] or { b }
- if trace_tests then
- report_sorters(" 6 split: %s",utf.tostring(b)) -- todo
- end
- end
- if n then
- if trace_tests then
- report_sorters(" 7 order: % t",n)
- end
- else
- n = noorder
- if trace_tests then
- report_sorters(" 8 order: 0")
- end
- end
- end
- else
- n = noorder
- if trace_tests then
- report_sorters(" 9 order: 0")
- end
- end
- rawset(t,k,n)
- return n
- end
- }
- data.m_mappings = m_mappings
- data.z_mappings = z_mappings
- data.p_mappings = p_mappings
- m_mappings.__delta = -1
- z_mappings.__delta = 0
- p_mappings.__delta = 1
- setmetatable(data.entries,mte)
- setmetatable(data.m_mappings,mtm)
- setmetatable(data.z_mappings,mtm)
- setmetatable(data.p_mappings,mtm)
- thefirstofsplit = data.firstofsplit
-end
-
-local function update() -- prepare parent chains, needed when new languages are added
- for language, data in next, definitions do
- local parent = data.parent or "default"
- if language ~= "default" then
- setmetatableindex(data,definitions[parent] or definitions.default)
- end
- data.language = language
- data.parent = parent
- data.m_mappings = { } -- free temp data
- data.z_mappings = { } -- free temp data
- data.p_mappings = { } -- free temp data
- end
-end
-
-local function setlanguage(l,m,d,u)
- language = (l ~= "" and l) or constants.defaultlanguage
- data = definitions[language or constants.defaultlanguage] or definitions[constants.defaultlanguage]
- method = (m ~= "" and m) or data.method or constants.defaultmethod
- digits = (d ~= "" and d) or data.digits or constants.defaultdigits
- if trace_tests then
- report_sorters("setting language %a, method %a, digits %a",language,method,digits)
- end
- replacements = data.replacements
- entries = data.entries
- orders = data.orders
- lower = data.lower
- upper = data.upper
- preparetables(data)
- m_mappings = data.m_mappings
- z_mappings = data.z_mappings
- p_mappings = data.p_mappings
- --
- method = predefinedmethods[variables[method]] or method
- data.method = method
- --
- data.digits = digits
- --
- local seq = utilities.parsers.settings_to_array(method or "") -- check the list
- sequence = { }
- local nofsequence = 0
- for i=1,#seq do
- local s = seq[i]
- if validmethods[s] then
- nofsequence = nofsequence + 1
- sequence[nofsequence] = s
- else
- report_sorters("invalid sorter method %a in %a",s,method)
- end
- end
- data.sequence = sequence
- if trace_tests then
- report_sorters("using sort sequence: % t",sequence)
- end
- --
- return data
-end
-
-function sorters.update()
- update()
- setlanguage(language,method,numberorder) -- resync current language and method
-end
-
-function sorters.setlanguage(language,method,numberorder)
- update()
- setlanguage(language,method,numberorder) -- new language and method
-end
-
--- tricky: { 0, 0, 0 } vs { 0, 0, 0, 0 } => longer wins and mm, pm, zm can have them
-
-local function basicsort(sort_a,sort_b)
- if sort_a and sort_b then
- local na = #sort_a
- local nb = #sort_b
- if na > nb then
- na = nb
- end
- for i=1,na do
- local ai, bi = sort_a[i], sort_b[i]
- if ai > bi then
- return 1
- elseif ai < bi then
- return -1
- end
- end
- end
- return 0
-end
-
-function comparers.basic(a,b) -- trace ea and eb
- local ea, eb = a.split, b.split
- local na, nb = #ea, #eb
- if na == 0 and nb == 0 then
- -- simple variant (single word)
- local result = 0
- for j=1,#sequence do
- local m = sequence[j]
- result = basicsort(ea[m],eb[m])
- if result ~= 0 then
- return result
- end
- end
- if result == 0 then
- local la, lb = #ea.uc, #eb.uc
- if la > lb then
- return 1
- elseif lb > la then
- return -1
- else
- return 0
- end
- else
- return result
- end
- else
- -- complex variant, used in register (multiple words)
- local result = 0
- for i=1,nb < na and nb or na do
- local eai, ebi = ea[i], eb[i]
- for j=1,#sequence do
- local m = sequence[j]
- result = basicsort(eai[m],ebi[m])
- if result ~= 0 then
- return result
- end
- end
- if result == 0 then
- local la, lb = #eai.uc, #ebi.uc
- if la > lb then
- return 1
- elseif lb > la then
- return -1
- end
- else
- return result
- end
- end
- if result ~= 0 then
- return result
- elseif na > nb then
- return 1
- elseif nb > na then
- return -1
- else
- return 0
- end
- end
-end
-
-local function numify(s)
- s = digitsoffset + tonumber(s) -- alternatively we can create range
- if s > digitsmaximum then
- s = digitsmaximum
- end
- return utfchar(s)
-end
-
-function sorters.strip(str) -- todo: only letters and such
- if str and str ~= "" then
- -- todo: make a decent lpeg
- str = gsub(str,"\\[\"\'~^`]*","") -- \"e -- hm, too greedy
- str = gsub(str,"\\%S*","") -- the rest
- str = gsub(str,"%s","\001") -- can be option
- str = gsub(str,"[%s%[%](){}%$\"\']*","")
- if digits == v_numbers then
- str = gsub(str,"(%d+)",numify) -- sort numbers properly
- end
- return str
- else
- return ""
- end
-end
-
-local function firstofsplit(entry)
- -- numbers are left padded by spaces
- local split = entry.split
- if #split > 0 then
- split = split[1].ch
- else
- split = split.ch
- end
- local first = split and split[1] or ""
- if thefirstofsplit then
- return thefirstofsplit(first,data,entry) -- normally the first one is needed
- else
- return first, entries[first] or "\000" -- tag
- end
-end
-
-sorters.firstofsplit = firstofsplit
-
--- for the moment we use an inefficient bunch of tables but once
--- we know what combinations make sense we can optimize this
-
-function splitters.utf(str) -- we could append m and u but this is cleaner, s is for tracing
- if #replacements > 0 then
- -- todo make an lpeg for this
- for k=1,#replacements do
- local v = replacements[k]
- str = gsub(str,v[1],v[2])
- end
- end
- local m_case, z_case, p_case, m_mapping, z_mapping, p_mapping, char, byte, n = { }, { }, { }, { }, { }, { }, { }, { }, 0
- local nm, nz, np = 0, 0, 0
- for sc in utfcharacters(str) do
- local b = utfbyte(sc)
- if b >= digitsoffset then
- if n == 0 then
- -- we need to force number to the top
- z_case[1] = 0
- m_case[1] = 0
- p_case[1] = 0
- char[1] = sc
- byte[1] = 0
- m_mapping[1] = 0
- z_mapping[1] = 0
- p_mapping[1] = 0
- n = 2
- else
- n = n + 1
- end
- z_case[n] = b
- m_case[n] = b
- p_case[n] = b
- char[n] = sc
- byte[n] = b
- nm = nm + 1
- nz = nz + 1
- np = np + 1
- m_mapping[nm] = b
- z_mapping[nz] = b
- p_mapping[np] = b
- else
- n = n + 1
- local l = lower[sc]
- l = l and utfbyte(l) or lccodes[b]
- if type(l) == "table" then
- l = l[1] -- there are currently no tables in lccodes but it can be some, day
- end
- z_case[n] = l
- if l ~= b then
- m_case[n] = l - 1
- p_case[n] = l + 1
- else
- m_case[n] = l
- p_case[n] = l
- end
- char[n], byte[n] = sc, b
- local fs = fscodes[b] or b
- local msc = m_mappings[sc]
- if msc ~= noorder then
- if not msc then
- msc = m_mappings[fs]
- end
- for i=1,#msc do
- nm = nm + 1
- m_mapping[nm] = msc[i]
- end
- end
- local zsc = z_mappings[sc]
- if zsc ~= noorder then
- if not zsc then
- zsc = z_mappings[fs]
- end
- for i=1,#zsc do
- nz = nz + 1
- z_mapping[nz] = zsc[i]
- end
- end
- local psc = p_mappings[sc]
- if psc ~= noorder then
- if not psc then
- psc = p_mappings[fs]
- end
- for i=1,#psc do
- np = np + 1
- p_mapping[np] = psc[i]
- end
- end
- end
- end
- -- -- only those needed that are part of a sequence
- --
- -- local b = byte[1]
- -- if b then
- -- -- we set them to the first split code (korean)
- -- local fs = fscodes[b] or b
- -- if #m_mapping == 0 then
- -- m_mapping = { m_mappings[fs][1] }
- -- end
- -- if #z_mapping == 0 then
- -- z_mapping = { z_mappings[fs][1] }
- -- end
- -- if #p_mapping == 0 then
- -- p_mapping = { p_mappings[fs][1] }
- -- end
- -- end
- local t = {
- ch = char,
- uc = byte,
- mc = m_case,
- zc = z_case,
- pc = p_case,
- mm = m_mapping,
- zm = z_mapping,
- pm = p_mapping,
- }
-
- return t
-end
-
-local function packch(entry)
- local split = entry.split
- if #split > 0 then -- useless test
- local t = { }
- for i=1,#split do
- local tt, li = { }, split[i].ch
- for j=1,#li do
- local lij = li[j]
- tt[j] = utfbyte(lij) > ignoredoffset and "[]" or lij
- end
- t[i] = concat(tt)
- end
- return concat(t," + ")
- else
- local t, li = { }, split.ch
- for j=1,#li do
- local lij = li[j]
- t[j] = utfbyte(lij) > ignoredoffset and "[]" or lij
- end
- return concat(t)
- end
-end
-
-local function packuc(entry)
- local split = entry.split
- if #split > 0 then -- useless test
- local t = { }
- for i=1,#split do
- t[i] = concat(split[i].uc, " ")
- end
- return concat(t," + ")
- else
- return concat(split.uc," ")
- end
-end
-
-function sorters.sort(entries,cmp)
- if trace_tests or trace_methods then
- local nofentries = #entries
- report_sorters("entries: %s, language: %s, method: %s, digits: %s",nofentries,language,method,tostring(digits))
- for i=1,nofentries do
- report_sorters("entry %s",table.serialize(entries[i].split,i,true,true,true))
- end
- end
- if trace_tests then
- sort(entries,function(a,b)
- local r = cmp(a,b)
- local e = (not r and "?") or (r<0 and "<") or (r>0 and ">") or "="
- report_sorters("%s %s %s | %s %s %s",packch(a),e,packch(b),packuc(a),e,packuc(b))
- return r == -1
- end)
- local s
- for i=1,#entries do
- local entry = entries[i]
- local letter, first = firstofsplit(entry)
- if first == s then
- first = " "
- else
- s = first
- report_sorters(">> %C (%C)",first,letter)
- end
- report_sorters(" %s | %s",packch(entry),packuc(entry))
- end
- else
- sort(entries,function(a,b)
- return cmp(a,b) == -1
- end)
- end
-end
+if not modules then modules = { } end modules ['sort-ini'] = {
+ version = 1.001,
+ comment = "companion to sort-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- It took a while to get there, but with Fleetwood Mac's "Don't Stop"
+-- playing in the background we sort of got it done.
+
+--[[
The code here evolved from the rather old mkii approach. There
+we concatinate the key and (raw) entry into a new string. Numbers and
+special characters get some treatment so that they sort ok. In
+addition some normalization (lowercasing, accent stripping) takes
+place and again data is appended ror prepended. Eventually these
+strings are sorted using a regular string sorter. The relative order
+of character is dealt with by weighting them. It took a while to
+figure this all out but eventually it worked ok for most languages,
+given that the right datatables were provided.
+
+
Here we do follow a similar approach but this time we don't append
+the manipulated keys and entries but create tables for each of them
+with entries being tables themselves having different properties. In
+these tables characters are represented by numbers and sorting takes
+place using these numbers. Strings are simplified using lowercasing
+as well as shape codes. Numbers are filtered and after getting an offset
+they end up at the right end of the spectrum (more clever parser will
+be added some day). There are definitely more solutions to the problem
+and it is a nice puzzle to solve.
+
+
In the future more methods can be added, as there is practically no
+limit to what goes into the tables. For that we will provide hooks.
+
+
Todo: decomposition with specific order of accents, this is
+relatively easy to do.
+
+
Todo: investigate what standards and conventions there are and see
+how they map onto this mechanism. I've learned that users can come up
+with any demand so nothing here is frozen.
+
+
In the future index entries will become more clever, i.e. they will
+have language etc properties that then can be used.
For raw 8 bit characters, the offset is 0x110000 (bottom of plane 18) at
-the top of 's char range but outside the unicode range.
---ldx]]--
-
-local tonumber = tonumber
-local utfchar = utf.char
-local gsub, format = string.gsub, string.format
-
-function converters.hexstringtonumber(n) tonumber(n,16) end
-function converters.octstringtonumber(n) tonumber(n, 8) end
-function converters.rawcharacter (n) utfchar(0x110000+n) end
-function converters.lchexnumber (n) format("%x" ,n) end
-function converters.uchexnumber (n) format("%X" ,n) end
-function converters.lchexnumbers (n) format("%02x",n) end
-function converters.uchexnumbers (n) format("%02X",n) end
-function converters.octnumber (n) format("%03o",n) end
-
-function commands.hexstringtonumber(n) context(tonumber(n,16)) end
-function commands.octstringtonumber(n) context(tonumber(n, 8)) end
-function commands.rawcharacter (n) context(utfchar(0x110000+n)) end
-function commands.lchexnumber (n) context("%x" ,n) end
-function commands.uchexnumber (n) context("%X" ,n) end
-function commands.lchexnumbers (n) context("%02x",n) end
-function commands.uchexnumbers (n) context("%02X",n) end
-function commands.octnumber (n) context("%03o",n) end
-
-function commands.format(fmt,...) -- used ?
- fmt = gsub(fmt,"@","%%")
- context(fmt,...)
-end
-
-local cosd, sind, tand = math.cosd, math.sind, math.tand
-local cos, sin, tan = math.cos, math.sin, math.tan
-
--- unfortunately %s spits out: 6.1230317691119e-017
---
--- function commands.sind(n) context(sind(n)) end
--- function commands.cosd(n) context(cosd(n)) end
--- function commands.tand(n) context(tand(n)) end
---
--- function commands.sin (n) context(sin (n)) end
--- function commands.cos (n) context(cos (n)) end
--- function commands.tan (n) context(tan (n)) end
-
-function commands.sind(n) context("%0.6f",sind(n)) end
-function commands.cosd(n) context("%0.6f",cosd(n)) end
-function commands.tand(n) context("%0.6f",tand(n)) end
-
-function commands.sin (n) context("%0.6f",sin (n)) end
-function commands.cos (n) context("%0.6f",cos (n)) end
-function commands.tan (n) context("%0.6f",tan (n)) end
+if not modules then modules = { } end modules ['syst-con'] = {
+ version = 1.001,
+ comment = "companion to syst-con.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+converters = converters or { }
+
+--[[ldx--
+
For raw 8 bit characters, the offset is 0x110000 (bottom of plane 18) at
+the top of 's char range but outside the unicode range.
+--ldx]]--
+
+local tonumber = tonumber
+local utfchar = utf.char
+local gsub, format = string.gsub, string.format
+
+function converters.hexstringtonumber(n) tonumber(n,16) end
+function converters.octstringtonumber(n) tonumber(n, 8) end
+function converters.rawcharacter (n) utfchar(0x110000+n) end
+function converters.lchexnumber (n) format("%x" ,n) end
+function converters.uchexnumber (n) format("%X" ,n) end
+function converters.lchexnumbers (n) format("%02x",n) end
+function converters.uchexnumbers (n) format("%02X",n) end
+function converters.octnumber (n) format("%03o",n) end
+
+function commands.hexstringtonumber(n) context(tonumber(n,16)) end
+function commands.octstringtonumber(n) context(tonumber(n, 8)) end
+function commands.rawcharacter (n) context(utfchar(0x110000+n)) end
+function commands.lchexnumber (n) context("%x" ,n) end
+function commands.uchexnumber (n) context("%X" ,n) end
+function commands.lchexnumbers (n) context("%02x",n) end
+function commands.uchexnumbers (n) context("%02X",n) end
+function commands.octnumber (n) context("%03o",n) end
+
+function commands.format(fmt,...) -- used ?
+ fmt = gsub(fmt,"@","%%")
+ context(fmt,...)
+end
+
+local cosd, sind, tand = math.cosd, math.sind, math.tand
+local cos, sin, tan = math.cos, math.sin, math.tan
+
+-- unfortunately %s spits out: 6.1230317691119e-017
+--
+-- function commands.sind(n) context(sind(n)) end
+-- function commands.cosd(n) context(cosd(n)) end
+-- function commands.tand(n) context(tand(n)) end
+--
+-- function commands.sin (n) context(sin (n)) end
+-- function commands.cos (n) context(cos (n)) end
+-- function commands.tan (n) context(tan (n)) end
+
+function commands.sind(n) context("%0.6f",sind(n)) end
+function commands.cosd(n) context("%0.6f",cosd(n)) end
+function commands.tand(n) context("%0.6f",tand(n)) end
+
+function commands.sin (n) context("%0.6f",sin (n)) end
+function commands.cos (n) context("%0.6f",cos (n)) end
+function commands.tan (n) context("%0.6f",tan (n)) end
diff --git a/tex/context/base/syst-lua.lua b/tex/context/base/syst-lua.lua
index ef524c339..4795efe68 100644
--- a/tex/context/base/syst-lua.lua
+++ b/tex/context/base/syst-lua.lua
@@ -1,123 +1,123 @@
-if not modules then modules = { } end modules ['syst-lua'] = {
- version = 1.001,
- comment = "companion to syst-lua.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, find, match, rep = string.format, string.find, string.match, string.rep
-local tonumber = tonumber
-local S, lpegmatch, lpegtsplitat = lpeg.S, lpeg.match, lpeg.tsplitat
-
-local context = context
-
-commands = commands or { }
-
-function commands.writestatus(...) logs.status(...) end -- overloaded later
-
-local firstoftwoarguments = context.firstoftwoarguments -- context.constructcsonly("firstoftwoarguments" )
-local secondoftwoarguments = context.secondoftwoarguments -- context.constructcsonly("secondoftwoarguments")
-local firstofoneargument = context.firstofoneargument -- context.constructcsonly("firstofoneargument" )
-local gobbleoneargument = context.gobbleoneargument -- context.constructcsonly("gobbleoneargument" )
-
--- contextsprint(prtcatcodes,[[\ui_fo]]) -- firstofonearguments
--- contextsprint(prtcatcodes,[[\ui_go]]) -- gobbleonearguments
--- contextsprint(prtcatcodes,[[\ui_ft]]) -- firstoftwoarguments
--- contextsprint(prtcatcodes,[[\ui_st]]) -- secondoftwoarguments
-
-function commands.doifelse(b)
- if b then
- firstoftwoarguments()
- else
- secondoftwoarguments()
- end
-end
-
-function commands.doif(b)
- if b then
- firstofoneargument()
- else
- gobbleoneargument()
- end
-end
-
-function commands.doifnot(b)
- if b then
- gobbleoneargument()
- else
- firstofoneargument()
- end
-end
-
-commands.testcase = commands.doifelse -- obsolete
-
-function commands.boolcase(b)
- context(b and 1 or 0)
-end
-
-function commands.doifelsespaces(str)
- if find(str,"^ +$") then
- firstoftwoarguments()
- else
- secondoftwoarguments()
- end
-end
-
-local s = lpegtsplitat(",")
-local h = { }
-
-function commands.doifcommonelse(a,b) -- often the same test
- local ha = h[a]
- local hb = h[b]
- if not ha then
- ha = lpegmatch(s,a)
- h[a] = ha
- end
- if not hb then
- hb = lpegmatch(s,b)
- h[b] = hb
- end
- local na = #ha
- local nb = #hb
- for i=1,na do
- for j=1,nb do
- if ha[i] == hb[j] then
- firstoftwoarguments()
- return
- end
- end
- end
- secondoftwoarguments()
-end
-
-function commands.doifinsetelse(a,b)
- local hb = h[b]
- if not hb then hb = lpegmatch(s,b) h[b] = hb end
- for i=1,#hb do
- if a == hb[i] then
- firstoftwoarguments()
- return
- end
- end
- secondoftwoarguments()
-end
-
-local pattern = lpeg.patterns.validdimen
-
-function commands.doifdimenstringelse(str)
- if lpegmatch(pattern,str) then
- firstoftwoarguments()
- else
- secondoftwoarguments()
- end
-end
-
-function commands.firstinset(str)
- local first = match(str,"^([^,]+),")
- context(first or str)
-end
-
-function commands.ntimes(str,n)
- context(rep(str,n or 1))
-end
+if not modules then modules = { } end modules ['syst-lua'] = {
+ version = 1.001,
+ comment = "companion to syst-lua.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format, find, match, rep = string.format, string.find, string.match, string.rep
+local tonumber = tonumber
+local S, lpegmatch, lpegtsplitat = lpeg.S, lpeg.match, lpeg.tsplitat
+
+local context = context
+
+commands = commands or { }
+
+function commands.writestatus(...) logs.status(...) end -- overloaded later
+
+local firstoftwoarguments = context.firstoftwoarguments -- context.constructcsonly("firstoftwoarguments" )
+local secondoftwoarguments = context.secondoftwoarguments -- context.constructcsonly("secondoftwoarguments")
+local firstofoneargument = context.firstofoneargument -- context.constructcsonly("firstofoneargument" )
+local gobbleoneargument = context.gobbleoneargument -- context.constructcsonly("gobbleoneargument" )
+
+-- contextsprint(prtcatcodes,[[\ui_fo]]) -- firstofonearguments
+-- contextsprint(prtcatcodes,[[\ui_go]]) -- gobbleonearguments
+-- contextsprint(prtcatcodes,[[\ui_ft]]) -- firstoftwoarguments
+-- contextsprint(prtcatcodes,[[\ui_st]]) -- secondoftwoarguments
+
+function commands.doifelse(b)
+ if b then
+ firstoftwoarguments()
+ else
+ secondoftwoarguments()
+ end
+end
+
+function commands.doif(b)
+ if b then
+ firstofoneargument()
+ else
+ gobbleoneargument()
+ end
+end
+
+function commands.doifnot(b)
+ if b then
+ gobbleoneargument()
+ else
+ firstofoneargument()
+ end
+end
+
+commands.testcase = commands.doifelse -- obsolete
+
+function commands.boolcase(b)
+ context(b and 1 or 0)
+end
+
+function commands.doifelsespaces(str)
+ if find(str,"^ +$") then
+ firstoftwoarguments()
+ else
+ secondoftwoarguments()
+ end
+end
+
+local s = lpegtsplitat(",")
+local h = { }
+
+function commands.doifcommonelse(a,b) -- often the same test
+ local ha = h[a]
+ local hb = h[b]
+ if not ha then
+ ha = lpegmatch(s,a)
+ h[a] = ha
+ end
+ if not hb then
+ hb = lpegmatch(s,b)
+ h[b] = hb
+ end
+ local na = #ha
+ local nb = #hb
+ for i=1,na do
+ for j=1,nb do
+ if ha[i] == hb[j] then
+ firstoftwoarguments()
+ return
+ end
+ end
+ end
+ secondoftwoarguments()
+end
+
+function commands.doifinsetelse(a,b)
+ local hb = h[b]
+ if not hb then hb = lpegmatch(s,b) h[b] = hb end
+ for i=1,#hb do
+ if a == hb[i] then
+ firstoftwoarguments()
+ return
+ end
+ end
+ secondoftwoarguments()
+end
+
+local pattern = lpeg.patterns.validdimen
+
+function commands.doifdimenstringelse(str)
+ if lpegmatch(pattern,str) then
+ firstoftwoarguments()
+ else
+ secondoftwoarguments()
+ end
+end
+
+function commands.firstinset(str)
+ local first = match(str,"^([^,]+),")
+ context(first or str)
+end
+
+function commands.ntimes(str,n)
+ context(rep(str,n or 1))
+end
diff --git a/tex/context/base/tabl-tbl.lua b/tex/context/base/tabl-tbl.lua
index 19548e7b3..224b2fb99 100644
--- a/tex/context/base/tabl-tbl.lua
+++ b/tex/context/base/tabl-tbl.lua
@@ -1,41 +1,41 @@
-if not modules then modules = { } end modules ['tabl-tbl'] = {
- version = 1.001,
- comment = "companion to tabl-tbl.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- A couple of hacks ... easier to do in Lua than in regular TeX. More will
--- follow.
-
-local context, commands = context, commands
-
-local tonumber = tonumber
-local gsub, rep, sub, find = string.gsub, string.rep, string.sub, string.find
-local P, C, Cc, Ct, lpegmatch = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Ct, lpeg.match
-
-local settexcount = tex.setcount
-
-local separator = P("|")
-local nested = lpeg.patterns.nested
-local pattern = Ct((separator * (C(nested) + Cc("")) * C((1-separator)^0))^0)
-
-function commands.presettabulate(preamble)
- preamble = gsub(preamble,"~","d") -- let's get rid of ~ mess here
- if find(preamble,"%*") then
- -- todo: lpeg but not now
- preamble = gsub(preamble, "%*(%b{})(%b{})", function(n,p)
- return rep(sub(p,2,-2),tonumber(sub(n,2,-2)) or 1)
- end)
- end
- local t = lpegmatch(pattern,preamble)
- local m = #t - 2
- settexcount("global","c_tabl_tabulate_nofcolumns", m/2)
- settexcount("global","c_tabl_tabulate_has_rule_spec_first", t[1] == "" and 0 or 1)
- settexcount("global","c_tabl_tabulate_has_rule_spec_last", t[m+1] == "" and 0 or 1)
- for i=1,m,2 do
- context.settabulateentry(t[i],t[i+1])
- end
- context.settabulatelastentry(t[m+1])
-end
+if not modules then modules = { } end modules ['tabl-tbl'] = {
+ version = 1.001,
+ comment = "companion to tabl-tbl.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- A couple of hacks ... easier to do in Lua than in regular TeX. More will
+-- follow.
+
+local context, commands = context, commands
+
+local tonumber = tonumber
+local gsub, rep, sub, find = string.gsub, string.rep, string.sub, string.find
+local P, C, Cc, Ct, lpegmatch = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Ct, lpeg.match
+
+local settexcount = tex.setcount
+
+local separator = P("|")
+local nested = lpeg.patterns.nested
+local pattern = Ct((separator * (C(nested) + Cc("")) * C((1-separator)^0))^0)
+
+function commands.presettabulate(preamble)
+ preamble = gsub(preamble,"~","d") -- let's get rid of ~ mess here
+ if find(preamble,"%*") then
+ -- todo: lpeg but not now
+ preamble = gsub(preamble, "%*(%b{})(%b{})", function(n,p)
+ return rep(sub(p,2,-2),tonumber(sub(n,2,-2)) or 1)
+ end)
+ end
+ local t = lpegmatch(pattern,preamble)
+ local m = #t - 2
+ settexcount("global","c_tabl_tabulate_nofcolumns", m/2)
+ settexcount("global","c_tabl_tabulate_has_rule_spec_first", t[1] == "" and 0 or 1)
+ settexcount("global","c_tabl_tabulate_has_rule_spec_last", t[m+1] == "" and 0 or 1)
+ for i=1,m,2 do
+ context.settabulateentry(t[i],t[i+1])
+ end
+ context.settabulatelastentry(t[m+1])
+end
diff --git a/tex/context/base/tabl-xtb.lua b/tex/context/base/tabl-xtb.lua
index 3ffe8a219..5b47bf705 100644
--- a/tex/context/base/tabl-xtb.lua
+++ b/tex/context/base/tabl-xtb.lua
@@ -1,988 +1,988 @@
-if not modules then modules = { } end modules ['tabl-xtb'] = {
- version = 1.001,
- comment = "companion to tabl-xtb.mkvi",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[
-
-This table mechanism is a combination between TeX and Lua. We do process
-cells at the TeX end and inspect them at the Lua end. After some analysis
-we have a second pass using the calculated widths, and if needed cells
-will go through a third pass to get the heights right. This last pass is
-avoided when possible which is why some code below looks a bit more
-complex than needed. The reason for such optimizations is that each cells
-is actually a framed instance and because tables like this can be hundreds
-of pages we want to keep processing time reasonable.
-
-To a large extend the behaviour is comparable with the way bTABLE/eTABLE
-works and there is a module that maps that one onto this one. Eventually
-this mechamism will be improved so that it can replace its older cousin.
-
-]]--
-
--- todo: use linked list instead of r/c array
-
-local commands, context, tex, node = commands, context, tex, node
-
-local texdimen = tex.dimen
-local texcount = tex.count
-local texbox = tex.box
-local texsetcount = tex.setcount
-local texsetdimen = tex.setdimen
-
-local format = string.format
-local concat = table.concat
-local points = number.points
-
-local context = context
-local context_beginvbox = context.beginvbox
-local context_endvbox = context.endvbox
-local context_blank = context.blank
-local context_nointerlineskip = context.nointerlineskip
-
-local variables = interfaces.variables
-
-local setmetatableindex = table.setmetatableindex
-local settings_to_hash = utilities.parsers.settings_to_hash
-
-local copy_node_list = node.copy_list
-local hpack_node_list = node.hpack
-local vpack_node_list = node.vpack
-local slide_node_list = node.slide
-local flush_node_list = node.flush_list
-
-local nodepool = nodes.pool
-
-local new_glue = nodepool.glue
-local new_kern = nodepool.kern
-local new_penalty = nodepool.penalty
-local new_hlist = nodepool.hlist
-
-local v_stretch = variables.stretch
-local v_normal = variables.normal
-local v_width = variables.width
-local v_height = variables.height
-local v_repeat = variables["repeat"]
-local v_max = variables.max
-local v_fixed = variables.fixed
-
-local xtables = { }
-typesetters.xtables = xtables
-
-local trace_xtable = false
-local report_xtable = logs.reporter("xtable")
-
-trackers.register("xtable.construct", function(v) trace_xtable = v end)
-
-local null_mode = 0
-local head_mode = 1
-local foot_mode = 2
-local more_mode = 3
-local body_mode = 4
-
-local namedmodes = { [0] =
- "null",
- "head",
- "foot",
- "next",
- "body",
-}
-
-local stack, data = { }, nil
-
-function xtables.create(settings)
- table.insert(stack,data)
- local rows = { }
- local widths = { }
- local heights = { }
- local depths = { }
- local spans = { }
- local distances = { }
- local autowidths = { }
- local modes = { }
- local fixedrows = { }
- local fixedcolumns = { }
- local frozencolumns = { }
- local options = { }
- data = {
- rows = rows,
- widths = widths,
- heights = heights,
- depths = depths,
- spans = spans,
- distances = distances,
- modes = modes,
- autowidths = autowidths,
- fixedrows = fixedrows,
- fixedcolumns = fixedcolumns,
- frozencolumns = frozencolumns,
- options = options,
- nofrows = 0,
- nofcolumns = 0,
- currentrow = 0,
- currentcolumn = 0,
- settings = settings or { },
- }
- local function add_zero(t,k)
- t[k] = 0
- return 0
- end
- local function add_table(t,k)
- local v = { }
- t[k] = v
- return v
- end
- local function add_cell(row,c)
- local cell = {
- nx = 0,
- ny = 0,
- list = false,
- }
- row[c] = cell
- if c > data.nofcolumns then
- data.nofcolumns = c
- end
- return cell
- end
- local function add_row(rows,r)
- local row = { }
- setmetatableindex(row,add_cell)
- rows[r] = row
- if r > data.nofrows then
- data.nofrows = r
- end
- return row
- end
- setmetatableindex(rows,add_row)
- setmetatableindex(widths,add_zero)
- setmetatableindex(heights,add_zero)
- setmetatableindex(depths,add_zero)
- setmetatableindex(distances,add_zero)
- setmetatableindex(modes,add_zero)
- setmetatableindex(fixedrows,add_zero)
- setmetatableindex(fixedcolumns,add_zero)
- setmetatableindex(options,add_table)
- --
- settings.columndistance = tonumber(settings.columndistance) or 0
- settings.rowdistance = tonumber(settings.rowdistance) or 0
- settings.leftmargindistance = tonumber(settings.leftmargindistance) or 0
- settings.rightmargindistance = tonumber(settings.rightmargindistance) or 0
- settings.options = settings_to_hash(settings.option)
- settings.textwidth = tonumber(settings.textwidth) or tex.hsize
- settings.lineheight = tonumber(settings.lineheight) or texdimen.lineheight
- settings.maxwidth = tonumber(settings.maxwidth) or settings.textwidth/8
- -- if #stack > 0 then
- -- settings.textwidth = tex.hsize
- -- end
- data.criterium_v = 2 * data.settings.lineheight
- data.criterium_h = .75 * data.settings.textwidth
-
-end
-
-function xtables.initialize_reflow_width(option)
- local r = data.currentrow
- local c = data.currentcolumn + 1
- local drc = data.rows[r][c]
- drc.nx = texcount.c_tabl_x_nx
- drc.ny = texcount.c_tabl_x_ny
- local distances = data.distances
- local distance = texdimen.d_tabl_x_distance
- if distance > distances[c] then
- distances[c] = distance
- end
- if option and option ~= "" then
- local options = settings_to_hash(option)
- data.options[r][c] = options
- if options[v_fixed] then
- data.frozencolumns[c] = true
- end
- end
- data.currentcolumn = c
-end
-
--- local function rather_fixed(n)
--- for n in node.
-
-function xtables.set_reflow_width()
- local r = data.currentrow
- local c = data.currentcolumn
- local rows = data.rows
- local row = rows[r]
- while row[c].span do -- can also be previous row ones
- c = c + 1
- end
- local tb = texbox.b_tabl_x
- local drc = row[c]
- --
- drc.list = true -- we don't need to keep the content around as we're in trial mode (no: copy_node_list(tb))
- --
- local widths, width = data.widths, tb.width
- if width > widths[c] then
- widths[c] = width
- end
- local heights, height = data.heights, tb.height
- if height > heights[r] then
- heights[r] = height
- end
- local depths, depth = data.depths, tb.depth
- if depth > depths[r] then
- depths[r] = depth
- end
- --
- local dimensionstate = texcount.frameddimensionstate
- local fixedcolumns = data.fixedcolumns
- local fixedrows = data.fixedrows
- if dimensionstate == 1 then
- if width > fixedcolumns[c] then -- how about a span here?
- fixedcolumns[c] = width
- end
- elseif dimensionstate == 2 then
- fixedrows[r] = height
- elseif dimensionstate == 3 then
- fixedrows[r] = height -- width
- fixedcolumns[c] = width -- height
- else -- probably something frozen, like an image -- we could parse the list
- if width <= data.criterium_h and height >= data.criterium_v then
- if width > fixedcolumns[c] then -- how about a span here?
- fixedcolumns[c] = width
- end
- end
- end
- drc.dimensionstate = dimensionstate
- --
- local nx, ny = drc.nx, drc.ny
- if nx > 1 or ny > 1 then
- local spans = data.spans
- local self = true
- for y=1,ny do
- for x=1,nx do
- if self then
- self = false
- else
- local ry = r + y - 1
- local cx = c + x - 1
- if y > 1 then
- spans[ry] = true
- end
- rows[ry][cx].span = true
- end
- end
- end
- c = c + nx - 1
- end
- if c > data.nofcolumns then
- data.nofcolumns = c
- end
- data.currentcolumn = c
-end
-
-function xtables.initialize_reflow_height()
- local r = data.currentrow
- local c = data.currentcolumn + 1
- local rows = data.rows
- local row = rows[r]
- while row[c].span do -- can also be previous row ones
- c = c + 1
- end
- data.currentcolumn = c
- local widths = data.widths
- local w = widths[c]
- local drc = row[c]
- for x=1,drc.nx-1 do
- w = w + widths[c+x]
- end
- texdimen.d_tabl_x_width = w
- local dimensionstate = drc.dimensionstate or 0
- if dimensionstate == 1 or dimensionstate == 3 then
- -- width was fixed so height is known
- texcount.c_tabl_x_skip_mode = 1
- elseif dimensionstate == 2 then
- -- height is enforced
- texcount.c_tabl_x_skip_mode = 1
- elseif data.autowidths[c] then
- -- width has changed so we need to recalculate the height
- texcount.c_tabl_x_skip_mode = 0
- else
- texcount.c_tabl_x_skip_mode = 1
- end
-end
-
-function xtables.set_reflow_height()
- local r = data.currentrow
- local c = data.currentcolumn
- local rows = data.rows
- local row = rows[r]
--- while row[c].span do -- we could adapt drc.nx instead
--- c = c + 1
--- end
- local tb = texbox.b_tabl_x
- local drc = row[c]
- if data.fixedrows[r] == 0 then -- and drc.dimensionstate < 2
- local heights, height = data.heights, tb.height
- if height > heights[r] then
- heights[r] = height
- end
- local depths, depth = data.depths, tb.depth
- if depth > depths[r] then
- depths[r] = depth
- end
- end
--- c = c + drc.nx - 1
--- data.currentcolumn = c
-end
-
-function xtables.initialize_construct()
- local r = data.currentrow
- local c = data.currentcolumn + 1
- local rows = data.rows
- local row = rows[r]
- while row[c].span do -- can also be previous row ones
- c = c + 1
- end
- data.currentcolumn = c
- local widths = data.widths
- local heights = data.heights
- local depths = data.depths
- local w = widths[c]
- local h = heights[r]
- local d = depths[r]
- local drc = row[c]
- for x=1,drc.nx-1 do
- w = w + widths[c+x]
- end
- for y=1,drc.ny-1 do
- h = h + heights[r+y]
- d = d + depths[r+y]
- end
- texdimen.d_tabl_x_width = w
- texdimen.d_tabl_x_height = h + d
- texdimen.d_tabl_x_depth = 0
-end
-
-function xtables.set_construct()
- local r = data.currentrow
- local c = data.currentcolumn
- local rows = data.rows
- local row = rows[r]
--- while row[c].span do -- can also be previous row ones
--- c = c + 1
--- end
- local drc = row[c]
- -- this will change as soon as in luatex we can reset a box list without freeing
- drc.list = copy_node_list(texbox.b_tabl_x)
--- c = c + drc.nx - 1
--- data.currentcolumn = c
-end
-
-local function showwidths(where,widths,autowidths)
- local result = { }
- for i=1,#widths do
- result[#result+1] = format("%12s%s",points(widths[i]),autowidths[i] and "*" or " ")
- end
- return report_xtable("%s : %s",where,concat(result," "))
-end
-
-function xtables.reflow_width()
- local nofrows = data.nofrows
- local nofcolumns = data.nofcolumns
- local rows = data.rows
- for r=1,nofrows do
- local row = rows[r]
- for c=1,nofcolumns do
- local drc = row[c]
- if drc.list then
- -- flush_node_list(drc.list)
- drc.list = false
- end
- end
- end
- -- spread
- local settings = data.settings
- local options = settings.options
- local maxwidth = settings.maxwidth
- -- calculate width
- local widths = data.widths
- local distances = data.distances
- local autowidths = data.autowidths
- local fixedcolumns = data.fixedcolumns
- local frozencolumns = data.frozencolumns
- local width = 0
- local distance = 0
- local nofwide = 0
- local widetotal = 0
- local available = settings.textwidth - settings.leftmargindistance - settings.rightmargindistance
- if trace_xtable then
- showwidths("stage 1",widths,autowidths)
- end
- local noffrozen = 0
- if options[v_max] then
- for c=1,nofcolumns do
- width = width + widths[c]
- if width > maxwidth then
- autowidths[c] = true
- nofwide = nofwide + 1
- widetotal = widetotal + widths[c]
- end
- if c < nofcolumns then
- distance = distance + distances[c]
- end
- if frozencolumns[c] then
- noffrozen = noffrozen + 1 -- brr, should be nx or so
- end
- end
- else
- for c=1,nofcolumns do -- also keep track of forced
- local fixedwidth = fixedcolumns[c]
- if fixedwidth > 0 then
- widths[c] = fixedwidth
- width = width + fixedwidth
- else
- width = width + widths[c]
- if width > maxwidth then
- autowidths[c] = true
- nofwide = nofwide + 1
- widetotal = widetotal + widths[c]
- end
- end
- if c < nofcolumns then
- distance = distance + distances[c]
- end
- if frozencolumns[c] then
- noffrozen = noffrozen + 1 -- brr, should be nx or so
- end
- end
- end
- if trace_xtable then
- showwidths("stage 2",widths,autowidths)
- end
- local delta = available - width - distance - (nofcolumns-1) * settings.columndistance
- if delta == 0 then
- -- nothing to be done
- if trace_xtable then
- report_xtable("perfect fit")
- end
- elseif delta > 0 then
- -- we can distribute some
- if not options[v_stretch] then
- -- not needed
- if trace_xtable then
- report_xtable("too wide but no stretch, delta %p",delta)
- end
- elseif options[v_width] then
- local factor = delta / width
- if trace_xtable then
- report_xtable("proportional stretch, delta %p, width %p, factor %a",delta,width,factor)
- end
- for c=1,nofcolumns do
- widths[c] = widths[c] + factor * widths[c]
- end
- else
- -- frozen -> a column with option=fixed will not stretch
- local extra = delta / (nofcolumns - noffrozen)
- if trace_xtable then
- report_xtable("normal stretch, delta %p, extra %p",delta,extra)
- end
- for c=1,nofcolumns do
- if not frozencolumns[c] then
- widths[c] = widths[c] + extra
- end
- end
- end
- elseif nofwide > 0 then
- while true do
- done = false
- local available = (widetotal + delta) / nofwide
- if trace_xtable then
- report_xtable("shrink check, total %p, delta %p, columns %s, fixed %p",widetotal,delta,nofwide,available)
- end
- for c=1,nofcolumns do
- if autowidths[c] and available >= widths[c] then
- autowidths[c] = nil
- nofwide = nofwide - 1
- widetotal = widetotal - widths[c]
- done = true
- end
- end
- if not done then
- break
- end
- end
- -- maybe also options[v_width] here but tricky as width does not say
- -- much about amount
- if options[v_width] then -- not that much (we could have a clever vpack loop balancing .. no fun)
- local factor = (widetotal + delta) / width
- if trace_xtable then
- report_xtable("proportional shrink used, total %p, delta %p, columns %s, factor %s",widetotal,delta,nofwide,factor)
- end
- for c=1,nofcolumns do
- if autowidths[c] then
- widths[c] = factor * widths[c]
- end
- end
- else
- local available = (widetotal + delta) / nofwide
- if trace_xtable then
- report_xtable("normal shrink used, total %p, delta %p, columns %s, fixed %p",widetotal,delta,nofwide,available)
- end
- for c=1,nofcolumns do
- if autowidths[c] then
- widths[c] = available
- end
- end
- end
- end
- if trace_xtable then
- showwidths("stage 3",widths,autowidths)
- end
- --
- data.currentrow = 0
- data.currentcolumn = 0
-end
-
-function xtables.reflow_height()
- data.currentrow = 0
- data.currentcolumn = 0
- local settings = data.settings
- if settings.options[v_height] then
- local heights = data.heights
- local depths = data.depths
- local nofrows = data.nofrows
- local totalheight = 0
- local totaldepth = 0
- for i=1,nofrows do
- totalheight = totalheight + heights[i]
- totalheight = totalheight + depths [i]
- end
- local total = totalheight + totaldepth
- local leftover = settings.textheight - total
- if leftover > 0 then
- local leftheight = (totalheight / total ) * leftover / #heights
- local leftdepth = (totaldepth / total ) * leftover / #depths
- for i=1,nofrows do
- heights[i] = heights[i] + leftheight
- depths [i] = depths [i] + leftdepth
- end
- end
- end
-end
-
-local function showspans(data)
- local rows = data.rows
- local modes = data.modes
- local nofcolumns = data.nofcolumns
- local nofrows = data.nofrows
- for r=1,nofrows do
- local line = { }
- local row = rows[r]
- for c=1,nofcolumns do
- local cell =row[c]
- if cell.list then
- line[#line+1] = "list"
- elseif cell.span then
- line[#line+1] = "span"
- else
- line[#line+1] = "none"
- end
- end
- report_xtable("%3d : %s : % t",r,namedmodes[modes[r]] or "----",line)
- end
-end
-
-function xtables.construct()
- local rows = data.rows
- local heights = data.heights
- local depths = data.depths
- local widths = data.widths
- local spans = data.spans
- local distances = data.distances
- local modes = data.modes
- local settings = data.settings
- local nofcolumns = data.nofcolumns
- local nofrows = data.nofrows
- local columndistance = settings.columndistance
- local rowdistance = settings.rowdistance
- local leftmargindistance = settings.leftmargindistance
- local rightmargindistance = settings.rightmargindistance
- -- ranges can be mixes so we collect
-
- if trace_xtable then
- showspans(data)
- end
-
- local ranges = {
- [head_mode] = { },
- [foot_mode] = { },
- [more_mode] = { },
- [body_mode] = { },
- }
- for r=1,nofrows do
- local m = modes[r]
- if m == 0 then
- m = body_mode
- end
- local range = ranges[m]
- range[#range+1] = r
- end
- -- todo: hook in the splitter ... the splitter can ask for a chunk of
- -- a certain size ... no longer a split memory issue then and header
- -- footer then has to happen here too .. target height
- local function packaged_column(r)
- local row = rows[r]
- local start = nil
- local stop = nil
- if leftmargindistance > 0 then
- start = new_kern(leftmargindistance)
- stop = start
- end
- local hasspan = false
- for c=1,nofcolumns do
- local drc = row[c]
- if not hasspan then
- hasspan = drc.span
- end
- local list = drc.list
- if list then
- list.shift = list.height + list.depth
- -- list = hpack_node_list(list) -- is somehow needed
- -- list.width = 0
- -- list.height = 0
- -- list.depth = 0
- -- faster:
- local h = new_hlist()
- h.list = list
- list = h
- --
- if start then
- stop.next = list
- list.prev = stop
- else
- start = list
- end
- stop = list -- one node anyway, so not needed: slide_node_list(list)
- end
- local step = widths[c]
- if c < nofcolumns then
- step = step + columndistance + distances[c]
- end
- local kern = new_kern(step)
- if stop then
- stop.prev = kern
- stop.next = kern
- else -- can be first spanning next row (ny=...)
- start = kern
- end
- stop = kern
- end
- if start then
- if rightmargindistance > 0 then
- local kern = new_kern(rightmargindistance)
- stop.next = kern
- kern.prev = stop
- -- stop = kern
- end
- return start, heights[r] + depths[r], hasspan
- end
- end
- local function collect_range(range)
- local result, nofr = { }, 0
- local nofrange = #range
- for i=1,#range do
- local r = range[i]
- -- local row = rows[r]
- local list, size, hasspan = packaged_column(r)
- if list then
- if hasspan and nofr > 0 then
- result[nofr][4] = true
- end
- nofr = nofr + 1
- result[nofr] = {
- hpack_node_list(list),
- size,
- i < nofrange and rowdistance > 0 and rowdistance or false, -- might move
- false
- }
- end
- end
- return result
- end
- local body = collect_range(ranges[body_mode])
- data.results = {
- [head_mode] = collect_range(ranges[head_mode]),
- [foot_mode] = collect_range(ranges[foot_mode]),
- [more_mode] = collect_range(ranges[more_mode]),
- [body_mode] = body,
- }
- if #body == 0 then
- texsetcount("global","c_tabl_x_state",0)
- texsetdimen("global","d_tabl_x_final_width",0)
- else
- texsetcount("global","c_tabl_x_state",1)
- texsetdimen("global","d_tabl_x_final_width",body[1][1].width)
- end
-end
-
-local function inject(row,copy,package)
- local list = row[1]
- if copy then
- row[1] = copy_node_list(list)
- end
- if package then
- context_beginvbox()
- context(list)
- context(new_kern(row[2]))
- context_endvbox()
- context_nointerlineskip() -- figure out a better way
- if row[4] then
- -- nothing as we have a span
- elseif row[3] then
- context_blank(row[3] .. "sp") -- why blank ?
- else
- context(new_glue(0))
- end
- else
- context(list)
- context(new_kern(row[2]))
- if row[3] then
- context(new_glue(row[3]))
- end
- end
-end
-
-local function total(row,distance)
- local n = #row > 0 and rowdistance or 0
- for i=1,#row do
- local ri = row[i]
- n = n + ri[2] + (ri[3] or 0)
- end
- return n
-end
-
--- local function append(list,what)
--- for i=1,#what do
--- local l = what[i]
--- list[#list+1] = l[1]
--- local k = l[2] + (l[3] or 0)
--- if k ~= 0 then
--- list[#list+1] = new_kern(k)
--- end
--- end
--- end
-
-local function spanheight(body,i)
- local height, n = 0, 1
- while true do
- local bi = body[i]
- if bi then
- height = height + bi[2] + (bi[3] or 0)
- if bi[4] then
- n = n + 1
- i = i + 1
- else
- break
- end
- else
- break
- end
- end
- return height, n
-end
-
-function xtables.flush(directives) -- todo split by size / no inbetween then .. glue list kern blank
- local vsize = directives.vsize
- local method = directives.method or v_normal
- local settings = data.settings
- local results = data.results
- local rowdistance = settings.rowdistance
- local head = results[head_mode]
- local foot = results[foot_mode]
- local more = results[more_mode]
- local body = results[body_mode]
- local repeatheader = settings.header == v_repeat
- local repeatfooter = settings.footer == v_repeat
- if vsize and vsize > 0 then
- context_beginvbox()
- local bodystart = data.bodystart or 1
- local bodystop = data.bodystop or #body
- if bodystart > 0 and bodystart <= bodystop then
- local bodysize = vsize
- local footsize = total(foot,rowdistance)
- local headsize = total(head,rowdistance)
- local moresize = total(more,rowdistance)
- local firstsize, firstspans = spanheight(body,bodystart)
- if bodystart == 1 then -- first chunk gets head
- bodysize = bodysize - headsize - footsize
- if headsize > 0 and bodysize >= firstsize then
- for i=1,#head do
- inject(head[i],repeatheader)
- end
- if rowdistance > 0 then
- context(new_glue(rowdistance))
- end
- if not repeatheader then
- results[head_mode] = { }
- end
- end
- elseif moresize > 0 then -- following chunk gets next
- bodysize = bodysize - footsize - moresize
- if bodysize >= firstsize then
- for i=1,#more do
- inject(more[i],true)
- end
- if rowdistance > 0 then
- context(new_glue(rowdistance))
- end
- end
- elseif headsize > 0 and repeatheader then -- following chunk gets head
- bodysize = bodysize - footsize - headsize
- if bodysize >= firstsize then
- for i=1,#head do
- inject(head[i],true)
- end
- if rowdistance > 0 then
- context(new_glue(rowdistance))
- end
- end
- else -- following chunk gets nothing
- bodysize = bodysize - footsize
- end
- if bodysize >= firstsize then
- local i = bodystart
- while i <= bodystop do -- room for improvement
- local total, spans = spanheight(body,i)
- local bs = bodysize - total
- if bs > 0 then
- bodysize = bs
- for s=1,spans do
- inject(body[i])
- body[i] = nil
- i = i + 1
- end
- bodystart = i
- else
- break
- end
- end
- if bodystart > bodystop then
- -- all is flushed and footer fits
- if footsize > 0 then
- if rowdistance > 0 then
- context(new_glue(rowdistance))
- end
- for i=1,#foot do
- inject(foot[i])
- end
- results[foot_mode] = { }
- end
- results[body_mode] = { }
- texsetcount("global","c_tabl_x_state",0)
- else
- -- some is left so footer is delayed
- -- todo: try to flush a few more lines
- if repeatfooter and footsize > 0 then
- if rowdistance > 0 then
- context(new_glue(rowdistance))
- end
- for i=1,#foot do
- inject(foot[i],true)
- end
- else
- -- todo: try to fit more of body
- end
- texsetcount("global","c_tabl_x_state",2)
- end
- else
- if firstsize > vsize then
- -- get rid of the too large cell
- for s=1,firstspans do
- inject(body[bodystart])
- body[bodystart] = nil
- bodystart = bodystart + 1
- end
- end
- texsetcount("global","c_tabl_x_state",2) -- 1
- end
- else
- texsetcount("global","c_tabl_x_state",0)
- end
- data.bodystart = bodystart
- data.bodystop = bodystop
- context_endvbox()
- else
- if method == variables.split then
- -- maybe also a non float mode with header/footer repeat although
- -- we can also use a float without caption
- for i=1,#head do
- inject(head[i],false,true)
- end
- if #head > 0 and rowdistance > 0 then
- context_blank(rowdistance .. "sp")
- end
- for i=1,#body do
- inject(body[i],false,true)
- end
- if #foot > 0 and rowdistance > 0 then
- context_blank(rowdistance .. "sp")
- end
- for i=1,#foot do
- inject(foot[i],false,true)
- end
- else -- normal
- context_beginvbox()
- for i=1,#head do
- inject(head[i])
- end
- if #head > 0 and rowdistance > 0 then
- context(new_glue(rowdistance))
- end
- for i=1,#body do
- inject(body[i])
- end
- if #foot > 0 and rowdistance > 0 then
- context(new_glue(rowdistance))
- end
- for i=1,#foot do
- inject(foot[i])
- end
- context_endvbox()
- end
- results[head_mode] = { }
- results[body_mode] = { }
- results[foot_mode] = { }
- texsetcount("global","c_tabl_x_state",0)
- end
-end
-
-function xtables.cleanup()
- for mode, result in next, data.results do
- for _, r in next, result do
- flush_node_list(r[1])
- end
- end
- data = table.remove(stack)
-end
-
-function xtables.next_row()
- local r = data.currentrow + 1
- data.modes[r] = texcount.c_tabl_x_mode
- data.currentrow = r
- data.currentcolumn = 0
-end
-
--- eventually we might only have commands
-
-commands.x_table_create = xtables.create
-commands.x_table_reflow_width = xtables.reflow_width
-commands.x_table_reflow_height = xtables.reflow_height
-commands.x_table_construct = xtables.construct
-commands.x_table_flush = xtables.flush
-commands.x_table_cleanup = xtables.cleanup
-commands.x_table_next_row = xtables.next_row
-commands.x_table_init_reflow_width = xtables.initialize_reflow_width
-commands.x_table_init_reflow_height = xtables.initialize_reflow_height
-commands.x_table_init_construct = xtables.initialize_construct
-commands.x_table_set_reflow_width = xtables.set_reflow_width
-commands.x_table_set_reflow_height = xtables.set_reflow_height
-commands.x_table_set_construct = xtables.set_construct
+if not modules then modules = { } end modules ['tabl-xtb'] = {
+ version = 1.001,
+ comment = "companion to tabl-xtb.mkvi",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[
+
+This table mechanism is a combination between TeX and Lua. We do process
+cells at the TeX end and inspect them at the Lua end. After some analysis
+we have a second pass using the calculated widths, and if needed cells
+will go through a third pass to get the heights right. This last pass is
+avoided when possible which is why some code below looks a bit more
+complex than needed. The reason for such optimizations is that each cells
+is actually a framed instance and because tables like this can be hundreds
+of pages we want to keep processing time reasonable.
+
+To a large extend the behaviour is comparable with the way bTABLE/eTABLE
+works and there is a module that maps that one onto this one. Eventually
+this mechamism will be improved so that it can replace its older cousin.
+
+]]--
+
+-- todo: use linked list instead of r/c array
+
+local commands, context, tex, node = commands, context, tex, node
+
+local texdimen = tex.dimen
+local texcount = tex.count
+local texbox = tex.box
+local texsetcount = tex.setcount
+local texsetdimen = tex.setdimen
+
+local format = string.format
+local concat = table.concat
+local points = number.points
+
+local context = context
+local context_beginvbox = context.beginvbox
+local context_endvbox = context.endvbox
+local context_blank = context.blank
+local context_nointerlineskip = context.nointerlineskip
+
+local variables = interfaces.variables
+
+local setmetatableindex = table.setmetatableindex
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+local copy_node_list = node.copy_list
+local hpack_node_list = node.hpack
+local vpack_node_list = node.vpack
+local slide_node_list = node.slide
+local flush_node_list = node.flush_list
+
+local nodepool = nodes.pool
+
+local new_glue = nodepool.glue
+local new_kern = nodepool.kern
+local new_penalty = nodepool.penalty
+local new_hlist = nodepool.hlist
+
+local v_stretch = variables.stretch
+local v_normal = variables.normal
+local v_width = variables.width
+local v_height = variables.height
+local v_repeat = variables["repeat"]
+local v_max = variables.max
+local v_fixed = variables.fixed
+
+local xtables = { }
+typesetters.xtables = xtables
+
+local trace_xtable = false
+local report_xtable = logs.reporter("xtable")
+
+trackers.register("xtable.construct", function(v) trace_xtable = v end)
+
+local null_mode = 0
+local head_mode = 1
+local foot_mode = 2
+local more_mode = 3
+local body_mode = 4
+
+local namedmodes = { [0] =
+ "null",
+ "head",
+ "foot",
+ "next",
+ "body",
+}
+
+local stack, data = { }, nil
+
+function xtables.create(settings)
+ table.insert(stack,data)
+ local rows = { }
+ local widths = { }
+ local heights = { }
+ local depths = { }
+ local spans = { }
+ local distances = { }
+ local autowidths = { }
+ local modes = { }
+ local fixedrows = { }
+ local fixedcolumns = { }
+ local frozencolumns = { }
+ local options = { }
+ data = {
+ rows = rows,
+ widths = widths,
+ heights = heights,
+ depths = depths,
+ spans = spans,
+ distances = distances,
+ modes = modes,
+ autowidths = autowidths,
+ fixedrows = fixedrows,
+ fixedcolumns = fixedcolumns,
+ frozencolumns = frozencolumns,
+ options = options,
+ nofrows = 0,
+ nofcolumns = 0,
+ currentrow = 0,
+ currentcolumn = 0,
+ settings = settings or { },
+ }
+ local function add_zero(t,k)
+ t[k] = 0
+ return 0
+ end
+ local function add_table(t,k)
+ local v = { }
+ t[k] = v
+ return v
+ end
+ local function add_cell(row,c)
+ local cell = {
+ nx = 0,
+ ny = 0,
+ list = false,
+ }
+ row[c] = cell
+ if c > data.nofcolumns then
+ data.nofcolumns = c
+ end
+ return cell
+ end
+ local function add_row(rows,r)
+ local row = { }
+ setmetatableindex(row,add_cell)
+ rows[r] = row
+ if r > data.nofrows then
+ data.nofrows = r
+ end
+ return row
+ end
+ setmetatableindex(rows,add_row)
+ setmetatableindex(widths,add_zero)
+ setmetatableindex(heights,add_zero)
+ setmetatableindex(depths,add_zero)
+ setmetatableindex(distances,add_zero)
+ setmetatableindex(modes,add_zero)
+ setmetatableindex(fixedrows,add_zero)
+ setmetatableindex(fixedcolumns,add_zero)
+ setmetatableindex(options,add_table)
+ --
+ settings.columndistance = tonumber(settings.columndistance) or 0
+ settings.rowdistance = tonumber(settings.rowdistance) or 0
+ settings.leftmargindistance = tonumber(settings.leftmargindistance) or 0
+ settings.rightmargindistance = tonumber(settings.rightmargindistance) or 0
+ settings.options = settings_to_hash(settings.option)
+ settings.textwidth = tonumber(settings.textwidth) or tex.hsize
+ settings.lineheight = tonumber(settings.lineheight) or texdimen.lineheight
+ settings.maxwidth = tonumber(settings.maxwidth) or settings.textwidth/8
+ -- if #stack > 0 then
+ -- settings.textwidth = tex.hsize
+ -- end
+ data.criterium_v = 2 * data.settings.lineheight
+ data.criterium_h = .75 * data.settings.textwidth
+
+end
+
+function xtables.initialize_reflow_width(option)
+ local r = data.currentrow
+ local c = data.currentcolumn + 1
+ local drc = data.rows[r][c]
+ drc.nx = texcount.c_tabl_x_nx
+ drc.ny = texcount.c_tabl_x_ny
+ local distances = data.distances
+ local distance = texdimen.d_tabl_x_distance
+ if distance > distances[c] then
+ distances[c] = distance
+ end
+ if option and option ~= "" then
+ local options = settings_to_hash(option)
+ data.options[r][c] = options
+ if options[v_fixed] then
+ data.frozencolumns[c] = true
+ end
+ end
+ data.currentcolumn = c
+end
+
+-- local function rather_fixed(n)
+-- for n in node.
+
+function xtables.set_reflow_width()
+ local r = data.currentrow
+ local c = data.currentcolumn
+ local rows = data.rows
+ local row = rows[r]
+ while row[c].span do -- can also be previous row ones
+ c = c + 1
+ end
+ local tb = texbox.b_tabl_x
+ local drc = row[c]
+ --
+ drc.list = true -- we don't need to keep the content around as we're in trial mode (no: copy_node_list(tb))
+ --
+ local widths, width = data.widths, tb.width
+ if width > widths[c] then
+ widths[c] = width
+ end
+ local heights, height = data.heights, tb.height
+ if height > heights[r] then
+ heights[r] = height
+ end
+ local depths, depth = data.depths, tb.depth
+ if depth > depths[r] then
+ depths[r] = depth
+ end
+ --
+ local dimensionstate = texcount.frameddimensionstate
+ local fixedcolumns = data.fixedcolumns
+ local fixedrows = data.fixedrows
+ if dimensionstate == 1 then
+ if width > fixedcolumns[c] then -- how about a span here?
+ fixedcolumns[c] = width
+ end
+ elseif dimensionstate == 2 then
+ fixedrows[r] = height
+ elseif dimensionstate == 3 then
+ fixedrows[r] = height -- width
+ fixedcolumns[c] = width -- height
+ else -- probably something frozen, like an image -- we could parse the list
+ if width <= data.criterium_h and height >= data.criterium_v then
+ if width > fixedcolumns[c] then -- how about a span here?
+ fixedcolumns[c] = width
+ end
+ end
+ end
+ drc.dimensionstate = dimensionstate
+ --
+ local nx, ny = drc.nx, drc.ny
+ if nx > 1 or ny > 1 then
+ local spans = data.spans
+ local self = true
+ for y=1,ny do
+ for x=1,nx do
+ if self then
+ self = false
+ else
+ local ry = r + y - 1
+ local cx = c + x - 1
+ if y > 1 then
+ spans[ry] = true
+ end
+ rows[ry][cx].span = true
+ end
+ end
+ end
+ c = c + nx - 1
+ end
+ if c > data.nofcolumns then
+ data.nofcolumns = c
+ end
+ data.currentcolumn = c
+end
+
+function xtables.initialize_reflow_height()
+ local r = data.currentrow
+ local c = data.currentcolumn + 1
+ local rows = data.rows
+ local row = rows[r]
+ while row[c].span do -- can also be previous row ones
+ c = c + 1
+ end
+ data.currentcolumn = c
+ local widths = data.widths
+ local w = widths[c]
+ local drc = row[c]
+ for x=1,drc.nx-1 do
+ w = w + widths[c+x]
+ end
+ texdimen.d_tabl_x_width = w
+ local dimensionstate = drc.dimensionstate or 0
+ if dimensionstate == 1 or dimensionstate == 3 then
+ -- width was fixed so height is known
+ texcount.c_tabl_x_skip_mode = 1
+ elseif dimensionstate == 2 then
+ -- height is enforced
+ texcount.c_tabl_x_skip_mode = 1
+ elseif data.autowidths[c] then
+ -- width has changed so we need to recalculate the height
+ texcount.c_tabl_x_skip_mode = 0
+ else
+ texcount.c_tabl_x_skip_mode = 1
+ end
+end
+
+function xtables.set_reflow_height()
+ local r = data.currentrow
+ local c = data.currentcolumn
+ local rows = data.rows
+ local row = rows[r]
+-- while row[c].span do -- we could adapt drc.nx instead
+-- c = c + 1
+-- end
+ local tb = texbox.b_tabl_x
+ local drc = row[c]
+ if data.fixedrows[r] == 0 then -- and drc.dimensionstate < 2
+ local heights, height = data.heights, tb.height
+ if height > heights[r] then
+ heights[r] = height
+ end
+ local depths, depth = data.depths, tb.depth
+ if depth > depths[r] then
+ depths[r] = depth
+ end
+ end
+-- c = c + drc.nx - 1
+-- data.currentcolumn = c
+end
+
+function xtables.initialize_construct()
+ local r = data.currentrow
+ local c = data.currentcolumn + 1
+ local rows = data.rows
+ local row = rows[r]
+ while row[c].span do -- can also be previous row ones
+ c = c + 1
+ end
+ data.currentcolumn = c
+ local widths = data.widths
+ local heights = data.heights
+ local depths = data.depths
+ local w = widths[c]
+ local h = heights[r]
+ local d = depths[r]
+ local drc = row[c]
+ for x=1,drc.nx-1 do
+ w = w + widths[c+x]
+ end
+ for y=1,drc.ny-1 do
+ h = h + heights[r+y]
+ d = d + depths[r+y]
+ end
+ texdimen.d_tabl_x_width = w
+ texdimen.d_tabl_x_height = h + d
+ texdimen.d_tabl_x_depth = 0
+end
+
+function xtables.set_construct()
+ local r = data.currentrow
+ local c = data.currentcolumn
+ local rows = data.rows
+ local row = rows[r]
+-- while row[c].span do -- can also be previous row ones
+-- c = c + 1
+-- end
+ local drc = row[c]
+ -- this will change as soon as in luatex we can reset a box list without freeing
+ drc.list = copy_node_list(texbox.b_tabl_x)
+-- c = c + drc.nx - 1
+-- data.currentcolumn = c
+end
+
+local function showwidths(where,widths,autowidths)
+ local result = { }
+ for i=1,#widths do
+ result[#result+1] = format("%12s%s",points(widths[i]),autowidths[i] and "*" or " ")
+ end
+ return report_xtable("%s : %s",where,concat(result," "))
+end
+
+function xtables.reflow_width()
+ local nofrows = data.nofrows
+ local nofcolumns = data.nofcolumns
+ local rows = data.rows
+ for r=1,nofrows do
+ local row = rows[r]
+ for c=1,nofcolumns do
+ local drc = row[c]
+ if drc.list then
+ -- flush_node_list(drc.list)
+ drc.list = false
+ end
+ end
+ end
+ -- spread
+ local settings = data.settings
+ local options = settings.options
+ local maxwidth = settings.maxwidth
+ -- calculate width
+ local widths = data.widths
+ local distances = data.distances
+ local autowidths = data.autowidths
+ local fixedcolumns = data.fixedcolumns
+ local frozencolumns = data.frozencolumns
+ local width = 0
+ local distance = 0
+ local nofwide = 0
+ local widetotal = 0
+ local available = settings.textwidth - settings.leftmargindistance - settings.rightmargindistance
+ if trace_xtable then
+ showwidths("stage 1",widths,autowidths)
+ end
+ local noffrozen = 0
+ if options[v_max] then
+ for c=1,nofcolumns do
+ width = width + widths[c]
+ if width > maxwidth then
+ autowidths[c] = true
+ nofwide = nofwide + 1
+ widetotal = widetotal + widths[c]
+ end
+ if c < nofcolumns then
+ distance = distance + distances[c]
+ end
+ if frozencolumns[c] then
+ noffrozen = noffrozen + 1 -- brr, should be nx or so
+ end
+ end
+ else
+ for c=1,nofcolumns do -- also keep track of forced
+ local fixedwidth = fixedcolumns[c]
+ if fixedwidth > 0 then
+ widths[c] = fixedwidth
+ width = width + fixedwidth
+ else
+ width = width + widths[c]
+ if width > maxwidth then
+ autowidths[c] = true
+ nofwide = nofwide + 1
+ widetotal = widetotal + widths[c]
+ end
+ end
+ if c < nofcolumns then
+ distance = distance + distances[c]
+ end
+ if frozencolumns[c] then
+ noffrozen = noffrozen + 1 -- brr, should be nx or so
+ end
+ end
+ end
+ if trace_xtable then
+ showwidths("stage 2",widths,autowidths)
+ end
+ local delta = available - width - distance - (nofcolumns-1) * settings.columndistance
+ if delta == 0 then
+ -- nothing to be done
+ if trace_xtable then
+ report_xtable("perfect fit")
+ end
+ elseif delta > 0 then
+ -- we can distribute some
+ if not options[v_stretch] then
+ -- not needed
+ if trace_xtable then
+ report_xtable("too wide but no stretch, delta %p",delta)
+ end
+ elseif options[v_width] then
+ local factor = delta / width
+ if trace_xtable then
+ report_xtable("proportional stretch, delta %p, width %p, factor %a",delta,width,factor)
+ end
+ for c=1,nofcolumns do
+ widths[c] = widths[c] + factor * widths[c]
+ end
+ else
+ -- frozen -> a column with option=fixed will not stretch
+ local extra = delta / (nofcolumns - noffrozen)
+ if trace_xtable then
+ report_xtable("normal stretch, delta %p, extra %p",delta,extra)
+ end
+ for c=1,nofcolumns do
+ if not frozencolumns[c] then
+ widths[c] = widths[c] + extra
+ end
+ end
+ end
+ elseif nofwide > 0 then
+ while true do
+ done = false
+ local available = (widetotal + delta) / nofwide
+ if trace_xtable then
+ report_xtable("shrink check, total %p, delta %p, columns %s, fixed %p",widetotal,delta,nofwide,available)
+ end
+ for c=1,nofcolumns do
+ if autowidths[c] and available >= widths[c] then
+ autowidths[c] = nil
+ nofwide = nofwide - 1
+ widetotal = widetotal - widths[c]
+ done = true
+ end
+ end
+ if not done then
+ break
+ end
+ end
+ -- maybe also options[v_width] here but tricky as width does not say
+ -- much about amount
+ if options[v_width] then -- not that much (we could have a clever vpack loop balancing .. no fun)
+ local factor = (widetotal + delta) / width
+ if trace_xtable then
+ report_xtable("proportional shrink used, total %p, delta %p, columns %s, factor %s",widetotal,delta,nofwide,factor)
+ end
+ for c=1,nofcolumns do
+ if autowidths[c] then
+ widths[c] = factor * widths[c]
+ end
+ end
+ else
+ local available = (widetotal + delta) / nofwide
+ if trace_xtable then
+ report_xtable("normal shrink used, total %p, delta %p, columns %s, fixed %p",widetotal,delta,nofwide,available)
+ end
+ for c=1,nofcolumns do
+ if autowidths[c] then
+ widths[c] = available
+ end
+ end
+ end
+ end
+ if trace_xtable then
+ showwidths("stage 3",widths,autowidths)
+ end
+ --
+ data.currentrow = 0
+ data.currentcolumn = 0
+end
+
+function xtables.reflow_height()
+ data.currentrow = 0
+ data.currentcolumn = 0
+ local settings = data.settings
+ if settings.options[v_height] then
+ local heights = data.heights
+ local depths = data.depths
+ local nofrows = data.nofrows
+ local totalheight = 0
+ local totaldepth = 0
+ for i=1,nofrows do
+ totalheight = totalheight + heights[i]
+ totalheight = totalheight + depths [i]
+ end
+ local total = totalheight + totaldepth
+ local leftover = settings.textheight - total
+ if leftover > 0 then
+ local leftheight = (totalheight / total ) * leftover / #heights
+ local leftdepth = (totaldepth / total ) * leftover / #depths
+ for i=1,nofrows do
+ heights[i] = heights[i] + leftheight
+ depths [i] = depths [i] + leftdepth
+ end
+ end
+ end
+end
+
+local function showspans(data)
+ local rows = data.rows
+ local modes = data.modes
+ local nofcolumns = data.nofcolumns
+ local nofrows = data.nofrows
+ for r=1,nofrows do
+ local line = { }
+ local row = rows[r]
+ for c=1,nofcolumns do
+ local cell =row[c]
+ if cell.list then
+ line[#line+1] = "list"
+ elseif cell.span then
+ line[#line+1] = "span"
+ else
+ line[#line+1] = "none"
+ end
+ end
+ report_xtable("%3d : %s : % t",r,namedmodes[modes[r]] or "----",line)
+ end
+end
+
+function xtables.construct()
+ local rows = data.rows
+ local heights = data.heights
+ local depths = data.depths
+ local widths = data.widths
+ local spans = data.spans
+ local distances = data.distances
+ local modes = data.modes
+ local settings = data.settings
+ local nofcolumns = data.nofcolumns
+ local nofrows = data.nofrows
+ local columndistance = settings.columndistance
+ local rowdistance = settings.rowdistance
+ local leftmargindistance = settings.leftmargindistance
+ local rightmargindistance = settings.rightmargindistance
+ -- ranges can be mixes so we collect
+
+ if trace_xtable then
+ showspans(data)
+ end
+
+ local ranges = {
+ [head_mode] = { },
+ [foot_mode] = { },
+ [more_mode] = { },
+ [body_mode] = { },
+ }
+ for r=1,nofrows do
+ local m = modes[r]
+ if m == 0 then
+ m = body_mode
+ end
+ local range = ranges[m]
+ range[#range+1] = r
+ end
+ -- todo: hook in the splitter ... the splitter can ask for a chunk of
+ -- a certain size ... no longer a split memory issue then and header
+ -- footer then has to happen here too .. target height
+ local function packaged_column(r)
+ local row = rows[r]
+ local start = nil
+ local stop = nil
+ if leftmargindistance > 0 then
+ start = new_kern(leftmargindistance)
+ stop = start
+ end
+ local hasspan = false
+ for c=1,nofcolumns do
+ local drc = row[c]
+ if not hasspan then
+ hasspan = drc.span
+ end
+ local list = drc.list
+ if list then
+ list.shift = list.height + list.depth
+ -- list = hpack_node_list(list) -- is somehow needed
+ -- list.width = 0
+ -- list.height = 0
+ -- list.depth = 0
+ -- faster:
+ local h = new_hlist()
+ h.list = list
+ list = h
+ --
+ if start then
+ stop.next = list
+ list.prev = stop
+ else
+ start = list
+ end
+ stop = list -- one node anyway, so not needed: slide_node_list(list)
+ end
+ local step = widths[c]
+ if c < nofcolumns then
+ step = step + columndistance + distances[c]
+ end
+ local kern = new_kern(step)
+ if stop then
+ stop.prev = kern
+ stop.next = kern
+ else -- can be first spanning next row (ny=...)
+ start = kern
+ end
+ stop = kern
+ end
+ if start then
+ if rightmargindistance > 0 then
+ local kern = new_kern(rightmargindistance)
+ stop.next = kern
+ kern.prev = stop
+ -- stop = kern
+ end
+ return start, heights[r] + depths[r], hasspan
+ end
+ end
+ local function collect_range(range)
+ local result, nofr = { }, 0
+ local nofrange = #range
+ for i=1,#range do
+ local r = range[i]
+ -- local row = rows[r]
+ local list, size, hasspan = packaged_column(r)
+ if list then
+ if hasspan and nofr > 0 then
+ result[nofr][4] = true
+ end
+ nofr = nofr + 1
+ result[nofr] = {
+ hpack_node_list(list),
+ size,
+ i < nofrange and rowdistance > 0 and rowdistance or false, -- might move
+ false
+ }
+ end
+ end
+ return result
+ end
+ local body = collect_range(ranges[body_mode])
+ data.results = {
+ [head_mode] = collect_range(ranges[head_mode]),
+ [foot_mode] = collect_range(ranges[foot_mode]),
+ [more_mode] = collect_range(ranges[more_mode]),
+ [body_mode] = body,
+ }
+ if #body == 0 then
+ texsetcount("global","c_tabl_x_state",0)
+ texsetdimen("global","d_tabl_x_final_width",0)
+ else
+ texsetcount("global","c_tabl_x_state",1)
+ texsetdimen("global","d_tabl_x_final_width",body[1][1].width)
+ end
+end
+
+local function inject(row,copy,package)
+ local list = row[1]
+ if copy then
+ row[1] = copy_node_list(list)
+ end
+ if package then
+ context_beginvbox()
+ context(list)
+ context(new_kern(row[2]))
+ context_endvbox()
+ context_nointerlineskip() -- figure out a better way
+ if row[4] then
+ -- nothing as we have a span
+ elseif row[3] then
+ context_blank(row[3] .. "sp") -- why blank ?
+ else
+ context(new_glue(0))
+ end
+ else
+ context(list)
+ context(new_kern(row[2]))
+ if row[3] then
+ context(new_glue(row[3]))
+ end
+ end
+end
+
+local function total(row,distance)
+ local n = #row > 0 and rowdistance or 0
+ for i=1,#row do
+ local ri = row[i]
+ n = n + ri[2] + (ri[3] or 0)
+ end
+ return n
+end
+
+-- local function append(list,what)
+-- for i=1,#what do
+-- local l = what[i]
+-- list[#list+1] = l[1]
+-- local k = l[2] + (l[3] or 0)
+-- if k ~= 0 then
+-- list[#list+1] = new_kern(k)
+-- end
+-- end
+-- end
+
+local function spanheight(body,i)
+ local height, n = 0, 1
+ while true do
+ local bi = body[i]
+ if bi then
+ height = height + bi[2] + (bi[3] or 0)
+ if bi[4] then
+ n = n + 1
+ i = i + 1
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ return height, n
+end
+
+function xtables.flush(directives) -- todo split by size / no inbetween then .. glue list kern blank
+ local vsize = directives.vsize
+ local method = directives.method or v_normal
+ local settings = data.settings
+ local results = data.results
+ local rowdistance = settings.rowdistance
+ local head = results[head_mode]
+ local foot = results[foot_mode]
+ local more = results[more_mode]
+ local body = results[body_mode]
+ local repeatheader = settings.header == v_repeat
+ local repeatfooter = settings.footer == v_repeat
+ if vsize and vsize > 0 then
+ context_beginvbox()
+ local bodystart = data.bodystart or 1
+ local bodystop = data.bodystop or #body
+ if bodystart > 0 and bodystart <= bodystop then
+ local bodysize = vsize
+ local footsize = total(foot,rowdistance)
+ local headsize = total(head,rowdistance)
+ local moresize = total(more,rowdistance)
+ local firstsize, firstspans = spanheight(body,bodystart)
+ if bodystart == 1 then -- first chunk gets head
+ bodysize = bodysize - headsize - footsize
+ if headsize > 0 and bodysize >= firstsize then
+ for i=1,#head do
+ inject(head[i],repeatheader)
+ end
+ if rowdistance > 0 then
+ context(new_glue(rowdistance))
+ end
+ if not repeatheader then
+ results[head_mode] = { }
+ end
+ end
+ elseif moresize > 0 then -- following chunk gets next
+ bodysize = bodysize - footsize - moresize
+ if bodysize >= firstsize then
+ for i=1,#more do
+ inject(more[i],true)
+ end
+ if rowdistance > 0 then
+ context(new_glue(rowdistance))
+ end
+ end
+ elseif headsize > 0 and repeatheader then -- following chunk gets head
+ bodysize = bodysize - footsize - headsize
+ if bodysize >= firstsize then
+ for i=1,#head do
+ inject(head[i],true)
+ end
+ if rowdistance > 0 then
+ context(new_glue(rowdistance))
+ end
+ end
+ else -- following chunk gets nothing
+ bodysize = bodysize - footsize
+ end
+ if bodysize >= firstsize then
+ local i = bodystart
+ while i <= bodystop do -- room for improvement
+ local total, spans = spanheight(body,i)
+ local bs = bodysize - total
+ if bs > 0 then
+ bodysize = bs
+ for s=1,spans do
+ inject(body[i])
+ body[i] = nil
+ i = i + 1
+ end
+ bodystart = i
+ else
+ break
+ end
+ end
+ if bodystart > bodystop then
+ -- all is flushed and footer fits
+ if footsize > 0 then
+ if rowdistance > 0 then
+ context(new_glue(rowdistance))
+ end
+ for i=1,#foot do
+ inject(foot[i])
+ end
+ results[foot_mode] = { }
+ end
+ results[body_mode] = { }
+ texsetcount("global","c_tabl_x_state",0)
+ else
+ -- some is left so footer is delayed
+ -- todo: try to flush a few more lines
+ if repeatfooter and footsize > 0 then
+ if rowdistance > 0 then
+ context(new_glue(rowdistance))
+ end
+ for i=1,#foot do
+ inject(foot[i],true)
+ end
+ else
+ -- todo: try to fit more of body
+ end
+ texsetcount("global","c_tabl_x_state",2)
+ end
+ else
+ if firstsize > vsize then
+ -- get rid of the too large cell
+ for s=1,firstspans do
+ inject(body[bodystart])
+ body[bodystart] = nil
+ bodystart = bodystart + 1
+ end
+ end
+ texsetcount("global","c_tabl_x_state",2) -- 1
+ end
+ else
+ texsetcount("global","c_tabl_x_state",0)
+ end
+ data.bodystart = bodystart
+ data.bodystop = bodystop
+ context_endvbox()
+ else
+ if method == variables.split then
+ -- maybe also a non float mode with header/footer repeat although
+ -- we can also use a float without caption
+ for i=1,#head do
+ inject(head[i],false,true)
+ end
+ if #head > 0 and rowdistance > 0 then
+ context_blank(rowdistance .. "sp")
+ end
+ for i=1,#body do
+ inject(body[i],false,true)
+ end
+ if #foot > 0 and rowdistance > 0 then
+ context_blank(rowdistance .. "sp")
+ end
+ for i=1,#foot do
+ inject(foot[i],false,true)
+ end
+ else -- normal
+ context_beginvbox()
+ for i=1,#head do
+ inject(head[i])
+ end
+ if #head > 0 and rowdistance > 0 then
+ context(new_glue(rowdistance))
+ end
+ for i=1,#body do
+ inject(body[i])
+ end
+ if #foot > 0 and rowdistance > 0 then
+ context(new_glue(rowdistance))
+ end
+ for i=1,#foot do
+ inject(foot[i])
+ end
+ context_endvbox()
+ end
+ results[head_mode] = { }
+ results[body_mode] = { }
+ results[foot_mode] = { }
+ texsetcount("global","c_tabl_x_state",0)
+ end
+end
+
+function xtables.cleanup()
+ for mode, result in next, data.results do
+ for _, r in next, result do
+ flush_node_list(r[1])
+ end
+ end
+ data = table.remove(stack)
+end
+
+function xtables.next_row()
+ local r = data.currentrow + 1
+ data.modes[r] = texcount.c_tabl_x_mode
+ data.currentrow = r
+ data.currentcolumn = 0
+end
+
+-- eventually we might only have commands
+
+commands.x_table_create = xtables.create
+commands.x_table_reflow_width = xtables.reflow_width
+commands.x_table_reflow_height = xtables.reflow_height
+commands.x_table_construct = xtables.construct
+commands.x_table_flush = xtables.flush
+commands.x_table_cleanup = xtables.cleanup
+commands.x_table_next_row = xtables.next_row
+commands.x_table_init_reflow_width = xtables.initialize_reflow_width
+commands.x_table_init_reflow_height = xtables.initialize_reflow_height
+commands.x_table_init_construct = xtables.initialize_construct
+commands.x_table_set_reflow_width = xtables.set_reflow_width
+commands.x_table_set_reflow_height = xtables.set_reflow_height
+commands.x_table_set_construct = xtables.set_construct
diff --git a/tex/context/base/task-ini.lua b/tex/context/base/task-ini.lua
index 0f477cb6e..41f045ac9 100644
--- a/tex/context/base/task-ini.lua
+++ b/tex/context/base/task-ini.lua
@@ -1,191 +1,191 @@
-if not modules then modules = { } end modules ['task-ini'] = {
- version = 1.001,
- comment = "companion to task-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this is a temporary solution, we need to isolate some modules and then
--- the load order can determine the trickery to be applied to node lists
---
--- we can disable more handlers and enable then when really used (*)
---
--- todo: two finalizers: real shipout (can be imposed page) and page shipout (individual page)
-
-local tasks = nodes.tasks
-local appendaction = tasks.appendaction
-local disableaction = tasks.disableaction
-local freezegroup = tasks.freezegroup
-local freezecallbacks = callbacks.freeze
-
-appendaction("processors", "normalizers", "typesetters.characters.handler") -- always on
-appendaction("processors", "normalizers", "fonts.collections.process") -- disabled
-appendaction("processors", "normalizers", "fonts.checkers.missing") -- disabled
-
-appendaction("processors", "characters", "scripts.autofontfeature.handler")
-appendaction("processors", "characters", "scripts.splitters.handler") -- disabled
-appendaction("processors", "characters", "typesetters.cleaners.handler") -- disabled
-appendaction("processors", "characters", "typesetters.directions.handler") -- disabled
-appendaction("processors", "characters", "typesetters.cases.handler") -- disabled
-appendaction("processors", "characters", "typesetters.breakpoints.handler") -- disabled
-appendaction("processors", "characters", "scripts.injectors.handler") -- disabled
-
-appendaction("processors", "words", "builders.kernel.hyphenation") -- always on
-appendaction("processors", "words", "languages.words.check") -- disabled
-
-appendaction("processors", "fonts", "builders.paragraphs.solutions.splitters.split") -- experimental
-appendaction("processors", "fonts", "nodes.handlers.characters") -- maybe todo
-appendaction("processors", "fonts", "nodes.injections.handler") -- maybe todo
-appendaction("processors", "fonts", "nodes.handlers.protectglyphs", nil, "nohead") -- maybe todo
-appendaction("processors", "fonts", "builders.kernel.ligaturing") -- always on (could be selective: if only node mode)
-appendaction("processors", "fonts", "builders.kernel.kerning") -- always on (could be selective: if only node mode)
-appendaction("processors", "fonts", "nodes.handlers.stripping") -- disabled (might move)
-------------("processors", "fonts", "typesetters.italics.handler") -- disabled (after otf/kern handling)
-
-appendaction("processors", "lists", "typesetters.spacings.handler") -- disabled
-appendaction("processors", "lists", "typesetters.kerns.handler") -- disabled
-appendaction("processors", "lists", "typesetters.digits.handler") -- disabled (after otf handling)
-appendaction("processors", "lists", "typesetters.italics.handler") -- disabled (after otf/kern handling)
-appendaction("processors", "lists", "typesetters.paragraphs.handler") -- disabled
-
-appendaction("shipouts", "normalizers", "nodes.handlers.cleanuppage") -- disabled
-appendaction("shipouts", "normalizers", "typesetters.alignments.handler")
-appendaction("shipouts", "normalizers", "nodes.references.handler") -- disabled
-appendaction("shipouts", "normalizers", "nodes.destinations.handler") -- disabled
-appendaction("shipouts", "normalizers", "nodes.rules.handler") -- disabled
-appendaction("shipouts", "normalizers", "nodes.shifts.handler") -- disabled
-appendaction("shipouts", "normalizers", "structures.tags.handler") -- disabled
-appendaction("shipouts", "normalizers", "nodes.handlers.accessibility") -- disabled
-appendaction("shipouts", "normalizers", "nodes.handlers.backgrounds") -- disabled
-appendaction("shipouts", "normalizers", "nodes.handlers.alignbackgrounds") -- disabled
-------------("shipouts", "normalizers", "nodes.handlers.export") -- disabled
-
-appendaction("shipouts", "finishers", "nodes.visualizers.handler") -- disabled
-appendaction("shipouts", "finishers", "attributes.colors.handler") -- disabled
-appendaction("shipouts", "finishers", "attributes.transparencies.handler") -- disabled
-appendaction("shipouts", "finishers", "attributes.colorintents.handler") -- disabled
-appendaction("shipouts", "finishers", "attributes.negatives.handler") -- disabled
-appendaction("shipouts", "finishers", "attributes.effects.handler") -- disabled
-appendaction("shipouts", "finishers", "attributes.viewerlayers.handler") -- disabled
-
---maybe integrate relocate and families
-
-appendaction("math", "normalizers", "noads.handlers.unscript", nil, "nohead") -- always on (maybe disabled)
-appendaction("math", "normalizers", "noads.handlers.variants", nil, "nohead") -- always on
-appendaction("math", "normalizers", "noads.handlers.relocate", nil, "nohead") -- always on
-appendaction("math", "normalizers", "noads.handlers.families", nil, "nohead") -- always on
-
-appendaction("math", "normalizers", "noads.handlers.render", nil, "nohead") -- always on
-appendaction("math", "normalizers", "noads.handlers.collapse", nil, "nohead") -- always on
-appendaction("math", "normalizers", "noads.handlers.resize", nil, "nohead") -- always on
-------------("math", "normalizers", "noads.handlers.respace", nil, "nohead") -- always on
-appendaction("math", "normalizers", "noads.handlers.check", nil, "nohead") -- always on
-appendaction("math", "normalizers", "noads.handlers.tags", nil, "nohead") -- disabled
-appendaction("math", "normalizers", "noads.handlers.italics", nil, "nohead") -- disabled
-
-appendaction("math", "builders", "builders.kernel.mlist_to_hlist") -- always on
-------------("math", "builders", "noads.handlers.italics", nil, "nohead") -- disabled
-
--- quite experimental (nodes.handlers.graphicvadjust might go away)
-
-appendaction("finalizers", "lists", "builders.paragraphs.keeptogether")
-appendaction("finalizers", "lists", "nodes.handlers.graphicvadjust") -- todo
-appendaction("finalizers", "fonts", "builders.paragraphs.solutions.splitters.optimize") -- experimental
-appendaction("finalizers", "lists", "builders.paragraphs.tag")
-
--- still experimental
-
-appendaction("mvlbuilders", "normalizers", "nodes.handlers.migrate") --
-appendaction("mvlbuilders", "normalizers", "builders.vspacing.pagehandler") -- last !
-
-appendaction("vboxbuilders", "normalizers", "builders.vspacing.vboxhandler") --
-
--- experimental too
-
-appendaction("mvlbuilders","normalizers","typesetters.checkers.handler")
-appendaction("vboxbuilders","normalizers","typesetters.checkers.handler")
-
--- speedup: only kick in when used
-
-disableaction("processors", "scripts.autofontfeature.handler")
-disableaction("processors", "scripts.splitters.handler")
-disableaction("processors", "scripts.injectors.handler") -- was enabled
-disableaction("processors", "fonts.collections.process")
-disableaction("processors", "fonts.checkers.missing")
-disableaction("processors", "chars.handle_breakpoints")
-disableaction("processors", "typesetters.cleaners.handler")
-disableaction("processors", "typesetters.cases.handler")
-disableaction("processors", "typesetters.digits.handler")
-disableaction("processors", "typesetters.breakpoints.handler")
-disableaction("processors", "typesetters.directions.handler")
-disableaction("processors", "languages.words.check")
-disableaction("processors", "typesetters.spacings.handler")
-disableaction("processors", "typesetters.kerns.handler")
-disableaction("processors", "typesetters.italics.handler")
-disableaction("processors", "nodes.handlers.stripping")
-disableaction("processors", "typesetters.paragraphs.handler")
-
-disableaction("shipouts", "typesetters.alignments.handler")
-disableaction("shipouts", "nodes.rules.handler")
-disableaction("shipouts", "nodes.shifts.handler")
-disableaction("shipouts", "attributes.colors.handler")
-disableaction("shipouts", "attributes.transparencies.handler")
-disableaction("shipouts", "attributes.colorintents.handler")
-disableaction("shipouts", "attributes.effects.handler")
-disableaction("shipouts", "attributes.negatives.handler")
-disableaction("shipouts", "attributes.viewerlayers.handler")
-disableaction("shipouts", "structures.tags.handler")
-disableaction("shipouts", "nodes.visualizers.handler")
-disableaction("shipouts", "nodes.handlers.accessibility")
-disableaction("shipouts", "nodes.handlers.backgrounds")
-disableaction("shipouts", "nodes.handlers.alignbackgrounds")
-disableaction("shipouts", "nodes.handlers.cleanuppage")
-
-disableaction("shipouts", "nodes.references.handler")
-disableaction("shipouts", "nodes.destinations.handler")
-
---~ disableaction("shipouts", "nodes.handlers.export")
-
-disableaction("mvlbuilders", "nodes.handlers.migrate")
-
-disableaction("processors", "builders.paragraphs.solutions.splitters.split")
-
-disableaction("finalizers", "builders.paragraphs.keeptogether")
-disableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize")
-disableaction("finalizers", "nodes.handlers.graphicvadjust") -- sort of obsolete
-disableaction("finalizers", "builders.paragraphs.tag")
-
-disableaction("math", "noads.handlers.tags")
-disableaction("math", "noads.handlers.italics")
-
-disableaction("mvlbuilders", "typesetters.checkers.handler")
-disableaction("vboxbuilders","typesetters.checkers.handler")
-
-freezecallbacks("find_.*_file", "find file using resolver")
-freezecallbacks("read_.*_file", "read file at once")
-freezecallbacks("open_.*_file", "open file for reading")
-
--- experimental:
-
-freezegroup("processors", "normalizers")
-freezegroup("processors", "characters")
-freezegroup("processors", "words")
-freezegroup("processors", "fonts")
-freezegroup("processors", "lists")
-
-freezegroup("finalizers", "normalizers")
-freezegroup("finalizers", "fonts")
-freezegroup("finalizers", "lists")
-
-freezegroup("shipouts", "normalizers")
-freezegroup("shipouts", "finishers")
-
-freezegroup("mvlbuilders", "normalizers")
-freezegroup("vboxbuilders", "normalizers")
-
------------("parbuilders", "lists")
------------("pagebuilders", "lists")
-
-freezegroup("math", "normalizers")
-freezegroup("math", "builders")
+if not modules then modules = { } end modules ['task-ini'] = {
+ version = 1.001,
+ comment = "companion to task-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- this is a temporary solution, we need to isolate some modules and then
+-- the load order can determine the trickery to be applied to node lists
+--
+-- we can disable more handlers and enable then when really used (*)
+--
+-- todo: two finalizers: real shipout (can be imposed page) and page shipout (individual page)
+
+local tasks = nodes.tasks
+local appendaction = tasks.appendaction
+local disableaction = tasks.disableaction
+local freezegroup = tasks.freezegroup
+local freezecallbacks = callbacks.freeze
+
+appendaction("processors", "normalizers", "typesetters.characters.handler") -- always on
+appendaction("processors", "normalizers", "fonts.collections.process") -- disabled
+appendaction("processors", "normalizers", "fonts.checkers.missing") -- disabled
+
+appendaction("processors", "characters", "scripts.autofontfeature.handler")
+appendaction("processors", "characters", "scripts.splitters.handler") -- disabled
+appendaction("processors", "characters", "typesetters.cleaners.handler") -- disabled
+appendaction("processors", "characters", "typesetters.directions.handler") -- disabled
+appendaction("processors", "characters", "typesetters.cases.handler") -- disabled
+appendaction("processors", "characters", "typesetters.breakpoints.handler") -- disabled
+appendaction("processors", "characters", "scripts.injectors.handler") -- disabled
+
+appendaction("processors", "words", "builders.kernel.hyphenation") -- always on
+appendaction("processors", "words", "languages.words.check") -- disabled
+
+appendaction("processors", "fonts", "builders.paragraphs.solutions.splitters.split") -- experimental
+appendaction("processors", "fonts", "nodes.handlers.characters") -- maybe todo
+appendaction("processors", "fonts", "nodes.injections.handler") -- maybe todo
+appendaction("processors", "fonts", "nodes.handlers.protectglyphs", nil, "nohead") -- maybe todo
+appendaction("processors", "fonts", "builders.kernel.ligaturing") -- always on (could be selective: if only node mode)
+appendaction("processors", "fonts", "builders.kernel.kerning") -- always on (could be selective: if only node mode)
+appendaction("processors", "fonts", "nodes.handlers.stripping") -- disabled (might move)
+------------("processors", "fonts", "typesetters.italics.handler") -- disabled (after otf/kern handling)
+
+appendaction("processors", "lists", "typesetters.spacings.handler") -- disabled
+appendaction("processors", "lists", "typesetters.kerns.handler") -- disabled
+appendaction("processors", "lists", "typesetters.digits.handler") -- disabled (after otf handling)
+appendaction("processors", "lists", "typesetters.italics.handler") -- disabled (after otf/kern handling)
+appendaction("processors", "lists", "typesetters.paragraphs.handler") -- disabled
+
+appendaction("shipouts", "normalizers", "nodes.handlers.cleanuppage") -- disabled
+appendaction("shipouts", "normalizers", "typesetters.alignments.handler")
+appendaction("shipouts", "normalizers", "nodes.references.handler") -- disabled
+appendaction("shipouts", "normalizers", "nodes.destinations.handler") -- disabled
+appendaction("shipouts", "normalizers", "nodes.rules.handler") -- disabled
+appendaction("shipouts", "normalizers", "nodes.shifts.handler") -- disabled
+appendaction("shipouts", "normalizers", "structures.tags.handler") -- disabled
+appendaction("shipouts", "normalizers", "nodes.handlers.accessibility") -- disabled
+appendaction("shipouts", "normalizers", "nodes.handlers.backgrounds") -- disabled
+appendaction("shipouts", "normalizers", "nodes.handlers.alignbackgrounds") -- disabled
+------------("shipouts", "normalizers", "nodes.handlers.export") -- disabled
+
+appendaction("shipouts", "finishers", "nodes.visualizers.handler") -- disabled
+appendaction("shipouts", "finishers", "attributes.colors.handler") -- disabled
+appendaction("shipouts", "finishers", "attributes.transparencies.handler") -- disabled
+appendaction("shipouts", "finishers", "attributes.colorintents.handler") -- disabled
+appendaction("shipouts", "finishers", "attributes.negatives.handler") -- disabled
+appendaction("shipouts", "finishers", "attributes.effects.handler") -- disabled
+appendaction("shipouts", "finishers", "attributes.viewerlayers.handler") -- disabled
+
+--maybe integrate relocate and families
+
+appendaction("math", "normalizers", "noads.handlers.unscript", nil, "nohead") -- always on (maybe disabled)
+appendaction("math", "normalizers", "noads.handlers.variants", nil, "nohead") -- always on
+appendaction("math", "normalizers", "noads.handlers.relocate", nil, "nohead") -- always on
+appendaction("math", "normalizers", "noads.handlers.families", nil, "nohead") -- always on
+
+appendaction("math", "normalizers", "noads.handlers.render", nil, "nohead") -- always on
+appendaction("math", "normalizers", "noads.handlers.collapse", nil, "nohead") -- always on
+appendaction("math", "normalizers", "noads.handlers.resize", nil, "nohead") -- always on
+------------("math", "normalizers", "noads.handlers.respace", nil, "nohead") -- always on
+appendaction("math", "normalizers", "noads.handlers.check", nil, "nohead") -- always on
+appendaction("math", "normalizers", "noads.handlers.tags", nil, "nohead") -- disabled
+appendaction("math", "normalizers", "noads.handlers.italics", nil, "nohead") -- disabled
+
+appendaction("math", "builders", "builders.kernel.mlist_to_hlist") -- always on
+------------("math", "builders", "noads.handlers.italics", nil, "nohead") -- disabled
+
+-- quite experimental (nodes.handlers.graphicvadjust might go away)
+
+appendaction("finalizers", "lists", "builders.paragraphs.keeptogether")
+appendaction("finalizers", "lists", "nodes.handlers.graphicvadjust") -- todo
+appendaction("finalizers", "fonts", "builders.paragraphs.solutions.splitters.optimize") -- experimental
+appendaction("finalizers", "lists", "builders.paragraphs.tag")
+
+-- still experimental
+
+appendaction("mvlbuilders", "normalizers", "nodes.handlers.migrate") --
+appendaction("mvlbuilders", "normalizers", "builders.vspacing.pagehandler") -- last !
+
+appendaction("vboxbuilders", "normalizers", "builders.vspacing.vboxhandler") --
+
+-- experimental too
+
+appendaction("mvlbuilders","normalizers","typesetters.checkers.handler")
+appendaction("vboxbuilders","normalizers","typesetters.checkers.handler")
+
+-- speedup: only kick in when used
+
+disableaction("processors", "scripts.autofontfeature.handler")
+disableaction("processors", "scripts.splitters.handler")
+disableaction("processors", "scripts.injectors.handler") -- was enabled
+disableaction("processors", "fonts.collections.process")
+disableaction("processors", "fonts.checkers.missing")
+disableaction("processors", "chars.handle_breakpoints")
+disableaction("processors", "typesetters.cleaners.handler")
+disableaction("processors", "typesetters.cases.handler")
+disableaction("processors", "typesetters.digits.handler")
+disableaction("processors", "typesetters.breakpoints.handler")
+disableaction("processors", "typesetters.directions.handler")
+disableaction("processors", "languages.words.check")
+disableaction("processors", "typesetters.spacings.handler")
+disableaction("processors", "typesetters.kerns.handler")
+disableaction("processors", "typesetters.italics.handler")
+disableaction("processors", "nodes.handlers.stripping")
+disableaction("processors", "typesetters.paragraphs.handler")
+
+disableaction("shipouts", "typesetters.alignments.handler")
+disableaction("shipouts", "nodes.rules.handler")
+disableaction("shipouts", "nodes.shifts.handler")
+disableaction("shipouts", "attributes.colors.handler")
+disableaction("shipouts", "attributes.transparencies.handler")
+disableaction("shipouts", "attributes.colorintents.handler")
+disableaction("shipouts", "attributes.effects.handler")
+disableaction("shipouts", "attributes.negatives.handler")
+disableaction("shipouts", "attributes.viewerlayers.handler")
+disableaction("shipouts", "structures.tags.handler")
+disableaction("shipouts", "nodes.visualizers.handler")
+disableaction("shipouts", "nodes.handlers.accessibility")
+disableaction("shipouts", "nodes.handlers.backgrounds")
+disableaction("shipouts", "nodes.handlers.alignbackgrounds")
+disableaction("shipouts", "nodes.handlers.cleanuppage")
+
+disableaction("shipouts", "nodes.references.handler")
+disableaction("shipouts", "nodes.destinations.handler")
+
+--~ disableaction("shipouts", "nodes.handlers.export")
+
+disableaction("mvlbuilders", "nodes.handlers.migrate")
+
+disableaction("processors", "builders.paragraphs.solutions.splitters.split")
+
+disableaction("finalizers", "builders.paragraphs.keeptogether")
+disableaction("finalizers", "builders.paragraphs.solutions.splitters.optimize")
+disableaction("finalizers", "nodes.handlers.graphicvadjust") -- sort of obsolete
+disableaction("finalizers", "builders.paragraphs.tag")
+
+disableaction("math", "noads.handlers.tags")
+disableaction("math", "noads.handlers.italics")
+
+disableaction("mvlbuilders", "typesetters.checkers.handler")
+disableaction("vboxbuilders","typesetters.checkers.handler")
+
+freezecallbacks("find_.*_file", "find file using resolver")
+freezecallbacks("read_.*_file", "read file at once")
+freezecallbacks("open_.*_file", "open file for reading")
+
+-- experimental:
+
+freezegroup("processors", "normalizers")
+freezegroup("processors", "characters")
+freezegroup("processors", "words")
+freezegroup("processors", "fonts")
+freezegroup("processors", "lists")
+
+freezegroup("finalizers", "normalizers")
+freezegroup("finalizers", "fonts")
+freezegroup("finalizers", "lists")
+
+freezegroup("shipouts", "normalizers")
+freezegroup("shipouts", "finishers")
+
+freezegroup("mvlbuilders", "normalizers")
+freezegroup("vboxbuilders", "normalizers")
+
+-----------("parbuilders", "lists")
+-----------("pagebuilders", "lists")
+
+freezegroup("math", "normalizers")
+freezegroup("math", "builders")
diff --git a/tex/context/base/toks-ini.lua b/tex/context/base/toks-ini.lua
index ef4b5406b..0136f274f 100644
--- a/tex/context/base/toks-ini.lua
+++ b/tex/context/base/toks-ini.lua
@@ -1,341 +1,341 @@
-if not modules then modules = { } end modules ['toks-ini'] = {
- version = 1.001,
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local utfbyte, utfchar, utfvalues = utf.byte, utf.char, utf.values
-local format, gsub = string.format, string.gsub
-
---[[ldx--
-
This code is experimental and needs a cleanup. The visualizers will move to
-a module.
---ldx]]--
-
--- 1 = command, 2 = modifier (char), 3 = controlsequence id
---
--- callback.register('token_filter', token.get_next)
---
--- token.get_next()
--- token.expand()
--- token.create()
--- token.csname_id()
--- token.csname_name(v)
--- token.command_id()
--- token.command_name(v)
--- token.is_expandable()
--- token.is_activechar()
--- token.lookup(v)
-
--- actually, we can use token registers to store tokens
-
-local token, tex = token, tex
-
-local createtoken = token.create
-local csname_id = token.csname_id
-local command_id = token.command_id
-local command_name = token.command_name
-local get_next = token.get_next
-local expand = token.expand
-local is_activechar = token.is_activechar
-local csname_name = token.csname_name
-
-tokens = tokens or { }
-local tokens = tokens
-
-tokens.vbox = createtoken("vbox")
-tokens.hbox = createtoken("hbox")
-tokens.vtop = createtoken("vtop")
-tokens.bgroup = createtoken(utfbyte("{"), 1)
-tokens.egroup = createtoken(utfbyte("}"), 2)
-
-tokens.letter = function(chr) return createtoken(utfbyte(chr), 11) end
-tokens.other = function(chr) return createtoken(utfbyte(chr), 12) end
-
-tokens.letters = function(str)
- local t, n = { }, 0
- for chr in utfvalues(str) do
- n = n + 1
- t[n] = createtoken(chr, 11)
- end
- return t
-end
-
-tokens.collectors = tokens.collectors or { }
-local collectors = tokens.collectors
-
-collectors.data = collectors.data or { }
-local collectordata = collectors.data
-
-collectors.registered = collectors.registered or { }
-local registered = collectors.registered
-
-local function printlist(data)
- callbacks.push('token_filter', function ()
- callbacks.pop('token_filter') -- tricky but the nil assignment helps
- return data
- end)
-end
-
-tex.printlist = printlist -- will change to another namespace
-
-function collectors.flush(tag)
- printlist(collectordata[tag])
-end
-
-function collectors.test(tag)
- printlist(collectordata[tag])
-end
-
-function collectors.register(name)
- registered[csname_id(name)] = name
-end
-
-local call = command_id("call")
-local letter = command_id("letter")
-local other = command_id("other_char")
-
-function collectors.install(tag,end_cs)
- local data, d = { }, 0
- collectordata[tag] = data
- local endcs = csname_id(end_cs)
- while true do
- local t = get_next()
- local a, b = t[1], t[3]
- if b == endcs then
- context["end_cs"]()
- return
- elseif a == call and registered[b] then
- expand()
- else
- d = d + 1
- data[d] = t
- end
- end
-end
-
-function collectors.handle(tag,handle,flush)
- collectordata[tag] = handle(collectordata[tag])
- if flush then
- collectors.flush(tag)
- end
-end
-
-local show_methods = { }
-collectors.show_methods = show_methods
-
-function collectors.show(tag, method)
- if type(tag) == "table" then
- show_methods[method or 'a'](tag)
- else
- show_methods[method or 'a'](collectordata[tag])
- end
-end
-
-function collectors.defaultwords(t,str)
- local n = #t
- n = n + 1
- t[n] = tokens.bgroup
- n = n + 1
- t[n] = createtoken("red")
- for i=1,#str do
- n = n + 1
- t[n] = tokens.other('*')
- end
- n = n + 1
- t[n] = tokens.egroup
-end
-
-function collectors.dowithwords(tag,handle)
- local t, w, tn, wn = { }, { }, 0, 0
- handle = handle or collectors.defaultwords
- local tagdata = collectordata[tag]
- for k=1,#tagdata do
- local v = tagdata[k]
- if v[1] == letter then
- wn = wn + 1
- w[wn] = v[2]
- else
- if wn > 0 then
- handle(t,w)
- wn = 0
- end
- tn = tn + 1
- t[tn] = v
- end
- end
- if wn > 0 then
- handle(t,w)
- end
- collectordata[tag] = t
-end
-
-local function showtoken(t)
- if t then
- local cmd, chr, id, cs, name = t[1], t[2], t[3], nil, command_name(t) or ""
- if cmd == letter or cmd == other then
- return format("%s-> %s -> %s", name, chr, utfchar(chr))
- elseif id > 0 then
- cs = csname_name(t) or nil
- if cs then
- return format("%s-> %s", name, cs)
- elseif tonumber(chr) < 0 then
- return format("%s-> %s", name, id)
- else
- return format("%s-> (%s,%s)", name, chr, id)
- end
- else
- return format("%s", name)
- end
- else
- return "no node"
- end
-end
-
-collectors.showtoken = showtoken
-
-function collectors.trace()
- local t = get_next()
- logs.report("tokenlist",showtoken(t))
- return t
-end
-
--- these might move to a runtime module
-
-show_methods.a = function(data) -- no need to store the table, just pass directly
- local function row(one,two,three,four,five)
- context.NC() context(one)
- context.NC() context(two)
- context.NC() context(three)
- context.NC() context(four)
- context.NC() context(five)
- context.NC() context.NR()
- end
- context.starttabulate { "|T|Tr|cT|Tr|T|" }
- row("cmd","chr","","id","name")
- context.HL()
- for _,v in next, data do
- local cmd, chr, id, cs, sym = v[1], v[2], v[3], "", ""
- local name = gsub(command_name(v) or "","_","\\_")
- if id > 0 then
- cs = csname_name(v) or ""
- if cs ~= "" then cs = "\\string " .. cs end
- else
- id = ""
- end
- if cmd == letter or cmd == other then
- sym = "\\char " .. chr
- end
- if tonumber(chr) < 0 then
- row(name,"",sym,id,cs)
- else
- row(name,chr,sym,id,cs)
- end
- end
- context.stoptabulate()
-end
-
-local function show_b_c(data,swap) -- no need to store the table, just pass directly
- local function row(one,two,three)
- context.NC() context(one)
- context.NC() context(two)
- context.NC() context(three)
- context.NC() context.NR()
- end
- if swap then
- context.starttabulate { "|Tl|Tl|Tr|" }
- else
- context.starttabulate { "|Tl|Tr|Tl|" }
- end
- row("cmd","chr","name")
- context.HL()
- for _,v in next, data do
- local cmd, chr, id, cs, sym = v[1], v[2], v[3], "", ""
- local name = gsub(command_name(v) or "","_","\\_")
- if id > 0 then
- cs = csname_name(v) or ""
- end
- if cmd == letter or cmd == other then
- sym = "\\char " .. chr
- elseif cs == "" then
- -- okay
- elseif is_activechar(v) then
- sym = "\\string " .. cs
- else
- sym = "\\string\\" .. cs
- end
- if swap then
- row(name,sym,chr)
- elseif tonumber(chr) < 0 then
- row(name,"",sym)
- else
- row(name,chr,sym)
- end
- end
- context.stoptabulate()
-end
-
--- Even more experimental ...
-
-show_methods.b = function(data) show_b_c(data,false) end
-show_methods.c = function(data) show_b_c(data,true ) end
-
-local remapper = { } -- namespace
-collectors.remapper = remapper
-
-local remapperdata = { } -- user mappings
-remapper.data = remapperdata
-
-function remapper.store(tag,class,key)
- local s = remapperdata[class]
- if not s then
- s = { }
- remapperdata[class] = s
- end
- s[key] = collectordata[tag]
- collectordata[tag] = nil
-end
-
-function remapper.convert(tag,toks)
- local data = remapperdata[tag]
- local leftbracket, rightbracket = utfbyte('['), utfbyte(']')
- local skipping = 0
- -- todo: math
- if data then
- local t, n = { }, 0
- for s=1,#toks do
- local tok = toks[s]
- local one, two = tok[1], tok[2]
- if one == 11 or one == 12 then
- if two == leftbracket then
- skipping = skipping + 1
- n = n + 1 ; t[n] = tok
- elseif two == rightbracket then
- skipping = skipping - 1
- n = n + 1 ; t[n] = tok
- elseif skipping == 0 then
- local new = data[two]
- if new then
- if #new > 1 then
- for n=1,#new do
- n = n + 1 ; t[n] = new[n]
- end
- else
- n = n + 1 ; t[n] = new[1]
- end
- else
- n = n + 1 ; t[n] = tok
- end
- else
- n = n + 1 ; t[n] = tok
- end
- else
- n = n + 1 ; t[n] = tok
- end
- end
- return t
- else
- return toks
- end
-end
+if not modules then modules = { } end modules ['toks-ini'] = {
+ version = 1.001,
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local utfbyte, utfchar, utfvalues = utf.byte, utf.char, utf.values
+local format, gsub = string.format, string.gsub
+
+--[[ldx--
+
This code is experimental and needs a cleanup. The visualizers will move to
+a module.
+--ldx]]--
+
+-- 1 = command, 2 = modifier (char), 3 = controlsequence id
+--
+-- callback.register('token_filter', token.get_next)
+--
+-- token.get_next()
+-- token.expand()
+-- token.create()
+-- token.csname_id()
+-- token.csname_name(v)
+-- token.command_id()
+-- token.command_name(v)
+-- token.is_expandable()
+-- token.is_activechar()
+-- token.lookup(v)
+
+-- actually, we can use token registers to store tokens
+
+local token, tex = token, tex
+
+local createtoken = token.create
+local csname_id = token.csname_id
+local command_id = token.command_id
+local command_name = token.command_name
+local get_next = token.get_next
+local expand = token.expand
+local is_activechar = token.is_activechar
+local csname_name = token.csname_name
+
+tokens = tokens or { }
+local tokens = tokens
+
+tokens.vbox = createtoken("vbox")
+tokens.hbox = createtoken("hbox")
+tokens.vtop = createtoken("vtop")
+tokens.bgroup = createtoken(utfbyte("{"), 1)
+tokens.egroup = createtoken(utfbyte("}"), 2)
+
+tokens.letter = function(chr) return createtoken(utfbyte(chr), 11) end
+tokens.other = function(chr) return createtoken(utfbyte(chr), 12) end
+
+tokens.letters = function(str)
+ local t, n = { }, 0
+ for chr in utfvalues(str) do
+ n = n + 1
+ t[n] = createtoken(chr, 11)
+ end
+ return t
+end
+
+tokens.collectors = tokens.collectors or { }
+local collectors = tokens.collectors
+
+collectors.data = collectors.data or { }
+local collectordata = collectors.data
+
+collectors.registered = collectors.registered or { }
+local registered = collectors.registered
+
+local function printlist(data)
+ callbacks.push('token_filter', function ()
+ callbacks.pop('token_filter') -- tricky but the nil assignment helps
+ return data
+ end)
+end
+
+tex.printlist = printlist -- will change to another namespace
+
+function collectors.flush(tag)
+ printlist(collectordata[tag])
+end
+
+function collectors.test(tag)
+ printlist(collectordata[tag])
+end
+
+function collectors.register(name)
+ registered[csname_id(name)] = name
+end
+
+local call = command_id("call")
+local letter = command_id("letter")
+local other = command_id("other_char")
+
+function collectors.install(tag,end_cs)
+ local data, d = { }, 0
+ collectordata[tag] = data
+ local endcs = csname_id(end_cs)
+ while true do
+ local t = get_next()
+ local a, b = t[1], t[3]
+ if b == endcs then
+ context["end_cs"]()
+ return
+ elseif a == call and registered[b] then
+ expand()
+ else
+ d = d + 1
+ data[d] = t
+ end
+ end
+end
+
+function collectors.handle(tag,handle,flush)
+ collectordata[tag] = handle(collectordata[tag])
+ if flush then
+ collectors.flush(tag)
+ end
+end
+
+local show_methods = { }
+collectors.show_methods = show_methods
+
+function collectors.show(tag, method)
+ if type(tag) == "table" then
+ show_methods[method or 'a'](tag)
+ else
+ show_methods[method or 'a'](collectordata[tag])
+ end
+end
+
+function collectors.defaultwords(t,str)
+ local n = #t
+ n = n + 1
+ t[n] = tokens.bgroup
+ n = n + 1
+ t[n] = createtoken("red")
+ for i=1,#str do
+ n = n + 1
+ t[n] = tokens.other('*')
+ end
+ n = n + 1
+ t[n] = tokens.egroup
+end
+
+function collectors.dowithwords(tag,handle)
+ local t, w, tn, wn = { }, { }, 0, 0
+ handle = handle or collectors.defaultwords
+ local tagdata = collectordata[tag]
+ for k=1,#tagdata do
+ local v = tagdata[k]
+ if v[1] == letter then
+ wn = wn + 1
+ w[wn] = v[2]
+ else
+ if wn > 0 then
+ handle(t,w)
+ wn = 0
+ end
+ tn = tn + 1
+ t[tn] = v
+ end
+ end
+ if wn > 0 then
+ handle(t,w)
+ end
+ collectordata[tag] = t
+end
+
+local function showtoken(t)
+ if t then
+ local cmd, chr, id, cs, name = t[1], t[2], t[3], nil, command_name(t) or ""
+ if cmd == letter or cmd == other then
+ return format("%s-> %s -> %s", name, chr, utfchar(chr))
+ elseif id > 0 then
+ cs = csname_name(t) or nil
+ if cs then
+ return format("%s-> %s", name, cs)
+ elseif tonumber(chr) < 0 then
+ return format("%s-> %s", name, id)
+ else
+ return format("%s-> (%s,%s)", name, chr, id)
+ end
+ else
+ return format("%s", name)
+ end
+ else
+ return "no node"
+ end
+end
+
+collectors.showtoken = showtoken
+
+function collectors.trace()
+ local t = get_next()
+ logs.report("tokenlist",showtoken(t))
+ return t
+end
+
+-- these might move to a runtime module
+
+show_methods.a = function(data) -- no need to store the table, just pass directly
+ local function row(one,two,three,four,five)
+ context.NC() context(one)
+ context.NC() context(two)
+ context.NC() context(three)
+ context.NC() context(four)
+ context.NC() context(five)
+ context.NC() context.NR()
+ end
+ context.starttabulate { "|T|Tr|cT|Tr|T|" }
+ row("cmd","chr","","id","name")
+ context.HL()
+ for _,v in next, data do
+ local cmd, chr, id, cs, sym = v[1], v[2], v[3], "", ""
+ local name = gsub(command_name(v) or "","_","\\_")
+ if id > 0 then
+ cs = csname_name(v) or ""
+ if cs ~= "" then cs = "\\string " .. cs end
+ else
+ id = ""
+ end
+ if cmd == letter or cmd == other then
+ sym = "\\char " .. chr
+ end
+ if tonumber(chr) < 0 then
+ row(name,"",sym,id,cs)
+ else
+ row(name,chr,sym,id,cs)
+ end
+ end
+ context.stoptabulate()
+end
+
+local function show_b_c(data,swap) -- no need to store the table, just pass directly
+ local function row(one,two,three)
+ context.NC() context(one)
+ context.NC() context(two)
+ context.NC() context(three)
+ context.NC() context.NR()
+ end
+ if swap then
+ context.starttabulate { "|Tl|Tl|Tr|" }
+ else
+ context.starttabulate { "|Tl|Tr|Tl|" }
+ end
+ row("cmd","chr","name")
+ context.HL()
+ for _,v in next, data do
+ local cmd, chr, id, cs, sym = v[1], v[2], v[3], "", ""
+ local name = gsub(command_name(v) or "","_","\\_")
+ if id > 0 then
+ cs = csname_name(v) or ""
+ end
+ if cmd == letter or cmd == other then
+ sym = "\\char " .. chr
+ elseif cs == "" then
+ -- okay
+ elseif is_activechar(v) then
+ sym = "\\string " .. cs
+ else
+ sym = "\\string\\" .. cs
+ end
+ if swap then
+ row(name,sym,chr)
+ elseif tonumber(chr) < 0 then
+ row(name,"",sym)
+ else
+ row(name,chr,sym)
+ end
+ end
+ context.stoptabulate()
+end
+
+-- Even more experimental ...
+
+show_methods.b = function(data) show_b_c(data,false) end
+show_methods.c = function(data) show_b_c(data,true ) end
+
+local remapper = { } -- namespace
+collectors.remapper = remapper
+
+local remapperdata = { } -- user mappings
+remapper.data = remapperdata
+
+function remapper.store(tag,class,key)
+ local s = remapperdata[class]
+ if not s then
+ s = { }
+ remapperdata[class] = s
+ end
+ s[key] = collectordata[tag]
+ collectordata[tag] = nil
+end
+
+function remapper.convert(tag,toks)
+ local data = remapperdata[tag]
+ local leftbracket, rightbracket = utfbyte('['), utfbyte(']')
+ local skipping = 0
+ -- todo: math
+ if data then
+ local t, n = { }, 0
+ for s=1,#toks do
+ local tok = toks[s]
+ local one, two = tok[1], tok[2]
+ if one == 11 or one == 12 then
+ if two == leftbracket then
+ skipping = skipping + 1
+ n = n + 1 ; t[n] = tok
+ elseif two == rightbracket then
+ skipping = skipping - 1
+ n = n + 1 ; t[n] = tok
+ elseif skipping == 0 then
+ local new = data[two]
+ if new then
+ if #new > 1 then
+ for n=1,#new do
+ n = n + 1 ; t[n] = new[n]
+ end
+ else
+ n = n + 1 ; t[n] = new[1]
+ end
+ else
+ n = n + 1 ; t[n] = tok
+ end
+ else
+ n = n + 1 ; t[n] = tok
+ end
+ else
+ n = n + 1 ; t[n] = tok
+ end
+ end
+ return t
+ else
+ return toks
+ end
+end
diff --git a/tex/context/base/trac-ctx.lua b/tex/context/base/trac-ctx.lua
index 706e7a244..8153d079a 100644
--- a/tex/context/base/trac-ctx.lua
+++ b/tex/context/base/trac-ctx.lua
@@ -1,48 +1,48 @@
-if not modules then modules = { } end modules ['trac-ctx'] = {
- version = 1.001,
- comment = "companion to trac-ctx.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local commands = commands
-local context = context
-local register = trackers.register
-
-local textrackers = tex.trackers or { }
-local texdirectives = tex.directives or { }
-
-tex.trackers = textrackers
-tex.directives = texdirectives
-
-storage.register("tex/trackers", textrackers, "tex.trackers")
-storage.register("tex/directives",texdirectives,"tex.directives")
-
-local function doit(category,tag,v)
- local tt = category[tag]
- if tt then
- context.unprotect()
- context(v and tt[1] or tt[2]) -- could be one call
- context.protect()
- end
-end
-
-local function initialize(category,register)
- for tag, commands in next, category do
- register(tag, function(v) doit(category,tag,v) end) -- todo: v,tag in caller
- end
-end
-
-local function install(category,register,tag,enable,disable)
- category[tag] = { enable, disable }
- register(tag, function(v) doit(category,tag,v) end) -- todo: v,tag in caller
-end
-
-function commands.initializetextrackers () initialize(textrackers ,trackers .register ) end
-function commands.initializetexdirectives() initialize(texdirectives,directives.register) end
-
--- commands.install(tag,enable,disable):
-
-function commands.installtextracker (...) install(textrackers ,trackers .register,...) end
-function commands.installtexdirective(...) install(texdirectives,directives.register,...) end
+if not modules then modules = { } end modules ['trac-ctx'] = {
+ version = 1.001,
+ comment = "companion to trac-ctx.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local commands = commands
+local context = context
+local register = trackers.register
+
+local textrackers = tex.trackers or { }
+local texdirectives = tex.directives or { }
+
+tex.trackers = textrackers
+tex.directives = texdirectives
+
+storage.register("tex/trackers", textrackers, "tex.trackers")
+storage.register("tex/directives",texdirectives,"tex.directives")
+
+local function doit(category,tag,v)
+ local tt = category[tag]
+ if tt then
+ context.unprotect()
+ context(v and tt[1] or tt[2]) -- could be one call
+ context.protect()
+ end
+end
+
+local function initialize(category,register)
+ for tag, commands in next, category do
+ register(tag, function(v) doit(category,tag,v) end) -- todo: v,tag in caller
+ end
+end
+
+local function install(category,register,tag,enable,disable)
+ category[tag] = { enable, disable }
+ register(tag, function(v) doit(category,tag,v) end) -- todo: v,tag in caller
+end
+
+function commands.initializetextrackers () initialize(textrackers ,trackers .register ) end
+function commands.initializetexdirectives() initialize(texdirectives,directives.register) end
+
+-- commands.install(tag,enable,disable):
+
+function commands.installtextracker (...) install(textrackers ,trackers .register,...) end
+function commands.installtexdirective(...) install(texdirectives,directives.register,...) end
diff --git a/tex/context/base/trac-deb.lua b/tex/context/base/trac-deb.lua
index fe167c343..b2a86df88 100644
--- a/tex/context/base/trac-deb.lua
+++ b/tex/context/base/trac-deb.lua
@@ -1,248 +1,248 @@
-if not modules then modules = { } end modules ['trac-deb'] = {
- version = 1.001,
- comment = "companion to trac-deb.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg, status = lpeg, status
-
-local lpegmatch = lpeg.match
-local format, concat, match = string.format, table.concat, string.match
-local tonumber, tostring = tonumber, tostring
-local texdimen, textoks, texcount = tex.dimen, tex.toks, tex.count
-
--- maybe tracers -> tracers.tex (and tracers.lua for current debugger)
-
-local report_system = logs.reporter("system","tex")
-
-tracers = tracers or { }
-local tracers = tracers
-
-tracers.lists = { }
-local lists = tracers.lists
-
-tracers.strings = { }
-local strings = tracers.strings
-
-strings.undefined = "undefined"
-
-lists.scratch = {
- 0, 2, 4, 6, 8
-}
-
-lists.internals = {
- 'p:hsize', 'p:parindent', 'p:leftskip','p:rightskip',
- 'p:vsize', 'p:parskip', 'p:baselineskip', 'p:lineskip', 'p:topskip'
-}
-
-lists.context = {
- 'd:lineheight',
- 'c:realpageno', 'c:userpageno', 'c:pageno', 'c:subpageno'
-}
-
-local types = {
- ['d'] = tracers.dimen,
- ['c'] = tracers.count,
- ['t'] = tracers.toks,
- ['p'] = tracers.primitive
-}
-
-local splitboth = lpeg.splitat(":")
-local splittype = lpeg.firstofsplit(":")
-local splitname = lpeg.secondofsplit(":")
-
-function tracers.type(csname)
- return lpegmatch(splittype,csname)
-end
-
-function tracers.name(csname)
- return lpegmatch(splitname,csname) or csname
-end
-
-function tracers.cs(csname)
- local tag, name = lpegmatch(splitboth,csname)
- if name and types[tag] then
- return types[tag](name)
- else
- return tracers.primitive(csname)
- end
-end
-
-function tracers.dimen(name)
- local d = texdimen[name]
- return d and number.topoints(d) or strings.undefined
-end
-
-function tracers.count(name)
- return texcount[name] or strings.undefined
-end
-
-function tracers.toks(name,limit)
- local t = textoks[name]
- return t and string.limit(t,tonumber(limit) or 40) or strings.undefined
-end
-
-function tracers.primitive(name)
- return tex[name] or strings.undefined
-end
-
-function tracers.knownlist(name)
- local l = lists[name]
- return l and #l > 0
-end
-
-function tracers.showlines(filename,linenumber,offset,errorstr)
- local data = io.loaddata(filename)
- if not data or data == "" then
- local hash = url.hashed(filename)
- if not hash.noscheme then
- local ok, d, n = resolvers.loaders.byscheme(hash.scheme,filename)
- if ok and n > 0 then
- data = d
- end
- end
- end
- local lines = data and string.splitlines(data)
- if lines and #lines > 0 then
- -- This does not work completely as we cannot access the last Lua error using
- -- table.print(status.list()). This is on the agenda. Eventually we will
- -- have a sequence of checks here (tex, lua, mp) at this end.
- --
- -- Actually, in 0.75+ the lua error message is even weirder as you can
- -- get:
- --
- -- LuaTeX error [string "\directlua "]:3: unexpected symbol near '1' ...
- --
- -- \endgroup \directlua {
- --
- -- So there is some work to be done in the LuaTeX engine.
- --
- local what, where = match(errorstr,[[LuaTeX error :(%d+)]])
- or match(errorstr,[[LuaTeX error %[string "\\(.-lua) "%]:(%d+)]]) -- buglet
- if where then
- -- lua error: linenumber points to last line
- local start = "\\startluacode"
- local stop = "\\stopluacode"
- local where = tonumber(where)
- if lines[linenumber] == start then
- local n = linenumber
- for i=n,1,-1 do
- if lines[i] == start then
- local n = i + where
- if n <= linenumber then
- linenumber = n
- end
- end
- end
- end
- end
- offset = tonumber(offset) or 10
- linenumber = tonumber(linenumber) or 10
- local start = math.max(linenumber - offset,1)
- local stop = math.min(linenumber + offset,#lines)
- if stop > #lines then
- return ""
- else
- local result, fmt = { }, "%" .. #tostring(stop) .. "d %s %s"
- for n=start,stop do
- result[#result+1] = format(fmt,n,n == linenumber and ">>" or " ",lines[n])
- end
- return concat(result,"\n")
- end
- else
- return ""
- end
-end
-
-function tracers.printerror(offset)
- local inputstack = resolvers.inputstack
- local filename = inputstack[#inputstack] or status.filename
- local linenumber = tonumber(status.linenumber) or 0
- if not filename then
- report_system("error not related to input file: %s ...",status.lasterrorstring)
- elseif type(filename) == "number" then
- report_system("error on line %s of filehandle %s: %s ...",linenumber,filename,status.lasterrorstring)
- else
- -- currently we still get the error message printed to the log/console so we
- -- add a bit of spacing around our variant
- texio.write_nl("\n")
- local errorstr = status.lasterrorstring or "?"
- -- inspect(status.list())
- report_system("error on line %s in file %s: %s ...\n",linenumber,filename,errorstr) -- lua error?
- texio.write_nl(tracers.showlines(filename,linenumber,offset,errorstr),"\n")
- end
-end
-
-directives.register("system.errorcontext", function(v)
- if v then
- callback.register('show_error_hook', function() tracers.printerror(v) end)
- else
- callback.register('show_error_hook', nil)
- end
-end)
-
--- this might move
-
-lmx = lmx or { }
-
-lmx.htmfile = function(name) return environment.jobname .. "-status.html" end
-lmx.lmxfile = function(name) return resolvers.findfile(name,'tex') end
-
-function lmx.showdebuginfo(lmxname)
- local variables = {
- ['title'] = 'ConTeXt Debug Information',
- ['color-background-one'] = lmx.get('color-background-green'),
- ['color-background-two'] = lmx.get('color-background-blue'),
- }
- if lmxname == false then
- return variables
- else
- lmx.show(lmxname or 'context-debug.lmx',variables)
- end
-end
-
-function lmx.showerror(lmxname)
- local filename, linenumber, errorcontext = status.filename, tonumber(status.linenumber) or 0, ""
- if not filename then
- filename, errorcontext = 'unknown', 'error in filename'
- elseif type(filename) == "number" then
- filename, errorcontext = format("",filename), 'unknown error'
- else
- errorcontext = tracers.showlines(filename,linenumber,offset)
- end
- local variables = {
- ['title'] = 'ConTeXt Error Information',
- ['errormessage'] = status.lasterrorstring,
- ['linenumber'] = linenumber,
- ['color-background-one'] = lmx.get('color-background-yellow'),
- ['color-background-two'] = lmx.get('color-background-purple'),
- ['filename'] = filename,
- ['errorcontext'] = errorcontext,
- }
- if lmxname == false then
- return variables
- else
- lmx.show(lmxname or 'context-error.lmx',variables)
- end
-end
-
-function lmx.overloaderror()
- callback.register('show_error_hook', function() lmx.showerror() end) -- prevents arguments being passed
-end
-
-directives.register("system.showerror", lmx.overloaderror)
-
-local debugger = utilities.debugger
-
-local function trace_calls(n)
- debugger.enable()
- luatex.registerstopactions(function()
- debugger.disable()
- debugger.savestats(tex.jobname .. "-luacalls.log",tonumber(n))
- end)
- trace_calls = function() end
-end
-
-directives.register("system.tracecalls", function(n) trace_calls(n) end) -- indirect is needed for nilling
+if not modules then modules = { } end modules ['trac-deb'] = {
+ version = 1.001,
+ comment = "companion to trac-deb.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local lpeg, status = lpeg, status
+
+local lpegmatch = lpeg.match
+local format, concat, match = string.format, table.concat, string.match
+local tonumber, tostring = tonumber, tostring
+local texdimen, textoks, texcount = tex.dimen, tex.toks, tex.count
+
+-- maybe tracers -> tracers.tex (and tracers.lua for current debugger)
+
+local report_system = logs.reporter("system","tex")
+
+tracers = tracers or { }
+local tracers = tracers
+
+tracers.lists = { }
+local lists = tracers.lists
+
+tracers.strings = { }
+local strings = tracers.strings
+
+strings.undefined = "undefined"
+
+lists.scratch = {
+ 0, 2, 4, 6, 8
+}
+
+lists.internals = {
+ 'p:hsize', 'p:parindent', 'p:leftskip','p:rightskip',
+ 'p:vsize', 'p:parskip', 'p:baselineskip', 'p:lineskip', 'p:topskip'
+}
+
+lists.context = {
+ 'd:lineheight',
+ 'c:realpageno', 'c:userpageno', 'c:pageno', 'c:subpageno'
+}
+
+local types = {
+ ['d'] = tracers.dimen,
+ ['c'] = tracers.count,
+ ['t'] = tracers.toks,
+ ['p'] = tracers.primitive
+}
+
+local splitboth = lpeg.splitat(":")
+local splittype = lpeg.firstofsplit(":")
+local splitname = lpeg.secondofsplit(":")
+
+function tracers.type(csname)
+ return lpegmatch(splittype,csname)
+end
+
+function tracers.name(csname)
+ return lpegmatch(splitname,csname) or csname
+end
+
+function tracers.cs(csname)
+ local tag, name = lpegmatch(splitboth,csname)
+ if name and types[tag] then
+ return types[tag](name)
+ else
+ return tracers.primitive(csname)
+ end
+end
+
+function tracers.dimen(name)
+ local d = texdimen[name]
+ return d and number.topoints(d) or strings.undefined
+end
+
+function tracers.count(name)
+ return texcount[name] or strings.undefined
+end
+
+function tracers.toks(name,limit)
+ local t = textoks[name]
+ return t and string.limit(t,tonumber(limit) or 40) or strings.undefined
+end
+
+function tracers.primitive(name)
+ return tex[name] or strings.undefined
+end
+
+function tracers.knownlist(name)
+ local l = lists[name]
+ return l and #l > 0
+end
+
+function tracers.showlines(filename,linenumber,offset,errorstr)
+ local data = io.loaddata(filename)
+ if not data or data == "" then
+ local hash = url.hashed(filename)
+ if not hash.noscheme then
+ local ok, d, n = resolvers.loaders.byscheme(hash.scheme,filename)
+ if ok and n > 0 then
+ data = d
+ end
+ end
+ end
+ local lines = data and string.splitlines(data)
+ if lines and #lines > 0 then
+ -- This does not work completely as we cannot access the last Lua error using
+ -- table.print(status.list()). This is on the agenda. Eventually we will
+ -- have a sequence of checks here (tex, lua, mp) at this end.
+ --
+ -- Actually, in 0.75+ the lua error message is even weirder as you can
+ -- get:
+ --
+ -- LuaTeX error [string "\directlua "]:3: unexpected symbol near '1' ...
+ --
+ -- \endgroup \directlua {
+ --
+ -- So there is some work to be done in the LuaTeX engine.
+ --
+ local what, where = match(errorstr,[[LuaTeX error :(%d+)]])
+ or match(errorstr,[[LuaTeX error %[string "\\(.-lua) "%]:(%d+)]]) -- buglet
+ if where then
+ -- lua error: linenumber points to last line
+ local start = "\\startluacode"
+ local stop = "\\stopluacode"
+ local where = tonumber(where)
+ if lines[linenumber] == start then
+ local n = linenumber
+ for i=n,1,-1 do
+ if lines[i] == start then
+ local n = i + where
+ if n <= linenumber then
+ linenumber = n
+ end
+ end
+ end
+ end
+ end
+ offset = tonumber(offset) or 10
+ linenumber = tonumber(linenumber) or 10
+ local start = math.max(linenumber - offset,1)
+ local stop = math.min(linenumber + offset,#lines)
+ if stop > #lines then
+ return ""
+ else
+ local result, fmt = { }, "%" .. #tostring(stop) .. "d %s %s"
+ for n=start,stop do
+ result[#result+1] = format(fmt,n,n == linenumber and ">>" or " ",lines[n])
+ end
+ return concat(result,"\n")
+ end
+ else
+ return ""
+ end
+end
+
+function tracers.printerror(offset)
+ local inputstack = resolvers.inputstack
+ local filename = inputstack[#inputstack] or status.filename
+ local linenumber = tonumber(status.linenumber) or 0
+ if not filename then
+ report_system("error not related to input file: %s ...",status.lasterrorstring)
+ elseif type(filename) == "number" then
+ report_system("error on line %s of filehandle %s: %s ...",linenumber,filename,status.lasterrorstring)
+ else
+ -- currently we still get the error message printed to the log/console so we
+ -- add a bit of spacing around our variant
+ texio.write_nl("\n")
+ local errorstr = status.lasterrorstring or "?"
+ -- inspect(status.list())
+ report_system("error on line %s in file %s: %s ...\n",linenumber,filename,errorstr) -- lua error?
+ texio.write_nl(tracers.showlines(filename,linenumber,offset,errorstr),"\n")
+ end
+end
+
+directives.register("system.errorcontext", function(v)
+ if v then
+ callback.register('show_error_hook', function() tracers.printerror(v) end)
+ else
+ callback.register('show_error_hook', nil)
+ end
+end)
+
+-- this might move
+
+lmx = lmx or { }
+
+lmx.htmfile = function(name) return environment.jobname .. "-status.html" end
+lmx.lmxfile = function(name) return resolvers.findfile(name,'tex') end
+
+function lmx.showdebuginfo(lmxname)
+ local variables = {
+ ['title'] = 'ConTeXt Debug Information',
+ ['color-background-one'] = lmx.get('color-background-green'),
+ ['color-background-two'] = lmx.get('color-background-blue'),
+ }
+ if lmxname == false then
+ return variables
+ else
+ lmx.show(lmxname or 'context-debug.lmx',variables)
+ end
+end
+
+function lmx.showerror(lmxname)
+ local filename, linenumber, errorcontext = status.filename, tonumber(status.linenumber) or 0, ""
+ if not filename then
+ filename, errorcontext = 'unknown', 'error in filename'
+ elseif type(filename) == "number" then
+ filename, errorcontext = format("",filename), 'unknown error'
+ else
+ errorcontext = tracers.showlines(filename,linenumber,offset)
+ end
+ local variables = {
+ ['title'] = 'ConTeXt Error Information',
+ ['errormessage'] = status.lasterrorstring,
+ ['linenumber'] = linenumber,
+ ['color-background-one'] = lmx.get('color-background-yellow'),
+ ['color-background-two'] = lmx.get('color-background-purple'),
+ ['filename'] = filename,
+ ['errorcontext'] = errorcontext,
+ }
+ if lmxname == false then
+ return variables
+ else
+ lmx.show(lmxname or 'context-error.lmx',variables)
+ end
+end
+
+function lmx.overloaderror()
+ callback.register('show_error_hook', function() lmx.showerror() end) -- prevents arguments being passed
+end
+
+directives.register("system.showerror", lmx.overloaderror)
+
+local debugger = utilities.debugger
+
+local function trace_calls(n)
+ debugger.enable()
+ luatex.registerstopactions(function()
+ debugger.disable()
+ debugger.savestats(tex.jobname .. "-luacalls.log",tonumber(n))
+ end)
+ trace_calls = function() end
+end
+
+directives.register("system.tracecalls", function(n) trace_calls(n) end) -- indirect is needed for nilling
diff --git a/tex/context/base/trac-exp.lua b/tex/context/base/trac-exp.lua
index 5879f1b7b..9daf86357 100644
--- a/tex/context/base/trac-exp.lua
+++ b/tex/context/base/trac-exp.lua
@@ -1,229 +1,229 @@
-if not modules then modules = { } end modules ['trac-exp'] = {
- version = 1.001,
- comment = "companion to trac-log.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local formatters = string.formatters
-local reporters = logs.reporters
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
-local xmltext = xml.text
-local xmlfirst = xml.first
-local xmlfilter = xml.filter
-
--- there is no need for a newhandlers { name = "help", parent = "string" }
-
-local function flagdata(flag)
- local name = flag.at.name or ""
- local value = flag.at.value or ""
- -- local short = xmlfirst(s,"/short")
- -- local short = xmlserialize(short,xs)
- local short = xmltext(xmlfirst(flag,"/short")) or ""
- return name, value, short
-end
-
-local function exampledata(example)
- local command = xmltext(xmlfirst(example,"/command")) or ""
- local comment = xmltext(xmlfirst(example,"/comment")) or ""
- return command, comment
-end
-
-local function categorytitle(category)
- return xmltext(xmlfirst(category,"/title")) or ""
-end
-
-local exporters = logs.exporters
-
-function exporters.man(specification,...)
- local root = xml.convert(specification.helpinfo or "")
- if not root then
- return
- end
- local xs = xml.gethandlers("string")
- xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end)
- xml.sethandlersfunction(xs,"ref", function(e,handler) handler.handle("--"..e.at.name) end)
- local wantedcategories = select("#",...) == 0 and true or table.tohash { ... }
- local nofcategories = xml.count(root,"/application/flags/category")
- local name = xmlfilter(root,"/application/metadata/entry[@name='name']/text()")
- local detail = xmlfilter(root,"/application/metadata/entry[@name='detail']/text()") or name
- local version = xmlfilter(root,"/application/metadata/entry[@name='version']/text()") or "0.00"
- local banner = specification.banner or detail or name
- --
- local result = { }
- --
- -- .TH "context" "1" "some date" "version" "ConTeXt" -- we use a fake date as I don't want to polute the git repos
- --
- local runner = string.match(name,"^mtx%-(.*)")
- if runner then
- runner = formatters["mtxrun --script %s"](runner)
- else
- runner = name
- end
- --
- result[#result+1] = formatters['.TH "%s" "1" "%s" "version %s" "%s"'](name,os.date("01-01-%Y"),version,detail)
- result[#result+1] = formatters[".SH NAME\n.B %s"](name)
- result[#result+1] = formatters[".SH SYNOPSIS\n.B %s [\n.I OPTIONS ...\n.B ] [\n.I FILENAMES\n.B ]"](runner)
- result[#result+1] = formatters[".SH DESCRIPTION\n.B %s"](detail)
- --
- for category in xmlcollected(root,"/application/flags/category") do
- if nofcategories > 1 then
- result[#result+1] = formatters['.SH OPTIONS: %s'](string.upper(category.at.name or "all"))
- else
- result[#result+1] = ".SH OPTIONS"
- end
- for subcategory in xmlcollected(category,"/subcategory") do
- for flag in xmlcollected(subcategory,"/flag") do
- local name, value, short = flagdata(flag)
- if value == "" then
- result[#result+1] = formatters[".TP\n.B --%s\n%s"](name,short)
- else
- result[#result+1] = formatters[".TP\n.B --%s=%s\n%s"](name,value,short)
- end
- end
- end
- end
- local moreinfo = specification.moreinfo
- if moreinfo and moreinfo ~= "" then
- moreinfo = string.gsub(moreinfo,"[\n\r]([%a]+)%s*:%s*",'\n\n.B "%1:"\n')
- result[#result+1] = formatters[".SH AUTHOR\n%s"](moreinfo)
- end
- return table.concat(result,"\n")
-end
-
-local craptemplate = [[
-
-
-
-%s
-
-
-%s
-
-]]
-
-function exporters.xml(specification,...)
- local helpinfo = specification.helpinfo
- if type(helpinfo) == "string" then
- if string.find(helpinfo,"^<%?xml") then
- return helpinfo
- end
- elseif type(helpinfo) == "table" then
- helpinfo = table.concat(helpinfo,"\n\n")
- else
- helpinfo = "no help"
- end
- return formatters[craptemplate](specification.banner or "?",helpinfo)
-end
-
--- the following template is optimized a bit for space
-
--- local bodytemplate = [[
---
Command line options
---
---
---
flag
---
value
---
description
---
--- 1 then
--- ?>
---
---
---
--
---
---
---
---
---
---
--- ]]
-
-local bodytemplate = [[
-
Command line options
-
-
flag
value
description
- 1 then ?>
-
-
-
-
-
--
-
-
-
-
-
-
-
-
-
-]]
-
-function exporters.html(specification,...)
- local root = xml.convert(specification.helpinfo or "")
- if not root then
- return
- end
- local xs = xml.gethandlers("string")
- xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end)
- xml.sethandlersfunction(xs,"ref", function(e,handler) handler.handle("--"..e.at.name) end)
- local wantedcategories = select("#",...) == 0 and true or table.tohash { ... }
- local nofcategories = xml.count(root,"/application/flags/category")
- local name = xmlfilter(root,"/application/metadata/entry[@name='name']/text()")
- local detail = xmlfilter(root,"/application/metadata/entry[@name='detail']/text()") or name
- local version = xmlfilter(root,"/application/metadata/entry[@name='version']/text()") or "0.00"
- local banner = specification.banner or detail or name
- --
- dofile(resolvers.findfile("trac-lmx.lua","tex"))
- --
- local htmltemplate = io.loaddata(resolvers.findfile("context-base.lmx","tex")) or "no template"
- --
- local body = lmx.convertstring(bodytemplate, {
- nofcategories = nofcategories,
- wantedcategories = wantedcategories,
- root = root,
- -- moreinfo = specification.moreinfo,
- flagdata = flagdata,
- exampledata = exampledata,
- categorytitle = categorytitle,
- })
- local html = lmx.convertstring(htmltemplate, {
- maintext = body,
- title = banner,
- bottomtext = "wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl",
- })
- --
- return html
-end
+if not modules then modules = { } end modules ['trac-exp'] = {
+ version = 1.001,
+ comment = "companion to trac-log.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local formatters = string.formatters
+local reporters = logs.reporters
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmltext = xml.text
+local xmlfirst = xml.first
+local xmlfilter = xml.filter
+
+-- there is no need for a newhandlers { name = "help", parent = "string" }
+
+local function flagdata(flag)
+ local name = flag.at.name or ""
+ local value = flag.at.value or ""
+ -- local short = xmlfirst(s,"/short")
+ -- local short = xmlserialize(short,xs)
+ local short = xmltext(xmlfirst(flag,"/short")) or ""
+ return name, value, short
+end
+
+local function exampledata(example)
+ local command = xmltext(xmlfirst(example,"/command")) or ""
+ local comment = xmltext(xmlfirst(example,"/comment")) or ""
+ return command, comment
+end
+
+local function categorytitle(category)
+ return xmltext(xmlfirst(category,"/title")) or ""
+end
+
+local exporters = logs.exporters
+
+function exporters.man(specification,...)
+ local root = xml.convert(specification.helpinfo or "")
+ if not root then
+ return
+ end
+ local xs = xml.gethandlers("string")
+ xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end)
+ xml.sethandlersfunction(xs,"ref", function(e,handler) handler.handle("--"..e.at.name) end)
+ local wantedcategories = select("#",...) == 0 and true or table.tohash { ... }
+ local nofcategories = xml.count(root,"/application/flags/category")
+ local name = xmlfilter(root,"/application/metadata/entry[@name='name']/text()")
+ local detail = xmlfilter(root,"/application/metadata/entry[@name='detail']/text()") or name
+ local version = xmlfilter(root,"/application/metadata/entry[@name='version']/text()") or "0.00"
+ local banner = specification.banner or detail or name
+ --
+ local result = { }
+ --
+ -- .TH "context" "1" "some date" "version" "ConTeXt" -- we use a fake date as I don't want to polute the git repos
+ --
+ local runner = string.match(name,"^mtx%-(.*)")
+ if runner then
+ runner = formatters["mtxrun --script %s"](runner)
+ else
+ runner = name
+ end
+ --
+ result[#result+1] = formatters['.TH "%s" "1" "%s" "version %s" "%s"'](name,os.date("01-01-%Y"),version,detail)
+ result[#result+1] = formatters[".SH NAME\n.B %s"](name)
+ result[#result+1] = formatters[".SH SYNOPSIS\n.B %s [\n.I OPTIONS ...\n.B ] [\n.I FILENAMES\n.B ]"](runner)
+ result[#result+1] = formatters[".SH DESCRIPTION\n.B %s"](detail)
+ --
+ for category in xmlcollected(root,"/application/flags/category") do
+ if nofcategories > 1 then
+ result[#result+1] = formatters['.SH OPTIONS: %s'](string.upper(category.at.name or "all"))
+ else
+ result[#result+1] = ".SH OPTIONS"
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for flag in xmlcollected(subcategory,"/flag") do
+ local name, value, short = flagdata(flag)
+ if value == "" then
+ result[#result+1] = formatters[".TP\n.B --%s\n%s"](name,short)
+ else
+ result[#result+1] = formatters[".TP\n.B --%s=%s\n%s"](name,value,short)
+ end
+ end
+ end
+ end
+ local moreinfo = specification.moreinfo
+ if moreinfo and moreinfo ~= "" then
+ moreinfo = string.gsub(moreinfo,"[\n\r]([%a]+)%s*:%s*",'\n\n.B "%1:"\n')
+ result[#result+1] = formatters[".SH AUTHOR\n%s"](moreinfo)
+ end
+ return table.concat(result,"\n")
+end
+
+local craptemplate = [[
+
+
+
+%s
+
+
+%s
+
+]]
+
+function exporters.xml(specification,...)
+ local helpinfo = specification.helpinfo
+ if type(helpinfo) == "string" then
+ if string.find(helpinfo,"^<%?xml") then
+ return helpinfo
+ end
+ elseif type(helpinfo) == "table" then
+ helpinfo = table.concat(helpinfo,"\n\n")
+ else
+ helpinfo = "no help"
+ end
+ return formatters[craptemplate](specification.banner or "?",helpinfo)
+end
+
+-- the following template is optimized a bit for space
+
+-- local bodytemplate = [[
+--
Command line options
+--
+--
+--
flag
+--
value
+--
description
+--
+-- 1 then
+-- ?>
+--
+--
+--
--
+--
+--
+--
+--
+--
+--
+-- ]]
+
+local bodytemplate = [[
+
Command line options
+
+
flag
value
description
+ 1 then ?>
+
+
+
+
+
--
+
+
+
+
+
+
+
+
+
+]]
+
+function exporters.html(specification,...)
+ local root = xml.convert(specification.helpinfo or "")
+ if not root then
+ return
+ end
+ local xs = xml.gethandlers("string")
+ xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end)
+ xml.sethandlersfunction(xs,"ref", function(e,handler) handler.handle("--"..e.at.name) end)
+ local wantedcategories = select("#",...) == 0 and true or table.tohash { ... }
+ local nofcategories = xml.count(root,"/application/flags/category")
+ local name = xmlfilter(root,"/application/metadata/entry[@name='name']/text()")
+ local detail = xmlfilter(root,"/application/metadata/entry[@name='detail']/text()") or name
+ local version = xmlfilter(root,"/application/metadata/entry[@name='version']/text()") or "0.00"
+ local banner = specification.banner or detail or name
+ --
+ dofile(resolvers.findfile("trac-lmx.lua","tex"))
+ --
+ local htmltemplate = io.loaddata(resolvers.findfile("context-base.lmx","tex")) or "no template"
+ --
+ local body = lmx.convertstring(bodytemplate, {
+ nofcategories = nofcategories,
+ wantedcategories = wantedcategories,
+ root = root,
+ -- moreinfo = specification.moreinfo,
+ flagdata = flagdata,
+ exampledata = exampledata,
+ categorytitle = categorytitle,
+ })
+ local html = lmx.convertstring(htmltemplate, {
+ maintext = body,
+ title = banner,
+ bottomtext = "wiki: http://contextgarden.net | mail: ntg-context@ntg.nl | website: http://www.pragma-ade.nl",
+ })
+ --
+ return html
+end
diff --git a/tex/context/base/trac-fil.lua b/tex/context/base/trac-fil.lua
index 8cc903e2a..d6d40356d 100644
--- a/tex/context/base/trac-fil.lua
+++ b/tex/context/base/trac-fil.lua
@@ -1,181 +1,181 @@
-if not modules then modules = { } end modules ['trac-fil'] = {
- version = 1.001,
- comment = "for the moment for myself",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local rawset, tonumber, type, pcall = rawset, tonumber, type, pcall
-local format, concat = string.format, table.concat
-local openfile = io.open
-local date = os.date
-local sortedpairs = table.sortedpairs
-
-local P, C, Cc, Cg, Cf, Ct, Cs, Carg = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Ct, lpeg.Cs, lpeg.Carg
-local lpegmatch = lpeg.match
-
-local patterns = lpeg.patterns
-local cardinal = patterns.cardinal
-local whitespace = patterns.whitespace^0
-
-local timestamp = Cf(Ct("") * (
- Cg (Cc("year") * (cardinal/tonumber)) * P("-")
- * Cg (Cc("month") * (cardinal/tonumber)) * P("-")
- * Cg (Cc("day") * (cardinal/tonumber)) * P(" ")
- * Cg (Cc("hour") * (cardinal/tonumber)) * P(":")
- * Cg (Cc("minute") * (cardinal/tonumber)) * P(":")
- * Cg (Cc("second") * (cardinal/tonumber)) * P("+")
- * Cg (Cc("thour") * (cardinal/tonumber)) * P(":")
- * Cg (Cc("tminute") * (cardinal/tonumber))
-)^0, rawset)
-
-local keysvalues = Cf(Ct("") * (
- Cg(C(patterns.letter^0) * whitespace * "=" * whitespace * Cs(patterns.unquoted) * whitespace)
-)^0, rawset)
-
-local statusline = Cf(Ct("") * (
- whitespace * P("[") * Cg(Cc("timestamp") * timestamp ) * P("]")
- * whitespace * Cg(Cc("status" ) * keysvalues)
-),rawset)
-
-patterns.keysvalues = keysvalues
-patterns.statusline = statusline
-patterns.timestamp = timestamp
-
-loggers = loggers or { }
-
-local timeformat = format("[%%s%s]",os.timezone(true))
-local dateformat = "!%Y-%m-%d %H:%M:%S"
-
-function loggers.makeline(t)
- local result = { } -- minimize time that file is open
- result[#result+1] = format(timeformat,date(dateformat))
- for k, v in sortedpairs(t) do
- local tv = type(v)
- if tv == "string" then
- if v ~= "password" then
- result[#result+1] = format(" %s=%q",k,v)
- end
- elseif tv == "number" or tv == "boolean" then
- result[#result+1] = format(" %s=%q",k,tostring(v))
- end
- end
- return concat(result," ")
-end
-
-local function append(filename,...)
- local f = openfile(filename,"a+")
- if not f then
- dir.mkdirs(file.dirname(filename))
- f = openfile(filename,"a+")
- end
- if f then
- f:write(...)
- f:close()
- return true
- else
- return false
- end
-end
-
-function loggers.store(filename,data) -- a log service is nicer
- if type(data) == "table"then
- data = loggers.makeline(data)
- end
- pcall(append,filename,data,"\n")
-end
-
-function loggers.collect(filename,result)
- if lfs.isfile(filename) then
- local r = lpegmatch(Ct(statusline^0),io.loaddata(filename))
- if result then -- append
- local nofresult = #result
- for i=1,#r do
- nofresult = nofresult + 1
- result[nofresult] = r[i]
- end
- return result
- else
- return r
- end
- else
- return result or { }
- end
-end
-
-function loggers.fields(results) -- returns hash of fields with counts so that we can decide on importance
- local fields = { }
- if results then
- for i=1,#results do
- local r = results[i]
- for k, v in next, r do
- local f = fields[k]
- if not f then
- fields[k] = 1
- else
- fields[k] = f + 1
- end
- end
- end
- end
- return fields
-end
-
-local template = [[
-
-
%s
-%s
-
-
-]]
-
-function loggers.tohtml(entries,fields)
- if not fields or #fields == 0 then
- return ""
- end
- if type(entries) == "string" then
- entries = loggers.collect(entries)
- end
- local scratch, lines = { }, { }
- for i=1,#entries do
- local entry = entries[i]
- local status = entry.status
- for i=1,#fields do
- local field = fields[i]
- local v = status[field.name]
- if v ~= nil then
- v = tostring(v)
- local f = field.format
- if f then
- v = format(f,v)
- end
- scratch[i] = format("
%s
",field.align or "left",v)
- else
- scratch[i] = "
"
- end
- end
- lines[i] = format("
%s
",concat(scratch))
- end
- for i=1,#fields do
- local field = fields[i]
- scratch[i] = format("
%s
", field.label or field.name)
- end
- local result = format(template,concat(scratch),concat(lines,"\n"))
- return result, entries
-end
-
--- loggers.store("test.log", { name = "whatever", more = math.random(1,100) })
-
--- local fields = {
--- { name = "name", align = "left" },
--- { name = "more", align = "right" },
--- }
-
--- local entries = loggers.collect("test.log")
--- local html = loggers.tohtml(entries,fields)
-
--- inspect(entries)
--- inspect(fields)
--- inspect(html)
-
+if not modules then modules = { } end modules ['trac-fil'] = {
+ version = 1.001,
+ comment = "for the moment for myself",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local rawset, tonumber, type, pcall = rawset, tonumber, type, pcall
+local format, concat = string.format, table.concat
+local openfile = io.open
+local date = os.date
+local sortedpairs = table.sortedpairs
+
+local P, C, Cc, Cg, Cf, Ct, Cs, Carg = lpeg.P, lpeg.C, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Ct, lpeg.Cs, lpeg.Carg
+local lpegmatch = lpeg.match
+
+local patterns = lpeg.patterns
+local cardinal = patterns.cardinal
+local whitespace = patterns.whitespace^0
+
+local timestamp = Cf(Ct("") * (
+ Cg (Cc("year") * (cardinal/tonumber)) * P("-")
+ * Cg (Cc("month") * (cardinal/tonumber)) * P("-")
+ * Cg (Cc("day") * (cardinal/tonumber)) * P(" ")
+ * Cg (Cc("hour") * (cardinal/tonumber)) * P(":")
+ * Cg (Cc("minute") * (cardinal/tonumber)) * P(":")
+ * Cg (Cc("second") * (cardinal/tonumber)) * P("+")
+ * Cg (Cc("thour") * (cardinal/tonumber)) * P(":")
+ * Cg (Cc("tminute") * (cardinal/tonumber))
+)^0, rawset)
+
+local keysvalues = Cf(Ct("") * (
+ Cg(C(patterns.letter^0) * whitespace * "=" * whitespace * Cs(patterns.unquoted) * whitespace)
+)^0, rawset)
+
+local statusline = Cf(Ct("") * (
+ whitespace * P("[") * Cg(Cc("timestamp") * timestamp ) * P("]")
+ * whitespace * Cg(Cc("status" ) * keysvalues)
+),rawset)
+
+patterns.keysvalues = keysvalues
+patterns.statusline = statusline
+patterns.timestamp = timestamp
+
+loggers = loggers or { }
+
+local timeformat = format("[%%s%s]",os.timezone(true))
+local dateformat = "!%Y-%m-%d %H:%M:%S"
+
+function loggers.makeline(t)
+ local result = { } -- minimize time that file is open
+ result[#result+1] = format(timeformat,date(dateformat))
+ for k, v in sortedpairs(t) do
+ local tv = type(v)
+ if tv == "string" then
+ if v ~= "password" then
+ result[#result+1] = format(" %s=%q",k,v)
+ end
+ elseif tv == "number" or tv == "boolean" then
+ result[#result+1] = format(" %s=%q",k,tostring(v))
+ end
+ end
+ return concat(result," ")
+end
+
+local function append(filename,...)
+ local f = openfile(filename,"a+")
+ if not f then
+ dir.mkdirs(file.dirname(filename))
+ f = openfile(filename,"a+")
+ end
+ if f then
+ f:write(...)
+ f:close()
+ return true
+ else
+ return false
+ end
+end
+
+function loggers.store(filename,data) -- a log service is nicer
+ if type(data) == "table"then
+ data = loggers.makeline(data)
+ end
+ pcall(append,filename,data,"\n")
+end
+
+function loggers.collect(filename,result)
+ if lfs.isfile(filename) then
+ local r = lpegmatch(Ct(statusline^0),io.loaddata(filename))
+ if result then -- append
+ local nofresult = #result
+ for i=1,#r do
+ nofresult = nofresult + 1
+ result[nofresult] = r[i]
+ end
+ return result
+ else
+ return r
+ end
+ else
+ return result or { }
+ end
+end
+
+function loggers.fields(results) -- returns hash of fields with counts so that we can decide on importance
+ local fields = { }
+ if results then
+ for i=1,#results do
+ local r = results[i]
+ for k, v in next, r do
+ local f = fields[k]
+ if not f then
+ fields[k] = 1
+ else
+ fields[k] = f + 1
+ end
+ end
+ end
+ end
+ return fields
+end
+
+local template = [[
+
+
%s
+%s
+
+
+]]
+
+function loggers.tohtml(entries,fields)
+ if not fields or #fields == 0 then
+ return ""
+ end
+ if type(entries) == "string" then
+ entries = loggers.collect(entries)
+ end
+ local scratch, lines = { }, { }
+ for i=1,#entries do
+ local entry = entries[i]
+ local status = entry.status
+ for i=1,#fields do
+ local field = fields[i]
+ local v = status[field.name]
+ if v ~= nil then
+ v = tostring(v)
+ local f = field.format
+ if f then
+ v = format(f,v)
+ end
+ scratch[i] = format("
%s
",field.align or "left",v)
+ else
+ scratch[i] = "
"
+ end
+ end
+ lines[i] = format("
%s
",concat(scratch))
+ end
+ for i=1,#fields do
+ local field = fields[i]
+ scratch[i] = format("
%s
", field.label or field.name)
+ end
+ local result = format(template,concat(scratch),concat(lines,"\n"))
+ return result, entries
+end
+
+-- loggers.store("test.log", { name = "whatever", more = math.random(1,100) })
+
+-- local fields = {
+-- { name = "name", align = "left" },
+-- { name = "more", align = "right" },
+-- }
+
+-- local entries = loggers.collect("test.log")
+-- local html = loggers.tohtml(entries,fields)
+
+-- inspect(entries)
+-- inspect(fields)
+-- inspect(html)
+
diff --git a/tex/context/base/trac-inf.lua b/tex/context/base/trac-inf.lua
index eefc15a6f..aa7704d3f 100644
--- a/tex/context/base/trac-inf.lua
+++ b/tex/context/base/trac-inf.lua
@@ -1,193 +1,193 @@
-if not modules then modules = { } end modules ['trac-inf'] = {
- version = 1.001,
- comment = "companion to trac-inf.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- As we want to protect the global tables, we no longer store the timing
--- in the tables themselves but in a hidden timers table so that we don't
--- get warnings about assignments. This is more efficient than using rawset
--- and rawget.
-
-local type, tonumber = type, tonumber
-local format, lower = string.format, string.lower
-local concat = table.concat
-local clock = os.gettimeofday or os.clock -- should go in environment
-
-statistics = statistics or { }
-local statistics = statistics
-
-statistics.enable = true
-statistics.threshold = 0.01
-
-local statusinfo, n, registered, timers = { }, 0, { }, { }
-
-table.setmetatableindex(timers,function(t,k)
- local v = { timing = 0, loadtime = 0 }
- t[k] = v
- return v
-end)
-
-local function hastiming(instance)
- return instance and timers[instance]
-end
-
-local function resettiming(instance)
- timers[instance or "notimer"] = { timing = 0, loadtime = 0 }
-end
-
-local function starttiming(instance)
- local timer = timers[instance or "notimer"]
- local it = timer.timing or 0
- if it == 0 then
- timer.starttime = clock()
- if not timer.loadtime then
- timer.loadtime = 0
- end
- end
- timer.timing = it + 1
-end
-
-local function stoptiming(instance)
- local timer = timers[instance or "notimer"]
- local it = timer.timing
- if it > 1 then
- timer.timing = it - 1
- else
- local starttime = timer.starttime
- if starttime then
- local stoptime = clock()
- local loadtime = stoptime - starttime
- timer.stoptime = stoptime
- timer.loadtime = timer.loadtime + loadtime
- timer.timing = 0
- return loadtime
- end
- end
- return 0
-end
-
-local function elapsed(instance)
- if type(instance) == "number" then
- return instance or 0
- else
- local timer = timers[instance or "notimer"]
- return timer and timer.loadtime or 0
- end
-end
-
-local function elapsedtime(instance)
- return format("%0.3f",elapsed(instance))
-end
-
-local function elapsedindeed(instance)
- return elapsed(instance) > statistics.threshold
-end
-
-local function elapsedseconds(instance,rest) -- returns nil if 0 seconds
- if elapsedindeed(instance) then
- return format("%0.3f seconds %s", elapsed(instance),rest or "")
- end
-end
-
-statistics.hastiming = hastiming
-statistics.resettiming = resettiming
-statistics.starttiming = starttiming
-statistics.stoptiming = stoptiming
-statistics.elapsed = elapsed
-statistics.elapsedtime = elapsedtime
-statistics.elapsedindeed = elapsedindeed
-statistics.elapsedseconds = elapsedseconds
-
--- general function .. we might split this module
-
-function statistics.register(tag,fnc)
- if statistics.enable and type(fnc) == "function" then
- local rt = registered[tag] or (#statusinfo + 1)
- statusinfo[rt] = { tag, fnc }
- registered[tag] = rt
- if #tag > n then n = #tag end
- end
-end
-
-local report = logs.reporter("mkiv lua stats")
-
-function statistics.show()
- if statistics.enable then
- -- this code will move
- local register = statistics.register
- register("luatex banner", function()
- return lower(status.banner)
- end)
- register("control sequences", function()
- return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
- end)
- register("callbacks", function()
- local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
- return format("%s direct, %s indirect, %s total", total-indirect, indirect, total)
- end)
- if jit then
- local status = { jit.status() }
- if status[1] then
- register("luajit status", function()
- return concat(status," ",2)
- end)
- end
- end
- -- so far
- -- collectgarbage("collect")
- register("current memory usage",statistics.memused)
- register("runtime",statistics.runtime)
- logs.newline() -- initial newline
- for i=1,#statusinfo do
- local s = statusinfo[i]
- local r = s[2]()
- if r then
- report("%s: %s",s[1],r)
- end
- end
- -- logs.newline() -- final newline
- statistics.enable = false
- end
-end
-
-function statistics.memused() -- no math.round yet -)
- local round = math.round or math.floor
- return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
-end
-
-starttiming(statistics)
-
-function statistics.formatruntime(runtime) -- indirect so it can be overloaded and
- return format("%s seconds", runtime) -- indeed that happens in cure-uti.lua
-end
-
-function statistics.runtime()
- stoptiming(statistics)
- return statistics.formatruntime(elapsedtime(statistics))
-end
-
-local report = logs.reporter("system")
-
-function statistics.timed(action)
- starttiming("run")
- action()
- stoptiming("run")
- report("total runtime: %s",elapsedtime("run"))
-end
-
--- where, not really the best spot for this:
-
-commands = commands or { }
-
-function commands.resettimer(name)
- resettiming(name or "whatever")
- starttiming(name or "whatever")
-end
-
-function commands.elapsedtime(name)
- stoptiming(name or "whatever")
- context(elapsedtime(name or "whatever"))
-end
+if not modules then modules = { } end modules ['trac-inf'] = {
+ version = 1.001,
+ comment = "companion to trac-inf.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- As we want to protect the global tables, we no longer store the timing
+-- in the tables themselves but in a hidden timers table so that we don't
+-- get warnings about assignments. This is more efficient than using rawset
+-- and rawget.
+
+local type, tonumber = type, tonumber
+local format, lower = string.format, string.lower
+local concat = table.concat
+local clock = os.gettimeofday or os.clock -- should go in environment
+
+statistics = statistics or { }
+local statistics = statistics
+
+statistics.enable = true
+statistics.threshold = 0.01
+
+local statusinfo, n, registered, timers = { }, 0, { }, { }
+
+table.setmetatableindex(timers,function(t,k)
+ local v = { timing = 0, loadtime = 0 }
+ t[k] = v
+ return v
+end)
+
+local function hastiming(instance)
+ return instance and timers[instance]
+end
+
+local function resettiming(instance)
+ timers[instance or "notimer"] = { timing = 0, loadtime = 0 }
+end
+
+local function starttiming(instance)
+ local timer = timers[instance or "notimer"]
+ local it = timer.timing or 0
+ if it == 0 then
+ timer.starttime = clock()
+ if not timer.loadtime then
+ timer.loadtime = 0
+ end
+ end
+ timer.timing = it + 1
+end
+
+local function stoptiming(instance)
+ local timer = timers[instance or "notimer"]
+ local it = timer.timing
+ if it > 1 then
+ timer.timing = it - 1
+ else
+ local starttime = timer.starttime
+ if starttime then
+ local stoptime = clock()
+ local loadtime = stoptime - starttime
+ timer.stoptime = stoptime
+ timer.loadtime = timer.loadtime + loadtime
+ timer.timing = 0
+ return loadtime
+ end
+ end
+ return 0
+end
+
+local function elapsed(instance)
+ if type(instance) == "number" then
+ return instance or 0
+ else
+ local timer = timers[instance or "notimer"]
+ return timer and timer.loadtime or 0
+ end
+end
+
+local function elapsedtime(instance)
+ return format("%0.3f",elapsed(instance))
+end
+
+local function elapsedindeed(instance)
+ return elapsed(instance) > statistics.threshold
+end
+
+local function elapsedseconds(instance,rest) -- returns nil if 0 seconds
+ if elapsedindeed(instance) then
+ return format("%0.3f seconds %s", elapsed(instance),rest or "")
+ end
+end
+
+statistics.hastiming = hastiming
+statistics.resettiming = resettiming
+statistics.starttiming = starttiming
+statistics.stoptiming = stoptiming
+statistics.elapsed = elapsed
+statistics.elapsedtime = elapsedtime
+statistics.elapsedindeed = elapsedindeed
+statistics.elapsedseconds = elapsedseconds
+
+-- general function .. we might split this module
+
+function statistics.register(tag,fnc)
+ if statistics.enable and type(fnc) == "function" then
+ local rt = registered[tag] or (#statusinfo + 1)
+ statusinfo[rt] = { tag, fnc }
+ registered[tag] = rt
+ if #tag > n then n = #tag end
+ end
+end
+
+local report = logs.reporter("mkiv lua stats")
+
+function statistics.show()
+ if statistics.enable then
+ -- this code will move
+ local register = statistics.register
+ register("luatex banner", function()
+ return lower(status.banner)
+ end)
+ register("control sequences", function()
+ return format("%s of %s + %s", status.cs_count, status.hash_size,status.hash_extra)
+ end)
+ register("callbacks", function()
+ local total, indirect = status.callbacks or 0, status.indirect_callbacks or 0
+ return format("%s direct, %s indirect, %s total", total-indirect, indirect, total)
+ end)
+ if jit then
+ local status = { jit.status() }
+ if status[1] then
+ register("luajit status", function()
+ return concat(status," ",2)
+ end)
+ end
+ end
+ -- so far
+ -- collectgarbage("collect")
+ register("current memory usage",statistics.memused)
+ register("runtime",statistics.runtime)
+ logs.newline() -- initial newline
+ for i=1,#statusinfo do
+ local s = statusinfo[i]
+ local r = s[2]()
+ if r then
+ report("%s: %s",s[1],r)
+ end
+ end
+ -- logs.newline() -- final newline
+ statistics.enable = false
+ end
+end
+
+function statistics.memused() -- no math.round yet -)
+ local round = math.round or math.floor
+ return format("%s MB (ctx: %s MB)",round(collectgarbage("count")/1000), round(status.luastate_bytes/1000000))
+end
+
+starttiming(statistics)
+
+function statistics.formatruntime(runtime) -- indirect so it can be overloaded and
+ return format("%s seconds", runtime) -- indeed that happens in cure-uti.lua
+end
+
+function statistics.runtime()
+ stoptiming(statistics)
+ return statistics.formatruntime(elapsedtime(statistics))
+end
+
+local report = logs.reporter("system")
+
+function statistics.timed(action)
+ starttiming("run")
+ action()
+ stoptiming("run")
+ report("total runtime: %s",elapsedtime("run"))
+end
+
+-- where, not really the best spot for this:
+
+commands = commands or { }
+
+function commands.resettimer(name)
+ resettiming(name or "whatever")
+ starttiming(name or "whatever")
+end
+
+function commands.elapsedtime(name)
+ stoptiming(name or "whatever")
+ context(elapsedtime(name or "whatever"))
+end
diff --git a/tex/context/base/trac-jus.lua b/tex/context/base/trac-jus.lua
index 9d99f059d..4be9b30f8 100644
--- a/tex/context/base/trac-jus.lua
+++ b/tex/context/base/trac-jus.lua
@@ -1,136 +1,136 @@
-if not modules then modules = { } end modules ['trac-jus'] = {
- version = 1.001,
- comment = "companion to trac-jus.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local checkers = typesetters.checkers or { }
-typesetters.checkers = checkers
-
------ report_justification = logs.reporter("visualize","justification")
-
-local a_alignstate = attributes.private("alignstate")
-local a_justification = attributes.private("justification")
-
-local tracers = nodes.tracers
-local setcolor = tracers.colors.set
-local settransparency = tracers.transparencies.set
-
-local new_rule = nodes.pool.rule
-local new_glue = nodes.pool.glue
-local new_kern = nodes.pool.kern
-local concat_nodes = nodes.concat
-local hpack_nodes = node.hpack
-local copy_node = node.copy
-local get_list_dimensions = node.dimensions
-local hlist_code = nodes.nodecodes.hlist
-
-local tex_set_attribute = tex.setattribute
-local unsetvalue = attributes.unsetvalue
-
-local min_threshold = 0
-local max_threshold = 0
-
-local function set(n)
- nodes.tasks.enableaction("mvlbuilders", "typesetters.checkers.handler")
- nodes.tasks.enableaction("vboxbuilders","typesetters.checkers.handler")
- tex_set_attribute(a_justification,n or 1)
- function typesetters.checkers.set(n)
- tex_set_attribute(a_justification,n or 1)
- end
-end
-
-local function reset()
- tex_set_attribute(a_justification,unsetvalue)
-end
-
-checkers.set = set
-checkers.reset = reset
-
-function commands.showjustification(n)
- set(n)
-end
-
-trackers.register("visualizers.justification", function(v)
- if v then
- set(1)
- else
- reset()
- end
-end)
-
-function checkers.handler(head)
- for current in node.traverse_id(hlist_code,head) do
- if current[a_justification] == 1 then
- current[a_justification] = 0
- local width = current.width
- if width > 0 then
- local list = current.list
- if list then
- local naturalwidth, naturalheight, naturaldepth = get_list_dimensions(list)
- local delta = naturalwidth - width
- if naturalwidth == 0 or delta == 0 then
- -- special box
- elseif delta >= max_threshold then
- local rule = new_rule(delta,naturalheight,naturaldepth)
- list = hpack_nodes(list,width,"exactly")
- if list.glue_set == 1 then
- setcolor(rule,"trace:dr")
- settransparency(rule,"trace:dr")
- else
- setcolor(rule,"trace:db")
- settransparency(rule,"trace:db")
- end
- rule = hpack_nodes(rule)
- rule.width = 0
- rule.height = 0
- rule.depth = 0
- current.list = concat_nodes { list, rule }
- -- current.list = concat_nodes { list, new_kern(-naturalwidth+width), rule }
- elseif delta <= min_threshold then
- local alignstate = list[a_alignstate]
- if alignstate == 1 then
- local rule = new_rule(-delta,naturalheight,naturaldepth)
- setcolor(rule,"trace:dc")
- settransparency(rule,"trace:dc")
- rule = hpack_nodes(rule)
- rule.height = 0
- rule.depth = 0
- rule.width = 0
- current.list = nodes.concat { rule, list }
- elseif alignstate == 2 then
- local rule = new_rule(-delta/2,naturalheight,naturaldepth)
- setcolor(rule,"trace:dy")
- settransparency(rule,"trace:dy")
- rule = hpack_nodes(rule)
- rule.width = 0
- rule.height = 0
- rule.depth = 0
- current.list = concat_nodes { copy_node(rule), list, new_kern(delta/2), rule }
- elseif alignstate == 3 then
- local rule = new_rule(-delta,naturalheight,naturaldepth)
- setcolor(rule,"trace:dm")
- settransparency(rule,"trace:dm")
- rule = hpack_nodes(rule)
- rule.height = 0
- rule.depth = 0
- current.list = concat_nodes { list, new_kern(delta), rule }
- else
- local rule = new_rule(-delta,naturalheight,naturaldepth)
- setcolor(rule,"trace:dg")
- settransparency(rule,"trace:dg")
- rule = hpack_nodes(rule)
- rule.height = 0
- rule.depth = 0
- rule.width = 0
- current.list = concat_nodes { list, new_kern(delta), rule }
- end
- end
- end
- end
- end
- end
- return head
-end
+if not modules then modules = { } end modules ['trac-jus'] = {
+ version = 1.001,
+ comment = "companion to trac-jus.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local checkers = typesetters.checkers or { }
+typesetters.checkers = checkers
+
+----- report_justification = logs.reporter("visualize","justification")
+
+local a_alignstate = attributes.private("alignstate")
+local a_justification = attributes.private("justification")
+
+local tracers = nodes.tracers
+local setcolor = tracers.colors.set
+local settransparency = tracers.transparencies.set
+
+local new_rule = nodes.pool.rule
+local new_glue = nodes.pool.glue
+local new_kern = nodes.pool.kern
+local concat_nodes = nodes.concat
+local hpack_nodes = node.hpack
+local copy_node = node.copy
+local get_list_dimensions = node.dimensions
+local hlist_code = nodes.nodecodes.hlist
+
+local tex_set_attribute = tex.setattribute
+local unsetvalue = attributes.unsetvalue
+
+local min_threshold = 0
+local max_threshold = 0
+
+local function set(n)
+ nodes.tasks.enableaction("mvlbuilders", "typesetters.checkers.handler")
+ nodes.tasks.enableaction("vboxbuilders","typesetters.checkers.handler")
+ tex_set_attribute(a_justification,n or 1)
+ function typesetters.checkers.set(n)
+ tex_set_attribute(a_justification,n or 1)
+ end
+end
+
+local function reset()
+ tex_set_attribute(a_justification,unsetvalue)
+end
+
+checkers.set = set
+checkers.reset = reset
+
+function commands.showjustification(n)
+ set(n)
+end
+
+trackers.register("visualizers.justification", function(v)
+ if v then
+ set(1)
+ else
+ reset()
+ end
+end)
+
+function checkers.handler(head)
+ for current in node.traverse_id(hlist_code,head) do
+ if current[a_justification] == 1 then
+ current[a_justification] = 0
+ local width = current.width
+ if width > 0 then
+ local list = current.list
+ if list then
+ local naturalwidth, naturalheight, naturaldepth = get_list_dimensions(list)
+ local delta = naturalwidth - width
+ if naturalwidth == 0 or delta == 0 then
+ -- special box
+ elseif delta >= max_threshold then
+ local rule = new_rule(delta,naturalheight,naturaldepth)
+ list = hpack_nodes(list,width,"exactly")
+ if list.glue_set == 1 then
+ setcolor(rule,"trace:dr")
+ settransparency(rule,"trace:dr")
+ else
+ setcolor(rule,"trace:db")
+ settransparency(rule,"trace:db")
+ end
+ rule = hpack_nodes(rule)
+ rule.width = 0
+ rule.height = 0
+ rule.depth = 0
+ current.list = concat_nodes { list, rule }
+ -- current.list = concat_nodes { list, new_kern(-naturalwidth+width), rule }
+ elseif delta <= min_threshold then
+ local alignstate = list[a_alignstate]
+ if alignstate == 1 then
+ local rule = new_rule(-delta,naturalheight,naturaldepth)
+ setcolor(rule,"trace:dc")
+ settransparency(rule,"trace:dc")
+ rule = hpack_nodes(rule)
+ rule.height = 0
+ rule.depth = 0
+ rule.width = 0
+ current.list = nodes.concat { rule, list }
+ elseif alignstate == 2 then
+ local rule = new_rule(-delta/2,naturalheight,naturaldepth)
+ setcolor(rule,"trace:dy")
+ settransparency(rule,"trace:dy")
+ rule = hpack_nodes(rule)
+ rule.width = 0
+ rule.height = 0
+ rule.depth = 0
+ current.list = concat_nodes { copy_node(rule), list, new_kern(delta/2), rule }
+ elseif alignstate == 3 then
+ local rule = new_rule(-delta,naturalheight,naturaldepth)
+ setcolor(rule,"trace:dm")
+ settransparency(rule,"trace:dm")
+ rule = hpack_nodes(rule)
+ rule.height = 0
+ rule.depth = 0
+ current.list = concat_nodes { list, new_kern(delta), rule }
+ else
+ local rule = new_rule(-delta,naturalheight,naturaldepth)
+ setcolor(rule,"trace:dg")
+ settransparency(rule,"trace:dg")
+ rule = hpack_nodes(rule)
+ rule.height = 0
+ rule.depth = 0
+ rule.width = 0
+ current.list = concat_nodes { list, new_kern(delta), rule }
+ end
+ end
+ end
+ end
+ end
+ end
+ return head
+end
diff --git a/tex/context/base/trac-lmx.lua b/tex/context/base/trac-lmx.lua
index 18c7f6020..1a12d2078 100644
--- a/tex/context/base/trac-lmx.lua
+++ b/tex/context/base/trac-lmx.lua
@@ -1,732 +1,732 @@
-if not modules then modules = { } end modules ['trac-lmx'] = {
- version = 1.002,
- comment = "companion to trac-lmx.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this one will be adpated to the latest helpers
-
-local type, tostring, rawget, loadstring, pcall = type, tostring, rawget, loadstring, pcall
-local format, sub, gsub = string.format, string.sub, string.gsub
-local concat = table.concat
-local collapsespaces = string.collapsespaces
-local P, Cc, Cs, C, Carg, lpegmatch = lpeg.P, lpeg.Cc, lpeg.Cs, lpeg.C, lpeg.Carg, lpeg.match
-local joinpath, replacesuffix, pathpart, filesuffix = file.join, file.replacesuffix, file.pathpart, file.suffix
-
-local allocate = utilities.storage.allocate
-local setmetatableindex = table.setmetatableindex
-
------ trace_templates = false trackers .register("lmx.templates", function(v) trace_templates = v end)
-local trace_variables = false trackers .register("lmx.variables", function(v) trace_variables = v end)
-
-local cache_templates = true directives.register("lmx.cache.templates",function(v) cache_templates = v end)
-local cache_files = true directives.register("lmx.cache.files", function(v) cache_files = v end)
-
-local report_lmx = logs.reporter("lmx")
-local report_error = logs.reporter("lmx","error")
-
-lmx = lmx or { }
-local lmx = lmx
-
--- This will change: we will just pass the global defaults as argument, but then we need
--- to rewrite some older code or come up with an ugly trick.
-
-local lmxvariables = {
- ['title-default'] = 'ConTeXt LMX File',
- ['color-background-green'] = '#4F6F6F',
- ['color-background-blue'] = '#6F6F8F',
- ['color-background-yellow'] = '#8F8F6F',
- ['color-background-purple'] = '#8F6F8F',
- ['color-background-body'] = '#808080',
- ['color-background-main'] = '#3F3F3F',
-}
-
-local lmxinherited = {
- ['title'] = 'title-default',
- ['color-background-one'] = 'color-background-green',
- ['color-background-two'] = 'color-background-blue',
- ['color-background-three'] = 'color-background-one',
- ['color-background-four'] = 'color-background-two',
-}
-
-lmx.variables = lmxvariables
-lmx.inherited = lmxinherited
-
-setmetatableindex(lmxvariables,function(t,k)
- k = lmxinherited[k]
- while k do
- local v = rawget(lmxvariables,k)
- if v then
- return v
- end
- k = lmxinherited[k]
- end
-end)
-
-function lmx.set(key,value)
- lmxvariables[key] = value
-end
-
-function lmx.get(key)
- return lmxvariables[key] or ""
-end
-
-lmx.report = report_lmx
-
--- helpers
-
--- the variables table is an empty one that gets linked to a defaults table
--- that gets passed with a creation (first time only) and that itself links
--- to one that gets passed to the converter
-
-local variables = { } -- we assume no nesting
-local result = { } -- we assume no nesting
-
-local function do_print(one,two,...)
- if two then
- result[#result+1] = concat { one, two, ... }
- else
- result[#result+1] = one
- end
-end
-
--- Although it does not make much sense for most elements, we provide a mechanism
--- to print wrapped content, something that is more efficient when we are constructing
--- tables.
-
-local html = { }
-lmx.html = html
-
-function html.td(str)
- if type(str) == "table" then
- for i=1,#str do -- spoils t !
- str[i] = format("
%s
",str[i] or "")
- end
- result[#result+1] = concat(str)
- else
- result[#result+1] = format("
%s
",str or "")
- end
-end
-
-function html.th(str)
- if type(str) == "table" then
- for i=1,#str do -- spoils t !
- str[i] = format("
",str or "")
- end
-end
-
-function html.a(text,url)
- result[#result+1] = format("%s",url,text)
-end
-
-setmetatableindex(html,function(t,k)
- local f = format("<%s>%%s%s>",k,k)
- local v = function(str) result[#result+1] = format(f,str or "") end
- t[k] = v
- return v
-end)
-
--- Loading templates:
-
-local function loadedfile(name)
- name = resolvers and resolvers.findfile and resolvers.findfile(name) or name
- local data = io.loaddata(name)
- if not data or data == "" then
- report_lmx("file %a is empty",name)
- end
- return data
-end
-
-local function loadedsubfile(name)
- return io.loaddata(resolvers and resolvers.findfile and resolvers.findfile(name) or name)
-end
-
-lmx.loadedfile = loadedfile
-
--- A few helpers (the next one could end up in l-lpeg):
-
-local usedpaths = { }
-local givenpath = nil
-
-local do_nested_include = nil
-
-local pattern = lpeg.replacer {
- ["&"] = "&",
- [">"] = ">",
- ["<"] = "<",
- ['"'] = """,
-}
-
-local function do_escape(str)
- return lpegmatch(pattern,str) or str
-end
-
-local function do_variable(str)
- local value = variables[str]
- if not trace_variables then
- -- nothing
- elseif type(value) == "string" then
- if #value > 80 then
- report_lmx("variable %a is set to: %s ...",str,collapsespaces(sub(value,1,80)))
- else
- report_lmx("variable %a is set to: %s",str,collapsespaces(value))
- end
- elseif type(value) == "nil" then
- report_lmx("variable %a is set to: %s",str,"")
- else
- report_lmx("variable %a is set to: %S",str,value)
- end
- if type(value) == "function" then -- obsolete ... will go away
- return value(str)
- else
- return value
- end
-end
-
-local function do_type(str)
- if str and str ~= "" then
- result[#result+1] = format("%s",do_escape(str))
- end
-end
-
-local function do_fprint(str,...)
- if str and str ~= "" then
- result[#result+1] = format(str,...)
- end
-end
-
-local function do_eprint(str,...)
- if str and str ~= "" then
- result[#result+1] = lpegmatch(pattern,format(str,...))
- end
-end
-
-local function do_print_variable(str)
- local str = do_variable(str) -- variables[str]
- if str and str ~= "" then
- result[#result+1] = str
- end
-end
-
-local function do_type_variable(str)
- local str = do_variable(str) -- variables[str]
- if str and str ~= "" then
- result[#result+1] = format("%s",do_escape(str))
- end
-end
-
-local function do_include(filename,option)
- local data = loadedsubfile(filename)
- if (not data or data == "") and givenpath then
- data = loadedsubfile(joinpath(givenpath,filename))
- end
- if (not data or data == "") and type(usedpaths) == "table" then
- for i=1,#usedpaths do
- data = loadedsubfile(joinpath(usedpaths[i],filename))
- if data and data ~= "" then
- break
- end
- end
- end
- if not data or data == "" then
- data = format("",filename)
- report_lmx("include file %a is empty",filename)
- else
- -- report_lmx("included file: %s",filename)
- data = do_nested_include(data)
- end
- if filesuffix(filename,"css") and option == "strip" then -- new
- data = lmx.stripcss(data)
- end
- return data
-end
-
--- Flushers:
-
-lmx.print = do_print
-lmx.type = do_type
-lmx.eprint = do_eprint
-lmx.fprint = do_fprint
-
-lmx.escape = do_escape
-lmx.urlescape = url.escape
-lmx.variable = do_variable
-lmx.include = do_include
-
-lmx.inject = do_print
-lmx.finject = do_fprint
-lmx.einject = do_eprint
-
-lmx.pv = do_print_variable
-lmx.tv = do_type_variable
-
--- The next functions set up the closure.
-
-function lmx.initialize(d,v)
- if not v then
- setmetatableindex(d,lmxvariables)
- if variables ~= d then
- setmetatableindex(variables,d)
- if trace_variables then
- report_lmx("using chain: variables => given defaults => lmx variables")
- end
- elseif trace_variables then
- report_lmx("using chain: variables == given defaults => lmx variables")
- end
- elseif d ~= v then
- setmetatableindex(v,d)
- if d ~= lmxvariables then
- setmetatableindex(d,lmxvariables)
- if variables ~= v then
- setmetatableindex(variables,v)
- if trace_variables then
- report_lmx("using chain: variables => given variables => given defaults => lmx variables")
- end
- elseif trace_variables then
- report_lmx("using chain: variables == given variables => given defaults => lmx variables")
- end
- else
- if variables ~= v then
- setmetatableindex(variables,v)
- if trace_variables then
- report_lmx("using chain: variabes => given variables => given defaults")
- end
- elseif trace_variables then
- report_lmx("using chain: variables == given variables => given defaults")
- end
- end
- else
- setmetatableindex(v,lmxvariables)
- if variables ~= v then
- setmetatableindex(variables,v)
- if trace_variables then
- report_lmx("using chain: variables => given variables => lmx variables")
- end
- elseif trace_variables then
- report_lmx("using chain: variables == given variables => lmx variables")
- end
- end
- result = { }
-end
-
-function lmx.finalized()
- local collapsed = concat(result)
- result = { } -- free memory
- return collapsed
-end
-
-function lmx.getvariables()
- return variables
-end
-
-function lmx.reset()
- -- obsolete
-end
-
--- Creation: (todo: strip )
-
--- local template = [[
--- return function(defaults,variables)
---
--- -- initialize
---
--- lmx.initialize(defaults,variables)
---
--- -- interface
---
--- local definitions = { }
--- local variables = lmx.getvariables()
--- local html = lmx.html
--- local inject = lmx.print
--- local finject = lmx.fprint
--- local einject = lmx.eprint
--- local escape = lmx.escape
--- local verbose = lmx.type
---
--- -- shortcuts (sort of obsolete as there is no gain)
---
--- local p = lmx.print
--- local f = lmx.fprint
--- local v = lmx.variable
--- local e = lmx.escape
--- local t = lmx.type
--- local pv = lmx.pv
--- local tv = lmx.tv
---
--- -- generator
---
--- %s
---
--- -- finalize
---
--- return lmx.finalized()
---
--- end
--- ]]
-
-local template = [[
--- interface
-
-local html = lmx.html
-local inject = lmx.print
-local finject = lmx.fprint -- better use the following
-local einject = lmx.eprint -- better use the following
-local injectf = lmx.fprint
-local injecte = lmx.eprint
-local injectfmt = lmx.fprint
-local injectesc = lmx.eprint
-local escape = lmx.escape
-local verbose = lmx.type
-
-local i_n_j_e_c_t = lmx.print
-
--- shortcuts (sort of obsolete as there is no gain)
-
-local p = lmx.print
-local f = lmx.fprint
-local v = lmx.variable
-local e = lmx.escape
-local t = lmx.type
-local pv = lmx.pv
-local tv = lmx.tv
-
-local lmx_initialize = lmx.initialize
-local lmx_finalized = lmx.finalized
-local lmx_getvariables = lmx.getvariables
-
--- generator
-
-return function(defaults,variables)
-
- lmx_initialize(defaults,variables)
-
- local definitions = { }
- local variables = lmx_getvariables()
-
- %s -- the action: appends to result
-
- return lmx_finalized()
-
-end
-]]
-
-local function savedefinition(definitions,tag,content)
- definitions[tag] = content
- return ""
-end
-
-local function getdefinition(definitions,tag)
- return definitions[tag] or ""
-end
-
-local whitespace = lpeg.patterns.whitespace
-local optionalspaces = whitespace^0
-
-local dquote = P('"')
-
-local begincomment = P("")
-
-local beginembedxml = P("")
-local endembedxml = P("?>")
-
-local beginembedcss = P("/*")
-local endembedcss = P("*/")
-
-local gobbledendxml = (optionalspaces * endembedxml) / ""
------ argumentxml = (1-gobbledendxml)^0
-local argumentxml = (whitespace^1 + dquote * C((1-dquote)^1) * dquote + C((1-gobbledendxml-whitespace)^1))^0
-
-local gobbledendcss = (optionalspaces * endembedcss) / ""
------ argumentcss = (1-gobbledendcss)^0
-local argumentcss = (whitespace^1 + dquote * C((1-dquote)^1) * dquote + C((1-gobbledendcss-whitespace)^1))^0
-
-local commentxml = (begincomment * (1-endcomment)^0 * endcomment) / ""
-
-local beginluaxml = (beginembedxml * P("lua")) / ""
-local endluaxml = endembedxml / ""
-
-local luacodexml = beginluaxml
- * (1-endluaxml)^1
- * endluaxml
-
-local beginluacss = (beginembedcss * P("lua")) / ""
-local endluacss = endembedcss / ""
-
-local luacodecss = beginluacss
- * (1-endluacss)^1
- * endluacss
-
-local othercode = (1-beginluaxml-beginluacss)^1 / " i_n_j_e_c_t[==[%0]==] "
-
-local includexml = ((beginembedxml * P("lmx-include") * optionalspaces) / "")
- * (argumentxml / do_include)
- * gobbledendxml
-
-local includecss = ((beginembedcss * P("lmx-include") * optionalspaces) / "")
- * (argumentcss / do_include)
- * gobbledendcss
-
-local definexml_b = ((beginembedxml * P("lmx-define-begin") * optionalspaces) / "")
- * argumentxml
- * gobbledendxml
-
-local definexml_e = ((beginembedxml * P("lmx-define-end") * optionalspaces) / "")
- * argumentxml
- * gobbledendxml
-
-local definexml_c = C((1-definexml_e)^0)
-
-local definexml = (Carg(1) * C(definexml_b) * definexml_c * definexml_e) / savedefinition
-
-local resolvexml = ((beginembedxml * P("lmx-resolve") * optionalspaces) / "")
- * ((Carg(1) * C(argumentxml)) / getdefinition)
- * gobbledendxml
-
-local definecss_b = ((beginembedcss * P("lmx-define-begin") * optionalspaces) / "")
- * argumentcss
- * gobbledendcss
-
-local definecss_e = ((beginembedcss * P("lmx-define-end") * optionalspaces) / "")
- * argumentcss
- * gobbledendcss
-
-local definecss_c = C((1-definecss_e)^0)
-
-local definecss = (Carg(1) * C(definecss_b) * definecss_c * definecss_e) / savedefinition
-
-local resolvecss = ((beginembedcss * P("lmx-resolve") * optionalspaces) / "")
- * ((Carg(1) * C(argumentcss)) / getdefinition)
- * gobbledendcss
-
-local pattern_1 = Cs((commentxml + includexml + includecss + P(1))^0) -- get rid of xml comments asap
-local pattern_2 = Cs((definexml + resolvexml + definecss + resolvecss + P(1))^0)
-local pattern_3 = Cs((luacodexml + luacodecss + othercode)^0)
-
-local cache = { }
-
-local function lmxerror(str)
- report_error(str)
- return html.tt(str)
-end
-
-local function wrapper(converter,defaults,variables)
- local outcome, message = pcall(converter,defaults,variables)
- if not outcome then
- return lmxerror(format("error in conversion: %s",message))
- else
- return message
- end
-end
-
-do_nested_include = function(data) -- also used in include
- return lpegmatch(pattern_1,data)
-end
-
-function lmxnew(data,defaults,nocache,path) -- todo: use defaults in calling routines
- data = data or ""
- local known = cache[data]
- if not known then
- givenpath = path
- usedpaths = lmxvariables.includepath or { }
- if type(usedpaths) == "string" then
- usedpaths = { usedpaths }
- end
- data = lpegmatch(pattern_1,data)
- data = lpegmatch(pattern_2,data,1,{})
- data = lpegmatch(pattern_3,data)
- local converted = loadstring(format(template,data))
- if converted then
- converted = converted()
- end
- defaults = defaults or { }
- local converter
- if converted then
- converter = function(variables)
- return wrapper(converted,defaults,variables)
- end
- else
- report_error("error in:\n%s\n:",data)
- converter = function() lmxerror("error in template") end
- end
- known = {
- data = defaults.trace and data or "",
- variables = defaults,
- converter = converter,
- }
- if cache_templates and nocache ~= false then
- cache[data] = known
- end
- elseif variables then
- known.variables = variables
- end
- return known, known.variables
-end
-
-local function lmxresult(self,variables)
- if self then
- local converter = self.converter
- if converter then
- local converted = converter(variables)
- if trace_variables then -- will become templates
- report_lmx("converted size: %s",#converted)
- end
- return converted or lmxerror("no result from converter")
- else
- return lmxerror("invalid converter")
- end
- else
- return lmxerror("invalid specification")
- end
-end
-
-lmx.new = lmxnew
-lmx.result = lmxresult
-
-local loadedfiles = { }
-
-function lmx.convertstring(templatestring,variables,nocache,path)
- return lmxresult(lmxnew(templatestring,nil,nocache,path),variables)
-end
-
-function lmx.convertfile(templatefile,variables,nocache)
- if trace_variables then -- will become templates
- report_lmx("converting file %a",templatefile)
- end
- local converter = loadedfiles[templatefile]
- if not converter then
- converter = lmxnew(loadedfile(templatefile),nil,nocache,pathpart(templatefile))
- loadedfiles[templatefile] = converter
- end
- return lmxresult(converter,variables)
-end
-
-function lmxconvert(templatefile,resultfile,variables,nocache) -- or (templatefile,variables)
- if trace_variables then -- will become templates
- report_lmx("converting file %a",templatefile)
- end
- if not variables and type(resultfile) == "table" then
- variables = resultfile
- end
- local converter = loadedfiles[templatefile]
- if not converter then
- converter = lmxnew(loadedfile(templatefile),nil,nocache,pathpart(templatefile))
- if cache_files then
- loadedfiles[templatefile] = converter
- end
- end
- local result = lmxresult(converter,variables)
- if resultfile then
- io.savedata(resultfile,result)
- else
- return result
- end
-end
-
-lmx.convert = lmxconvert
-
--- helpers
-
-local nocomment = (beginembedcss * (1 - endembedcss)^1 * endembedcss) / ""
-local nowhitespace = whitespace^1 / " " -- ""
-local semistripped = whitespace^1 / "" * P(";")
-local stripper = Cs((nocomment + semistripped + nowhitespace + 1)^1)
-
-function lmx.stripcss(str)
- return lpegmatch(stripper,str)
-end
-
-function lmx.color(r,g,b,a)
- if r > 1 then
- r = 1
- end
- if g > 1 then
- g = 1
- end
- if b > 1 then
- b = 1
- end
- if not a then
- a= 0
- elseif a > 1 then
- a = 1
- end
- if a > 0 then
- return format("rgba(%s%%,%s%%,%s%%,%s)",r*100,g*100,b*100,a)
- else
- return format("rgb(%s%%,%s%%,%s%%)",r*100,g*100,b*100)
- end
-end
-
-
--- these can be overloaded
-
-lmx.lmxfile = string.itself
-lmx.htmfile = string.itself
-lmx.popupfile = os.launch
-
-function lmxmake(name,variables)
- local lmxfile = lmx.lmxfile(name)
- local htmfile = lmx.htmfile(name)
- if lmxfile == htmfile then
- htmfile = replacesuffix(lmxfile,"html")
- end
- lmxconvert(lmxfile,htmfile,variables)
- return htmfile
-end
-
-lmxmake = lmx.make
-
-function lmx.show(name,variables)
- local htmfile = lmxmake(name,variables)
- lmx.popupfile(htmfile)
- return htmfile
-end
-
--- Command line (will become mtx-lmx):
-
-if arg then
- if arg[1] == "--show" then if arg[2] then lmx.show (arg[2]) end
- elseif arg[1] == "--convert" then if arg[2] then lmx.convert(arg[2], arg[3] or "temp.html") end
- end
-end
-
--- Test 1:
-
--- inspect(lmx.result(lmx.new(io.loaddata("t:/sources/context-timing.lmx"))))
-
--- Test 2:
-
--- local str = [[
---
---
--- some content a
--- some content b
---
---
---
---
---
---
---
---
---
---
---
--- ]]
-
--- local defaults = { trace = true, a = 3, b = 3 }
--- local result = lmx.new(str,defaults)
--- inspect(result.data)
--- inspect(result.converter(defaults))
--- inspect(result.converter { a = 1 })
--- inspect(lmx.result(result, { b = 2 }))
--- inspect(lmx.result(result, { a = 20000, b = 40000 }))
+if not modules then modules = { } end modules ['trac-lmx'] = {
+ version = 1.002,
+ comment = "companion to trac-lmx.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- this one will be adpated to the latest helpers
+
+local type, tostring, rawget, loadstring, pcall = type, tostring, rawget, loadstring, pcall
+local format, sub, gsub = string.format, string.sub, string.gsub
+local concat = table.concat
+local collapsespaces = string.collapsespaces
+local P, Cc, Cs, C, Carg, lpegmatch = lpeg.P, lpeg.Cc, lpeg.Cs, lpeg.C, lpeg.Carg, lpeg.match
+local joinpath, replacesuffix, pathpart, filesuffix = file.join, file.replacesuffix, file.pathpart, file.suffix
+
+local allocate = utilities.storage.allocate
+local setmetatableindex = table.setmetatableindex
+
+----- trace_templates = false trackers .register("lmx.templates", function(v) trace_templates = v end)
+local trace_variables = false trackers .register("lmx.variables", function(v) trace_variables = v end)
+
+local cache_templates = true directives.register("lmx.cache.templates",function(v) cache_templates = v end)
+local cache_files = true directives.register("lmx.cache.files", function(v) cache_files = v end)
+
+local report_lmx = logs.reporter("lmx")
+local report_error = logs.reporter("lmx","error")
+
+lmx = lmx or { }
+local lmx = lmx
+
+-- This will change: we will just pass the global defaults as argument, but then we need
+-- to rewrite some older code or come up with an ugly trick.
+
+local lmxvariables = {
+ ['title-default'] = 'ConTeXt LMX File',
+ ['color-background-green'] = '#4F6F6F',
+ ['color-background-blue'] = '#6F6F8F',
+ ['color-background-yellow'] = '#8F8F6F',
+ ['color-background-purple'] = '#8F6F8F',
+ ['color-background-body'] = '#808080',
+ ['color-background-main'] = '#3F3F3F',
+}
+
+local lmxinherited = {
+ ['title'] = 'title-default',
+ ['color-background-one'] = 'color-background-green',
+ ['color-background-two'] = 'color-background-blue',
+ ['color-background-three'] = 'color-background-one',
+ ['color-background-four'] = 'color-background-two',
+}
+
+lmx.variables = lmxvariables
+lmx.inherited = lmxinherited
+
+setmetatableindex(lmxvariables,function(t,k)
+ k = lmxinherited[k]
+ while k do
+ local v = rawget(lmxvariables,k)
+ if v then
+ return v
+ end
+ k = lmxinherited[k]
+ end
+end)
+
+function lmx.set(key,value)
+ lmxvariables[key] = value
+end
+
+function lmx.get(key)
+ return lmxvariables[key] or ""
+end
+
+lmx.report = report_lmx
+
+-- helpers
+
+-- the variables table is an empty one that gets linked to a defaults table
+-- that gets passed with a creation (first time only) and that itself links
+-- to one that gets passed to the converter
+
+local variables = { } -- we assume no nesting
+local result = { } -- we assume no nesting
+
+local function do_print(one,two,...)
+ if two then
+ result[#result+1] = concat { one, two, ... }
+ else
+ result[#result+1] = one
+ end
+end
+
+-- Although it does not make much sense for most elements, we provide a mechanism
+-- to print wrapped content, something that is more efficient when we are constructing
+-- tables.
+
+local html = { }
+lmx.html = html
+
+function html.td(str)
+ if type(str) == "table" then
+ for i=1,#str do -- spoils t !
+ str[i] = format("
%s
",str[i] or "")
+ end
+ result[#result+1] = concat(str)
+ else
+ result[#result+1] = format("
%s
",str or "")
+ end
+end
+
+function html.th(str)
+ if type(str) == "table" then
+ for i=1,#str do -- spoils t !
+ str[i] = format("
",str or "")
+ end
+end
+
+function html.a(text,url)
+ result[#result+1] = format("%s",url,text)
+end
+
+setmetatableindex(html,function(t,k)
+ local f = format("<%s>%%s%s>",k,k)
+ local v = function(str) result[#result+1] = format(f,str or "") end
+ t[k] = v
+ return v
+end)
+
+-- Loading templates:
+
+local function loadedfile(name)
+ name = resolvers and resolvers.findfile and resolvers.findfile(name) or name
+ local data = io.loaddata(name)
+ if not data or data == "" then
+ report_lmx("file %a is empty",name)
+ end
+ return data
+end
+
+local function loadedsubfile(name)
+ return io.loaddata(resolvers and resolvers.findfile and resolvers.findfile(name) or name)
+end
+
+lmx.loadedfile = loadedfile
+
+-- A few helpers (the next one could end up in l-lpeg):
+
+local usedpaths = { }
+local givenpath = nil
+
+local do_nested_include = nil
+
+local pattern = lpeg.replacer {
+ ["&"] = "&",
+ [">"] = ">",
+ ["<"] = "<",
+ ['"'] = """,
+}
+
+local function do_escape(str)
+ return lpegmatch(pattern,str) or str
+end
+
+local function do_variable(str)
+ local value = variables[str]
+ if not trace_variables then
+ -- nothing
+ elseif type(value) == "string" then
+ if #value > 80 then
+ report_lmx("variable %a is set to: %s ...",str,collapsespaces(sub(value,1,80)))
+ else
+ report_lmx("variable %a is set to: %s",str,collapsespaces(value))
+ end
+ elseif type(value) == "nil" then
+ report_lmx("variable %a is set to: %s",str,"")
+ else
+ report_lmx("variable %a is set to: %S",str,value)
+ end
+ if type(value) == "function" then -- obsolete ... will go away
+ return value(str)
+ else
+ return value
+ end
+end
+
+local function do_type(str)
+ if str and str ~= "" then
+ result[#result+1] = format("%s",do_escape(str))
+ end
+end
+
+local function do_fprint(str,...)
+ if str and str ~= "" then
+ result[#result+1] = format(str,...)
+ end
+end
+
+local function do_eprint(str,...)
+ if str and str ~= "" then
+ result[#result+1] = lpegmatch(pattern,format(str,...))
+ end
+end
+
+local function do_print_variable(str)
+ local str = do_variable(str) -- variables[str]
+ if str and str ~= "" then
+ result[#result+1] = str
+ end
+end
+
+local function do_type_variable(str)
+ local str = do_variable(str) -- variables[str]
+ if str and str ~= "" then
+ result[#result+1] = format("%s",do_escape(str))
+ end
+end
+
+local function do_include(filename,option)
+ local data = loadedsubfile(filename)
+ if (not data or data == "") and givenpath then
+ data = loadedsubfile(joinpath(givenpath,filename))
+ end
+ if (not data or data == "") and type(usedpaths) == "table" then
+ for i=1,#usedpaths do
+ data = loadedsubfile(joinpath(usedpaths[i],filename))
+ if data and data ~= "" then
+ break
+ end
+ end
+ end
+ if not data or data == "" then
+ data = format("",filename)
+ report_lmx("include file %a is empty",filename)
+ else
+ -- report_lmx("included file: %s",filename)
+ data = do_nested_include(data)
+ end
+ if filesuffix(filename,"css") and option == "strip" then -- new
+ data = lmx.stripcss(data)
+ end
+ return data
+end
+
+-- Flushers:
+
+lmx.print = do_print
+lmx.type = do_type
+lmx.eprint = do_eprint
+lmx.fprint = do_fprint
+
+lmx.escape = do_escape
+lmx.urlescape = url.escape
+lmx.variable = do_variable
+lmx.include = do_include
+
+lmx.inject = do_print
+lmx.finject = do_fprint
+lmx.einject = do_eprint
+
+lmx.pv = do_print_variable
+lmx.tv = do_type_variable
+
+-- The next functions set up the closure.
+
+function lmx.initialize(d,v)
+ if not v then
+ setmetatableindex(d,lmxvariables)
+ if variables ~= d then
+ setmetatableindex(variables,d)
+ if trace_variables then
+ report_lmx("using chain: variables => given defaults => lmx variables")
+ end
+ elseif trace_variables then
+ report_lmx("using chain: variables == given defaults => lmx variables")
+ end
+ elseif d ~= v then
+ setmetatableindex(v,d)
+ if d ~= lmxvariables then
+ setmetatableindex(d,lmxvariables)
+ if variables ~= v then
+ setmetatableindex(variables,v)
+ if trace_variables then
+ report_lmx("using chain: variables => given variables => given defaults => lmx variables")
+ end
+ elseif trace_variables then
+ report_lmx("using chain: variables == given variables => given defaults => lmx variables")
+ end
+ else
+ if variables ~= v then
+ setmetatableindex(variables,v)
+ if trace_variables then
+ report_lmx("using chain: variabes => given variables => given defaults")
+ end
+ elseif trace_variables then
+ report_lmx("using chain: variables == given variables => given defaults")
+ end
+ end
+ else
+ setmetatableindex(v,lmxvariables)
+ if variables ~= v then
+ setmetatableindex(variables,v)
+ if trace_variables then
+ report_lmx("using chain: variables => given variables => lmx variables")
+ end
+ elseif trace_variables then
+ report_lmx("using chain: variables == given variables => lmx variables")
+ end
+ end
+ result = { }
+end
+
+function lmx.finalized()
+ local collapsed = concat(result)
+ result = { } -- free memory
+ return collapsed
+end
+
+function lmx.getvariables()
+ return variables
+end
+
+function lmx.reset()
+ -- obsolete
+end
+
+-- Creation: (todo: strip )
+
+-- local template = [[
+-- return function(defaults,variables)
+--
+-- -- initialize
+--
+-- lmx.initialize(defaults,variables)
+--
+-- -- interface
+--
+-- local definitions = { }
+-- local variables = lmx.getvariables()
+-- local html = lmx.html
+-- local inject = lmx.print
+-- local finject = lmx.fprint
+-- local einject = lmx.eprint
+-- local escape = lmx.escape
+-- local verbose = lmx.type
+--
+-- -- shortcuts (sort of obsolete as there is no gain)
+--
+-- local p = lmx.print
+-- local f = lmx.fprint
+-- local v = lmx.variable
+-- local e = lmx.escape
+-- local t = lmx.type
+-- local pv = lmx.pv
+-- local tv = lmx.tv
+--
+-- -- generator
+--
+-- %s
+--
+-- -- finalize
+--
+-- return lmx.finalized()
+--
+-- end
+-- ]]
+
+local template = [[
+-- interface
+
+local html = lmx.html
+local inject = lmx.print
+local finject = lmx.fprint -- better use the following
+local einject = lmx.eprint -- better use the following
+local injectf = lmx.fprint
+local injecte = lmx.eprint
+local injectfmt = lmx.fprint
+local injectesc = lmx.eprint
+local escape = lmx.escape
+local verbose = lmx.type
+
+local i_n_j_e_c_t = lmx.print
+
+-- shortcuts (sort of obsolete as there is no gain)
+
+local p = lmx.print
+local f = lmx.fprint
+local v = lmx.variable
+local e = lmx.escape
+local t = lmx.type
+local pv = lmx.pv
+local tv = lmx.tv
+
+local lmx_initialize = lmx.initialize
+local lmx_finalized = lmx.finalized
+local lmx_getvariables = lmx.getvariables
+
+-- generator
+
+return function(defaults,variables)
+
+ lmx_initialize(defaults,variables)
+
+ local definitions = { }
+ local variables = lmx_getvariables()
+
+ %s -- the action: appends to result
+
+ return lmx_finalized()
+
+end
+]]
+
+local function savedefinition(definitions,tag,content)
+ definitions[tag] = content
+ return ""
+end
+
+local function getdefinition(definitions,tag)
+ return definitions[tag] or ""
+end
+
+local whitespace = lpeg.patterns.whitespace
+local optionalspaces = whitespace^0
+
+local dquote = P('"')
+
+local begincomment = P("")
+
+local beginembedxml = P("")
+local endembedxml = P("?>")
+
+local beginembedcss = P("/*")
+local endembedcss = P("*/")
+
+local gobbledendxml = (optionalspaces * endembedxml) / ""
+----- argumentxml = (1-gobbledendxml)^0
+local argumentxml = (whitespace^1 + dquote * C((1-dquote)^1) * dquote + C((1-gobbledendxml-whitespace)^1))^0
+
+local gobbledendcss = (optionalspaces * endembedcss) / ""
+----- argumentcss = (1-gobbledendcss)^0
+local argumentcss = (whitespace^1 + dquote * C((1-dquote)^1) * dquote + C((1-gobbledendcss-whitespace)^1))^0
+
+local commentxml = (begincomment * (1-endcomment)^0 * endcomment) / ""
+
+local beginluaxml = (beginembedxml * P("lua")) / ""
+local endluaxml = endembedxml / ""
+
+local luacodexml = beginluaxml
+ * (1-endluaxml)^1
+ * endluaxml
+
+local beginluacss = (beginembedcss * P("lua")) / ""
+local endluacss = endembedcss / ""
+
+local luacodecss = beginluacss
+ * (1-endluacss)^1
+ * endluacss
+
+local othercode = (1-beginluaxml-beginluacss)^1 / " i_n_j_e_c_t[==[%0]==] "
+
+local includexml = ((beginembedxml * P("lmx-include") * optionalspaces) / "")
+ * (argumentxml / do_include)
+ * gobbledendxml
+
+local includecss = ((beginembedcss * P("lmx-include") * optionalspaces) / "")
+ * (argumentcss / do_include)
+ * gobbledendcss
+
+local definexml_b = ((beginembedxml * P("lmx-define-begin") * optionalspaces) / "")
+ * argumentxml
+ * gobbledendxml
+
+local definexml_e = ((beginembedxml * P("lmx-define-end") * optionalspaces) / "")
+ * argumentxml
+ * gobbledendxml
+
+local definexml_c = C((1-definexml_e)^0)
+
+local definexml = (Carg(1) * C(definexml_b) * definexml_c * definexml_e) / savedefinition
+
+local resolvexml = ((beginembedxml * P("lmx-resolve") * optionalspaces) / "")
+ * ((Carg(1) * C(argumentxml)) / getdefinition)
+ * gobbledendxml
+
+local definecss_b = ((beginembedcss * P("lmx-define-begin") * optionalspaces) / "")
+ * argumentcss
+ * gobbledendcss
+
+local definecss_e = ((beginembedcss * P("lmx-define-end") * optionalspaces) / "")
+ * argumentcss
+ * gobbledendcss
+
+local definecss_c = C((1-definecss_e)^0)
+
+local definecss = (Carg(1) * C(definecss_b) * definecss_c * definecss_e) / savedefinition
+
+local resolvecss = ((beginembedcss * P("lmx-resolve") * optionalspaces) / "")
+ * ((Carg(1) * C(argumentcss)) / getdefinition)
+ * gobbledendcss
+
+local pattern_1 = Cs((commentxml + includexml + includecss + P(1))^0) -- get rid of xml comments asap
+local pattern_2 = Cs((definexml + resolvexml + definecss + resolvecss + P(1))^0)
+local pattern_3 = Cs((luacodexml + luacodecss + othercode)^0)
+
+local cache = { }
+
+local function lmxerror(str)
+ report_error(str)
+ return html.tt(str)
+end
+
+local function wrapper(converter,defaults,variables)
+ local outcome, message = pcall(converter,defaults,variables)
+ if not outcome then
+ return lmxerror(format("error in conversion: %s",message))
+ else
+ return message
+ end
+end
+
+do_nested_include = function(data) -- also used in include
+ return lpegmatch(pattern_1,data)
+end
+
+function lmxnew(data,defaults,nocache,path) -- todo: use defaults in calling routines
+ data = data or ""
+ local known = cache[data]
+ if not known then
+ givenpath = path
+ usedpaths = lmxvariables.includepath or { }
+ if type(usedpaths) == "string" then
+ usedpaths = { usedpaths }
+ end
+ data = lpegmatch(pattern_1,data)
+ data = lpegmatch(pattern_2,data,1,{})
+ data = lpegmatch(pattern_3,data)
+ local converted = loadstring(format(template,data))
+ if converted then
+ converted = converted()
+ end
+ defaults = defaults or { }
+ local converter
+ if converted then
+ converter = function(variables)
+ return wrapper(converted,defaults,variables)
+ end
+ else
+ report_error("error in:\n%s\n:",data)
+ converter = function() lmxerror("error in template") end
+ end
+ known = {
+ data = defaults.trace and data or "",
+ variables = defaults,
+ converter = converter,
+ }
+ if cache_templates and nocache ~= false then
+ cache[data] = known
+ end
+ elseif variables then
+ known.variables = variables
+ end
+ return known, known.variables
+end
+
+local function lmxresult(self,variables)
+ if self then
+ local converter = self.converter
+ if converter then
+ local converted = converter(variables)
+ if trace_variables then -- will become templates
+ report_lmx("converted size: %s",#converted)
+ end
+ return converted or lmxerror("no result from converter")
+ else
+ return lmxerror("invalid converter")
+ end
+ else
+ return lmxerror("invalid specification")
+ end
+end
+
+lmx.new = lmxnew
+lmx.result = lmxresult
+
+local loadedfiles = { }
+
+function lmx.convertstring(templatestring,variables,nocache,path)
+ return lmxresult(lmxnew(templatestring,nil,nocache,path),variables)
+end
+
+function lmx.convertfile(templatefile,variables,nocache)
+ if trace_variables then -- will become templates
+ report_lmx("converting file %a",templatefile)
+ end
+ local converter = loadedfiles[templatefile]
+ if not converter then
+ converter = lmxnew(loadedfile(templatefile),nil,nocache,pathpart(templatefile))
+ loadedfiles[templatefile] = converter
+ end
+ return lmxresult(converter,variables)
+end
+
+function lmxconvert(templatefile,resultfile,variables,nocache) -- or (templatefile,variables)
+ if trace_variables then -- will become templates
+ report_lmx("converting file %a",templatefile)
+ end
+ if not variables and type(resultfile) == "table" then
+ variables = resultfile
+ end
+ local converter = loadedfiles[templatefile]
+ if not converter then
+ converter = lmxnew(loadedfile(templatefile),nil,nocache,pathpart(templatefile))
+ if cache_files then
+ loadedfiles[templatefile] = converter
+ end
+ end
+ local result = lmxresult(converter,variables)
+ if resultfile then
+ io.savedata(resultfile,result)
+ else
+ return result
+ end
+end
+
+lmx.convert = lmxconvert
+
+-- helpers
+
+local nocomment = (beginembedcss * (1 - endembedcss)^1 * endembedcss) / ""
+local nowhitespace = whitespace^1 / " " -- ""
+local semistripped = whitespace^1 / "" * P(";")
+local stripper = Cs((nocomment + semistripped + nowhitespace + 1)^1)
+
+function lmx.stripcss(str)
+ return lpegmatch(stripper,str)
+end
+
+function lmx.color(r,g,b,a)
+ if r > 1 then
+ r = 1
+ end
+ if g > 1 then
+ g = 1
+ end
+ if b > 1 then
+ b = 1
+ end
+ if not a then
+ a= 0
+ elseif a > 1 then
+ a = 1
+ end
+ if a > 0 then
+ return format("rgba(%s%%,%s%%,%s%%,%s)",r*100,g*100,b*100,a)
+ else
+ return format("rgb(%s%%,%s%%,%s%%)",r*100,g*100,b*100)
+ end
+end
+
+
+-- these can be overloaded
+
+lmx.lmxfile = string.itself
+lmx.htmfile = string.itself
+lmx.popupfile = os.launch
+
+function lmxmake(name,variables)
+ local lmxfile = lmx.lmxfile(name)
+ local htmfile = lmx.htmfile(name)
+ if lmxfile == htmfile then
+ htmfile = replacesuffix(lmxfile,"html")
+ end
+ lmxconvert(lmxfile,htmfile,variables)
+ return htmfile
+end
+
+lmxmake = lmx.make
+
+function lmx.show(name,variables)
+ local htmfile = lmxmake(name,variables)
+ lmx.popupfile(htmfile)
+ return htmfile
+end
+
+-- Command line (will become mtx-lmx):
+
+if arg then
+ if arg[1] == "--show" then if arg[2] then lmx.show (arg[2]) end
+ elseif arg[1] == "--convert" then if arg[2] then lmx.convert(arg[2], arg[3] or "temp.html") end
+ end
+end
+
+-- Test 1:
+
+-- inspect(lmx.result(lmx.new(io.loaddata("t:/sources/context-timing.lmx"))))
+
+-- Test 2:
+
+-- local str = [[
+--
+--
+-- some content a
+-- some content b
+--
+--
+--
+--
+--
+--
+--
+--
+--
+--
+--
+-- ]]
+
+-- local defaults = { trace = true, a = 3, b = 3 }
+-- local result = lmx.new(str,defaults)
+-- inspect(result.data)
+-- inspect(result.converter(defaults))
+-- inspect(result.converter { a = 1 })
+-- inspect(lmx.result(result, { b = 2 }))
+-- inspect(lmx.result(result, { a = 20000, b = 40000 }))
diff --git a/tex/context/base/trac-log.lua b/tex/context/base/trac-log.lua
index 1f2520130..73e302e26 100644
--- a/tex/context/base/trac-log.lua
+++ b/tex/context/base/trac-log.lua
@@ -1,816 +1,816 @@
-if not modules then modules = { } end modules ['trac-log'] = {
- version = 1.001,
- comment = "companion to trac-log.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- if tex and (tex.jobname or tex.formatname) then
---
--- -- quick hack, awaiting speedup in engine (8 -> 6.4 sec for --make with console2)
--- -- still needed for luajittex
---
--- local texio_write_nl = texio.write_nl
--- local texio_write = texio.write
--- local io_write = io.write
-
--- local write_nl = function(target,...)
--- if not io_write then
--- io_write = io.write
--- end
--- if target == "term and log" then
--- texio_write_nl("log",...)
--- texio_write_nl("term","")
--- io_write(...)
--- elseif target == "log" then
--- texio_write_nl("log",...)
--- elseif target == "term" then
--- texio_write_nl("term","")
--- io_write(...)
--- else
--- texio_write_nl("log",target,...)
--- texio_write_nl("term","")
--- io_write(target,...)
--- end
--- end
-
--- local write = function(target,...)
--- if not io_write then
--- io_write = io.write
--- end
--- if target == "term and log" then
--- texio_write("log",...)
--- io_write(...)
--- elseif target == "log" then
--- texio_write("log",...)
--- elseif target == "term" then
--- io_write(...)
--- else
--- texio_write("log",target,...)
--- io_write(target,...)
--- end
--- end
-
--- texio.write = write
--- texio.write_nl = write_nl
---
--- else
---
--- -- texlua or just lua
---
--- end
-
--- todo: less categories, more subcategories (e.g. nodes)
--- todo: split into basics and ctx specific
-
-local write_nl, write = texio and texio.write_nl or print, texio and texio.write or io.write
-local format, gmatch, find = string.format, string.gmatch, string.find
-local concat, insert, remove = table.concat, table.insert, table.remove
-local topattern = string.topattern
-local texcount = tex and tex.count
-local next, type, select = next, type, select
-local utfchar = utf.char
-
-local setmetatableindex = table.setmetatableindex
-local formatters = string.formatters
-
---[[ldx--
-
This is a prelude to a more extensive logging module. We no longer
-provide based logging as parsing is relatively easy anyway.
---ldx]]--
-
-logs = logs or { }
-local logs = logs
-
-local moreinfo = [[
-More information about ConTeXt and the tools that come with it can be found at:
-]] .. "\n" .. [[
-maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
-webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
-wiki : http://contextgarden.net
-]]
-
--- -- we extend the formatters:
---
--- function utilities.strings.unichr(s) return "U+" .. format("%05X",s) .. " (" .. utfchar(s) .. ")" end
--- function utilities.strings.chruni(s) return utfchar(s) .. " (U+" .. format("%05X",s) .. ")" end
---
--- utilities.strings.formatters.add (
--- string.formatters, "uni",
--- [[unichr(%s)]],
--- [[local unichr = utilities.strings.unichr]]
--- )
---
--- utilities.strings.formatters.add (
--- string.formatters, "chr",
--- [[chruni(%s)]],
--- [[local chruni = utilities.strings.chruni]]
--- )
-
-utilities.strings.formatters.add (
- formatters, "unichr",
- [["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]]
-)
-
-utilities.strings.formatters.add (
- formatters, "chruni",
- [[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]]
-)
-
--- print(formatters["Missing character %!chruni! in font."](234))
--- print(formatters["Missing character %!unichr! in font."](234))
-
--- basic loggers
-
-local function ignore() end
-
-setmetatableindex(logs, function(t,k) t[k] = ignore ; return ignore end)
-
-local report, subreport, status, settarget, setformats, settranslations
-
-local direct, subdirect, writer, pushtarget, poptarget
-
-if tex and (tex.jobname or tex.formatname) then
-
- -- local format = string.formatter
-
- local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
-
- local target = "term and log"
-
- logs.flush = io.flush
-
- local formats = { } setmetatable(formats, valueiskey)
- local translations = { } setmetatable(translations,valueiskey)
-
- writer = function(...)
- write_nl(target,...)
- end
-
- newline = function()
- write_nl(target,"\n")
- end
-
- local f_one = formatters["%-15s > %s\n"]
- local f_two = formatters["%-15s >\n"]
-
- -- we can use formatters but best check for % then because for simple messages
- -- we con't want this overhead for single messages (not that there are that
- -- many; we could have a special weak table)
-
- report = function(a,b,c,...)
- if c then
- write_nl(target,f_one(translations[a],formatters[formats[b]](c,...)))
- elseif b then
- write_nl(target,f_one(translations[a],formats[b]))
- elseif a then
- write_nl(target,f_two(translations[a]))
- else
- write_nl(target,"\n")
- end
- end
-
- local f_one = formatters["%-15s > %s"]
- local f_two = formatters["%-15s >"]
-
- direct = function(a,b,c,...)
- if c then
- return f_one(translations[a],formatters[formats[b]](c,...))
- elseif b then
- return f_one(translations[a],formats[b])
- elseif a then
- return f_two(translations[a])
- else
- return ""
- end
- end
-
- local f_one = formatters["%-15s > %s > %s\n"]
- local f_two = formatters["%-15s > %s >\n"]
-
- subreport = function(a,s,b,c,...)
- if c then
- write_nl(target,f_one(translations[a],translations[s],formatters[formats[b]](c,...)))
- elseif b then
- write_nl(target,f_one(translations[a],translations[s],formats[b]))
- elseif a then
- write_nl(target,f_two(translations[a],translations[s]))
- else
- write_nl(target,"\n")
- end
- end
-
- local f_one = formatters["%-15s > %s > %s"]
- local f_two = formatters["%-15s > %s >"]
-
- subdirect = function(a,s,b,c,...)
- if c then
- return f_one(translations[a],translations[s],formatters[formats[b]](c,...))
- elseif b then
- return f_one(translations[a],translations[s],formats[b])
- elseif a then
- return f_two(translations[a],translations[s])
- else
- return ""
- end
- end
-
- local f_one = formatters["%-15s : %s\n"]
- local f_two = formatters["%-15s :\n"]
-
- status = function(a,b,c,...)
- if c then
- write_nl(target,f_one(translations[a],formatters[formats[b]](c,...)))
- elseif b then
- write_nl(target,f_one(translations[a],formats[b]))
- elseif a then
- write_nl(target,f_two(translations[a]))
- else
- write_nl(target,"\n")
- end
- end
-
- local targets = {
- logfile = "log",
- log = "log",
- file = "log",
- console = "term",
- terminal = "term",
- both = "term and log",
- }
-
- settarget = function(whereto)
- target = targets[whereto or "both"] or targets.both
- if target == "term" or target == "term and log" then
- logs.flush = io.flush
- else
- logs.flush = ignore
- end
- end
-
- local stack = { }
-
- pushtarget = function(newtarget)
- insert(stack,target)
- settarget(newtarget)
- end
-
- poptarget = function()
- if #stack > 0 then
- settarget(remove(stack))
- end
- end
-
- setformats = function(f)
- formats = f
- end
-
- settranslations = function(t)
- translations = t
- end
-
-else
-
- logs.flush = ignore
-
- writer = write_nl
-
- newline = function()
- write_nl("\n")
- end
-
- local f_one = formatters["%-15s | %s"]
- local f_two = formatters["%-15s |"]
-
- report = function(a,b,c,...)
- if c then
- write_nl(f_one(a,formatters[b](c,...)))
- elseif b then
- write_nl(f_one(a,b))
- elseif a then
- write_nl(f_two(a))
- else
- write_nl("")
- end
- end
-
- local f_one = formatters["%-15s | %s | %s"]
- local f_two = formatters["%-15s | %s |"]
-
- subreport = function(a,sub,b,c,...)
- if c then
- write_nl(f_one(a,sub,formatters[b](c,...)))
- elseif b then
- write_nl(f_one(a,sub,b))
- elseif a then
- write_nl(f_two(a,sub))
- else
- write_nl("")
- end
- end
-
- local f_one = formatters["%-15s : %s\n"]
- local f_two = formatters["%-15s :\n"]
-
- status = function(a,b,c,...) -- not to be used in lua anyway
- if c then
- write_nl(f_one(a,formatters[b](c,...)))
- elseif b then
- write_nl(f_one(a,b)) -- b can have %'s
- elseif a then
- write_nl(f_two(a))
- else
- write_nl("\n")
- end
- end
-
- direct = ignore
- subdirect = ignore
-
- settarget = ignore
- pushtarget = ignore
- poptarget = ignore
- setformats = ignore
- settranslations = ignore
-
-end
-
-logs.report = report
-logs.subreport = subreport
-logs.status = status
-logs.settarget = settarget
-logs.pushtarget = pushtarget
-logs.poptarget = poptarget
-logs.setformats = setformats
-logs.settranslations = settranslations
-
-logs.direct = direct
-logs.subdirect = subdirect
-logs.writer = writer
-logs.newline = newline
-
--- installer
-
--- todo: renew (un) locks when a new one is added and wildcard
-
-local data, states = { }, nil
-
-function logs.reporter(category,subcategory)
- local logger = data[category]
- if not logger then
- local state = false
- if states == true then
- state = true
- elseif type(states) == "table" then
- for c, _ in next, states do
- if find(category,c) then
- state = true
- break
- end
- end
- end
- logger = {
- reporters = { },
- state = state,
- }
- data[category] = logger
- end
- local reporter = logger.reporters[subcategory or "default"]
- if not reporter then
- if subcategory then
- reporter = function(...)
- if not logger.state then
- subreport(category,subcategory,...)
- end
- end
- logger.reporters[subcategory] = reporter
- else
- local tag = category
- reporter = function(...)
- if not logger.state then
- report(category,...)
- end
- end
- logger.reporters.default = reporter
- end
- end
- return reporter
-end
-
-logs.new = logs.reporter -- for old times sake
-
--- context specicific: this ends up in the macro stream
-
-local ctxreport = logs.writer
-
-function logs.setmessenger(m)
- ctxreport = m
-end
-
-function logs.messenger(category,subcategory)
- -- we need to avoid catcode mess (todo: fast context)
- if subcategory then
- return function(...)
- ctxreport(subdirect(category,subcategory,...))
- end
- else
- return function(...)
- ctxreport(direct(category,...))
- end
- end
-end
-
--- so far
-
-local function setblocked(category,value)
- if category == true then
- -- lock all
- category, value = "*", true
- elseif category == false then
- -- unlock all
- category, value = "*", false
- elseif value == nil then
- -- lock selective
- value = true
- end
- if category == "*" then
- states = value
- for k, v in next, data do
- v.state = value
- end
- else
- states = utilities.parsers.settings_to_hash(category)
- for c, _ in next, states do
- if data[c] then
- v.state = value
- else
- c = topattern(c,true,true)
- for k, v in next, data do
- if find(k,c) then
- v.state = value
- end
- end
- end
- end
- end
-end
-
-function logs.disable(category,value)
- setblocked(category,value == nil and true or value)
-end
-
-function logs.enable(category)
- setblocked(category,false)
-end
-
-function logs.categories()
- return table.sortedkeys(data)
-end
-
-function logs.show()
- local n, c, s, max = 0, 0, 0, 0
- for category, v in table.sortedpairs(data) do
- n = n + 1
- local state = v.state
- local reporters = v.reporters
- local nc = #category
- if nc > c then
- c = nc
- end
- for subcategory, _ in next, reporters do
- local ns = #subcategory
- if ns > c then
- s = ns
- end
- local m = nc + ns
- if m > max then
- max = m
- end
- end
- local subcategories = concat(table.sortedkeys(reporters),", ")
- if state == true then
- state = "disabled"
- elseif state == false then
- state = "enabled"
- else
- state = "unknown"
- end
- -- no new here
- report("logging","category %a, subcategories %a, state %a",category,subcategories,state)
- end
- report("logging","categories: %s, max category: %s, max subcategory: %s, max combined: %s",n,c,s,max)
-end
-
-local delayed_reporters = { }
-
-setmetatableindex(delayed_reporters,function(t,k)
- local v = logs.reporter(k.name)
- t[k] = v
- return v
-end)
-
-function utilities.setters.report(setter,...)
- delayed_reporters[setter](...)
-end
-
-directives.register("logs.blocked", function(v)
- setblocked(v,true)
-end)
-
-directives.register("logs.target", function(v)
- settarget(v)
-end)
-
--- tex specific loggers (might move elsewhere)
-
-local report_pages = logs.reporter("pages") -- not needed but saves checking when we grep for it
-
-local real, user, sub
-
-function logs.start_page_number()
- real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
--- real, user, sub = 0, 0, 0
-end
-
-local timing = false
-local starttime = nil
-local lasttime = nil
-
-trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
- starttime = os.clock()
- timing = true
-end)
-
-function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
- if timing then
- local elapsed, average
- local stoptime = os.clock()
- if not lasttime or real < 2 then
- elapsed = stoptime
- average = stoptime
- starttime = stoptime
- else
- elapsed = stoptime - lasttime
- average = (stoptime - starttime) / (real - 1)
- end
- lasttime = stoptime
- if real <= 0 then
- report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
- elseif user <= 0 then
- report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
- elseif sub <= 0 then
- report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
- else
- report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
- end
- else
- if real <= 0 then
- report_pages("flushing page")
- elseif user <= 0 then
- report_pages("flushing realpage %s",real)
- elseif sub <= 0 then
- report_pages("flushing realpage %s, userpage %s",real,user)
- else
- report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
- end
- end
- logs.flush()
-end
-
--- we don't have show_open and show_close callbacks yet
-
-local report_files = logs.reporter("files")
-local nesting = 0
-local verbose = false
-local hasscheme = url.hasscheme
-
-function logs.show_open(name)
- -- if hasscheme(name) ~= "virtual" then
- -- if verbose then
- -- nesting = nesting + 1
- -- report_files("level %s, opening %s",nesting,name)
- -- else
- -- write(formatters["(%s"](name)) -- tex adds a space
- -- end
- -- end
-end
-
-function logs.show_close(name)
- -- if hasscheme(name) ~= "virtual" then
- -- if verbose then
- -- report_files("level %s, closing %s",nesting,name)
- -- nesting = nesting - 1
- -- else
- -- write(")") -- tex adds a space
- -- end
- -- end
-end
-
-function logs.show_load(name)
- -- if hasscheme(name) ~= "virtual" then
- -- if verbose then
- -- report_files("level %s, loading %s",nesting+1,name)
- -- else
- -- write(formatters["(%s)"](name))
- -- end
- -- end
-end
-
--- there may be scripts out there using this:
-
-local simple = logs.reporter("comment")
-
-logs.simple = simple
-logs.simpleline = simple
-
--- obsolete
-
-function logs.setprogram () end -- obsolete
-function logs.extendbanner() end -- obsolete
-function logs.reportlines () end -- obsolete
-function logs.reportbanner() end -- obsolete
-function logs.reportline () end -- obsolete
-function logs.simplelines () end -- obsolete
-function logs.help () end -- obsolete
-
--- applications
-
--- local function reportlines(t,str)
--- if str then
--- for line in gmatch(str,"([^\n\r]*)[\n\r]") do
--- t.report(line)
--- end
--- end
--- end
-
-local Carg, C, lpegmatch = lpeg.Carg, lpeg.C, lpeg.match
-local p_newline = lpeg.patterns.newline
-
-local linewise = (
- Carg(1) * C((1-p_newline)^1) / function(t,s) t.report(s) end
- + Carg(1) * p_newline^2 / function(t) t.report() end
- + p_newline
-)^1
-
-local function reportlines(t,str)
- if str then
- lpegmatch(linewise,str,1,t)
- end
-end
-
-local function reportbanner(t)
- local banner = t.banner
- if banner then
- t.report(banner)
- t.report()
- end
-end
-
-local function reportversion(t)
- local banner = t.banner
- if banner then
- t.report(banner)
- end
-end
-
-local function reporthelp(t,...)
- local helpinfo = t.helpinfo
- if type(helpinfo) == "string" then
- reportlines(t,helpinfo)
- elseif type(helpinfo) == "table" then
- for i=1,select("#",...) do
- reportlines(t,t.helpinfo[select(i,...)])
- if i < n then
- t.report()
- end
- end
- end
-end
-
-local function reportinfo(t)
- t.report()
- reportlines(t,t.moreinfo)
-end
-
-local function reportexport(t,method)
- report(t.helpinfo)
-end
-
-local reporters = {
- lines = reportlines, -- not to be overloaded
- banner = reportbanner,
- version = reportversion,
- help = reporthelp,
- info = reportinfo,
- export = reportexport,
-}
-
-local exporters = {
- -- empty
-}
-
-logs.reporters = reporters
-logs.exporters = exporters
-
-function logs.application(t)
- t.name = t.name or "unknown"
- t.banner = t.banner
- t.moreinfo = moreinfo
- t.report = logs.reporter(t.name)
- t.help = function(...)
- reporters.banner(t)
- reporters.help(t,...)
- reporters.info(t)
- end
- t.export = function(...)
- reporters.export(t,...)
- end
- t.identify = function()
- reporters.banner(t)
- end
- t.version = function()
- reporters.version(t)
- end
- return t
-end
-
--- somewhat special .. will be redone (already a better solution in place in lmx)
-
--- logging to a file
-
--- local syslogname = "oeps.xxx"
---
--- for i=1,10 do
--- logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123")
--- end
-
-function logs.system(whereto,process,jobname,category,...)
- local message = formatters["%s %s => %s => %s => %s\r"](os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
- for i=1,10 do
- local f = io.open(whereto,"a") -- we can consider keeping the file open
- if f then
- f:write(message)
- f:close()
- break
- else
- sleep(0.1)
- end
- end
-end
-
-local report_system = logs.reporter("system","logs")
-
-function logs.obsolete(old,new)
- local o = loadstring("return " .. new)()
- if type(o) == "function" then
- return function(...)
- report_system("function %a is obsolete, use %a",old,new)
- loadstring(old .. "=" .. new .. " return ".. old)()(...)
- end
- elseif type(o) == "table" then
- local t, m = { }, { }
- m.__index = function(t,k)
- report_system("table %a is obsolete, use %a",old,new)
- m.__index, m.__newindex = o, o
- return o[k]
- end
- m.__newindex = function(t,k,v)
- report_system("table %a is obsolete, use %a",old,new)
- m.__index, m.__newindex = o, o
- o[k] = v
- end
- if libraries then
- libraries.obsolete[old] = t -- true
- end
- setmetatable(t,m)
- return t
- end
-end
-
-if utilities then
- utilities.report = report_system
-end
-
-if tex and tex.error then
- function logs.texerrormessage(...) -- for the moment we put this function here
- tex.error(format(...), { })
- end
-else
- function logs.texerrormessage(...)
- print(format(...))
- end
-end
-
--- this is somewhat slower but prevents out-of-order messages when print is mixed
--- with texio.write
-
-io.stdout:setvbuf('no')
-io.stderr:setvbuf('no')
-
--- windows: > nul 2>&1
--- unix : > null 2>&1
-
-if package.helpers.report then
- package.helpers.report = logs.reporter("package loader") -- when used outside mtxrun
-end
+if not modules then modules = { } end modules ['trac-log'] = {
+ version = 1.001,
+ comment = "companion to trac-log.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- if tex and (tex.jobname or tex.formatname) then
+--
+-- -- quick hack, awaiting speedup in engine (8 -> 6.4 sec for --make with console2)
+-- -- still needed for luajittex
+--
+-- local texio_write_nl = texio.write_nl
+-- local texio_write = texio.write
+-- local io_write = io.write
+
+-- local write_nl = function(target,...)
+-- if not io_write then
+-- io_write = io.write
+-- end
+-- if target == "term and log" then
+-- texio_write_nl("log",...)
+-- texio_write_nl("term","")
+-- io_write(...)
+-- elseif target == "log" then
+-- texio_write_nl("log",...)
+-- elseif target == "term" then
+-- texio_write_nl("term","")
+-- io_write(...)
+-- else
+-- texio_write_nl("log",target,...)
+-- texio_write_nl("term","")
+-- io_write(target,...)
+-- end
+-- end
+
+-- local write = function(target,...)
+-- if not io_write then
+-- io_write = io.write
+-- end
+-- if target == "term and log" then
+-- texio_write("log",...)
+-- io_write(...)
+-- elseif target == "log" then
+-- texio_write("log",...)
+-- elseif target == "term" then
+-- io_write(...)
+-- else
+-- texio_write("log",target,...)
+-- io_write(target,...)
+-- end
+-- end
+
+-- texio.write = write
+-- texio.write_nl = write_nl
+--
+-- else
+--
+-- -- texlua or just lua
+--
+-- end
+
+-- todo: less categories, more subcategories (e.g. nodes)
+-- todo: split into basics and ctx specific
+
+local write_nl, write = texio and texio.write_nl or print, texio and texio.write or io.write
+local format, gmatch, find = string.format, string.gmatch, string.find
+local concat, insert, remove = table.concat, table.insert, table.remove
+local topattern = string.topattern
+local texcount = tex and tex.count
+local next, type, select = next, type, select
+local utfchar = utf.char
+
+local setmetatableindex = table.setmetatableindex
+local formatters = string.formatters
+
+--[[ldx--
+
This is a prelude to a more extensive logging module. We no longer
+provide based logging as parsing is relatively easy anyway.
+--ldx]]--
+
+logs = logs or { }
+local logs = logs
+
+local moreinfo = [[
+More information about ConTeXt and the tools that come with it can be found at:
+]] .. "\n" .. [[
+maillist : ntg-context@ntg.nl / http://www.ntg.nl/mailman/listinfo/ntg-context
+webpage : http://www.pragma-ade.nl / http://tex.aanhet.net
+wiki : http://contextgarden.net
+]]
+
+-- -- we extend the formatters:
+--
+-- function utilities.strings.unichr(s) return "U+" .. format("%05X",s) .. " (" .. utfchar(s) .. ")" end
+-- function utilities.strings.chruni(s) return utfchar(s) .. " (U+" .. format("%05X",s) .. ")" end
+--
+-- utilities.strings.formatters.add (
+-- string.formatters, "uni",
+-- [[unichr(%s)]],
+-- [[local unichr = utilities.strings.unichr]]
+-- )
+--
+-- utilities.strings.formatters.add (
+-- string.formatters, "chr",
+-- [[chruni(%s)]],
+-- [[local chruni = utilities.strings.chruni]]
+-- )
+
+utilities.strings.formatters.add (
+ formatters, "unichr",
+ [["U+" .. format("%%05X",%s) .. " (" .. utfchar(%s) .. ")"]]
+)
+
+utilities.strings.formatters.add (
+ formatters, "chruni",
+ [[utfchar(%s) .. " (U+" .. format("%%05X",%s) .. ")"]]
+)
+
+-- print(formatters["Missing character %!chruni! in font."](234))
+-- print(formatters["Missing character %!unichr! in font."](234))
+
+-- basic loggers
+
+local function ignore() end
+
+setmetatableindex(logs, function(t,k) t[k] = ignore ; return ignore end)
+
+local report, subreport, status, settarget, setformats, settranslations
+
+local direct, subdirect, writer, pushtarget, poptarget
+
+if tex and (tex.jobname or tex.formatname) then
+
+ -- local format = string.formatter
+
+ local valueiskey = { __index = function(t,k) t[k] = k return k end } -- will be helper
+
+ local target = "term and log"
+
+ logs.flush = io.flush
+
+ local formats = { } setmetatable(formats, valueiskey)
+ local translations = { } setmetatable(translations,valueiskey)
+
+ writer = function(...)
+ write_nl(target,...)
+ end
+
+ newline = function()
+ write_nl(target,"\n")
+ end
+
+ local f_one = formatters["%-15s > %s\n"]
+ local f_two = formatters["%-15s >\n"]
+
+ -- we can use formatters but best check for % then because for simple messages
+ -- we con't want this overhead for single messages (not that there are that
+ -- many; we could have a special weak table)
+
+ report = function(a,b,c,...)
+ if c then
+ write_nl(target,f_one(translations[a],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,f_one(translations[a],formats[b]))
+ elseif a then
+ write_nl(target,f_two(translations[a]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+
+ local f_one = formatters["%-15s > %s"]
+ local f_two = formatters["%-15s >"]
+
+ direct = function(a,b,c,...)
+ if c then
+ return f_one(translations[a],formatters[formats[b]](c,...))
+ elseif b then
+ return f_one(translations[a],formats[b])
+ elseif a then
+ return f_two(translations[a])
+ else
+ return ""
+ end
+ end
+
+ local f_one = formatters["%-15s > %s > %s\n"]
+ local f_two = formatters["%-15s > %s >\n"]
+
+ subreport = function(a,s,b,c,...)
+ if c then
+ write_nl(target,f_one(translations[a],translations[s],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,f_one(translations[a],translations[s],formats[b]))
+ elseif a then
+ write_nl(target,f_two(translations[a],translations[s]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+
+ local f_one = formatters["%-15s > %s > %s"]
+ local f_two = formatters["%-15s > %s >"]
+
+ subdirect = function(a,s,b,c,...)
+ if c then
+ return f_one(translations[a],translations[s],formatters[formats[b]](c,...))
+ elseif b then
+ return f_one(translations[a],translations[s],formats[b])
+ elseif a then
+ return f_two(translations[a],translations[s])
+ else
+ return ""
+ end
+ end
+
+ local f_one = formatters["%-15s : %s\n"]
+ local f_two = formatters["%-15s :\n"]
+
+ status = function(a,b,c,...)
+ if c then
+ write_nl(target,f_one(translations[a],formatters[formats[b]](c,...)))
+ elseif b then
+ write_nl(target,f_one(translations[a],formats[b]))
+ elseif a then
+ write_nl(target,f_two(translations[a]))
+ else
+ write_nl(target,"\n")
+ end
+ end
+
+ local targets = {
+ logfile = "log",
+ log = "log",
+ file = "log",
+ console = "term",
+ terminal = "term",
+ both = "term and log",
+ }
+
+ settarget = function(whereto)
+ target = targets[whereto or "both"] or targets.both
+ if target == "term" or target == "term and log" then
+ logs.flush = io.flush
+ else
+ logs.flush = ignore
+ end
+ end
+
+ local stack = { }
+
+ pushtarget = function(newtarget)
+ insert(stack,target)
+ settarget(newtarget)
+ end
+
+ poptarget = function()
+ if #stack > 0 then
+ settarget(remove(stack))
+ end
+ end
+
+ setformats = function(f)
+ formats = f
+ end
+
+ settranslations = function(t)
+ translations = t
+ end
+
+else
+
+ logs.flush = ignore
+
+ writer = write_nl
+
+ newline = function()
+ write_nl("\n")
+ end
+
+ local f_one = formatters["%-15s | %s"]
+ local f_two = formatters["%-15s |"]
+
+ report = function(a,b,c,...)
+ if c then
+ write_nl(f_one(a,formatters[b](c,...)))
+ elseif b then
+ write_nl(f_one(a,b))
+ elseif a then
+ write_nl(f_two(a))
+ else
+ write_nl("")
+ end
+ end
+
+ local f_one = formatters["%-15s | %s | %s"]
+ local f_two = formatters["%-15s | %s |"]
+
+ subreport = function(a,sub,b,c,...)
+ if c then
+ write_nl(f_one(a,sub,formatters[b](c,...)))
+ elseif b then
+ write_nl(f_one(a,sub,b))
+ elseif a then
+ write_nl(f_two(a,sub))
+ else
+ write_nl("")
+ end
+ end
+
+ local f_one = formatters["%-15s : %s\n"]
+ local f_two = formatters["%-15s :\n"]
+
+ status = function(a,b,c,...) -- not to be used in lua anyway
+ if c then
+ write_nl(f_one(a,formatters[b](c,...)))
+ elseif b then
+ write_nl(f_one(a,b)) -- b can have %'s
+ elseif a then
+ write_nl(f_two(a))
+ else
+ write_nl("\n")
+ end
+ end
+
+ direct = ignore
+ subdirect = ignore
+
+ settarget = ignore
+ pushtarget = ignore
+ poptarget = ignore
+ setformats = ignore
+ settranslations = ignore
+
+end
+
+logs.report = report
+logs.subreport = subreport
+logs.status = status
+logs.settarget = settarget
+logs.pushtarget = pushtarget
+logs.poptarget = poptarget
+logs.setformats = setformats
+logs.settranslations = settranslations
+
+logs.direct = direct
+logs.subdirect = subdirect
+logs.writer = writer
+logs.newline = newline
+
+-- installer
+
+-- todo: renew (un) locks when a new one is added and wildcard
+
+local data, states = { }, nil
+
+function logs.reporter(category,subcategory)
+ local logger = data[category]
+ if not logger then
+ local state = false
+ if states == true then
+ state = true
+ elseif type(states) == "table" then
+ for c, _ in next, states do
+ if find(category,c) then
+ state = true
+ break
+ end
+ end
+ end
+ logger = {
+ reporters = { },
+ state = state,
+ }
+ data[category] = logger
+ end
+ local reporter = logger.reporters[subcategory or "default"]
+ if not reporter then
+ if subcategory then
+ reporter = function(...)
+ if not logger.state then
+ subreport(category,subcategory,...)
+ end
+ end
+ logger.reporters[subcategory] = reporter
+ else
+ local tag = category
+ reporter = function(...)
+ if not logger.state then
+ report(category,...)
+ end
+ end
+ logger.reporters.default = reporter
+ end
+ end
+ return reporter
+end
+
+logs.new = logs.reporter -- for old times sake
+
+-- context specicific: this ends up in the macro stream
+
+local ctxreport = logs.writer
+
+function logs.setmessenger(m)
+ ctxreport = m
+end
+
+function logs.messenger(category,subcategory)
+ -- we need to avoid catcode mess (todo: fast context)
+ if subcategory then
+ return function(...)
+ ctxreport(subdirect(category,subcategory,...))
+ end
+ else
+ return function(...)
+ ctxreport(direct(category,...))
+ end
+ end
+end
+
+-- so far
+
+local function setblocked(category,value)
+ if category == true then
+ -- lock all
+ category, value = "*", true
+ elseif category == false then
+ -- unlock all
+ category, value = "*", false
+ elseif value == nil then
+ -- lock selective
+ value = true
+ end
+ if category == "*" then
+ states = value
+ for k, v in next, data do
+ v.state = value
+ end
+ else
+ states = utilities.parsers.settings_to_hash(category)
+ for c, _ in next, states do
+ if data[c] then
+ v.state = value
+ else
+ c = topattern(c,true,true)
+ for k, v in next, data do
+ if find(k,c) then
+ v.state = value
+ end
+ end
+ end
+ end
+ end
+end
+
+function logs.disable(category,value)
+ setblocked(category,value == nil and true or value)
+end
+
+function logs.enable(category)
+ setblocked(category,false)
+end
+
+function logs.categories()
+ return table.sortedkeys(data)
+end
+
+function logs.show()
+ local n, c, s, max = 0, 0, 0, 0
+ for category, v in table.sortedpairs(data) do
+ n = n + 1
+ local state = v.state
+ local reporters = v.reporters
+ local nc = #category
+ if nc > c then
+ c = nc
+ end
+ for subcategory, _ in next, reporters do
+ local ns = #subcategory
+ if ns > c then
+ s = ns
+ end
+ local m = nc + ns
+ if m > max then
+ max = m
+ end
+ end
+ local subcategories = concat(table.sortedkeys(reporters),", ")
+ if state == true then
+ state = "disabled"
+ elseif state == false then
+ state = "enabled"
+ else
+ state = "unknown"
+ end
+ -- no new here
+ report("logging","category %a, subcategories %a, state %a",category,subcategories,state)
+ end
+ report("logging","categories: %s, max category: %s, max subcategory: %s, max combined: %s",n,c,s,max)
+end
+
+local delayed_reporters = { }
+
+setmetatableindex(delayed_reporters,function(t,k)
+ local v = logs.reporter(k.name)
+ t[k] = v
+ return v
+end)
+
+function utilities.setters.report(setter,...)
+ delayed_reporters[setter](...)
+end
+
+directives.register("logs.blocked", function(v)
+ setblocked(v,true)
+end)
+
+directives.register("logs.target", function(v)
+ settarget(v)
+end)
+
+-- tex specific loggers (might move elsewhere)
+
+local report_pages = logs.reporter("pages") -- not needed but saves checking when we grep for it
+
+local real, user, sub
+
+function logs.start_page_number()
+ real, user, sub = texcount.realpageno, texcount.userpageno, texcount.subpageno
+-- real, user, sub = 0, 0, 0
+end
+
+local timing = false
+local starttime = nil
+local lasttime = nil
+
+trackers.register("pages.timing", function(v) -- only for myself (diagnostics)
+ starttime = os.clock()
+ timing = true
+end)
+
+function logs.stop_page_number() -- the first page can includes the initialization so we omit this in average
+ if timing then
+ local elapsed, average
+ local stoptime = os.clock()
+ if not lasttime or real < 2 then
+ elapsed = stoptime
+ average = stoptime
+ starttime = stoptime
+ else
+ elapsed = stoptime - lasttime
+ average = (stoptime - starttime) / (real - 1)
+ end
+ lasttime = stoptime
+ if real <= 0 then
+ report_pages("flushing page, time %0.04f / %0.04f",elapsed,average)
+ elseif user <= 0 then
+ report_pages("flushing realpage %s, time %0.04f / %0.04f",real,elapsed,average)
+ elseif sub <= 0 then
+ report_pages("flushing realpage %s, userpage %s, time %0.04f / %0.04f",real,user,elapsed,average)
+ else
+ report_pages("flushing realpage %s, userpage %s, subpage %s, time %0.04f / %0.04f",real,user,sub,elapsed,average)
+ end
+ else
+ if real <= 0 then
+ report_pages("flushing page")
+ elseif user <= 0 then
+ report_pages("flushing realpage %s",real)
+ elseif sub <= 0 then
+ report_pages("flushing realpage %s, userpage %s",real,user)
+ else
+ report_pages("flushing realpage %s, userpage %s, subpage %s",real,user,sub)
+ end
+ end
+ logs.flush()
+end
+
+-- we don't have show_open and show_close callbacks yet
+
+local report_files = logs.reporter("files")
+local nesting = 0
+local verbose = false
+local hasscheme = url.hasscheme
+
+function logs.show_open(name)
+ -- if hasscheme(name) ~= "virtual" then
+ -- if verbose then
+ -- nesting = nesting + 1
+ -- report_files("level %s, opening %s",nesting,name)
+ -- else
+ -- write(formatters["(%s"](name)) -- tex adds a space
+ -- end
+ -- end
+end
+
+function logs.show_close(name)
+ -- if hasscheme(name) ~= "virtual" then
+ -- if verbose then
+ -- report_files("level %s, closing %s",nesting,name)
+ -- nesting = nesting - 1
+ -- else
+ -- write(")") -- tex adds a space
+ -- end
+ -- end
+end
+
+function logs.show_load(name)
+ -- if hasscheme(name) ~= "virtual" then
+ -- if verbose then
+ -- report_files("level %s, loading %s",nesting+1,name)
+ -- else
+ -- write(formatters["(%s)"](name))
+ -- end
+ -- end
+end
+
+-- there may be scripts out there using this:
+
+local simple = logs.reporter("comment")
+
+logs.simple = simple
+logs.simpleline = simple
+
+-- obsolete
+
+function logs.setprogram () end -- obsolete
+function logs.extendbanner() end -- obsolete
+function logs.reportlines () end -- obsolete
+function logs.reportbanner() end -- obsolete
+function logs.reportline () end -- obsolete
+function logs.simplelines () end -- obsolete
+function logs.help () end -- obsolete
+
+-- applications
+
+-- local function reportlines(t,str)
+-- if str then
+-- for line in gmatch(str,"([^\n\r]*)[\n\r]") do
+-- t.report(line)
+-- end
+-- end
+-- end
+
+local Carg, C, lpegmatch = lpeg.Carg, lpeg.C, lpeg.match
+local p_newline = lpeg.patterns.newline
+
+local linewise = (
+ Carg(1) * C((1-p_newline)^1) / function(t,s) t.report(s) end
+ + Carg(1) * p_newline^2 / function(t) t.report() end
+ + p_newline
+)^1
+
+local function reportlines(t,str)
+ if str then
+ lpegmatch(linewise,str,1,t)
+ end
+end
+
+local function reportbanner(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ t.report()
+ end
+end
+
+local function reportversion(t)
+ local banner = t.banner
+ if banner then
+ t.report(banner)
+ end
+end
+
+local function reporthelp(t,...)
+ local helpinfo = t.helpinfo
+ if type(helpinfo) == "string" then
+ reportlines(t,helpinfo)
+ elseif type(helpinfo) == "table" then
+ for i=1,select("#",...) do
+ reportlines(t,t.helpinfo[select(i,...)])
+ if i < n then
+ t.report()
+ end
+ end
+ end
+end
+
+local function reportinfo(t)
+ t.report()
+ reportlines(t,t.moreinfo)
+end
+
+local function reportexport(t,method)
+ report(t.helpinfo)
+end
+
+local reporters = {
+ lines = reportlines, -- not to be overloaded
+ banner = reportbanner,
+ version = reportversion,
+ help = reporthelp,
+ info = reportinfo,
+ export = reportexport,
+}
+
+local exporters = {
+ -- empty
+}
+
+logs.reporters = reporters
+logs.exporters = exporters
+
+function logs.application(t)
+ t.name = t.name or "unknown"
+ t.banner = t.banner
+ t.moreinfo = moreinfo
+ t.report = logs.reporter(t.name)
+ t.help = function(...)
+ reporters.banner(t)
+ reporters.help(t,...)
+ reporters.info(t)
+ end
+ t.export = function(...)
+ reporters.export(t,...)
+ end
+ t.identify = function()
+ reporters.banner(t)
+ end
+ t.version = function()
+ reporters.version(t)
+ end
+ return t
+end
+
+-- somewhat special .. will be redone (already a better solution in place in lmx)
+
+-- logging to a file
+
+-- local syslogname = "oeps.xxx"
+--
+-- for i=1,10 do
+-- logs.system(syslogname,"context","test","fonts","font %s recached due to newer version (%s)","blabla","123")
+-- end
+
+function logs.system(whereto,process,jobname,category,...)
+ local message = formatters["%s %s => %s => %s => %s\r"](os.date("%d/%m/%y %H:%m:%S"),process,jobname,category,format(...))
+ for i=1,10 do
+ local f = io.open(whereto,"a") -- we can consider keeping the file open
+ if f then
+ f:write(message)
+ f:close()
+ break
+ else
+ sleep(0.1)
+ end
+ end
+end
+
+local report_system = logs.reporter("system","logs")
+
+function logs.obsolete(old,new)
+ local o = loadstring("return " .. new)()
+ if type(o) == "function" then
+ return function(...)
+ report_system("function %a is obsolete, use %a",old,new)
+ loadstring(old .. "=" .. new .. " return ".. old)()(...)
+ end
+ elseif type(o) == "table" then
+ local t, m = { }, { }
+ m.__index = function(t,k)
+ report_system("table %a is obsolete, use %a",old,new)
+ m.__index, m.__newindex = o, o
+ return o[k]
+ end
+ m.__newindex = function(t,k,v)
+ report_system("table %a is obsolete, use %a",old,new)
+ m.__index, m.__newindex = o, o
+ o[k] = v
+ end
+ if libraries then
+ libraries.obsolete[old] = t -- true
+ end
+ setmetatable(t,m)
+ return t
+ end
+end
+
+if utilities then
+ utilities.report = report_system
+end
+
+if tex and tex.error then
+ function logs.texerrormessage(...) -- for the moment we put this function here
+ tex.error(format(...), { })
+ end
+else
+ function logs.texerrormessage(...)
+ print(format(...))
+ end
+end
+
+-- this is somewhat slower but prevents out-of-order messages when print is mixed
+-- with texio.write
+
+io.stdout:setvbuf('no')
+io.stderr:setvbuf('no')
+
+-- windows: > nul 2>&1
+-- unix : > null 2>&1
+
+if package.helpers.report then
+ package.helpers.report = logs.reporter("package loader") -- when used outside mtxrun
+end
diff --git a/tex/context/base/trac-pro.lua b/tex/context/base/trac-pro.lua
index d6e0d0339..401fa9275 100644
--- a/tex/context/base/trac-pro.lua
+++ b/tex/context/base/trac-pro.lua
@@ -1,208 +1,208 @@
-if not modules then modules = { } end modules ['trac-pro'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local getmetatable, setmetatable, rawset, type = getmetatable, setmetatable, rawset, type
-
--- The protection implemented here is probably not that tight but good enough to catch
--- problems due to naive usage.
---
--- There's a more extensive version (trac-xxx.lua) that supports nesting.
---
--- This will change when we have _ENV in lua 5.2+
-
-local trace_namespaces = false trackers.register("system.namespaces", function(v) trace_namespaces = v end)
-
-local report_system = logs.reporter("system","protection")
-
-namespaces = namespaces or { }
-local namespaces = namespaces
-
-local registered = { }
-
-local function report_index(k,name)
- if trace_namespaces then
- report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
- else
- report_system("reference to %a in protected namespace %a",k,name)
- end
-end
-
-local function report_newindex(k,name)
- if trace_namespaces then
- report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
- else
- report_system("assignment to %a in protected namespace %a",k,name)
- end
-end
-
-local function register(name)
- local data = name == "global" and _G or _G[name]
- if not data then
- return -- error
- end
- registered[name] = data
- local m = getmetatable(data)
- if not m then
- m = { }
- setmetatable(data,m)
- end
- local index, newindex = { }, { }
- m.__saved__index = m.__index
- m.__no__index = function(t,k)
- if not index[k] then
- index[k] = true
- report_index(k,name)
- end
- return nil
- end
- m.__saved__newindex = m.__newindex
- m.__no__newindex = function(t,k,v)
- if not newindex[k] then
- newindex[k] = true
- report_newindex(k,name)
- end
- rawset(t,k,v)
- end
- m.__protection__depth = 0
-end
-
-local function private(name) -- maybe save name
- local data = registered[name]
- if not data then
- data = _G[name]
- if not data then
- data = { }
- _G[name] = data
- end
- register(name)
- end
- return data
-end
-
-local function protect(name)
- local data = registered[name]
- if not data then
- return
- end
- local m = getmetatable(data)
- local pd = m.__protection__depth
- if pd > 0 then
- m.__protection__depth = pd + 1
- else
- m.__save_d_index, m.__saved__newindex = m.__index, m.__newindex
- m.__index, m.__newindex = m.__no__index, m.__no__newindex
- m.__protection__depth = 1
- end
-end
-
-local function unprotect(name)
- local data = registered[name]
- if not data then
- return
- end
- local m = getmetatable(data)
- local pd = m.__protection__depth
- if pd > 1 then
- m.__protection__depth = pd - 1
- else
- m.__index, m.__newindex = m.__saved__index, m.__saved__newindex
- m.__protection__depth = 0
- end
-end
-
-local function protectall()
- for name, _ in next, registered do
- if name ~= "global" then
- protect(name)
- end
- end
-end
-
-local function unprotectall()
- for name, _ in next, registered do
- if name ~= "global" then
- unprotect(name)
- end
- end
-end
-
-namespaces.register = register -- register when defined
-namespaces.private = private -- allocate and register if needed
-namespaces.protect = protect
-namespaces.unprotect = unprotect
-namespaces.protectall = protectall
-namespaces.unprotectall = unprotectall
-
-namespaces.private("namespaces") registered = { } register("global") -- unreachable
-
-directives.register("system.protect", function(v)
- if v then
- protectall()
- else
- unprotectall()
- end
-end)
-
-directives.register("system.checkglobals", function(v)
- if v then
- report_system("enabling global namespace guard")
- protect("global")
- else
- report_system("disabling global namespace guard")
- unprotect("global")
- end
-end)
-
--- dummy section (will go to luat-dum.lua)
-
---~ if not namespaces.private then
---~ -- somewhat protected
---~ local registered = { }
---~ function namespaces.private(name)
---~ local data = registered[name]
---~ if data then
---~ return data
---~ end
---~ local data = _G[name]
---~ if not data then
---~ data = { }
---~ _G[name] = data
---~ end
---~ registered[name] = data
---~ return data
---~ end
---~ function namespaces.protectall(list)
---~ for name, data in next, list or registered do
---~ setmetatable(data, { __newindex = function() print(string.format("table %s is protected",name)) end })
---~ end
---~ end
---~ namespaces.protectall { namespaces = namespaces }
---~ end
-
---~ directives.enable("system.checkglobals")
-
---~ namespaces.register("resolvers","trackers")
---~ namespaces.protect("resolvers")
---~ namespaces.protect("resolvers")
---~ namespaces.protect("resolvers")
---~ namespaces.unprotect("resolvers")
---~ namespaces.unprotect("resolvers")
---~ namespaces.unprotect("resolvers")
---~ namespaces.protect("trackers")
-
---~ resolvers.x = true
---~ resolvers.y = true
---~ trackers.a = ""
---~ resolvers.z = true
---~ oeps = { }
-
---~ resolvers = namespaces.private("resolvers")
---~ fonts = namespaces.private("fonts")
---~ directives.enable("system.protect")
---~ namespaces.protectall()
---~ resolvers.xx = { }
+if not modules then modules = { } end modules ['trac-pro'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local getmetatable, setmetatable, rawset, type = getmetatable, setmetatable, rawset, type
+
+-- The protection implemented here is probably not that tight but good enough to catch
+-- problems due to naive usage.
+--
+-- There's a more extensive version (trac-xxx.lua) that supports nesting.
+--
+-- This will change when we have _ENV in lua 5.2+
+
+local trace_namespaces = false trackers.register("system.namespaces", function(v) trace_namespaces = v end)
+
+local report_system = logs.reporter("system","protection")
+
+namespaces = namespaces or { }
+local namespaces = namespaces
+
+local registered = { }
+
+local function report_index(k,name)
+ if trace_namespaces then
+ report_system("reference to %a in protected namespace %a: %s",k,name,debug.traceback())
+ else
+ report_system("reference to %a in protected namespace %a",k,name)
+ end
+end
+
+local function report_newindex(k,name)
+ if trace_namespaces then
+ report_system("assignment to %a in protected namespace %a: %s",k,name,debug.traceback())
+ else
+ report_system("assignment to %a in protected namespace %a",k,name)
+ end
+end
+
+local function register(name)
+ local data = name == "global" and _G or _G[name]
+ if not data then
+ return -- error
+ end
+ registered[name] = data
+ local m = getmetatable(data)
+ if not m then
+ m = { }
+ setmetatable(data,m)
+ end
+ local index, newindex = { }, { }
+ m.__saved__index = m.__index
+ m.__no__index = function(t,k)
+ if not index[k] then
+ index[k] = true
+ report_index(k,name)
+ end
+ return nil
+ end
+ m.__saved__newindex = m.__newindex
+ m.__no__newindex = function(t,k,v)
+ if not newindex[k] then
+ newindex[k] = true
+ report_newindex(k,name)
+ end
+ rawset(t,k,v)
+ end
+ m.__protection__depth = 0
+end
+
+local function private(name) -- maybe save name
+ local data = registered[name]
+ if not data then
+ data = _G[name]
+ if not data then
+ data = { }
+ _G[name] = data
+ end
+ register(name)
+ end
+ return data
+end
+
+local function protect(name)
+ local data = registered[name]
+ if not data then
+ return
+ end
+ local m = getmetatable(data)
+ local pd = m.__protection__depth
+ if pd > 0 then
+ m.__protection__depth = pd + 1
+ else
+ m.__save_d_index, m.__saved__newindex = m.__index, m.__newindex
+ m.__index, m.__newindex = m.__no__index, m.__no__newindex
+ m.__protection__depth = 1
+ end
+end
+
+local function unprotect(name)
+ local data = registered[name]
+ if not data then
+ return
+ end
+ local m = getmetatable(data)
+ local pd = m.__protection__depth
+ if pd > 1 then
+ m.__protection__depth = pd - 1
+ else
+ m.__index, m.__newindex = m.__saved__index, m.__saved__newindex
+ m.__protection__depth = 0
+ end
+end
+
+local function protectall()
+ for name, _ in next, registered do
+ if name ~= "global" then
+ protect(name)
+ end
+ end
+end
+
+local function unprotectall()
+ for name, _ in next, registered do
+ if name ~= "global" then
+ unprotect(name)
+ end
+ end
+end
+
+namespaces.register = register -- register when defined
+namespaces.private = private -- allocate and register if needed
+namespaces.protect = protect
+namespaces.unprotect = unprotect
+namespaces.protectall = protectall
+namespaces.unprotectall = unprotectall
+
+namespaces.private("namespaces") registered = { } register("global") -- unreachable
+
+directives.register("system.protect", function(v)
+ if v then
+ protectall()
+ else
+ unprotectall()
+ end
+end)
+
+directives.register("system.checkglobals", function(v)
+ if v then
+ report_system("enabling global namespace guard")
+ protect("global")
+ else
+ report_system("disabling global namespace guard")
+ unprotect("global")
+ end
+end)
+
+-- dummy section (will go to luat-dum.lua)
+
+--~ if not namespaces.private then
+--~ -- somewhat protected
+--~ local registered = { }
+--~ function namespaces.private(name)
+--~ local data = registered[name]
+--~ if data then
+--~ return data
+--~ end
+--~ local data = _G[name]
+--~ if not data then
+--~ data = { }
+--~ _G[name] = data
+--~ end
+--~ registered[name] = data
+--~ return data
+--~ end
+--~ function namespaces.protectall(list)
+--~ for name, data in next, list or registered do
+--~ setmetatable(data, { __newindex = function() print(string.format("table %s is protected",name)) end })
+--~ end
+--~ end
+--~ namespaces.protectall { namespaces = namespaces }
+--~ end
+
+--~ directives.enable("system.checkglobals")
+
+--~ namespaces.register("resolvers","trackers")
+--~ namespaces.protect("resolvers")
+--~ namespaces.protect("resolvers")
+--~ namespaces.protect("resolvers")
+--~ namespaces.unprotect("resolvers")
+--~ namespaces.unprotect("resolvers")
+--~ namespaces.unprotect("resolvers")
+--~ namespaces.protect("trackers")
+
+--~ resolvers.x = true
+--~ resolvers.y = true
+--~ trackers.a = ""
+--~ resolvers.z = true
+--~ oeps = { }
+
+--~ resolvers = namespaces.private("resolvers")
+--~ fonts = namespaces.private("fonts")
+--~ directives.enable("system.protect")
+--~ namespaces.protectall()
+--~ resolvers.xx = { }
diff --git a/tex/context/base/trac-set.lua b/tex/context/base/trac-set.lua
index 95fdc43b3..5ab189f55 100644
--- a/tex/context/base/trac-set.lua
+++ b/tex/context/base/trac-set.lua
@@ -1,379 +1,379 @@
-if not modules then modules = { } end modules ['trac-set'] = { -- might become util-set.lua
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- maybe this should be util-set.lua
-
-local type, next, tostring = type, next, tostring
-local concat = table.concat
-local format, find, lower, gsub, topattern = string.format, string.find, string.lower, string.gsub, string.topattern
-local is_boolean = string.is_boolean
-local settings_to_hash = utilities.parsers.settings_to_hash
-local allocate = utilities.storage.allocate
-
-utilities = utilities or { }
-local utilities = utilities
-
-local setters = utilities.setters or { }
-utilities.setters = setters
-
-local data = { }
-
--- We can initialize from the cnf file. This is sort of tricky as
--- later defined setters also need to be initialized then. If set
--- this way, we need to ensure that they are not reset later on.
-
-local trace_initialize = false -- only for testing during development
-
-function setters.initialize(filename,name,values) -- filename only for diagnostics
- local setter = data[name]
- if setter then
- frozen = true -- don't permitoverload
- -- trace_initialize = true
- local data = setter.data
- if data then
- for key, newvalue in next, values do
- local newvalue = is_boolean(newvalue,newvalue)
- local functions = data[key]
- if functions then
- local oldvalue = functions.value
- if functions.frozen then
- if trace_initialize then
- setter.report("%s: %a is %s to %a",filename,key,"frozen",oldvalue)
- end
- elseif #functions > 0 and not oldvalue then
--- elseif #functions > 0 and oldvalue == nil then
- if trace_initialize then
- setter.report("%s: %a is %s to %a",filename,key,"set",newvalue)
- end
- for i=1,#functions do
- functions[i](newvalue)
- end
- functions.value = newvalue
- functions.frozen = functions.frozen or frozen
- else
- if trace_initialize then
- setter.report("%s: %a is %s as %a",filename,key,"kept",oldvalue)
- end
- end
- else
- -- we do a simple preregistration i.e. not in the
- -- list as it might be an obsolete entry
- functions = { default = newvalue, frozen = frozen }
- data[key] = functions
- if trace_initialize then
- setter.report("%s: %a is %s to %a",filename,key,"defaulted",newvalue)
- end
- end
- end
- return true
- end
- end
-end
-
--- user interface code
-
-local function set(t,what,newvalue)
- local data = t.data
- if not data.frozen then
- local done = t.done
- if type(what) == "string" then
- what = settings_to_hash(what) -- inefficient but ok
- end
- if type(what) ~= "table" then
- return
- end
- if not done then -- catch ... why not set?
- done = { }
- t.done = done
- end
- for w, value in next, what do
- if value == "" then
- value = newvalue
- elseif not value then
- value = false -- catch nil
- else
- value = is_boolean(value,value)
- end
- w = topattern(w,true,true)
- for name, functions in next, data do
- if done[name] then
- -- prevent recursion due to wildcards
- elseif find(name,w) then
- done[name] = true
- for i=1,#functions do
- functions[i](value)
- end
- functions.value = value
- end
- end
- end
- end
-end
-
-local function reset(t)
- local data = t.data
- if not data.frozen then
- for name, functions in next, data do
- for i=1,#functions do
- functions[i](false)
- end
- functions.value = false
- end
- end
-end
-
-local function enable(t,what)
- set(t,what,true)
-end
-
-local function disable(t,what)
- local data = t.data
- if not what or what == "" then
- t.done = { }
- reset(t)
- else
- set(t,what,false)
- end
-end
-
-function setters.register(t,what,...)
- local data = t.data
- what = lower(what)
- local functions = data[what]
- if not functions then
- functions = { }
- data[what] = functions
- if trace_initialize then
- t.report("defining %a",what)
- end
- end
- local default = functions.default -- can be set from cnf file
- for i=1,select("#",...) do
- local fnc = select(i,...)
- local typ = type(fnc)
- if typ == "string" then
- if trace_initialize then
- t.report("coupling %a to %a",what,fnc)
- end
- local s = fnc -- else wrong reference
- fnc = function(value) set(t,s,value) end
- elseif typ ~= "function" then
- fnc = nil
- end
- if fnc then
- functions[#functions+1] = fnc
- -- default: set at command line or in cnf file
- -- value : set in tex run (needed when loading runtime)
- local value = functions.value or default
- if value ~= nil then
- fnc(value)
- functions.value = value
- end
- end
- end
- return false -- so we can use it in an assignment
-end
-
-function setters.enable(t,what)
- local e = t.enable
- t.enable, t.done = enable, { }
- enable(t,what)
- t.enable, t.done = e, { }
-end
-
-function setters.disable(t,what)
- local e = t.disable
- t.disable, t.done = disable, { }
- disable(t,what)
- t.disable, t.done = e, { }
-end
-
-function setters.reset(t)
- t.done = { }
- reset(t)
-end
-
-function setters.list(t) -- pattern
- local list = table.sortedkeys(t.data)
- local user, system = { }, { }
- for l=1,#list do
- local what = list[l]
- if find(what,"^%*") then
- system[#system+1] = what
- else
- user[#user+1] = what
- end
- end
- return user, system
-end
-
-function setters.show(t)
- local category = t.name
- local list = setters.list(t)
- t.report()
- for k=1,#list do
- local name = list[k]
- local functions = t.data[name]
- if functions then
- local value, default, modules = functions.value, functions.default, #functions
- value = value == nil and "unset" or tostring(value)
- default = default == nil and "unset" or tostring(default)
- t.report("%-50s modules: %2i default: %-12s value: %-12s",name,modules,default,value)
- end
- end
- t.report()
-end
-
--- we could have used a bit of oo and the trackers:enable syntax but
--- there is already a lot of code around using the singular tracker
-
--- we could make this into a module but we also want the rest avaliable
-
-local enable, disable, register, list, show = setters.enable, setters.disable, setters.register, setters.list, setters.show
-
-function setters.report(setter,...)
- print(format("%-15s : %s\n",setter.name,format(...)))
-end
-
-local function default(setter,name)
- local d = setter.data[name]
- return d and d.default
-end
-
-local function value(setter,name)
- local d = setter.data[name]
- return d and (d.value or d.default)
-end
-
-function setters.new(name) -- we could use foo:bar syntax (but not used that often)
- local setter -- we need to access it in setter itself
- setter = {
- data = allocate(), -- indexed, but also default and value fields
- name = name,
- report = function(...) setters.report (setter,...) end,
- enable = function(...) enable (setter,...) end,
- disable = function(...) disable (setter,...) end,
- register = function(...) register(setter,...) end,
- list = function(...) list (setter,...) end,
- show = function(...) show (setter,...) end,
- default = function(...) return default (setter,...) end,
- value = function(...) return value (setter,...) end,
- }
- data[name] = setter
- return setter
-end
-
-trackers = setters.new("trackers")
-directives = setters.new("directives")
-experiments = setters.new("experiments")
-
-local t_enable, t_disable = trackers .enable, trackers .disable
-local d_enable, d_disable = directives .enable, directives .disable
-local e_enable, e_disable = experiments.enable, experiments.disable
-
--- nice trick: we overload two of the directives related functions with variants that
--- do tracing (itself using a tracker) .. proof of concept
-
-local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end)
-local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end)
-
-function directives.enable(...)
- if trace_directives then
- directives.report("enabling: % t",{...})
- end
- d_enable(...)
-end
-
-function directives.disable(...)
- if trace_directives then
- directives.report("disabling: % t",{...})
- end
- d_disable(...)
-end
-
-function experiments.enable(...)
- if trace_experiments then
- experiments.report("enabling: % t",{...})
- end
- e_enable(...)
-end
-
-function experiments.disable(...)
- if trace_experiments then
- experiments.report("disabling: % t",{...})
- end
- e_disable(...)
-end
-
--- a useful example
-
-directives.register("system.nostatistics", function(v)
- if statistics then
- statistics.enable = not v
- else
- -- forget about it
- end
-end)
-
-directives.register("system.nolibraries", function(v)
- if libraries then
- libraries = nil -- we discard this tracing for security
- else
- -- no libraries defined
- end
-end)
-
--- experiment
-
-if environment then
-
- -- The engineflags are known earlier than environment.arguments but maybe we
- -- need to handle them both as the later are parsed differently. The c: prefix
- -- is used by mtx-context to isolate the flags from those that concern luatex.
-
- local engineflags = environment.engineflags
-
- if engineflags then
- local list = engineflags["c:trackers"] or engineflags["trackers"]
- if type(list) == "string" then
- setters.initialize("commandline flags","trackers",settings_to_hash(list))
- -- t_enable(list)
- end
- local list = engineflags["c:directives"] or engineflags["directives"]
- if type(list) == "string" then
- setters.initialize("commandline flags","directives", settings_to_hash(list))
- -- d_enable(list)
- end
- end
-
-end
-
--- here
-
-if texconfig then
-
- -- this happens too late in ini mode but that is no problem
-
- local function set(k,v)
- v = tonumber(v)
- if v then
- texconfig[k] = v
- end
- end
-
- directives.register("luatex.expanddepth", function(v) set("expand_depth",v) end)
- directives.register("luatex.hashextra", function(v) set("hash_extra",v) end)
- directives.register("luatex.nestsize", function(v) set("nest_size",v) end)
- directives.register("luatex.maxinopen", function(v) set("max_in_open",v) end)
- directives.register("luatex.maxprintline", function(v) set("max_print_line",v) end)
- directives.register("luatex.maxstrings", function(v) set("max_strings",v) end)
- directives.register("luatex.paramsize", function(v) set("param_size",v) end)
- directives.register("luatex.savesize", function(v) set("save_size",v) end)
- directives.register("luatex.stacksize", function(v) set("stack_size",v) end)
-
-end
+if not modules then modules = { } end modules ['trac-set'] = { -- might become util-set.lua
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- maybe this should be util-set.lua
+
+local type, next, tostring = type, next, tostring
+local concat = table.concat
+local format, find, lower, gsub, topattern = string.format, string.find, string.lower, string.gsub, string.topattern
+local is_boolean = string.is_boolean
+local settings_to_hash = utilities.parsers.settings_to_hash
+local allocate = utilities.storage.allocate
+
+utilities = utilities or { }
+local utilities = utilities
+
+local setters = utilities.setters or { }
+utilities.setters = setters
+
+local data = { }
+
+-- We can initialize from the cnf file. This is sort of tricky as
+-- later defined setters also need to be initialized then. If set
+-- this way, we need to ensure that they are not reset later on.
+
+local trace_initialize = false -- only for testing during development
+
+function setters.initialize(filename,name,values) -- filename only for diagnostics
+ local setter = data[name]
+ if setter then
+ frozen = true -- don't permitoverload
+ -- trace_initialize = true
+ local data = setter.data
+ if data then
+ for key, newvalue in next, values do
+ local newvalue = is_boolean(newvalue,newvalue)
+ local functions = data[key]
+ if functions then
+ local oldvalue = functions.value
+ if functions.frozen then
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"frozen",oldvalue)
+ end
+ elseif #functions > 0 and not oldvalue then
+-- elseif #functions > 0 and oldvalue == nil then
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"set",newvalue)
+ end
+ for i=1,#functions do
+ functions[i](newvalue)
+ end
+ functions.value = newvalue
+ functions.frozen = functions.frozen or frozen
+ else
+ if trace_initialize then
+ setter.report("%s: %a is %s as %a",filename,key,"kept",oldvalue)
+ end
+ end
+ else
+ -- we do a simple preregistration i.e. not in the
+ -- list as it might be an obsolete entry
+ functions = { default = newvalue, frozen = frozen }
+ data[key] = functions
+ if trace_initialize then
+ setter.report("%s: %a is %s to %a",filename,key,"defaulted",newvalue)
+ end
+ end
+ end
+ return true
+ end
+ end
+end
+
+-- user interface code
+
+local function set(t,what,newvalue)
+ local data = t.data
+ if not data.frozen then
+ local done = t.done
+ if type(what) == "string" then
+ what = settings_to_hash(what) -- inefficient but ok
+ end
+ if type(what) ~= "table" then
+ return
+ end
+ if not done then -- catch ... why not set?
+ done = { }
+ t.done = done
+ end
+ for w, value in next, what do
+ if value == "" then
+ value = newvalue
+ elseif not value then
+ value = false -- catch nil
+ else
+ value = is_boolean(value,value)
+ end
+ w = topattern(w,true,true)
+ for name, functions in next, data do
+ if done[name] then
+ -- prevent recursion due to wildcards
+ elseif find(name,w) then
+ done[name] = true
+ for i=1,#functions do
+ functions[i](value)
+ end
+ functions.value = value
+ end
+ end
+ end
+ end
+end
+
+local function reset(t)
+ local data = t.data
+ if not data.frozen then
+ for name, functions in next, data do
+ for i=1,#functions do
+ functions[i](false)
+ end
+ functions.value = false
+ end
+ end
+end
+
+local function enable(t,what)
+ set(t,what,true)
+end
+
+local function disable(t,what)
+ local data = t.data
+ if not what or what == "" then
+ t.done = { }
+ reset(t)
+ else
+ set(t,what,false)
+ end
+end
+
+function setters.register(t,what,...)
+ local data = t.data
+ what = lower(what)
+ local functions = data[what]
+ if not functions then
+ functions = { }
+ data[what] = functions
+ if trace_initialize then
+ t.report("defining %a",what)
+ end
+ end
+ local default = functions.default -- can be set from cnf file
+ for i=1,select("#",...) do
+ local fnc = select(i,...)
+ local typ = type(fnc)
+ if typ == "string" then
+ if trace_initialize then
+ t.report("coupling %a to %a",what,fnc)
+ end
+ local s = fnc -- else wrong reference
+ fnc = function(value) set(t,s,value) end
+ elseif typ ~= "function" then
+ fnc = nil
+ end
+ if fnc then
+ functions[#functions+1] = fnc
+ -- default: set at command line or in cnf file
+ -- value : set in tex run (needed when loading runtime)
+ local value = functions.value or default
+ if value ~= nil then
+ fnc(value)
+ functions.value = value
+ end
+ end
+ end
+ return false -- so we can use it in an assignment
+end
+
+function setters.enable(t,what)
+ local e = t.enable
+ t.enable, t.done = enable, { }
+ enable(t,what)
+ t.enable, t.done = e, { }
+end
+
+function setters.disable(t,what)
+ local e = t.disable
+ t.disable, t.done = disable, { }
+ disable(t,what)
+ t.disable, t.done = e, { }
+end
+
+function setters.reset(t)
+ t.done = { }
+ reset(t)
+end
+
+function setters.list(t) -- pattern
+ local list = table.sortedkeys(t.data)
+ local user, system = { }, { }
+ for l=1,#list do
+ local what = list[l]
+ if find(what,"^%*") then
+ system[#system+1] = what
+ else
+ user[#user+1] = what
+ end
+ end
+ return user, system
+end
+
+function setters.show(t)
+ local category = t.name
+ local list = setters.list(t)
+ t.report()
+ for k=1,#list do
+ local name = list[k]
+ local functions = t.data[name]
+ if functions then
+ local value, default, modules = functions.value, functions.default, #functions
+ value = value == nil and "unset" or tostring(value)
+ default = default == nil and "unset" or tostring(default)
+ t.report("%-50s modules: %2i default: %-12s value: %-12s",name,modules,default,value)
+ end
+ end
+ t.report()
+end
+
+-- we could have used a bit of oo and the trackers:enable syntax but
+-- there is already a lot of code around using the singular tracker
+
+-- we could make this into a module but we also want the rest avaliable
+
+local enable, disable, register, list, show = setters.enable, setters.disable, setters.register, setters.list, setters.show
+
+function setters.report(setter,...)
+ print(format("%-15s : %s\n",setter.name,format(...)))
+end
+
+local function default(setter,name)
+ local d = setter.data[name]
+ return d and d.default
+end
+
+local function value(setter,name)
+ local d = setter.data[name]
+ return d and (d.value or d.default)
+end
+
+function setters.new(name) -- we could use foo:bar syntax (but not used that often)
+ local setter -- we need to access it in setter itself
+ setter = {
+ data = allocate(), -- indexed, but also default and value fields
+ name = name,
+ report = function(...) setters.report (setter,...) end,
+ enable = function(...) enable (setter,...) end,
+ disable = function(...) disable (setter,...) end,
+ register = function(...) register(setter,...) end,
+ list = function(...) list (setter,...) end,
+ show = function(...) show (setter,...) end,
+ default = function(...) return default (setter,...) end,
+ value = function(...) return value (setter,...) end,
+ }
+ data[name] = setter
+ return setter
+end
+
+trackers = setters.new("trackers")
+directives = setters.new("directives")
+experiments = setters.new("experiments")
+
+local t_enable, t_disable = trackers .enable, trackers .disable
+local d_enable, d_disable = directives .enable, directives .disable
+local e_enable, e_disable = experiments.enable, experiments.disable
+
+-- nice trick: we overload two of the directives related functions with variants that
+-- do tracing (itself using a tracker) .. proof of concept
+
+local trace_directives = false local trace_directives = false trackers.register("system.directives", function(v) trace_directives = v end)
+local trace_experiments = false local trace_experiments = false trackers.register("system.experiments", function(v) trace_experiments = v end)
+
+function directives.enable(...)
+ if trace_directives then
+ directives.report("enabling: % t",{...})
+ end
+ d_enable(...)
+end
+
+function directives.disable(...)
+ if trace_directives then
+ directives.report("disabling: % t",{...})
+ end
+ d_disable(...)
+end
+
+function experiments.enable(...)
+ if trace_experiments then
+ experiments.report("enabling: % t",{...})
+ end
+ e_enable(...)
+end
+
+function experiments.disable(...)
+ if trace_experiments then
+ experiments.report("disabling: % t",{...})
+ end
+ e_disable(...)
+end
+
+-- a useful example
+
+directives.register("system.nostatistics", function(v)
+ if statistics then
+ statistics.enable = not v
+ else
+ -- forget about it
+ end
+end)
+
+directives.register("system.nolibraries", function(v)
+ if libraries then
+ libraries = nil -- we discard this tracing for security
+ else
+ -- no libraries defined
+ end
+end)
+
+-- experiment
+
+if environment then
+
+ -- The engineflags are known earlier than environment.arguments but maybe we
+ -- need to handle them both as the later are parsed differently. The c: prefix
+ -- is used by mtx-context to isolate the flags from those that concern luatex.
+
+ local engineflags = environment.engineflags
+
+ if engineflags then
+ local list = engineflags["c:trackers"] or engineflags["trackers"]
+ if type(list) == "string" then
+ setters.initialize("commandline flags","trackers",settings_to_hash(list))
+ -- t_enable(list)
+ end
+ local list = engineflags["c:directives"] or engineflags["directives"]
+ if type(list) == "string" then
+ setters.initialize("commandline flags","directives", settings_to_hash(list))
+ -- d_enable(list)
+ end
+ end
+
+end
+
+-- here
+
+if texconfig then
+
+ -- this happens too late in ini mode but that is no problem
+
+ local function set(k,v)
+ v = tonumber(v)
+ if v then
+ texconfig[k] = v
+ end
+ end
+
+ directives.register("luatex.expanddepth", function(v) set("expand_depth",v) end)
+ directives.register("luatex.hashextra", function(v) set("hash_extra",v) end)
+ directives.register("luatex.nestsize", function(v) set("nest_size",v) end)
+ directives.register("luatex.maxinopen", function(v) set("max_in_open",v) end)
+ directives.register("luatex.maxprintline", function(v) set("max_print_line",v) end)
+ directives.register("luatex.maxstrings", function(v) set("max_strings",v) end)
+ directives.register("luatex.paramsize", function(v) set("param_size",v) end)
+ directives.register("luatex.savesize", function(v) set("save_size",v) end)
+ directives.register("luatex.stacksize", function(v) set("stack_size",v) end)
+
+end
diff --git a/tex/context/base/trac-tex.lua b/tex/context/base/trac-tex.lua
index 7e3406073..aecf1799b 100644
--- a/tex/context/base/trac-tex.lua
+++ b/tex/context/base/trac-tex.lua
@@ -1,75 +1,75 @@
-if not modules then modules = { } end modules ['trac-tex'] = {
- version = 1.001,
- comment = "companion to trac-deb.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- moved from trac-deb.lua
-
-local format = string.format
-
-local texhashtokens = tex.hashtokens
-
-local trackers = trackers
-
-local saved = { }
-
-function trackers.savehash()
- saved = texhashtokens()
-end
-
-function trackers.dumphashtofile(filename,delta)
- local list, hash, command_name = { }, texhashtokens(), token.command_name
- for name, token in next, hash do
- if not delta or not saved[name] then
- -- token: cmd, chr, csid -- combination cmd,chr determines name
- local category = command_name(token)
- local dk = list[category]
- if not dk then
- -- a bit funny names but this sorts better (easier to study)
- dk = { names = { }, found = 0, code = token[1] }
- list[category] = dk
- end
- dk.names[name] = { token[2], token[3] }
- dk.found = dk.found + 1
- end
- end
- io.savedata(filename or tex.jobname .. "-hash.log",table.serialize(list,true))
-end
-
-local delta = nil
-
-local function dump_hash(wanteddelta)
- if delta == nil then
- saved = saved or texhashtokens() -- no need for trackers.dump_hash
- luatex.registerstopactions(1,function() dump_hash(nil,wanteddelta) end) -- at front
- end
- delta = wanteddelta
-end
-
-directives.register("system.dumphash", function() dump_hash(false) end)
-directives.register("system.dumpdelta", function() dump_hash(true ) end)
-
-local report_dump = logs.reporter("resolvers","dump")
-
-local function saveusedfilesintrees(format)
- local data = {
- jobname = environment.jobname or "?",
- version = environment.version or "?",
- kind = environment.kind or "?",
- files = resolvers.instance.foundintrees
- }
- local filename = file.replacesuffix(environment.jobname or "context-job",'jlg')
- if format == "lua" then
- io.savedata(filename,table.serialize(data,true))
- else
- io.savedata(filename,table.toxml(data,"job"))
- end
-end
-
-directives.register("system.dumpfiles", function(v)
- luatex.registerstopactions(function() saveusedfilesintrees(v) end)
-end)
-
+if not modules then modules = { } end modules ['trac-tex'] = {
+ version = 1.001,
+ comment = "companion to trac-deb.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- moved from trac-deb.lua
+
+local format = string.format
+
+local texhashtokens = tex.hashtokens
+
+local trackers = trackers
+
+local saved = { }
+
+function trackers.savehash()
+ saved = texhashtokens()
+end
+
+function trackers.dumphashtofile(filename,delta)
+ local list, hash, command_name = { }, texhashtokens(), token.command_name
+ for name, token in next, hash do
+ if not delta or not saved[name] then
+ -- token: cmd, chr, csid -- combination cmd,chr determines name
+ local category = command_name(token)
+ local dk = list[category]
+ if not dk then
+ -- a bit funny names but this sorts better (easier to study)
+ dk = { names = { }, found = 0, code = token[1] }
+ list[category] = dk
+ end
+ dk.names[name] = { token[2], token[3] }
+ dk.found = dk.found + 1
+ end
+ end
+ io.savedata(filename or tex.jobname .. "-hash.log",table.serialize(list,true))
+end
+
+local delta = nil
+
+local function dump_hash(wanteddelta)
+ if delta == nil then
+ saved = saved or texhashtokens() -- no need for trackers.dump_hash
+ luatex.registerstopactions(1,function() dump_hash(nil,wanteddelta) end) -- at front
+ end
+ delta = wanteddelta
+end
+
+directives.register("system.dumphash", function() dump_hash(false) end)
+directives.register("system.dumpdelta", function() dump_hash(true ) end)
+
+local report_dump = logs.reporter("resolvers","dump")
+
+local function saveusedfilesintrees(format)
+ local data = {
+ jobname = environment.jobname or "?",
+ version = environment.version or "?",
+ kind = environment.kind or "?",
+ files = resolvers.instance.foundintrees
+ }
+ local filename = file.replacesuffix(environment.jobname or "context-job",'jlg')
+ if format == "lua" then
+ io.savedata(filename,table.serialize(data,true))
+ else
+ io.savedata(filename,table.toxml(data,"job"))
+ end
+end
+
+directives.register("system.dumpfiles", function(v)
+ luatex.registerstopactions(function() saveusedfilesintrees(v) end)
+end)
+
diff --git a/tex/context/base/trac-tim.lua b/tex/context/base/trac-tim.lua
index 15ac9bf1b..e62e7e149 100644
--- a/tex/context/base/trac-tim.lua
+++ b/tex/context/base/trac-tim.lua
@@ -1,138 +1,138 @@
-if not modules then modules = { } end modules ['trac-tim'] = {
- version = 1.001,
- comment = "companion to m-timing.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format, gsub = string.format, string.gsub
-local concat, sort = table.concat, table.sort
-local next, tonumber = next, tonumber
-
-moduledata = moduledata or { }
-local progress = moduledata.progress or { }
-moduledata.progress = progress
-
-local report_timing = logs.reporter("timing")
-
-if not nodes then nodes = { } end -- when loaded in mtxrun
-
-progress.parameters = nodes and nodes.snapshots.getparameters
-progress.defaultfilename = ((tex and tex.jobname) or "whatever") .. "-luatex-progress"
-
--- storage
-
-function progress.store()
- nodes.snapshots.takesample()
-end
-
-function progress.save(name)
- local filename = (name or progress.defaultfilename) .. ".lut"
- report_timing("saving data in %a",filename)
- table.save(filename,nodes.snapshots.getsamples())
- nodes.snapshots.resetsamples()
-end
-
--- conversion
-
-local processed = { }
-local parameters = progress.parameters()
-
-local function convert(name)
- name = name ~= "" and name or progress.defaultfilename
- if not processed[name] then
- local names, top, bot, pages, paths, keys = { }, { }, { }, 0, { }, { }
- local data = table.load(name .. ".lut")
- if data then
- pages = #data
- if pages > 1 then
- local factor = 100
- for k=1,#data do
- for k, v in next, data[k].node_memory do
- keys[k] = true
- end
- end
- for k=1,#data do
- local m = data[k].node_memory
- for k, v in next, keys do
- if not m[k] then m[k] = 0 end
- end
- end
- local function path(tag,subtag)
- local b, t, s = nil, nil, { }
- for k=1,#data do
- local v = data[k][tag]
- v = v and (subtag and v[subtag]) or v
- if v then
- v = tonumber(v)
- if b then
- if v > t then t = v end
- if v < b then b = v end
- else
- t = v
- b = v
- end
- s[k] = v
- else
- s[k] = 0
- end
- end
- local tagname = subtag or tag
- top[tagname] = gsub(format("%.3f",t),"%.000$","")
- bot[tagname] = gsub(format("%.3f",b),"%.000$","")
- local delta = t-b
- if delta == 0 then
- delta = 1
- else
- delta = factor/delta
- end
- for k=1,#s do
- s[k] = format("(%s,%s)",k,(s[k]-b)*delta)
- end
- paths[tagname] = concat(s,"--")
- end
- for i=1,#parameters do
- path(parameters[i])
- end
- for tag, _ in next, keys do
- path("node_memory",tag)
- names[#names+1] = tag
- end
- pages = pages - 1
- end
- end
- sort(names)
- processed[name] = {
- names = names,
- top = top,
- bot = bot,
- pages = pages,
- paths = paths,
- }
- end
- return processed[name]
-end
-
-progress.convert = convert
-
-function progress.bot(name,tag)
- return convert(name).bot[tag] or 0
-end
-
-function progress.top(name,tag)
- return convert(name).top[tag] or 0
-end
-
-function progress.pages(name,tag)
- return convert(name).pages or 0
-end
-
-function progress.path(name,tag)
- return convert(name).paths[tag] or "origin"
-end
-
-function progress.nodes(name)
- return convert(name).names or { }
-end
-
+if not modules then modules = { } end modules ['trac-tim'] = {
+ version = 1.001,
+ comment = "companion to m-timing.tex",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format, gsub = string.format, string.gsub
+local concat, sort = table.concat, table.sort
+local next, tonumber = next, tonumber
+
+moduledata = moduledata or { }
+local progress = moduledata.progress or { }
+moduledata.progress = progress
+
+local report_timing = logs.reporter("timing")
+
+if not nodes then nodes = { } end -- when loaded in mtxrun
+
+progress.parameters = nodes and nodes.snapshots.getparameters
+progress.defaultfilename = ((tex and tex.jobname) or "whatever") .. "-luatex-progress"
+
+-- storage
+
+function progress.store()
+ nodes.snapshots.takesample()
+end
+
+function progress.save(name)
+ local filename = (name or progress.defaultfilename) .. ".lut"
+ report_timing("saving data in %a",filename)
+ table.save(filename,nodes.snapshots.getsamples())
+ nodes.snapshots.resetsamples()
+end
+
+-- conversion
+
+local processed = { }
+local parameters = progress.parameters()
+
+local function convert(name)
+ name = name ~= "" and name or progress.defaultfilename
+ if not processed[name] then
+ local names, top, bot, pages, paths, keys = { }, { }, { }, 0, { }, { }
+ local data = table.load(name .. ".lut")
+ if data then
+ pages = #data
+ if pages > 1 then
+ local factor = 100
+ for k=1,#data do
+ for k, v in next, data[k].node_memory do
+ keys[k] = true
+ end
+ end
+ for k=1,#data do
+ local m = data[k].node_memory
+ for k, v in next, keys do
+ if not m[k] then m[k] = 0 end
+ end
+ end
+ local function path(tag,subtag)
+ local b, t, s = nil, nil, { }
+ for k=1,#data do
+ local v = data[k][tag]
+ v = v and (subtag and v[subtag]) or v
+ if v then
+ v = tonumber(v)
+ if b then
+ if v > t then t = v end
+ if v < b then b = v end
+ else
+ t = v
+ b = v
+ end
+ s[k] = v
+ else
+ s[k] = 0
+ end
+ end
+ local tagname = subtag or tag
+ top[tagname] = gsub(format("%.3f",t),"%.000$","")
+ bot[tagname] = gsub(format("%.3f",b),"%.000$","")
+ local delta = t-b
+ if delta == 0 then
+ delta = 1
+ else
+ delta = factor/delta
+ end
+ for k=1,#s do
+ s[k] = format("(%s,%s)",k,(s[k]-b)*delta)
+ end
+ paths[tagname] = concat(s,"--")
+ end
+ for i=1,#parameters do
+ path(parameters[i])
+ end
+ for tag, _ in next, keys do
+ path("node_memory",tag)
+ names[#names+1] = tag
+ end
+ pages = pages - 1
+ end
+ end
+ sort(names)
+ processed[name] = {
+ names = names,
+ top = top,
+ bot = bot,
+ pages = pages,
+ paths = paths,
+ }
+ end
+ return processed[name]
+end
+
+progress.convert = convert
+
+function progress.bot(name,tag)
+ return convert(name).bot[tag] or 0
+end
+
+function progress.top(name,tag)
+ return convert(name).top[tag] or 0
+end
+
+function progress.pages(name,tag)
+ return convert(name).pages or 0
+end
+
+function progress.path(name,tag)
+ return convert(name).paths[tag] or "origin"
+end
+
+function progress.nodes(name)
+ return convert(name).names or { }
+end
+
diff --git a/tex/context/base/trac-vis.lua b/tex/context/base/trac-vis.lua
index df4909c3e..3dc7aa9d2 100644
--- a/tex/context/base/trac-vis.lua
+++ b/tex/context/base/trac-vis.lua
@@ -1,926 +1,926 @@
-if not modules then modules = { } end modules ['trac-vis'] = {
- version = 1.001,
- comment = "companion to trac-vis.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local string, number, table = string, number, table
-local node, nodes, attributes, fonts, tex = node, nodes, attributes, fonts, tex
-local type = type
-local format = string.format
-local formatters = string.formatters
-
--- This module started out in the early days of mkiv and luatex with
--- visualizing kerns related to fonts. In the process of cleaning up the
--- visual debugger code it made sense to integrate some other code that
--- I had laying around and replace the old supp-vis debugging code. As
--- only a subset of the old visual debugger makes sense it has become a
--- different implementation. Soms of the m-visual functionality will also
--- be ported. The code is rather trivial. The caching is not really needed
--- but saves upto 50% of the time needed to add visualization. Of course
--- the overall runtime is larger because of color and layer processing in
--- the backend (can be times as much) so the runtime is somewhat larger
--- with full visualization enabled. In practice this will never happen
--- unless one is demoing.
-
--- We could use pdf literals and re stream codes but it's not worth the
--- trouble because we would end up in color etc mess. Maybe one day I'll
--- make a nodeinjection variant.
-
--- todo: global switch (so no attributes)
--- todo: maybe also xoffset, yoffset of glyph
--- todo: inline concat (more efficient)
-
-local nodecodes = nodes.nodecodes
-local disc_code = nodecodes.disc
-local kern_code = nodecodes.kern
-local glyph_code = nodecodes.glyph
-local disc_code = nodecodes.disc
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local glue_code = nodecodes.glue
-local penalty_code = nodecodes.penalty
-local whatsit_code = nodecodes.whatsit
-local user_code = nodecodes.user
-local gluespec_code = nodecodes.gluespec
-
-local kerncodes = nodes.kerncodes
-local font_kern_code = kerncodes.fontkern
-local user_kern_code = kerncodes.userkern
-
-local gluecodes = nodes.gluecodes
-local cleaders_code = gluecodes.cleaders
-local userskip_code = gluecodes.userskip
-local space_code = gluecodes.space
-local xspace_code = gluecodes.xspace
-local leftskip_code = gluecodes.leftskip
-local rightskip_code = gluecodes.rightskip
-
-local whatsitcodes = nodes.whatsitcodes
-
-local concat_nodes = nodes.concat
-local hpack_nodes = node.hpack
-local vpack_nodes = node.vpack
-local hpack_string = typesetters.hpack
-local fast_hpack_string = typesetters.fast_hpack
-local copy_node = node.copy
-local copy_list = node.copy_list
-local free_node = node.free
-local free_node_list = node.flush_list
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local fast_hpack = nodes.fasthpack
-local traverse_nodes = node.traverse
-
-local tex_attribute = tex.attribute
-local tex_box = tex.box
-local unsetvalue = attributes.unsetvalue
-
-local current_font = font.current
-
-local exheights = fonts.hashes.exheights
-local emwidths = fonts.hashes.emwidths
-local pt_factor = number.dimenfactors.pt
-
-local nodepool = nodes.pool
-local new_rule = nodepool.rule
-local new_kern = nodepool.kern
-local new_glue = nodepool.glue
-local new_penalty = nodepool.penalty
-
-local tracers = nodes.tracers
-local visualizers = nodes.visualizers
-
-local setcolor = tracers.colors.set
-local setlistcolor = tracers.colors.setlist
-local settransparency = tracers.transparencies.set
-local setlisttransparency = tracers.transparencies.setlist
-
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-
-local a_visual = attributes.private("visual")
-local a_fontkern = attributes.private("fontkern")
-local a_layer = attributes.private("viewerlayer")
-
-local hasbit = number.hasbit
-local bit = number.bit
-local setbit = number.setbit
-local clearbit = number.clearbit
-
-local trace_hbox
-local trace_vbox
-local trace_vtop
-local trace_kern
-local trace_glue
-local trace_penalty
-local trace_fontkern
-local trace_strut
-local trace_whatsit
-local trace_user
-
-local report_visualize = logs.reporter("visualize")
-
-local modes = {
- hbox = 1,
- vbox = 2,
- vtop = 4,
- kern = 8,
- glue = 16,
- penalty = 32,
- fontkern = 64,
- strut = 128,
- whatsit = 256,
- glyph = 512,
- simple = 1024,
- simplehbox = 1024 + 1,
- simplevbox = 1024 + 2,
- simplevtop = 1024 + 4,
- user = 2048,
-}
-
-local modes_makeup = { "hbox", "vbox", "kern", "glue", "penalty" }
-local modes_boxes = { "hbox", "vbox" }
-local modes_all = { "hbox", "vbox", "kern", "glue", "penalty", "fontkern", "whatsit", "glyph", "user" }
-
-local usedfont, exheight, emwidth
-local l_penalty, l_glue, l_kern, l_fontkern, l_hbox, l_vbox, l_vtop, l_strut, l_whatsit, l_glyph, l_user
-
-local enabled = false
-local layers = { }
-
-local preset_boxes = modes.hbox + modes.vbox
-local preset_makeup = preset_boxes + modes.kern + modes.glue + modes.penalty
-local preset_all = preset_makeup + modes.fontkern + modes.whatsit + modes.glyph + modes.user
-
-function visualizers.setfont(id)
- usedfont = id or current_font()
- exheight = exheights[usedfont]
- emwidth = emwidths[usedfont]
-end
-
--- we can preset a bunch of bits
-
-local function enable()
- if not usedfont then
- -- we use a narrow monospaced font
- visualizers.setfont(fonts.definers.define { name = "lmmonoltcond10regular", size = tex.sp("4pt") })
- end
- for mode, value in next, modes do
- local tag = formatters["v_%s"](mode)
- attributes.viewerlayers.define {
- tag = tag,
- title = formatters["visualizer %s"](mode),
- visible = "start",
- editable = "yes",
- printable = "yes"
- }
- layers[mode] = attributes.viewerlayers.register(tag,true)
- end
- l_hbox = layers.hbox
- l_vbox = layers.vbox
- l_vtop = layers.vtop
- l_glue = layers.glue
- l_kern = layers.kern
- l_penalty = layers.penalty
- l_fontkern = layers.fontkern
- l_strut = layers.strut
- l_whatsit = layers.whatsit
- l_glyph = layers.glyph
- l_user = layers.user
- nodes.tasks.enableaction("shipouts","nodes.visualizers.handler")
- report_visualize("enabled")
- enabled = true
- tex.setcount("global","c_syst_visualizers_state",1) -- so that we can optimize at the tex end
-end
-
-local function setvisual(n,a,what) -- this will become more efficient when we have the bit lib linked in
- if not n or n == "reset" then
- return unsetvalue
- elseif n == "makeup" then
- if not a or a == 0 or a == unsetvalue then
- a = preset_makeup
- else
- a = setbit(a,preset_makeup)
- -- for i=1,#modes_makeup do
- -- a = setvisual(modes_makeup[i],a)
- -- end
- end
- elseif n == "boxes" then
- if not a or a == 0 or a == unsetvalue then
- a = preset_boxes
- else
- a = setbit(a,preset_boxes)
- -- for i=1,#modes_boxes do
- -- a = setvisual(modes_boxes[i],a)
- -- end
- end
- elseif n == "all" then
- if what == false then
- return unsetvalue
- elseif not a or a == 0 or a == unsetvalue then
- a = preset_all
- else
- a = setbit(a,preset_all)
- -- for i=1,#modes_all do
- -- a = setvisual(modes_all[i],a)
- -- end
- end
- else
- local m = modes[n]
- if not m then
- -- go on
- elseif a == unsetvalue then
- if what == false then
- return unsetvalue
- else
- -- a = setbit(0,m)
- a = m
- end
- elseif what == false then
- a = clearbit(a,m)
- elseif not a or a == 0 then
- a = m
- else
- a = setbit(a,m)
- end
- end
- if not a or a == 0 or a == unsetvalue then
- return unsetvalue
- elseif not enabled then -- must happen at runtime (as we don't store layers yet)
- enable()
- end
- return a
-end
-
-function visualizers.setvisual(n)
- tex_attribute[a_visual] = setvisual(n,tex_attribute[a_visual])
-end
-
-function visualizers.setlayer(n)
- tex_attribute[a_layer] = layers[n] or unsetvalue
-end
-
-commands.setvisual = visualizers.setvisual
-commands.setlayer = visualizers.setlayer
-
-function commands.visual(n)
- context(setvisual(n))
-end
-
-local function set(mode,v)
- tex_attribute[a_visual] = setvisual(mode,tex_attribute[a_visual],v)
-end
-
-for mode, value in next, modes do
- trackers.register(formatters["visualizers.%s"](mode), function(v) set(mode,v) end)
-end
-
-trackers.register("visualizers.reset", function(v) set("reset", v) end)
-trackers.register("visualizers.all", function(v) set("all", v) end)
-trackers.register("visualizers.makeup",function(v) set("makeup",v) end)
-trackers.register("visualizers.boxes", function(v) set("boxes", v) end)
-
-local c_positive = "trace:b"
-local c_negative = "trace:r"
-local c_zero = "trace:g"
-local c_text = "trace:s"
-local c_space = "trace:y"
-local c_skip_a = "trace:c"
-local c_skip_b = "trace:m"
-local c_glyph = "trace:o"
-local c_white = "trace:w"
-
-local c_positive_d = "trace:db"
-local c_negative_d = "trace:dr"
-local c_zero_d = "trace:dg"
-local c_text_d = "trace:ds"
-local c_space_d = "trace:dy"
-local c_skip_a_d = "trace:dc"
-local c_skip_b_d = "trace:dm"
-local c_glyph_d = "trace:do"
-local c_white_d = "trace:dw"
-
-local function sometext(str,layer,color,textcolor) -- we can just paste verbatim together .. no typesteting needed
- local text = fast_hpack_string(str,usedfont)
- local size = text.width
- local rule = new_rule(size,2*exheight,exheight/2)
- local kern = new_kern(-size)
- if color then
- setcolor(rule,color)
- end
- if textcolor then
- setlistcolor(text.list,textcolor)
- end
- local info = concat_nodes {
- rule,
- kern,
- text,
- }
- setlisttransparency(info,c_zero)
- info = fast_hpack(info)
- if layer then
- info[a_layer] = layer
- end
- local width = info.width
- info.width = 0
- info.height = 0
- info.depth = 0
- return info, width
-end
-
-local f_cache = { }
-
-local function fontkern(head,current)
- local kern = current.kern
- local info = f_cache[kern]
- if info then
- -- print("hit fontkern")
- else
- local text = fast_hpack_string(formatters[" %0.3f"](kern*pt_factor),usedfont)
- local rule = new_rule(emwidth/10,6*exheight,2*exheight)
- local list = text.list
- if kern > 0 then
- setlistcolor(list,c_positive_d)
- elseif kern < 0 then
- setlistcolor(list,c_negative_d)
- else
- setlistcolor(list,c_zero_d)
- end
- setlisttransparency(list,c_text_d)
- settransparency(rule,c_text_d)
- text.shift = -5 * exheight
- info = concat_nodes {
- rule,
- text,
- }
- info = fast_hpack(info)
- info[a_layer] = l_fontkern
- info.width = 0
- info.height = 0
- info.depth = 0
- f_cache[kern] = info
- end
- head = insert_node_before(head,current,copy_list(info))
- return head, current
-end
-
-local w_cache = { }
-
-local tags = {
- open = "FIC",
- write = "FIW",
- close = "FIC",
- special = "SPE",
- localpar = "PAR",
- dir = "DIR",
- pdfliteral = "PDF",
- pdfrefobj = "PDF",
- pdfrefxform = "PDF",
- pdfrefximage = "PDF",
- pdfannot = "PDF",
- pdfstartlink = "PDF",
- pdfendlink = "PDF",
- pdfdest = "PDF",
- pdfthread = "PDF",
- pdfstartthread = "PDF",
- pdfendthread = "PDF",
- pdfsavepos = "PDF",
- pdfthreaddata = "PDF",
- pdflinkdata = "PDF",
- pdfcolorstack = "PDF",
- pdfsetmatrix = "PDF",
- pdfsave = "PDF",
- pdfrestore = "PDF",
- latelua = "LUA",
- closelua = "LUA",
- cancelboundary = "CBD",
- userdefined = "USR",
-}
-
-local function whatsit(head,current)
- local what = current.subtype
- local info = w_cache[what]
- if info then
- -- print("hit whatsit")
- else
- local tag = whatsitcodes[what]
- -- maybe different text colors per tag
- info = sometext(formatters["W:%s"](tag and tags[tag] or what),usedfont,nil,c_white)
- info[a_layer] = l_whatsit
- w_cache[what] = info
- end
- head, current = insert_node_after(head,current,copy_list(info))
- return head, current
-end
-
-local function user(head,current)
- local what = current.subtype
- local info = w_cache[what]
- if info then
- -- print("hit user")
- else
- info = sometext(formatters["U:%s"](what),usedfont)
- info[a_layer] = l_user
- w_cache[what] = info
- end
- head, current = insert_node_after(head,current,copy_list(info))
- return head, current
-end
-
-local b_cache = { }
-
-local function ruledbox(head,current,vertical,layer,what,simple)
- local wd = current.width
- if wd ~= 0 then
- local ht, dp = current.height, current.depth
- local next, prev = current.next, current.prev
- current.next, current.prev = nil, nil
- local linewidth = emwidth/10
- local baseline, baseskip
- if dp ~= 0 and ht ~= 0 then
- if wd > 20*linewidth then
- baseline = b_cache.baseline
- if not baseline then
- -- due to an optimized leader color/transparency we need to set the glue node in order
- -- to trigger this mechanism
- local leader = concat_nodes {
- new_glue(2*linewidth), -- 2.5
- new_rule(6*linewidth,linewidth,0), -- 5.0
- new_glue(2*linewidth), -- 2.5
- }
- -- setlisttransparency(leader,c_text)
- leader = fast_hpack(leader)
- -- setlisttransparency(leader,c_text)
- baseline = new_glue(0)
- baseline.leader = leader
- baseline.subtype = cleaders_code
- baseline.spec.stretch = 65536
- baseline.spec.stretch_order = 2
- setlisttransparency(baseline,c_text)
- b_cache.baseline = baseline
- end
- baseline = copy_list(baseline)
- baseline = fast_hpack(baseline,wd-2*linewidth)
- -- or new hpack node, set head and also:
- -- baseline.width = wd
- -- baseline.glue_set = wd/65536
- -- baseline.glue_order = 2
- -- baseline.glue_sign = 1
- baseskip = new_kern(-wd+linewidth)
- else
- baseline = new_rule(wd-2*linewidth,linewidth,0)
- baseskip = new_kern(-wd+2*linewidth)
- end
- end
- local this
- if not simple then
- this = b_cache[what]
- if not this then
- local text = fast_hpack_string(what,usedfont)
- this = concat_nodes {
- new_kern(-text.width),
- text,
- }
- setlisttransparency(this,c_text)
- this = fast_hpack(this)
- this.width = 0
- this.height = 0
- this.depth = 0
- b_cache[what] = this
- end
- end
- local info = concat_nodes {
- this and copy_list(this) or nil, -- this also triggers the right mode (else sometimes no whatits)
- new_rule(linewidth,ht,dp),
- new_rule(wd-2*linewidth,-dp+linewidth,dp),
- new_rule(linewidth,ht,dp),
- new_kern(-wd+linewidth),
- new_rule(wd-2*linewidth,ht,-ht+linewidth),
- baseskip,
- baseline,
- }
- setlisttransparency(info,c_text)
- info = fast_hpack(info)
- info.width = 0
- info.height = 0
- info.depth = 0
- info[a_layer] = layer
- local info = concat_nodes {
- current,
- new_kern(-wd),
- info,
- }
- info = fast_hpack(info,wd)
- if vertical then
- info = vpack_nodes(info)
- end
- if next then
- info.next = next
- next.prev = info
- end
- if prev then
-if prev.id == gluespec_code then
- -- weird, how can this happen, an inline glue-spec
-else
- info.prev = prev
- prev.next = info
-end
- end
- if head == current then
- return info, info
- else
- return head, info
- end
- else
- return head, current
- end
-end
-
-local function ruledglyph(head,current)
- local wd = current.width
- if wd ~= 0 then
- local ht, dp = current.height, current.depth
- local next, prev = current.next, current.prev
- current.next, current.prev = nil, nil
- local linewidth = emwidth/20
- local baseline
- if dp ~= 0 and ht ~= 0 then
- baseline = new_rule(wd-2*linewidth,linewidth,0)
- end
- local doublelinewidth = 2*linewidth
- local info = concat_nodes {
- new_rule(linewidth,ht,dp),
- new_rule(wd-doublelinewidth,-dp+linewidth,dp),
- new_rule(linewidth,ht,dp),
- new_kern(-wd+linewidth),
- new_rule(wd-doublelinewidth,ht,-ht+linewidth),
- new_kern(-wd+doublelinewidth),
- baseline,
- }
- setlistcolor(info,c_glyph)
- setlisttransparency(info,c_glyph_d)
- info = fast_hpack(info)
- info.width = 0
- info.height = 0
- info.depth = 0
- info[a_layer] = l_glyph
- local info = concat_nodes {
- current,
- new_kern(-wd),
- info,
- }
- info = fast_hpack(info)
- info.width = wd
- if next then
- info.next = next
- next.prev = info
- end
- if prev then
- info.prev = prev
- prev.next = info
- end
- if head == current then
- return info, info
- else
- return head, info
- end
- else
- return head, current
- end
-end
-
-local g_cache = { }
-
-local tags = {
- -- userskip = "US",
- lineskip = "LS",
- baselineskip = "BS",
- parskip = "PS",
- abovedisplayskip = "DA",
- belowdisplayskip = "DB",
- abovedisplayshortskip = "SA",
- belowdisplayshortskip = "SB",
- leftskip = "LS",
- rightskip = "RS",
- topskip = "TS",
- splittopskip = "ST",
- tabskip = "AS",
- spaceskip = "SS",
- xspaceskip = "XS",
- parfillskip = "PF",
- thinmuskip = "MS",
- medmuskip = "MM",
- thickmuskip = "ML",
- leaders = "NL",
- cleaders = "CL",
- xleaders = "XL",
- gleaders = "GL",
- -- true = "VS",
- -- false = "HS",
-}
-
-local function ruledglue(head,current,vertical)
- local spec = current.spec
- local width = spec.width
- local subtype = current.subtype
- local amount = formatters["%s:%0.3f"](tags[subtype] or (vertical and "VS") or "HS",width*pt_factor)
- local info = g_cache[amount]
- if info then
- -- print("glue hit")
- else
- if subtype == space_code or subtype == xspace_code then -- not yet all space
- info = sometext(amount,l_glue,c_space)
- elseif subtype == leftskip_code or subtype == rightskip_code then
- info = sometext(amount,l_glue,c_skip_a)
- elseif subtype == userskip_code then
- if width > 0 then
- info = sometext(amount,l_glue,c_positive)
- elseif width < 0 then
- info = sometext(amount,l_glue,c_negative)
- else
- info = sometext(amount,l_glue,c_zero)
- end
- else
- info = sometext(amount,l_glue,c_skip_b)
- end
- g_cache[amount] = info
- end
- info = copy_list(info)
- if vertical then
- info = vpack_nodes(info)
- end
- head, current = insert_node_before(head,current,info)
- return head, current.next
-end
-
-local k_cache = { }
-
-local function ruledkern(head,current,vertical)
- local kern = current.kern
- local info = k_cache[kern]
- if info then
- -- print("kern hit")
- else
- local amount = formatters["%s:%0.3f"](vertical and "VK" or "HK",kern*pt_factor)
- if kern > 0 then
- info = sometext(amount,l_kern,c_positive)
- elseif kern < 0 then
- info = sometext(amount,l_kern,c_negative)
- else
- info = sometext(amount,l_kern,c_zero)
- end
- k_cache[kern] = info
- end
- info = copy_list(info)
- if vertical then
- info = vpack_nodes(info)
- end
- head, current = insert_node_before(head,current,info)
- return head, current.next
-end
-
-local p_cache = { }
-
-local function ruledpenalty(head,current,vertical)
- local penalty = current.penalty
- local info = p_cache[penalty]
- if info then
- -- print("penalty hit")
- else
- local amount = formatters["%s:%s"](vertical and "VP" or "HP",penalty)
- if penalty > 0 then
- info = sometext(amount,l_penalty,c_positive)
- elseif penalty < 0 then
- info = sometext(amount,l_penalty,c_negative)
- else
- info = sometext(amount,l_penalty,c_zero)
- end
- p_cache[penalty] = info
- end
- info = copy_list(info)
- if vertical then
- info = vpack_nodes(info)
- end
- head, current = insert_node_before(head,current,info)
- return head, current.next
-end
-
-local function visualize(head,vertical)
- local trace_hbox = false
- local trace_vbox = false
- local trace_vtop = false
- local trace_kern = false
- local trace_glue = false
- local trace_penalty = false
- local trace_fontkern = false
- local trace_strut = false
- local trace_whatsit = false
- local trace_glyph = false
- local trace_simple = false
- local trace_user = false
- local current = head
- local prev_trace_fontkern = nil
- local attr = unsetvalue
- while current do
- local id = current.id
- local a = current[a_visual] or unsetvalue
- if a ~= attr then
- prev_trace_fontkern = trace_fontkern
- if a == unsetvalue then
- trace_hbox = false
- trace_vbox = false
- trace_vtop = false
- trace_kern = false
- trace_glue = false
- trace_penalty = false
- trace_fontkern = false
- trace_strut = false
- trace_whatsit = false
- trace_glyph = false
- trace_simple = false
- trace_user = false
- else -- dead slow:
- trace_hbox = hasbit(a, 1)
- trace_vbox = hasbit(a, 2)
- trace_vtop = hasbit(a, 4)
- trace_kern = hasbit(a, 8)
- trace_glue = hasbit(a, 16)
- trace_penalty = hasbit(a, 32)
- trace_fontkern = hasbit(a, 64)
- trace_strut = hasbit(a, 128)
- trace_whatsit = hasbit(a, 256)
- trace_glyph = hasbit(a, 512)
- trace_simple = hasbit(a,1024)
- trace_user = hasbit(a,2048)
- end
- attr = a
- end
- if trace_strut then
- current[a_layer] = l_strut
- elseif id == glyph_code then
- if trace_glyph then
- head, current = ruledglyph(head,current)
- end
- elseif id == disc_code then
- if trace_glyph then
- local pre = current.pre
- if pre then
- current.pre = ruledglyph(pre,pre)
- end
- local post = current.post
- if post then
- current.post = ruledglyph(post,post)
- end
- local replace = current.replace
- if replace then
- current.replace = ruledglyph(replace,replace)
- end
- end
- elseif id == kern_code then
- local subtype = current.subtype
- -- tricky ... we don't copy the trace attribute in node-inj (yet)
- if subtype == font_kern_code or current[a_fontkern] then
- if trace_fontkern or prev_trace_fontkern then
- head, current = fontkern(head,current)
- end
- elseif subtype == user_kern_code then
- if trace_kern then
- head, current = ruledkern(head,current,vertical)
- end
- end
- elseif id == glue_code then
- local content = current.leader
- if content then
- current.leader = visualize(content,false)
- elseif trace_glue then
- head, current = ruledglue(head,current,vertical)
- end
- elseif id == penalty_code then
- if trace_penalty then
- head, current = ruledpenalty(head,current,vertical)
- end
- elseif id == disc_code then
- current.pre = visualize(current.pre)
- current.post = visualize(current.post)
- current.replace = visualize(current.replace)
- elseif id == hlist_code then
- local content = current.list
- if content then
- current.list = visualize(content,false)
- end
- if trace_hbox then
- head, current = ruledbox(head,current,false,l_hbox,"H__",trace_simple)
- end
- elseif id == vlist_code then
- local content = current.list
- if content then
- current.list = visualize(content,true)
- end
- if trace_vtop then
- head, current = ruledbox(head,current,true,l_vtop,"_T_",trace_simple)
- elseif trace_vbox then
- head, current = ruledbox(head,current,true,l_vbox,"__V",trace_simple)
- end
- elseif id == whatsit_code then
- if trace_whatsit then
- head, current = whatsit(head,current)
- end
- elseif id == user_code then
- if trace_whatsit then
- head, current = user(head,current)
- end
- end
- current = current.next
- end
- return head
-end
-
-local function freed(cache)
- local n = 0
- for k, v in next, cache do
- free_node_list(v)
- n = n + 1
- end
- if n == 0 then
- return 0, cache
- else
- return n, { }
- end
-end
-
-local function cleanup()
- local hf, ng, np, nk, nw
- nf, f_cache = freed(f_cache)
- ng, g_cache = freed(g_cache)
- np, p_cache = freed(p_cache)
- nk, k_cache = freed(k_cache)
- nw, w_cache = freed(w_cache)
- nb, b_cache = freed(b_cache)
- -- report_visualize("cache: %s fontkerns, %s skips, %s penalties, %s kerns, %s whatsits, %s boxes",nf,ng,np,nk,nw,nb)
-end
-
-function visualizers.handler(head)
- if usedfont then
- starttiming(visualizers)
- -- local l = tex_attribute[a_layer]
- -- local v = tex_attribute[a_visual]
- -- tex_attribute[a_layer] = unsetvalue
- -- tex_attribute[a_visual] = unsetvalue
- head = visualize(head)
- -- tex_attribute[a_layer] = l
- -- tex_attribute[a_visual] = v
- -- -- cleanup()
- stoptiming(visualizers)
- end
- return head, false
-end
-
-function visualizers.box(n)
- tex_box[n].list = visualizers.handler(tex_box[n].list)
-end
-
-local last = nil
-local used = nil
-
-local mark = {
- "trace:1", "trace:2", "trace:3",
- "trace:4", "trace:5", "trace:6",
- "trace:7",
-}
-
-local function markfonts(list)
- for n in traverse_nodes(list) do
- local id = n.id
- if id == glyph_code then
- local font = n.font
- local okay = used[font]
- if not okay then
- last = last + 1
- okay = mark[last]
- used[font] = okay
- end
- setcolor(n,okay)
- elseif id == hlist_code or id == vlist_code then
- markfonts(n.list)
- end
- end
-end
-
-function visualizers.markfonts(list)
- last, used = 0, { }
- markfonts(type(n) == "number" and tex_box[n].list or n)
-end
-
-function commands.markfonts(n)
- visualizers.markfonts(n)
-end
-
-statistics.register("visualization time",function()
- if enabled then
- cleanup() -- in case we don't don't do it each time
- return format("%s seconds",statistics.elapsedtime(visualizers))
- end
-end)
+if not modules then modules = { } end modules ['trac-vis'] = {
+ version = 1.001,
+ comment = "companion to trac-vis.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local string, number, table = string, number, table
+local node, nodes, attributes, fonts, tex = node, nodes, attributes, fonts, tex
+local type = type
+local format = string.format
+local formatters = string.formatters
+
+-- This module started out in the early days of mkiv and luatex with
+-- visualizing kerns related to fonts. In the process of cleaning up the
+-- visual debugger code it made sense to integrate some other code that
+-- I had laying around and replace the old supp-vis debugging code. As
+-- only a subset of the old visual debugger makes sense it has become a
+-- different implementation. Soms of the m-visual functionality will also
+-- be ported. The code is rather trivial. The caching is not really needed
+-- but saves upto 50% of the time needed to add visualization. Of course
+-- the overall runtime is larger because of color and layer processing in
+-- the backend (can be times as much) so the runtime is somewhat larger
+-- with full visualization enabled. In practice this will never happen
+-- unless one is demoing.
+
+-- We could use pdf literals and re stream codes but it's not worth the
+-- trouble because we would end up in color etc mess. Maybe one day I'll
+-- make a nodeinjection variant.
+
+-- todo: global switch (so no attributes)
+-- todo: maybe also xoffset, yoffset of glyph
+-- todo: inline concat (more efficient)
+
+local nodecodes = nodes.nodecodes
+local disc_code = nodecodes.disc
+local kern_code = nodecodes.kern
+local glyph_code = nodecodes.glyph
+local disc_code = nodecodes.disc
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local glue_code = nodecodes.glue
+local penalty_code = nodecodes.penalty
+local whatsit_code = nodecodes.whatsit
+local user_code = nodecodes.user
+local gluespec_code = nodecodes.gluespec
+
+local kerncodes = nodes.kerncodes
+local font_kern_code = kerncodes.fontkern
+local user_kern_code = kerncodes.userkern
+
+local gluecodes = nodes.gluecodes
+local cleaders_code = gluecodes.cleaders
+local userskip_code = gluecodes.userskip
+local space_code = gluecodes.space
+local xspace_code = gluecodes.xspace
+local leftskip_code = gluecodes.leftskip
+local rightskip_code = gluecodes.rightskip
+
+local whatsitcodes = nodes.whatsitcodes
+
+local concat_nodes = nodes.concat
+local hpack_nodes = node.hpack
+local vpack_nodes = node.vpack
+local hpack_string = typesetters.hpack
+local fast_hpack_string = typesetters.fast_hpack
+local copy_node = node.copy
+local copy_list = node.copy_list
+local free_node = node.free
+local free_node_list = node.flush_list
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local fast_hpack = nodes.fasthpack
+local traverse_nodes = node.traverse
+
+local tex_attribute = tex.attribute
+local tex_box = tex.box
+local unsetvalue = attributes.unsetvalue
+
+local current_font = font.current
+
+local exheights = fonts.hashes.exheights
+local emwidths = fonts.hashes.emwidths
+local pt_factor = number.dimenfactors.pt
+
+local nodepool = nodes.pool
+local new_rule = nodepool.rule
+local new_kern = nodepool.kern
+local new_glue = nodepool.glue
+local new_penalty = nodepool.penalty
+
+local tracers = nodes.tracers
+local visualizers = nodes.visualizers
+
+local setcolor = tracers.colors.set
+local setlistcolor = tracers.colors.setlist
+local settransparency = tracers.transparencies.set
+local setlisttransparency = tracers.transparencies.setlist
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+
+local a_visual = attributes.private("visual")
+local a_fontkern = attributes.private("fontkern")
+local a_layer = attributes.private("viewerlayer")
+
+local hasbit = number.hasbit
+local bit = number.bit
+local setbit = number.setbit
+local clearbit = number.clearbit
+
+local trace_hbox
+local trace_vbox
+local trace_vtop
+local trace_kern
+local trace_glue
+local trace_penalty
+local trace_fontkern
+local trace_strut
+local trace_whatsit
+local trace_user
+
+local report_visualize = logs.reporter("visualize")
+
+local modes = {
+ hbox = 1,
+ vbox = 2,
+ vtop = 4,
+ kern = 8,
+ glue = 16,
+ penalty = 32,
+ fontkern = 64,
+ strut = 128,
+ whatsit = 256,
+ glyph = 512,
+ simple = 1024,
+ simplehbox = 1024 + 1,
+ simplevbox = 1024 + 2,
+ simplevtop = 1024 + 4,
+ user = 2048,
+}
+
+local modes_makeup = { "hbox", "vbox", "kern", "glue", "penalty" }
+local modes_boxes = { "hbox", "vbox" }
+local modes_all = { "hbox", "vbox", "kern", "glue", "penalty", "fontkern", "whatsit", "glyph", "user" }
+
+local usedfont, exheight, emwidth
+local l_penalty, l_glue, l_kern, l_fontkern, l_hbox, l_vbox, l_vtop, l_strut, l_whatsit, l_glyph, l_user
+
+local enabled = false
+local layers = { }
+
+local preset_boxes = modes.hbox + modes.vbox
+local preset_makeup = preset_boxes + modes.kern + modes.glue + modes.penalty
+local preset_all = preset_makeup + modes.fontkern + modes.whatsit + modes.glyph + modes.user
+
+function visualizers.setfont(id)
+ usedfont = id or current_font()
+ exheight = exheights[usedfont]
+ emwidth = emwidths[usedfont]
+end
+
+-- we can preset a bunch of bits
+
+local function enable()
+ if not usedfont then
+ -- we use a narrow monospaced font
+ visualizers.setfont(fonts.definers.define { name = "lmmonoltcond10regular", size = tex.sp("4pt") })
+ end
+ for mode, value in next, modes do
+ local tag = formatters["v_%s"](mode)
+ attributes.viewerlayers.define {
+ tag = tag,
+ title = formatters["visualizer %s"](mode),
+ visible = "start",
+ editable = "yes",
+ printable = "yes"
+ }
+ layers[mode] = attributes.viewerlayers.register(tag,true)
+ end
+ l_hbox = layers.hbox
+ l_vbox = layers.vbox
+ l_vtop = layers.vtop
+ l_glue = layers.glue
+ l_kern = layers.kern
+ l_penalty = layers.penalty
+ l_fontkern = layers.fontkern
+ l_strut = layers.strut
+ l_whatsit = layers.whatsit
+ l_glyph = layers.glyph
+ l_user = layers.user
+ nodes.tasks.enableaction("shipouts","nodes.visualizers.handler")
+ report_visualize("enabled")
+ enabled = true
+ tex.setcount("global","c_syst_visualizers_state",1) -- so that we can optimize at the tex end
+end
+
+local function setvisual(n,a,what) -- this will become more efficient when we have the bit lib linked in
+ if not n or n == "reset" then
+ return unsetvalue
+ elseif n == "makeup" then
+ if not a or a == 0 or a == unsetvalue then
+ a = preset_makeup
+ else
+ a = setbit(a,preset_makeup)
+ -- for i=1,#modes_makeup do
+ -- a = setvisual(modes_makeup[i],a)
+ -- end
+ end
+ elseif n == "boxes" then
+ if not a or a == 0 or a == unsetvalue then
+ a = preset_boxes
+ else
+ a = setbit(a,preset_boxes)
+ -- for i=1,#modes_boxes do
+ -- a = setvisual(modes_boxes[i],a)
+ -- end
+ end
+ elseif n == "all" then
+ if what == false then
+ return unsetvalue
+ elseif not a or a == 0 or a == unsetvalue then
+ a = preset_all
+ else
+ a = setbit(a,preset_all)
+ -- for i=1,#modes_all do
+ -- a = setvisual(modes_all[i],a)
+ -- end
+ end
+ else
+ local m = modes[n]
+ if not m then
+ -- go on
+ elseif a == unsetvalue then
+ if what == false then
+ return unsetvalue
+ else
+ -- a = setbit(0,m)
+ a = m
+ end
+ elseif what == false then
+ a = clearbit(a,m)
+ elseif not a or a == 0 then
+ a = m
+ else
+ a = setbit(a,m)
+ end
+ end
+ if not a or a == 0 or a == unsetvalue then
+ return unsetvalue
+ elseif not enabled then -- must happen at runtime (as we don't store layers yet)
+ enable()
+ end
+ return a
+end
+
+function visualizers.setvisual(n)
+ tex_attribute[a_visual] = setvisual(n,tex_attribute[a_visual])
+end
+
+function visualizers.setlayer(n)
+ tex_attribute[a_layer] = layers[n] or unsetvalue
+end
+
+commands.setvisual = visualizers.setvisual
+commands.setlayer = visualizers.setlayer
+
+function commands.visual(n)
+ context(setvisual(n))
+end
+
+local function set(mode,v)
+ tex_attribute[a_visual] = setvisual(mode,tex_attribute[a_visual],v)
+end
+
+for mode, value in next, modes do
+ trackers.register(formatters["visualizers.%s"](mode), function(v) set(mode,v) end)
+end
+
+trackers.register("visualizers.reset", function(v) set("reset", v) end)
+trackers.register("visualizers.all", function(v) set("all", v) end)
+trackers.register("visualizers.makeup",function(v) set("makeup",v) end)
+trackers.register("visualizers.boxes", function(v) set("boxes", v) end)
+
+local c_positive = "trace:b"
+local c_negative = "trace:r"
+local c_zero = "trace:g"
+local c_text = "trace:s"
+local c_space = "trace:y"
+local c_skip_a = "trace:c"
+local c_skip_b = "trace:m"
+local c_glyph = "trace:o"
+local c_white = "trace:w"
+
+local c_positive_d = "trace:db"
+local c_negative_d = "trace:dr"
+local c_zero_d = "trace:dg"
+local c_text_d = "trace:ds"
+local c_space_d = "trace:dy"
+local c_skip_a_d = "trace:dc"
+local c_skip_b_d = "trace:dm"
+local c_glyph_d = "trace:do"
+local c_white_d = "trace:dw"
+
+local function sometext(str,layer,color,textcolor) -- we can just paste verbatim together .. no typesteting needed
+ local text = fast_hpack_string(str,usedfont)
+ local size = text.width
+ local rule = new_rule(size,2*exheight,exheight/2)
+ local kern = new_kern(-size)
+ if color then
+ setcolor(rule,color)
+ end
+ if textcolor then
+ setlistcolor(text.list,textcolor)
+ end
+ local info = concat_nodes {
+ rule,
+ kern,
+ text,
+ }
+ setlisttransparency(info,c_zero)
+ info = fast_hpack(info)
+ if layer then
+ info[a_layer] = layer
+ end
+ local width = info.width
+ info.width = 0
+ info.height = 0
+ info.depth = 0
+ return info, width
+end
+
+local f_cache = { }
+
+local function fontkern(head,current)
+ local kern = current.kern
+ local info = f_cache[kern]
+ if info then
+ -- print("hit fontkern")
+ else
+ local text = fast_hpack_string(formatters[" %0.3f"](kern*pt_factor),usedfont)
+ local rule = new_rule(emwidth/10,6*exheight,2*exheight)
+ local list = text.list
+ if kern > 0 then
+ setlistcolor(list,c_positive_d)
+ elseif kern < 0 then
+ setlistcolor(list,c_negative_d)
+ else
+ setlistcolor(list,c_zero_d)
+ end
+ setlisttransparency(list,c_text_d)
+ settransparency(rule,c_text_d)
+ text.shift = -5 * exheight
+ info = concat_nodes {
+ rule,
+ text,
+ }
+ info = fast_hpack(info)
+ info[a_layer] = l_fontkern
+ info.width = 0
+ info.height = 0
+ info.depth = 0
+ f_cache[kern] = info
+ end
+ head = insert_node_before(head,current,copy_list(info))
+ return head, current
+end
+
+local w_cache = { }
+
+local tags = {
+ open = "FIC",
+ write = "FIW",
+ close = "FIC",
+ special = "SPE",
+ localpar = "PAR",
+ dir = "DIR",
+ pdfliteral = "PDF",
+ pdfrefobj = "PDF",
+ pdfrefxform = "PDF",
+ pdfrefximage = "PDF",
+ pdfannot = "PDF",
+ pdfstartlink = "PDF",
+ pdfendlink = "PDF",
+ pdfdest = "PDF",
+ pdfthread = "PDF",
+ pdfstartthread = "PDF",
+ pdfendthread = "PDF",
+ pdfsavepos = "PDF",
+ pdfthreaddata = "PDF",
+ pdflinkdata = "PDF",
+ pdfcolorstack = "PDF",
+ pdfsetmatrix = "PDF",
+ pdfsave = "PDF",
+ pdfrestore = "PDF",
+ latelua = "LUA",
+ closelua = "LUA",
+ cancelboundary = "CBD",
+ userdefined = "USR",
+}
+
+local function whatsit(head,current)
+ local what = current.subtype
+ local info = w_cache[what]
+ if info then
+ -- print("hit whatsit")
+ else
+ local tag = whatsitcodes[what]
+ -- maybe different text colors per tag
+ info = sometext(formatters["W:%s"](tag and tags[tag] or what),usedfont,nil,c_white)
+ info[a_layer] = l_whatsit
+ w_cache[what] = info
+ end
+ head, current = insert_node_after(head,current,copy_list(info))
+ return head, current
+end
+
+local function user(head,current)
+ local what = current.subtype
+ local info = w_cache[what]
+ if info then
+ -- print("hit user")
+ else
+ info = sometext(formatters["U:%s"](what),usedfont)
+ info[a_layer] = l_user
+ w_cache[what] = info
+ end
+ head, current = insert_node_after(head,current,copy_list(info))
+ return head, current
+end
+
+local b_cache = { }
+
+local function ruledbox(head,current,vertical,layer,what,simple)
+ local wd = current.width
+ if wd ~= 0 then
+ local ht, dp = current.height, current.depth
+ local next, prev = current.next, current.prev
+ current.next, current.prev = nil, nil
+ local linewidth = emwidth/10
+ local baseline, baseskip
+ if dp ~= 0 and ht ~= 0 then
+ if wd > 20*linewidth then
+ baseline = b_cache.baseline
+ if not baseline then
+ -- due to an optimized leader color/transparency we need to set the glue node in order
+ -- to trigger this mechanism
+ local leader = concat_nodes {
+ new_glue(2*linewidth), -- 2.5
+ new_rule(6*linewidth,linewidth,0), -- 5.0
+ new_glue(2*linewidth), -- 2.5
+ }
+ -- setlisttransparency(leader,c_text)
+ leader = fast_hpack(leader)
+ -- setlisttransparency(leader,c_text)
+ baseline = new_glue(0)
+ baseline.leader = leader
+ baseline.subtype = cleaders_code
+ baseline.spec.stretch = 65536
+ baseline.spec.stretch_order = 2
+ setlisttransparency(baseline,c_text)
+ b_cache.baseline = baseline
+ end
+ baseline = copy_list(baseline)
+ baseline = fast_hpack(baseline,wd-2*linewidth)
+ -- or new hpack node, set head and also:
+ -- baseline.width = wd
+ -- baseline.glue_set = wd/65536
+ -- baseline.glue_order = 2
+ -- baseline.glue_sign = 1
+ baseskip = new_kern(-wd+linewidth)
+ else
+ baseline = new_rule(wd-2*linewidth,linewidth,0)
+ baseskip = new_kern(-wd+2*linewidth)
+ end
+ end
+ local this
+ if not simple then
+ this = b_cache[what]
+ if not this then
+ local text = fast_hpack_string(what,usedfont)
+ this = concat_nodes {
+ new_kern(-text.width),
+ text,
+ }
+ setlisttransparency(this,c_text)
+ this = fast_hpack(this)
+ this.width = 0
+ this.height = 0
+ this.depth = 0
+ b_cache[what] = this
+ end
+ end
+ local info = concat_nodes {
+ this and copy_list(this) or nil, -- this also triggers the right mode (else sometimes no whatits)
+ new_rule(linewidth,ht,dp),
+ new_rule(wd-2*linewidth,-dp+linewidth,dp),
+ new_rule(linewidth,ht,dp),
+ new_kern(-wd+linewidth),
+ new_rule(wd-2*linewidth,ht,-ht+linewidth),
+ baseskip,
+ baseline,
+ }
+ setlisttransparency(info,c_text)
+ info = fast_hpack(info)
+ info.width = 0
+ info.height = 0
+ info.depth = 0
+ info[a_layer] = layer
+ local info = concat_nodes {
+ current,
+ new_kern(-wd),
+ info,
+ }
+ info = fast_hpack(info,wd)
+ if vertical then
+ info = vpack_nodes(info)
+ end
+ if next then
+ info.next = next
+ next.prev = info
+ end
+ if prev then
+if prev.id == gluespec_code then
+ -- weird, how can this happen, an inline glue-spec
+else
+ info.prev = prev
+ prev.next = info
+end
+ end
+ if head == current then
+ return info, info
+ else
+ return head, info
+ end
+ else
+ return head, current
+ end
+end
+
+local function ruledglyph(head,current)
+ local wd = current.width
+ if wd ~= 0 then
+ local ht, dp = current.height, current.depth
+ local next, prev = current.next, current.prev
+ current.next, current.prev = nil, nil
+ local linewidth = emwidth/20
+ local baseline
+ if dp ~= 0 and ht ~= 0 then
+ baseline = new_rule(wd-2*linewidth,linewidth,0)
+ end
+ local doublelinewidth = 2*linewidth
+ local info = concat_nodes {
+ new_rule(linewidth,ht,dp),
+ new_rule(wd-doublelinewidth,-dp+linewidth,dp),
+ new_rule(linewidth,ht,dp),
+ new_kern(-wd+linewidth),
+ new_rule(wd-doublelinewidth,ht,-ht+linewidth),
+ new_kern(-wd+doublelinewidth),
+ baseline,
+ }
+ setlistcolor(info,c_glyph)
+ setlisttransparency(info,c_glyph_d)
+ info = fast_hpack(info)
+ info.width = 0
+ info.height = 0
+ info.depth = 0
+ info[a_layer] = l_glyph
+ local info = concat_nodes {
+ current,
+ new_kern(-wd),
+ info,
+ }
+ info = fast_hpack(info)
+ info.width = wd
+ if next then
+ info.next = next
+ next.prev = info
+ end
+ if prev then
+ info.prev = prev
+ prev.next = info
+ end
+ if head == current then
+ return info, info
+ else
+ return head, info
+ end
+ else
+ return head, current
+ end
+end
+
+local g_cache = { }
+
+local tags = {
+ -- userskip = "US",
+ lineskip = "LS",
+ baselineskip = "BS",
+ parskip = "PS",
+ abovedisplayskip = "DA",
+ belowdisplayskip = "DB",
+ abovedisplayshortskip = "SA",
+ belowdisplayshortskip = "SB",
+ leftskip = "LS",
+ rightskip = "RS",
+ topskip = "TS",
+ splittopskip = "ST",
+ tabskip = "AS",
+ spaceskip = "SS",
+ xspaceskip = "XS",
+ parfillskip = "PF",
+ thinmuskip = "MS",
+ medmuskip = "MM",
+ thickmuskip = "ML",
+ leaders = "NL",
+ cleaders = "CL",
+ xleaders = "XL",
+ gleaders = "GL",
+ -- true = "VS",
+ -- false = "HS",
+}
+
+local function ruledglue(head,current,vertical)
+ local spec = current.spec
+ local width = spec.width
+ local subtype = current.subtype
+ local amount = formatters["%s:%0.3f"](tags[subtype] or (vertical and "VS") or "HS",width*pt_factor)
+ local info = g_cache[amount]
+ if info then
+ -- print("glue hit")
+ else
+ if subtype == space_code or subtype == xspace_code then -- not yet all space
+ info = sometext(amount,l_glue,c_space)
+ elseif subtype == leftskip_code or subtype == rightskip_code then
+ info = sometext(amount,l_glue,c_skip_a)
+ elseif subtype == userskip_code then
+ if width > 0 then
+ info = sometext(amount,l_glue,c_positive)
+ elseif width < 0 then
+ info = sometext(amount,l_glue,c_negative)
+ else
+ info = sometext(amount,l_glue,c_zero)
+ end
+ else
+ info = sometext(amount,l_glue,c_skip_b)
+ end
+ g_cache[amount] = info
+ end
+ info = copy_list(info)
+ if vertical then
+ info = vpack_nodes(info)
+ end
+ head, current = insert_node_before(head,current,info)
+ return head, current.next
+end
+
+local k_cache = { }
+
+local function ruledkern(head,current,vertical)
+ local kern = current.kern
+ local info = k_cache[kern]
+ if info then
+ -- print("kern hit")
+ else
+ local amount = formatters["%s:%0.3f"](vertical and "VK" or "HK",kern*pt_factor)
+ if kern > 0 then
+ info = sometext(amount,l_kern,c_positive)
+ elseif kern < 0 then
+ info = sometext(amount,l_kern,c_negative)
+ else
+ info = sometext(amount,l_kern,c_zero)
+ end
+ k_cache[kern] = info
+ end
+ info = copy_list(info)
+ if vertical then
+ info = vpack_nodes(info)
+ end
+ head, current = insert_node_before(head,current,info)
+ return head, current.next
+end
+
+local p_cache = { }
+
+local function ruledpenalty(head,current,vertical)
+ local penalty = current.penalty
+ local info = p_cache[penalty]
+ if info then
+ -- print("penalty hit")
+ else
+ local amount = formatters["%s:%s"](vertical and "VP" or "HP",penalty)
+ if penalty > 0 then
+ info = sometext(amount,l_penalty,c_positive)
+ elseif penalty < 0 then
+ info = sometext(amount,l_penalty,c_negative)
+ else
+ info = sometext(amount,l_penalty,c_zero)
+ end
+ p_cache[penalty] = info
+ end
+ info = copy_list(info)
+ if vertical then
+ info = vpack_nodes(info)
+ end
+ head, current = insert_node_before(head,current,info)
+ return head, current.next
+end
+
+local function visualize(head,vertical)
+ local trace_hbox = false
+ local trace_vbox = false
+ local trace_vtop = false
+ local trace_kern = false
+ local trace_glue = false
+ local trace_penalty = false
+ local trace_fontkern = false
+ local trace_strut = false
+ local trace_whatsit = false
+ local trace_glyph = false
+ local trace_simple = false
+ local trace_user = false
+ local current = head
+ local prev_trace_fontkern = nil
+ local attr = unsetvalue
+ while current do
+ local id = current.id
+ local a = current[a_visual] or unsetvalue
+ if a ~= attr then
+ prev_trace_fontkern = trace_fontkern
+ if a == unsetvalue then
+ trace_hbox = false
+ trace_vbox = false
+ trace_vtop = false
+ trace_kern = false
+ trace_glue = false
+ trace_penalty = false
+ trace_fontkern = false
+ trace_strut = false
+ trace_whatsit = false
+ trace_glyph = false
+ trace_simple = false
+ trace_user = false
+ else -- dead slow:
+ trace_hbox = hasbit(a, 1)
+ trace_vbox = hasbit(a, 2)
+ trace_vtop = hasbit(a, 4)
+ trace_kern = hasbit(a, 8)
+ trace_glue = hasbit(a, 16)
+ trace_penalty = hasbit(a, 32)
+ trace_fontkern = hasbit(a, 64)
+ trace_strut = hasbit(a, 128)
+ trace_whatsit = hasbit(a, 256)
+ trace_glyph = hasbit(a, 512)
+ trace_simple = hasbit(a,1024)
+ trace_user = hasbit(a,2048)
+ end
+ attr = a
+ end
+ if trace_strut then
+ current[a_layer] = l_strut
+ elseif id == glyph_code then
+ if trace_glyph then
+ head, current = ruledglyph(head,current)
+ end
+ elseif id == disc_code then
+ if trace_glyph then
+ local pre = current.pre
+ if pre then
+ current.pre = ruledglyph(pre,pre)
+ end
+ local post = current.post
+ if post then
+ current.post = ruledglyph(post,post)
+ end
+ local replace = current.replace
+ if replace then
+ current.replace = ruledglyph(replace,replace)
+ end
+ end
+ elseif id == kern_code then
+ local subtype = current.subtype
+ -- tricky ... we don't copy the trace attribute in node-inj (yet)
+ if subtype == font_kern_code or current[a_fontkern] then
+ if trace_fontkern or prev_trace_fontkern then
+ head, current = fontkern(head,current)
+ end
+ elseif subtype == user_kern_code then
+ if trace_kern then
+ head, current = ruledkern(head,current,vertical)
+ end
+ end
+ elseif id == glue_code then
+ local content = current.leader
+ if content then
+ current.leader = visualize(content,false)
+ elseif trace_glue then
+ head, current = ruledglue(head,current,vertical)
+ end
+ elseif id == penalty_code then
+ if trace_penalty then
+ head, current = ruledpenalty(head,current,vertical)
+ end
+ elseif id == disc_code then
+ current.pre = visualize(current.pre)
+ current.post = visualize(current.post)
+ current.replace = visualize(current.replace)
+ elseif id == hlist_code then
+ local content = current.list
+ if content then
+ current.list = visualize(content,false)
+ end
+ if trace_hbox then
+ head, current = ruledbox(head,current,false,l_hbox,"H__",trace_simple)
+ end
+ elseif id == vlist_code then
+ local content = current.list
+ if content then
+ current.list = visualize(content,true)
+ end
+ if trace_vtop then
+ head, current = ruledbox(head,current,true,l_vtop,"_T_",trace_simple)
+ elseif trace_vbox then
+ head, current = ruledbox(head,current,true,l_vbox,"__V",trace_simple)
+ end
+ elseif id == whatsit_code then
+ if trace_whatsit then
+ head, current = whatsit(head,current)
+ end
+ elseif id == user_code then
+ if trace_whatsit then
+ head, current = user(head,current)
+ end
+ end
+ current = current.next
+ end
+ return head
+end
+
+local function freed(cache)
+ local n = 0
+ for k, v in next, cache do
+ free_node_list(v)
+ n = n + 1
+ end
+ if n == 0 then
+ return 0, cache
+ else
+ return n, { }
+ end
+end
+
+local function cleanup()
+ local hf, ng, np, nk, nw
+ nf, f_cache = freed(f_cache)
+ ng, g_cache = freed(g_cache)
+ np, p_cache = freed(p_cache)
+ nk, k_cache = freed(k_cache)
+ nw, w_cache = freed(w_cache)
+ nb, b_cache = freed(b_cache)
+ -- report_visualize("cache: %s fontkerns, %s skips, %s penalties, %s kerns, %s whatsits, %s boxes",nf,ng,np,nk,nw,nb)
+end
+
+function visualizers.handler(head)
+ if usedfont then
+ starttiming(visualizers)
+ -- local l = tex_attribute[a_layer]
+ -- local v = tex_attribute[a_visual]
+ -- tex_attribute[a_layer] = unsetvalue
+ -- tex_attribute[a_visual] = unsetvalue
+ head = visualize(head)
+ -- tex_attribute[a_layer] = l
+ -- tex_attribute[a_visual] = v
+ -- -- cleanup()
+ stoptiming(visualizers)
+ end
+ return head, false
+end
+
+function visualizers.box(n)
+ tex_box[n].list = visualizers.handler(tex_box[n].list)
+end
+
+local last = nil
+local used = nil
+
+local mark = {
+ "trace:1", "trace:2", "trace:3",
+ "trace:4", "trace:5", "trace:6",
+ "trace:7",
+}
+
+local function markfonts(list)
+ for n in traverse_nodes(list) do
+ local id = n.id
+ if id == glyph_code then
+ local font = n.font
+ local okay = used[font]
+ if not okay then
+ last = last + 1
+ okay = mark[last]
+ used[font] = okay
+ end
+ setcolor(n,okay)
+ elseif id == hlist_code or id == vlist_code then
+ markfonts(n.list)
+ end
+ end
+end
+
+function visualizers.markfonts(list)
+ last, used = 0, { }
+ markfonts(type(n) == "number" and tex_box[n].list or n)
+end
+
+function commands.markfonts(n)
+ visualizers.markfonts(n)
+end
+
+statistics.register("visualization time",function()
+ if enabled then
+ cleanup() -- in case we don't don't do it each time
+ return format("%s seconds",statistics.elapsedtime(visualizers))
+ end
+end)
diff --git a/tex/context/base/trac-xml.lua b/tex/context/base/trac-xml.lua
index cd8b8c0a5..aba82ef52 100644
--- a/tex/context/base/trac-xml.lua
+++ b/tex/context/base/trac-xml.lua
@@ -1,183 +1,183 @@
-if not modules then modules = { } end modules ['trac-xml'] = {
- version = 1.001,
- comment = "companion to trac-log.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- Application helpinfo can be defined in several ways:
---
--- helpinfo = "big blob of help"
---
--- helpinfo = { basic = "blob of basic help", extra = "blob of extra help" }
---
--- helpinfo = "..."
---
--- helpinfo = "somefile.xml"
---
--- In the case of an xml file, the file should be either present on the same path
--- as the script, or we should be be able to locate it using the resolver.
-
-local formatters = string.formatters
-local reporters = logs.reporters
-local xmlserialize = xml.serialize
-local xmlcollected = xml.collected
-local xmltext = xml.text
-local xmlfirst = xml.first
-
--- there is no need for a newhandlers { name = "help", parent = "string" }
-
-local function showhelp(specification,...)
- local root = xml.convert(specification.helpinfo or "")
- if not root then
- return
- end
- local xs = xml.gethandlers("string")
- xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end)
- xml.sethandlersfunction(xs,"ref", function(e,handler) handler.handle("--"..e.at.name) end)
- local wantedcategories = select("#",...) == 0 and true or table.tohash { ... }
- local nofcategories = xml.count(root,"/application/flags/category")
- local report = specification.report
- for category in xmlcollected(root,"/application/flags/category") do
- local categoryname = category.at.name or ""
- if wantedcategories == true or wantedcategories[categoryname] then
- if nofcategories > 1 then
- report("%s options:",categoryname)
- report()
- end
- for subcategory in xmlcollected(category,"/subcategory") do
- for flag in xmlcollected(subcategory,"/flag") do
- local name = flag.at.name
- local value = flag.at.value
- -- local short = xmlfirst(s,"/short")
- -- local short = xmlserialize(short,xs)
- local short = xmltext(xmlfirst(flag,"/short"))
- if value then
- report("--%-20s %s",formatters["%s=%s"](name,value),short)
- else
- report("--%-20s %s",name,short)
- end
- end
- report()
- end
- end
- end
- for category in xmlcollected(root,"/application/examples/category") do
- local title = xmltext(xmlfirst(category,"/title"))
- if title and title ~= "" then
- report()
- report(title)
- report()
- end
- for subcategory in xmlcollected(category,"/subcategory") do
- for example in xmlcollected(subcategory,"/example") do
- local command = xmltext(xmlfirst(example,"/command"))
- local comment = xmltext(xmlfirst(example,"/comment"))
- report(command)
- end
- report()
- end
- end
- for comment in xmlcollected(root,"/application/comments/comment") do
- local comment = xmltext(comment)
- report()
- report(comment)
- report()
- end
-end
-
-local reporthelp = reporters.help
-local exporthelp = reporters.export
-
-local function xmlfound(t)
- local helpinfo = t.helpinfo
- if type(helpinfo) == "table" then
- return false
- end
- if type(helpinfo) ~= "string" then
- helpinfo = "Warning: no helpinfo found."
- t.helpinfo = helpinfo
- return false
- end
- if string.find(helpinfo,".xml$") then
- local ownscript = environment.ownscript
- local helpdata = false
- if ownscript then
- local helpfile = file.join(file.pathpart(ownscript),helpinfo)
- helpdata = io.loaddata(helpfile)
- if helpdata == "" then
- helpdata = false
- end
- end
- if not helpdata then
- local helpfile = resolvers.findfile(helpinfo,"tex")
- helpdata = helpfile and io.loaddata(helpfile)
- end
- if helpdata and helpdata ~= "" then
- helpinfo = helpdata
- else
- helpinfo = formatters["Warning: help file %a is not found."](helpinfo)
- end
- end
- t.helpinfo = helpinfo
- return string.find(t.helpinfo,"^<%?xml") and true or false
-end
-
-function reporters.help(t,...)
- if xmlfound(t) then
- showhelp(t,...)
- else
- reporthelp(t,...)
- end
-end
-
-function reporters.export(t,methods,filename)
- if not xmlfound(t) then
- return exporthelp(t)
- end
- if not methods or methods == "" then
- methods = environment.arguments["exporthelp"]
- end
- if not filename or filename == "" then
- filename = environment.files[1]
- end
- dofile(resolvers.findfile("trac-exp.lua","tex"))
- local exporters = logs.exporters
- if not exporters or not methods then
- return exporthelp(t)
- end
- if methods == "all" then
- methods = table.keys(exporters)
- elseif type(methods) == "string" then
- methods = utilities.parsers.settings_to_array(methods)
- else
- return exporthelp(t)
- end
- if type(filename) ~= "string" or filename == "" then
- filename = false
- elseif file.pathpart(filename) == "" then
- t.report("export file %a will not be saved on the current path (safeguard)",filename)
- return
- end
- for i=1,#methods do
- local method = methods[i]
- local exporter = exporters[method]
- if exporter then
- local result = exporter(t,method)
- if result and result ~= "" then
- if filename then
- local fullname = file.replacesuffix(filename,method)
- t.report("saving export in %a",fullname)
- io.savedata(fullname,result)
- else
- reporters.lines(t,result)
- end
- else
- t.report("no output from exporter %a",method)
- end
- else
- t.report("unknown exporter %a",method)
- end
- end
-end
+if not modules then modules = { } end modules ['trac-xml'] = {
+ version = 1.001,
+ comment = "companion to trac-log.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Application helpinfo can be defined in several ways:
+--
+-- helpinfo = "big blob of help"
+--
+-- helpinfo = { basic = "blob of basic help", extra = "blob of extra help" }
+--
+-- helpinfo = "..."
+--
+-- helpinfo = "somefile.xml"
+--
+-- In the case of an xml file, the file should be either present on the same path
+-- as the script, or we should be be able to locate it using the resolver.
+
+local formatters = string.formatters
+local reporters = logs.reporters
+local xmlserialize = xml.serialize
+local xmlcollected = xml.collected
+local xmltext = xml.text
+local xmlfirst = xml.first
+
+-- there is no need for a newhandlers { name = "help", parent = "string" }
+
+local function showhelp(specification,...)
+ local root = xml.convert(specification.helpinfo or "")
+ if not root then
+ return
+ end
+ local xs = xml.gethandlers("string")
+ xml.sethandlersfunction(xs,"short",function(e,handler) xmlserialize(e.dt,handler) end)
+ xml.sethandlersfunction(xs,"ref", function(e,handler) handler.handle("--"..e.at.name) end)
+ local wantedcategories = select("#",...) == 0 and true or table.tohash { ... }
+ local nofcategories = xml.count(root,"/application/flags/category")
+ local report = specification.report
+ for category in xmlcollected(root,"/application/flags/category") do
+ local categoryname = category.at.name or ""
+ if wantedcategories == true or wantedcategories[categoryname] then
+ if nofcategories > 1 then
+ report("%s options:",categoryname)
+ report()
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for flag in xmlcollected(subcategory,"/flag") do
+ local name = flag.at.name
+ local value = flag.at.value
+ -- local short = xmlfirst(s,"/short")
+ -- local short = xmlserialize(short,xs)
+ local short = xmltext(xmlfirst(flag,"/short"))
+ if value then
+ report("--%-20s %s",formatters["%s=%s"](name,value),short)
+ else
+ report("--%-20s %s",name,short)
+ end
+ end
+ report()
+ end
+ end
+ end
+ for category in xmlcollected(root,"/application/examples/category") do
+ local title = xmltext(xmlfirst(category,"/title"))
+ if title and title ~= "" then
+ report()
+ report(title)
+ report()
+ end
+ for subcategory in xmlcollected(category,"/subcategory") do
+ for example in xmlcollected(subcategory,"/example") do
+ local command = xmltext(xmlfirst(example,"/command"))
+ local comment = xmltext(xmlfirst(example,"/comment"))
+ report(command)
+ end
+ report()
+ end
+ end
+ for comment in xmlcollected(root,"/application/comments/comment") do
+ local comment = xmltext(comment)
+ report()
+ report(comment)
+ report()
+ end
+end
+
+local reporthelp = reporters.help
+local exporthelp = reporters.export
+
+local function xmlfound(t)
+ local helpinfo = t.helpinfo
+ if type(helpinfo) == "table" then
+ return false
+ end
+ if type(helpinfo) ~= "string" then
+ helpinfo = "Warning: no helpinfo found."
+ t.helpinfo = helpinfo
+ return false
+ end
+ if string.find(helpinfo,".xml$") then
+ local ownscript = environment.ownscript
+ local helpdata = false
+ if ownscript then
+ local helpfile = file.join(file.pathpart(ownscript),helpinfo)
+ helpdata = io.loaddata(helpfile)
+ if helpdata == "" then
+ helpdata = false
+ end
+ end
+ if not helpdata then
+ local helpfile = resolvers.findfile(helpinfo,"tex")
+ helpdata = helpfile and io.loaddata(helpfile)
+ end
+ if helpdata and helpdata ~= "" then
+ helpinfo = helpdata
+ else
+ helpinfo = formatters["Warning: help file %a is not found."](helpinfo)
+ end
+ end
+ t.helpinfo = helpinfo
+ return string.find(t.helpinfo,"^<%?xml") and true or false
+end
+
+function reporters.help(t,...)
+ if xmlfound(t) then
+ showhelp(t,...)
+ else
+ reporthelp(t,...)
+ end
+end
+
+function reporters.export(t,methods,filename)
+ if not xmlfound(t) then
+ return exporthelp(t)
+ end
+ if not methods or methods == "" then
+ methods = environment.arguments["exporthelp"]
+ end
+ if not filename or filename == "" then
+ filename = environment.files[1]
+ end
+ dofile(resolvers.findfile("trac-exp.lua","tex"))
+ local exporters = logs.exporters
+ if not exporters or not methods then
+ return exporthelp(t)
+ end
+ if methods == "all" then
+ methods = table.keys(exporters)
+ elseif type(methods) == "string" then
+ methods = utilities.parsers.settings_to_array(methods)
+ else
+ return exporthelp(t)
+ end
+ if type(filename) ~= "string" or filename == "" then
+ filename = false
+ elseif file.pathpart(filename) == "" then
+ t.report("export file %a will not be saved on the current path (safeguard)",filename)
+ return
+ end
+ for i=1,#methods do
+ local method = methods[i]
+ local exporter = exporters[method]
+ if exporter then
+ local result = exporter(t,method)
+ if result and result ~= "" then
+ if filename then
+ local fullname = file.replacesuffix(filename,method)
+ t.report("saving export in %a",fullname)
+ io.savedata(fullname,result)
+ else
+ reporters.lines(t,result)
+ end
+ else
+ t.report("no output from exporter %a",method)
+ end
+ else
+ t.report("unknown exporter %a",method)
+ end
+ end
+end
diff --git a/tex/context/base/type-ini.lua b/tex/context/base/type-ini.lua
index 9ee97acae..fd9aa1e6d 100644
--- a/tex/context/base/type-ini.lua
+++ b/tex/context/base/type-ini.lua
@@ -1,76 +1,76 @@
-if not modules then modules = { } end modules ['type-ini'] = {
- version = 1.001,
- comment = "companion to type-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- more code will move here
-
-local commands, context = commands, context
-
-local gsub = string.gsub
-
-local report_typescripts = logs.reporter("fonts","typescripts")
-
-local patterns = { "type-imp-%s.mkiv", "type-imp-%s.tex", "type-%s.mkiv", "type-%s.tex" } -- this will be imp only
-
-local function action(name,foundname)
- -- context.startreadingfile()
- -- context.unprotect()
- -- context.pushendofline()
- -- context.input(foundname)
- -- context.popendofline()
- -- context.protect()
- -- context.stopreadingfile()
- context.loadfoundtypescriptfile(foundname)
-end
-
-local name_one, name_two
-
-local function failure_two(name)
- report_typescripts("unknown library %a or %a",name_one,name_two)
-end
-
-local function failure_one(name)
- name_two = gsub(name,"%-.*$","")
- if name_two == name then
- report_typescripts("unknown library %a",name_one)
- else
- commands.uselibrary {
- name = name_two,
- patterns = patterns,
- action = action,
- failure = failure_two,
- onlyonce = false, -- will become true
- }
- end
-end
-
-function commands.doprocesstypescriptfile(name)
- name_one = gsub(name,"^type%-","")
- commands.uselibrary {
- name = name_one,
- patterns = patterns,
- action = action,
- failure = failure_one,
- onlyonce = false, -- will become true
- }
-end
-
-local patterns = { "type-imp-%s.mkiv", "type-imp-%s.tex" }
-
-local function failure(name)
- report_typescripts("unknown library %a",name)
-end
-
-function commands.loadtypescriptfile(name) -- a more specific name
- commands.uselibrary {
- name = gsub(name,"^type%-",""),
- patterns = patterns,
- action = action,
- failure = failure,
- onlyonce = false, -- will become true
- }
-end
+if not modules then modules = { } end modules ['type-ini'] = {
+ version = 1.001,
+ comment = "companion to type-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- more code will move here
+
+local commands, context = commands, context
+
+local gsub = string.gsub
+
+local report_typescripts = logs.reporter("fonts","typescripts")
+
+local patterns = { "type-imp-%s.mkiv", "type-imp-%s.tex", "type-%s.mkiv", "type-%s.tex" } -- this will be imp only
+
+local function action(name,foundname)
+ -- context.startreadingfile()
+ -- context.unprotect()
+ -- context.pushendofline()
+ -- context.input(foundname)
+ -- context.popendofline()
+ -- context.protect()
+ -- context.stopreadingfile()
+ context.loadfoundtypescriptfile(foundname)
+end
+
+local name_one, name_two
+
+local function failure_two(name)
+ report_typescripts("unknown library %a or %a",name_one,name_two)
+end
+
+local function failure_one(name)
+ name_two = gsub(name,"%-.*$","")
+ if name_two == name then
+ report_typescripts("unknown library %a",name_one)
+ else
+ commands.uselibrary {
+ name = name_two,
+ patterns = patterns,
+ action = action,
+ failure = failure_two,
+ onlyonce = false, -- will become true
+ }
+ end
+end
+
+function commands.doprocesstypescriptfile(name)
+ name_one = gsub(name,"^type%-","")
+ commands.uselibrary {
+ name = name_one,
+ patterns = patterns,
+ action = action,
+ failure = failure_one,
+ onlyonce = false, -- will become true
+ }
+end
+
+local patterns = { "type-imp-%s.mkiv", "type-imp-%s.tex" }
+
+local function failure(name)
+ report_typescripts("unknown library %a",name)
+end
+
+function commands.loadtypescriptfile(name) -- a more specific name
+ commands.uselibrary {
+ name = gsub(name,"^type%-",""),
+ patterns = patterns,
+ action = action,
+ failure = failure,
+ onlyonce = false, -- will become true
+ }
+end
diff --git a/tex/context/base/typo-bld.lua b/tex/context/base/typo-bld.lua
index ed700add7..125b9946c 100644
--- a/tex/context/base/typo-bld.lua
+++ b/tex/context/base/typo-bld.lua
@@ -1,185 +1,185 @@
-if not modules then modules = { } end modules ['typo-bld'] = { -- was node-par
- version = 1.001,
- comment = "companion to typo-bld.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local insert, remove = table.insert, table.remove
-
-local builders, nodes, node = builders, nodes, node
-
-builders.paragraphs = builders.paragraphs or { }
-local parbuilders = builders.paragraphs
-
-parbuilders.constructors = parbuilders.constructors or { }
-local constructors = parbuilders.constructors
-
-constructors.names = constructors.names or { }
-local names = constructors.names
-
-constructors.numbers = constructors.numbers or { }
-local numbers = constructors.numbers
-
-constructors.methods = constructors.methods or { }
-local methods = constructors.methods
-
-local a_parbuilder = attributes.numbers['parbuilder'] or 999 -- why 999
-constructors.attribute = a_parbuilder
-
-local unsetvalue = attributes.unsetvalue
-local texsetattribute = tex.setattribute
-local texnest = tex.nest
-
-local nodepool = nodes.pool
-local new_baselineskip = nodepool.baselineskip
-local new_lineskip = nodepool.lineskip
-local insert_node_before = node.insert_before
-local hpack_node = node.hpack
-
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-
-storage.register("builders/paragraphs/constructors/names", names, "builders.paragraphs.constructors.names")
-storage.register("builders/paragraphs/constructors/numbers", numbers, "builders.paragraphs.constructors.numbers")
-
-local report_parbuilders = logs.reporter("parbuilders")
-
-local mainconstructor = nil -- not stored in format
-local nofconstructors = 0
-local stack = { }
-
-function constructors.define(name)
- nofconstructors = nofconstructors + 1
- names[nofconstructors] = name
- numbers[name] = nofconstructors
-end
-
-function constructors.set(name) --- will go
- if name then
- mainconstructor = numbers[name] or unsetvalue
- else
- mainconstructor = stack[#stack] or unsetvalue
- end
- texsetattribute(a_parbuilder,mainconstructor)
- if mainconstructor ~= unsetvalue then
- constructors.enable()
- end
-end
-
-function constructors.start(name)
- local number = numbers[name]
- insert(stack,number)
- mainconstructor = number or unsetvalue
- texsetattribute(a_parbuilder,mainconstructor)
- if mainconstructor ~= unsetvalue then
- constructors.enable()
- end
- -- report_parbuilders("start %a",name)
-end
-
-function constructors.stop()
- remove(stack)
- mainconstructor = stack[#stack] or unsetvalue
- texsetattribute(a_parbuilder,mainconstructor)
- if mainconstructor == unsetvalue then
- constructors.disable()
- end
- -- report_parbuilders("stop")
-end
-
--- return values:
---
--- true : tex will break itself
--- false : idem but dangerous
--- head : list of valid vmode nodes with last being hlist
-
-function constructors.handler(head,followed_by_display)
- if type(head) == "boolean" then
- return head
- else
- local attribute = head[a_parbuilder] -- or mainconstructor
- if attribute then
- local method = names[attribute]
- if method then
- local handler = methods[method]
- if handler then
- return handler(head,followed_by_display)
- else
- report_parbuilders("contructor method %a is not defined",tostring(method))
- return true -- let tex break
- end
- end
- end
- return true -- let tex break
- end
-end
-
--- just for testing
-
-function constructors.methods.default(head,followed_by_display)
- return true -- let tex break
-end
-
--- also for testing (now also surrounding spacing done)
-
-function builders.paragraphs.constructors.methods.oneline(head,followed_by_display)
- -- when needed we will turn this into a helper
- local t = texnest[texnest.ptr]
- local h = hpack_node(head)
- local d = tex.baselineskip.width - t.prevdepth - h.height
- t.prevdepth = h.depth
- t.prevgraf = 1
- if d < tex.lineskiplimit then
- return insert_node_before(h,h,new_lineskip(tex.lineskip))
- else
- return insert_node_before(h,h,new_baselineskip(d))
- end
-end
-
--- It makes no sense to have a sequence here as we already have
--- pre and post hooks and only one parbuilder makes sense, so no:
---
--- local actions = nodes.tasks.actions("parbuilders")
---
--- yet ... maybe some day.
-
-local actions = constructors.handler
-local enabled = false
-
-local function processor(head,followed_by_display)
- -- todo: not again in otr so we need to flag
- if enabled then
- starttiming(parbuilders)
- local head = actions(head,followed_by_display)
- stoptiming(parbuilders)
- return head
- else
- return true -- let tex do the work
- end
-end
-
-function constructors.enable()
- enabled = true
-end
-
-function constructors.disable()
- enabled = false
-end
-
-
-callbacks.register('linebreak_filter', processor, "breaking paragraps into lines")
-
-statistics.register("linebreak processing time", function()
- return statistics.elapsedseconds(parbuilders)
-end)
-
--- interface
-
-commands.defineparbuilder = constructors.define
-commands.startparbuilder = constructors.start
-commands.stopparbuilder = constructors.stop
-commands.setparbuilder = constructors.set
-commands.enableparbuilder = constructors.enable
-commands.disableparbuilder = constructors.disable
+if not modules then modules = { } end modules ['typo-bld'] = { -- was node-par
+ version = 1.001,
+ comment = "companion to typo-bld.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local insert, remove = table.insert, table.remove
+
+local builders, nodes, node = builders, nodes, node
+
+builders.paragraphs = builders.paragraphs or { }
+local parbuilders = builders.paragraphs
+
+parbuilders.constructors = parbuilders.constructors or { }
+local constructors = parbuilders.constructors
+
+constructors.names = constructors.names or { }
+local names = constructors.names
+
+constructors.numbers = constructors.numbers or { }
+local numbers = constructors.numbers
+
+constructors.methods = constructors.methods or { }
+local methods = constructors.methods
+
+local a_parbuilder = attributes.numbers['parbuilder'] or 999 -- why 999
+constructors.attribute = a_parbuilder
+
+local unsetvalue = attributes.unsetvalue
+local texsetattribute = tex.setattribute
+local texnest = tex.nest
+
+local nodepool = nodes.pool
+local new_baselineskip = nodepool.baselineskip
+local new_lineskip = nodepool.lineskip
+local insert_node_before = node.insert_before
+local hpack_node = node.hpack
+
+local starttiming = statistics.starttiming
+local stoptiming = statistics.stoptiming
+
+storage.register("builders/paragraphs/constructors/names", names, "builders.paragraphs.constructors.names")
+storage.register("builders/paragraphs/constructors/numbers", numbers, "builders.paragraphs.constructors.numbers")
+
+local report_parbuilders = logs.reporter("parbuilders")
+
+local mainconstructor = nil -- not stored in format
+local nofconstructors = 0
+local stack = { }
+
+function constructors.define(name)
+ nofconstructors = nofconstructors + 1
+ names[nofconstructors] = name
+ numbers[name] = nofconstructors
+end
+
+function constructors.set(name) --- will go
+ if name then
+ mainconstructor = numbers[name] or unsetvalue
+ else
+ mainconstructor = stack[#stack] or unsetvalue
+ end
+ texsetattribute(a_parbuilder,mainconstructor)
+ if mainconstructor ~= unsetvalue then
+ constructors.enable()
+ end
+end
+
+function constructors.start(name)
+ local number = numbers[name]
+ insert(stack,number)
+ mainconstructor = number or unsetvalue
+ texsetattribute(a_parbuilder,mainconstructor)
+ if mainconstructor ~= unsetvalue then
+ constructors.enable()
+ end
+ -- report_parbuilders("start %a",name)
+end
+
+function constructors.stop()
+ remove(stack)
+ mainconstructor = stack[#stack] or unsetvalue
+ texsetattribute(a_parbuilder,mainconstructor)
+ if mainconstructor == unsetvalue then
+ constructors.disable()
+ end
+ -- report_parbuilders("stop")
+end
+
+-- return values:
+--
+-- true : tex will break itself
+-- false : idem but dangerous
+-- head : list of valid vmode nodes with last being hlist
+
+function constructors.handler(head,followed_by_display)
+ if type(head) == "boolean" then
+ return head
+ else
+ local attribute = head[a_parbuilder] -- or mainconstructor
+ if attribute then
+ local method = names[attribute]
+ if method then
+ local handler = methods[method]
+ if handler then
+ return handler(head,followed_by_display)
+ else
+ report_parbuilders("contructor method %a is not defined",tostring(method))
+ return true -- let tex break
+ end
+ end
+ end
+ return true -- let tex break
+ end
+end
+
+-- just for testing
+
+function constructors.methods.default(head,followed_by_display)
+ return true -- let tex break
+end
+
+-- also for testing (now also surrounding spacing done)
+
+function builders.paragraphs.constructors.methods.oneline(head,followed_by_display)
+ -- when needed we will turn this into a helper
+ local t = texnest[texnest.ptr]
+ local h = hpack_node(head)
+ local d = tex.baselineskip.width - t.prevdepth - h.height
+ t.prevdepth = h.depth
+ t.prevgraf = 1
+ if d < tex.lineskiplimit then
+ return insert_node_before(h,h,new_lineskip(tex.lineskip))
+ else
+ return insert_node_before(h,h,new_baselineskip(d))
+ end
+end
+
+-- It makes no sense to have a sequence here as we already have
+-- pre and post hooks and only one parbuilder makes sense, so no:
+--
+-- local actions = nodes.tasks.actions("parbuilders")
+--
+-- yet ... maybe some day.
+
+local actions = constructors.handler
+local enabled = false
+
+local function processor(head,followed_by_display)
+ -- todo: not again in otr so we need to flag
+ if enabled then
+ starttiming(parbuilders)
+ local head = actions(head,followed_by_display)
+ stoptiming(parbuilders)
+ return head
+ else
+ return true -- let tex do the work
+ end
+end
+
+function constructors.enable()
+ enabled = true
+end
+
+function constructors.disable()
+ enabled = false
+end
+
+
+callbacks.register('linebreak_filter', processor, "breaking paragraps into lines")
+
+statistics.register("linebreak processing time", function()
+ return statistics.elapsedseconds(parbuilders)
+end)
+
+-- interface
+
+commands.defineparbuilder = constructors.define
+commands.startparbuilder = constructors.start
+commands.stopparbuilder = constructors.stop
+commands.setparbuilder = constructors.set
+commands.enableparbuilder = constructors.enable
+commands.disableparbuilder = constructors.disable
diff --git a/tex/context/base/typo-brk.lua b/tex/context/base/typo-brk.lua
index d6326ebeb..532909a30 100644
--- a/tex/context/base/typo-brk.lua
+++ b/tex/context/base/typo-brk.lua
@@ -1,302 +1,302 @@
-if not modules then modules = { } end modules ['typo-brk'] = {
- version = 1.001,
- comment = "companion to typo-brk.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- this code dates from the beginning and is kind of experimental; it
--- will be optimized and improved soon
-
-local next, type, tonumber = next, type, tonumber
-local utfbyte, utfchar = utf.byte, utf.char
-local format = string.format
-
-local trace_breakpoints = false trackers.register("typesetters.breakpoints", function(v) trace_breakpoints = v end)
-
-local report_breakpoints = logs.reporter("typesetting","breakpoints")
-
-local nodes, node = nodes, node
-
-local settings_to_array = utilities.parsers.settings_to_array
-local copy_node = node.copy
-local copy_nodelist = node.copy_list
-local free_node = node.free
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local remove_node = nodes.remove -- ! nodes
-
-local tonodes = nodes.tonodes
-
-local texattribute = tex.attribute
-local unsetvalue = attributes.unsetvalue
-
-local nodepool = nodes.pool
-local tasks = nodes.tasks
-
-local v_reset = interfaces.variables.reset
-
-local new_penalty = nodepool.penalty
-local new_glue = nodepool.glue
-local new_disc = nodepool.disc
-
-local nodecodes = nodes.nodecodes
-local kerncodes = nodes.kerncodes
-
-local glyph_code = nodecodes.glyph
-local kern_code = nodecodes.kern
-
-local kerning_code = kerncodes.kerning
-
-local typesetters = typesetters
-
-typesetters.breakpoints = typesetters.breakpoints or {}
-local breakpoints = typesetters.breakpoints
-
-breakpoints.mapping = breakpoints.mapping or { }
-breakpoints.numbers = breakpoints.numbers or { }
-
-breakpoints.methods = breakpoints.methods or { }
-local methods = breakpoints.methods
-
-local a_breakpoints = attributes.private("breakpoint")
-breakpoints.attribute = a_breakpoints
-
-storage.register("typesetters/breakpoints/mapping", breakpoints.mapping, "typesetters.breakpoints.mapping")
-
-local mapping = breakpoints.mapping
-local numbers = breakpoints.mapping
-
-for i=1,#mapping do
- local m = mapping[i]
- numbers[m.name] = m
-end
-
-local function insert_break(head,start,before,after)
- insert_node_before(head,start,new_penalty(before))
- insert_node_before(head,start,new_glue(0))
- insert_node_after(head,start,new_glue(0))
- insert_node_after(head,start,new_penalty(after))
-end
-
-methods[1] = function(head,start)
- if start.prev and start.next then
- insert_break(head,start,10000,0)
- end
- return head, start
-end
-
-methods[2] = function(head,start) -- ( => (-
- if start.prev and start.next then
- local tmp
- head, start, tmp = remove_node(head,start)
- head, start = insert_node_before(head,start,new_disc())
- start.attr = copy_nodelist(tmp.attr) -- todo: critical only
- start.replace = tmp
- local tmp, hyphen = copy_node(tmp), copy_node(tmp)
- hyphen.char = languages.prehyphenchar(tmp.lang)
- tmp.next, hyphen.prev = hyphen, tmp
- start.post = tmp
- insert_break(head,start,10000,10000)
- end
- return head, start
-end
-
-methods[3] = function(head,start) -- ) => -)
- if start.prev and start.next then
- local tmp
- head, start, tmp = remove_node(head,start)
- head, start = insert_node_before(head,start,new_disc())
- start.attr = copy_nodelist(tmp.attr) -- todo: critical only
- start.replace = tmp
- local tmp, hyphen = copy_node(tmp), copy_node(tmp)
- hyphen.char = languages.prehyphenchar(tmp.lang)
- tmp.prev, hyphen.next = hyphen, tmp
- start.pre = hyphen
- insert_break(head,start,10000,10000)
- end
- return head, start
-end
-
-methods[4] = function(head,start) -- - => - - -
- if start.prev and start.next then
- local tmp
- head, start, tmp = remove_node(head,start)
- head, start = insert_node_before(head,start,new_disc())
- start.attr = copy_nodelist(tmp.attr) -- todo: critical only
- start.pre, start.post, start.replace = copy_node(tmp), copy_node(tmp), tmp
- insert_break(head,start,10000,10000)
- end
- return head, start
-end
-
-methods[5] = function(head,start,settings) -- x => p q r
- if start.prev and start.next then
- local tmp
- head, start, tmp = remove_node(head,start)
- head, start = insert_node_before(head,start,new_disc())
- local attr = tmp.attr
- local font = tmp.font
- start.attr = copy_nodelist(attr) -- todo: critical only
- local left, right, middle = settings.left, settings.right, settings.middle
- if left then
- start.pre = tonodes(tostring(left),font,attr) -- was right
- end
- if right then
- start.post = tonodes(tostring(right),font,attr) -- was left
- end
- if middle then
- start.replace = tonodes(tostring(middle),font,attr)
- end
- free_node(tmp)
- insert_break(head,start,10000,10000)
- end
- return head, start
-end
-
-local function process(namespace,attribute,head)
- local done, numbers = false, languages.numbers
- local start, n = head, 0
- while start do
- local id = start.id
- if id == glyph_code then
- local attr = start[a_breakpoints]
- if attr and attr > 0 then
- start[a_breakpoints] = unsetvalue -- maybe test for subtype > 256 (faster)
- -- look ahead and back n chars
- local data = mapping[attr]
- if data then
- local map = data.characters
- local cmap = map[start.char]
- if cmap then
- local lang = start.lang
- -- we do a sanity check for language
- local smap = lang and lang >= 0 and lang < 0x7FFF and (cmap[numbers[lang]] or cmap[""])
- if smap then
- if n >= smap.nleft then
- local m = smap.nright
- local next = start.next
- while next do -- gamble on same attribute (not that important actually)
- local id = next.id
- if id == glyph_code then -- gamble on same attribute (not that important actually)
- if map[next.char] then
- break
- elseif m == 1 then
- local method = methods[smap.type]
- if method then
- head, start = method(head,start,smap)
- done = true
- end
- break
- else
- m = m - 1
- next = next.next
- end
- elseif id == kern_code and next.subtype == kerning_code then
- next = next.next
- -- ignore intercharacter kerning, will go way
- else
- -- we can do clever and set n and jump ahead but ... not now
- break
- end
- end
- end
- n = 0
- else
- n = n + 1
- end
- else
- n = n + 1
- end
- else
- n = 0
- end
- else
- -- n = n + 1 -- if we want single char handling (|-|) then we will use grouping and then we need this
- end
- elseif id == kern_code and start.subtype == kerning_code then
- -- ignore intercharacter kerning, will go way
- else
- n = 0
- end
- start = start.next
- end
- return head, done
-end
-
-local enabled = false
-
-function breakpoints.define(name)
- local data = numbers[name]
- if data then
- -- error
- else
- local number = #mapping + 1
- local data = {
- name = name,
- number = number,
- characters = { },
- }
- mapping[number] = data
- numbers[name] = data
- end
-end
-
-function breakpoints.setreplacement(name,char,language,settings)
- char = utfbyte(char)
- local data = numbers[name]
- if data then
- local characters = data.characters
- local cmap = characters[char]
- if not cmap then
- cmap = { }
- characters[char] = cmap
- end
- local left, right, middle = settings.left, settings.right, settings.middle
- cmap[language or ""] = {
- type = tonumber(settings.type) or 1,
- nleft = tonumber(settings.nleft) or 1,
- nright = tonumber(settings.nright) or 1,
- left = left ~= "" and left or nil,
- right = right ~= "" and right or nil,
- middle = middle ~= "" and middle or nil,
- } -- was { type or 1, before or 1, after or 1 }
- end
-end
-
-function breakpoints.set(n)
- if n == v_reset then
- n = unsetvalue
- else
- n = mapping[n]
- if not n then
- n = unsetvalue
- else
- if not enabled then
- if trace_breakpoints then
- report_breakpoints("enabling breakpoints handler")
- end
- tasks.enableaction("processors","typesetters.breakpoints.handler")
- end
- n = n.number
- end
- end
- texattribute[a_breakpoints] = n
-end
-
-breakpoints.handler = nodes.installattributehandler {
- name = "breakpoint",
- namespace = breakpoints,
- processor = process,
-}
-
--- function breakpoints.enable()
--- tasks.enableaction("processors","typesetters.breakpoints.handler")
--- end
-
--- interface
-
-commands.definebreakpoints = breakpoints.define
-commands.definebreakpoint = breakpoints.setreplacement
-commands.setbreakpoints = breakpoints.set
+if not modules then modules = { } end modules ['typo-brk'] = {
+ version = 1.001,
+ comment = "companion to typo-brk.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- this code dates from the beginning and is kind of experimental; it
+-- will be optimized and improved soon
+
+local next, type, tonumber = next, type, tonumber
+local utfbyte, utfchar = utf.byte, utf.char
+local format = string.format
+
+local trace_breakpoints = false trackers.register("typesetters.breakpoints", function(v) trace_breakpoints = v end)
+
+local report_breakpoints = logs.reporter("typesetting","breakpoints")
+
+local nodes, node = nodes, node
+
+local settings_to_array = utilities.parsers.settings_to_array
+local copy_node = node.copy
+local copy_nodelist = node.copy_list
+local free_node = node.free
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local remove_node = nodes.remove -- ! nodes
+
+local tonodes = nodes.tonodes
+
+local texattribute = tex.attribute
+local unsetvalue = attributes.unsetvalue
+
+local nodepool = nodes.pool
+local tasks = nodes.tasks
+
+local v_reset = interfaces.variables.reset
+
+local new_penalty = nodepool.penalty
+local new_glue = nodepool.glue
+local new_disc = nodepool.disc
+
+local nodecodes = nodes.nodecodes
+local kerncodes = nodes.kerncodes
+
+local glyph_code = nodecodes.glyph
+local kern_code = nodecodes.kern
+
+local kerning_code = kerncodes.kerning
+
+local typesetters = typesetters
+
+typesetters.breakpoints = typesetters.breakpoints or {}
+local breakpoints = typesetters.breakpoints
+
+breakpoints.mapping = breakpoints.mapping or { }
+breakpoints.numbers = breakpoints.numbers or { }
+
+breakpoints.methods = breakpoints.methods or { }
+local methods = breakpoints.methods
+
+local a_breakpoints = attributes.private("breakpoint")
+breakpoints.attribute = a_breakpoints
+
+storage.register("typesetters/breakpoints/mapping", breakpoints.mapping, "typesetters.breakpoints.mapping")
+
+local mapping = breakpoints.mapping
+local numbers = breakpoints.mapping
+
+for i=1,#mapping do
+ local m = mapping[i]
+ numbers[m.name] = m
+end
+
+local function insert_break(head,start,before,after)
+ insert_node_before(head,start,new_penalty(before))
+ insert_node_before(head,start,new_glue(0))
+ insert_node_after(head,start,new_glue(0))
+ insert_node_after(head,start,new_penalty(after))
+end
+
+methods[1] = function(head,start)
+ if start.prev and start.next then
+ insert_break(head,start,10000,0)
+ end
+ return head, start
+end
+
+methods[2] = function(head,start) -- ( => (-
+ if start.prev and start.next then
+ local tmp
+ head, start, tmp = remove_node(head,start)
+ head, start = insert_node_before(head,start,new_disc())
+ start.attr = copy_nodelist(tmp.attr) -- todo: critical only
+ start.replace = tmp
+ local tmp, hyphen = copy_node(tmp), copy_node(tmp)
+ hyphen.char = languages.prehyphenchar(tmp.lang)
+ tmp.next, hyphen.prev = hyphen, tmp
+ start.post = tmp
+ insert_break(head,start,10000,10000)
+ end
+ return head, start
+end
+
+methods[3] = function(head,start) -- ) => -)
+ if start.prev and start.next then
+ local tmp
+ head, start, tmp = remove_node(head,start)
+ head, start = insert_node_before(head,start,new_disc())
+ start.attr = copy_nodelist(tmp.attr) -- todo: critical only
+ start.replace = tmp
+ local tmp, hyphen = copy_node(tmp), copy_node(tmp)
+ hyphen.char = languages.prehyphenchar(tmp.lang)
+ tmp.prev, hyphen.next = hyphen, tmp
+ start.pre = hyphen
+ insert_break(head,start,10000,10000)
+ end
+ return head, start
+end
+
+methods[4] = function(head,start) -- - => - - -
+ if start.prev and start.next then
+ local tmp
+ head, start, tmp = remove_node(head,start)
+ head, start = insert_node_before(head,start,new_disc())
+ start.attr = copy_nodelist(tmp.attr) -- todo: critical only
+ start.pre, start.post, start.replace = copy_node(tmp), copy_node(tmp), tmp
+ insert_break(head,start,10000,10000)
+ end
+ return head, start
+end
+
+methods[5] = function(head,start,settings) -- x => p q r
+ if start.prev and start.next then
+ local tmp
+ head, start, tmp = remove_node(head,start)
+ head, start = insert_node_before(head,start,new_disc())
+ local attr = tmp.attr
+ local font = tmp.font
+ start.attr = copy_nodelist(attr) -- todo: critical only
+ local left, right, middle = settings.left, settings.right, settings.middle
+ if left then
+ start.pre = tonodes(tostring(left),font,attr) -- was right
+ end
+ if right then
+ start.post = tonodes(tostring(right),font,attr) -- was left
+ end
+ if middle then
+ start.replace = tonodes(tostring(middle),font,attr)
+ end
+ free_node(tmp)
+ insert_break(head,start,10000,10000)
+ end
+ return head, start
+end
+
+local function process(namespace,attribute,head)
+ local done, numbers = false, languages.numbers
+ local start, n = head, 0
+ while start do
+ local id = start.id
+ if id == glyph_code then
+ local attr = start[a_breakpoints]
+ if attr and attr > 0 then
+ start[a_breakpoints] = unsetvalue -- maybe test for subtype > 256 (faster)
+ -- look ahead and back n chars
+ local data = mapping[attr]
+ if data then
+ local map = data.characters
+ local cmap = map[start.char]
+ if cmap then
+ local lang = start.lang
+ -- we do a sanity check for language
+ local smap = lang and lang >= 0 and lang < 0x7FFF and (cmap[numbers[lang]] or cmap[""])
+ if smap then
+ if n >= smap.nleft then
+ local m = smap.nright
+ local next = start.next
+ while next do -- gamble on same attribute (not that important actually)
+ local id = next.id
+ if id == glyph_code then -- gamble on same attribute (not that important actually)
+ if map[next.char] then
+ break
+ elseif m == 1 then
+ local method = methods[smap.type]
+ if method then
+ head, start = method(head,start,smap)
+ done = true
+ end
+ break
+ else
+ m = m - 1
+ next = next.next
+ end
+ elseif id == kern_code and next.subtype == kerning_code then
+ next = next.next
+ -- ignore intercharacter kerning, will go way
+ else
+ -- we can do clever and set n and jump ahead but ... not now
+ break
+ end
+ end
+ end
+ n = 0
+ else
+ n = n + 1
+ end
+ else
+ n = n + 1
+ end
+ else
+ n = 0
+ end
+ else
+ -- n = n + 1 -- if we want single char handling (|-|) then we will use grouping and then we need this
+ end
+ elseif id == kern_code and start.subtype == kerning_code then
+ -- ignore intercharacter kerning, will go way
+ else
+ n = 0
+ end
+ start = start.next
+ end
+ return head, done
+end
+
+local enabled = false
+
+function breakpoints.define(name)
+ local data = numbers[name]
+ if data then
+ -- error
+ else
+ local number = #mapping + 1
+ local data = {
+ name = name,
+ number = number,
+ characters = { },
+ }
+ mapping[number] = data
+ numbers[name] = data
+ end
+end
+
+function breakpoints.setreplacement(name,char,language,settings)
+ char = utfbyte(char)
+ local data = numbers[name]
+ if data then
+ local characters = data.characters
+ local cmap = characters[char]
+ if not cmap then
+ cmap = { }
+ characters[char] = cmap
+ end
+ local left, right, middle = settings.left, settings.right, settings.middle
+ cmap[language or ""] = {
+ type = tonumber(settings.type) or 1,
+ nleft = tonumber(settings.nleft) or 1,
+ nright = tonumber(settings.nright) or 1,
+ left = left ~= "" and left or nil,
+ right = right ~= "" and right or nil,
+ middle = middle ~= "" and middle or nil,
+ } -- was { type or 1, before or 1, after or 1 }
+ end
+end
+
+function breakpoints.set(n)
+ if n == v_reset then
+ n = unsetvalue
+ else
+ n = mapping[n]
+ if not n then
+ n = unsetvalue
+ else
+ if not enabled then
+ if trace_breakpoints then
+ report_breakpoints("enabling breakpoints handler")
+ end
+ tasks.enableaction("processors","typesetters.breakpoints.handler")
+ end
+ n = n.number
+ end
+ end
+ texattribute[a_breakpoints] = n
+end
+
+breakpoints.handler = nodes.installattributehandler {
+ name = "breakpoint",
+ namespace = breakpoints,
+ processor = process,
+}
+
+-- function breakpoints.enable()
+-- tasks.enableaction("processors","typesetters.breakpoints.handler")
+-- end
+
+-- interface
+
+commands.definebreakpoints = breakpoints.define
+commands.definebreakpoint = breakpoints.setreplacement
+commands.setbreakpoints = breakpoints.set
diff --git a/tex/context/base/typo-cap.lua b/tex/context/base/typo-cap.lua
index fdbf2e353..304d133c9 100644
--- a/tex/context/base/typo-cap.lua
+++ b/tex/context/base/typo-cap.lua
@@ -1,331 +1,331 @@
-if not modules then modules = { } end modules ['typo-cap'] = {
- version = 1.001,
- comment = "companion to typo-cap.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
- }
-
-local next, type = next, type
-local format, insert = string.format, table.insert
-local div = math.div
-
-local trace_casing = false trackers.register("typesetters.casing", function(v) trace_casing = v end)
-
-local report_casing = logs.reporter("typesetting","casing")
-
-local nodes, node = nodes, node
-
-local traverse_id = node.traverse_id
-local copy_node = node.copy
-local end_of_math = node.end_of_math
-
-local texattribute = tex.attribute
-local unsetvalue = attributes.unsetvalue
-
-local nodecodes = nodes.nodecodes
-local skipcodes = nodes.skipcodes
-local kerncodes = nodes.kerncodes
-
-local glyph_code = nodecodes.glyph
-local kern_code = nodecodes.kern
-local math_code = nodecodes.math
-
-local kerning_code = kerncodes.kerning
-local userskip_code = skipcodes.userskip
-
-local tasks = nodes.tasks
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-local fontchar = fonthashes.characters
-
-local variables = interfaces.variables
-local v_reset = variables.reset
-
-local chardata = characters.data
-
-typesetters = typesetters or { }
-local typesetters = typesetters
-
-typesetters.cases = typesetters.cases or { }
-local cases = typesetters.cases
-
-cases.actions = { }
-local actions = cases.actions
-cases.attribute = c_cases -- no longer needed
-local a_cases = attributes.private("case")
-
-local lastfont = nil
-
--- we use char(0) as placeholder for the larger font, so we need to remove it
--- before it can do further harm
---
--- we could do the whole glyph run here (till no more attributes match) but
--- then we end up with more code .. maybe i will clean this up anyway as the
--- lastfont hack is somewhat ugly .. on the other hand, we need to deal with
--- cases like:
---
--- \WORD {far too \Word{many \WORD{more \word{pushed} in between} useless} words}
-
-local uccodes = characters.uccodes
-local lccodes = characters.lccodes
-
-local function helper(start, codes, special, attribute, once)
- local char = start.char
- local dc = codes[char]
- if dc then
- local fnt = start.font
- if special then
- -- will become function
- if start.char == 0 then
- lastfont = fnt
- local prev, next = start.prev, start.next
- prev.next = next
- if next then
- next.prev = prev
- end
- return prev, true
- elseif lastfont and start.prev.id ~= glyph_code then
- fnt = lastfont
- start.font = lastfont
- end
- end
- local ifc = fontchar[fnt]
- if type(dc) == "table" then
- local ok = true
- for i=1,#dc do
- ok = ok and ifc[dc[i]]
- end
- if ok then
- -- tood; use generic injector
- local prev, original = start, start
- for i=1,#dc do
- local chr = dc[i]
- prev = start
- if i == 1 then
- start.char = chr
- else
- local g = copy_node(original)
- g.char = chr
- local next = start.next
- g.prev = start
- if next then
- g.next = next
- start.next = g
- next.prev = g
- end
- start = g
- end
- end
- if once then lastfont = nil end
- return prev, true
- end
- if once then lastfont = nil end
- return start, false
- elseif ifc[dc] then
- start.char = dc
- if once then lastfont = nil end
- return start, true
- end
- end
- if once then lastfont = nil end
- return start, false
-end
-
-local registered, n = { }, 0
-
-local function register(name,f)
- if type(f) == "function" then
- n = n + 1
- actions[n] = f
- registered[name] = n
- return n
- else
- local n = registered[f]
- registered[name] = n
- return n
- end
-end
-
-cases.register = register
-
-local function WORD(start,attribute)
- lastfont = nil
- return helper(start,uccodes)
-end
-
-local function word(start,attribute)
- lastfont = nil
- return helper(start,lccodes)
-end
-
-local function Word(start,attribute,attr)
- lastfont = nil
- local prev = start.prev
- if prev and prev.id == kern_code and prev.subtype == kerning_code then
- prev = prev.prev
- end
- if not prev or prev.id ~= glyph_code then
- --- only the first character is treated
- for n in traverse_id(glyph_code,start.next) do
- if n[attribute] == attr then
- n[attribute] = unsetvalue
- else
- -- break -- we can have nested mess
- end
- end
- -- we could return the last in the range and save some scanning
- -- but why bother
- return helper(start,uccodes)
- else
- return start, false
- end
-end
-
-local function Words(start,attribute)
- lastfont = nil
- local prev = start.prev
- if prev and prev.id == kern_code and prev.subtype == kerning_code then
- prev = prev.prev
- end
- if not prev or prev.id ~= glyph_code then
- return helper(start,uccodes)
- else
- return start, false
- end
-end
-
-local function capital(start,attribute) -- 3
- return helper(start,uccodes,true,attribute,true)
-end
-
-local function Capital(start,attribute) -- 4
- return helper(start,uccodes,true,attribute,false)
-end
-
-local function none(start)
- return start, false
-end
-
-local function random(start)
- lastfont = nil
- local ch = start.char
- local mr = math.random
- -- local tfm = fontdata[start.font].characters
- local tfm = fontchar[start.font]
- if lccodes[ch] then
- while true do
- local d = chardata[mr(1,0xFFFF)]
- if d then
- local uc = uccodes[d]
- if uc and tfm[uc] then -- this also intercepts tables
- start.char = uc
- return start, true
- end
- end
- end
- elseif uccodes[ch] then
- while true do
- local d = chardata[mr(1,0xFFFF)]
- if d then
- local lc = lccodes[d]
- if lc and tfm[lc] then -- this also intercepts tables
- start.char = lc
- return start, true
- end
- end
- end
- end
- return start, false
-end
-
-register(variables.WORD, WORD) -- 1
-register(variables.word, word) -- 2
-register(variables.Word, Word) -- 3
-register(variables.Words, Words) -- 4
-register(variables.capital, capital) -- 5
-register(variables.Capital, Capital) -- 6
-register(variables.none, none) -- 7 (dummy)
-register(variables.random, random) -- 8
-
-register(variables.cap, variables.capital) -- clone
-register(variables.Cap, variables.Capital) -- clone
-
--- node.traverse_id_attr
-
-local function process(namespace,attribute,head) -- not real fast but also not used on much data
- lastfont = nil
- local lastattr = nil
- local done = false
- local start = head
- while start do -- while because start can jump ahead
- local id = start.id
- if id == glyph_code then
- local attr = start[attribute]
- if attr and attr > 0 then
- if attr ~= lastattr then
- lastfont = nil
- lastattr = attr
- end
- start[attribute] = unsetvalue
- local action = actions[attr%100] -- map back to low number
- if action then
- start, ok = action(start,attribute,attr)
- done = done and ok
- if trace_casing then
- report_casing("case trigger %a, instance %a, result %a",attr%100,div(attr,100),ok)
- end
- elseif trace_casing then
- report_casing("unknown case trigger %a",attr)
- end
- end
- elseif id == math_code then
- start = end_of_math(start)
- end
- if start then -- why test
- start = start.next
- end
- end
- lastfont = nil
- return head, done
-end
-
-local m, enabled = 0, false -- a trick to make neighbouring ranges work
-
-function cases.set(n)
- if n == v_reset then
- n = unsetvalue
- else
- n = registered[n] or tonumber(n)
- if n then
- if not enabled then
- tasks.enableaction("processors","typesetters.cases.handler")
- if trace_casing then
- report_casing("enabling case handler")
- end
- enabled = true
- end
- if m == 100 then
- m = 1
- else
- m = m + 1
- end
- n = m * 100 + n
- else
- n = unsetvalue
- end
- end
- texattribute[a_cases] = n
- -- return n -- bonus
-end
-
-cases.handler = nodes.installattributehandler {
- name = "case",
- namespace = cases,
- processor = process,
-}
-
--- interface
-
-commands.setcharactercasing = cases.set
+if not modules then modules = { } end modules ['typo-cap'] = {
+ version = 1.001,
+ comment = "companion to typo-cap.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+ }
+
+local next, type = next, type
+local format, insert = string.format, table.insert
+local div = math.div
+
+local trace_casing = false trackers.register("typesetters.casing", function(v) trace_casing = v end)
+
+local report_casing = logs.reporter("typesetting","casing")
+
+local nodes, node = nodes, node
+
+local traverse_id = node.traverse_id
+local copy_node = node.copy
+local end_of_math = node.end_of_math
+
+local texattribute = tex.attribute
+local unsetvalue = attributes.unsetvalue
+
+local nodecodes = nodes.nodecodes
+local skipcodes = nodes.skipcodes
+local kerncodes = nodes.kerncodes
+
+local glyph_code = nodecodes.glyph
+local kern_code = nodecodes.kern
+local math_code = nodecodes.math
+
+local kerning_code = kerncodes.kerning
+local userskip_code = skipcodes.userskip
+
+local tasks = nodes.tasks
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+local fontchar = fonthashes.characters
+
+local variables = interfaces.variables
+local v_reset = variables.reset
+
+local chardata = characters.data
+
+typesetters = typesetters or { }
+local typesetters = typesetters
+
+typesetters.cases = typesetters.cases or { }
+local cases = typesetters.cases
+
+cases.actions = { }
+local actions = cases.actions
+cases.attribute = c_cases -- no longer needed
+local a_cases = attributes.private("case")
+
+local lastfont = nil
+
+-- we use char(0) as placeholder for the larger font, so we need to remove it
+-- before it can do further harm
+--
+-- we could do the whole glyph run here (till no more attributes match) but
+-- then we end up with more code .. maybe i will clean this up anyway as the
+-- lastfont hack is somewhat ugly .. on the other hand, we need to deal with
+-- cases like:
+--
+-- \WORD {far too \Word{many \WORD{more \word{pushed} in between} useless} words}
+
+local uccodes = characters.uccodes
+local lccodes = characters.lccodes
+
+local function helper(start, codes, special, attribute, once)
+ local char = start.char
+ local dc = codes[char]
+ if dc then
+ local fnt = start.font
+ if special then
+ -- will become function
+ if start.char == 0 then
+ lastfont = fnt
+ local prev, next = start.prev, start.next
+ prev.next = next
+ if next then
+ next.prev = prev
+ end
+ return prev, true
+ elseif lastfont and start.prev.id ~= glyph_code then
+ fnt = lastfont
+ start.font = lastfont
+ end
+ end
+ local ifc = fontchar[fnt]
+ if type(dc) == "table" then
+ local ok = true
+ for i=1,#dc do
+ ok = ok and ifc[dc[i]]
+ end
+ if ok then
+ -- tood; use generic injector
+ local prev, original = start, start
+ for i=1,#dc do
+ local chr = dc[i]
+ prev = start
+ if i == 1 then
+ start.char = chr
+ else
+ local g = copy_node(original)
+ g.char = chr
+ local next = start.next
+ g.prev = start
+ if next then
+ g.next = next
+ start.next = g
+ next.prev = g
+ end
+ start = g
+ end
+ end
+ if once then lastfont = nil end
+ return prev, true
+ end
+ if once then lastfont = nil end
+ return start, false
+ elseif ifc[dc] then
+ start.char = dc
+ if once then lastfont = nil end
+ return start, true
+ end
+ end
+ if once then lastfont = nil end
+ return start, false
+end
+
+local registered, n = { }, 0
+
+local function register(name,f)
+ if type(f) == "function" then
+ n = n + 1
+ actions[n] = f
+ registered[name] = n
+ return n
+ else
+ local n = registered[f]
+ registered[name] = n
+ return n
+ end
+end
+
+cases.register = register
+
+local function WORD(start,attribute)
+ lastfont = nil
+ return helper(start,uccodes)
+end
+
+local function word(start,attribute)
+ lastfont = nil
+ return helper(start,lccodes)
+end
+
+local function Word(start,attribute,attr)
+ lastfont = nil
+ local prev = start.prev
+ if prev and prev.id == kern_code and prev.subtype == kerning_code then
+ prev = prev.prev
+ end
+ if not prev or prev.id ~= glyph_code then
+ --- only the first character is treated
+ for n in traverse_id(glyph_code,start.next) do
+ if n[attribute] == attr then
+ n[attribute] = unsetvalue
+ else
+ -- break -- we can have nested mess
+ end
+ end
+ -- we could return the last in the range and save some scanning
+ -- but why bother
+ return helper(start,uccodes)
+ else
+ return start, false
+ end
+end
+
+local function Words(start,attribute)
+ lastfont = nil
+ local prev = start.prev
+ if prev and prev.id == kern_code and prev.subtype == kerning_code then
+ prev = prev.prev
+ end
+ if not prev or prev.id ~= glyph_code then
+ return helper(start,uccodes)
+ else
+ return start, false
+ end
+end
+
+local function capital(start,attribute) -- 3
+ return helper(start,uccodes,true,attribute,true)
+end
+
+local function Capital(start,attribute) -- 4
+ return helper(start,uccodes,true,attribute,false)
+end
+
+local function none(start)
+ return start, false
+end
+
+local function random(start)
+ lastfont = nil
+ local ch = start.char
+ local mr = math.random
+ -- local tfm = fontdata[start.font].characters
+ local tfm = fontchar[start.font]
+ if lccodes[ch] then
+ while true do
+ local d = chardata[mr(1,0xFFFF)]
+ if d then
+ local uc = uccodes[d]
+ if uc and tfm[uc] then -- this also intercepts tables
+ start.char = uc
+ return start, true
+ end
+ end
+ end
+ elseif uccodes[ch] then
+ while true do
+ local d = chardata[mr(1,0xFFFF)]
+ if d then
+ local lc = lccodes[d]
+ if lc and tfm[lc] then -- this also intercepts tables
+ start.char = lc
+ return start, true
+ end
+ end
+ end
+ end
+ return start, false
+end
+
+register(variables.WORD, WORD) -- 1
+register(variables.word, word) -- 2
+register(variables.Word, Word) -- 3
+register(variables.Words, Words) -- 4
+register(variables.capital, capital) -- 5
+register(variables.Capital, Capital) -- 6
+register(variables.none, none) -- 7 (dummy)
+register(variables.random, random) -- 8
+
+register(variables.cap, variables.capital) -- clone
+register(variables.Cap, variables.Capital) -- clone
+
+-- node.traverse_id_attr
+
+local function process(namespace,attribute,head) -- not real fast but also not used on much data
+ lastfont = nil
+ local lastattr = nil
+ local done = false
+ local start = head
+ while start do -- while because start can jump ahead
+ local id = start.id
+ if id == glyph_code then
+ local attr = start[attribute]
+ if attr and attr > 0 then
+ if attr ~= lastattr then
+ lastfont = nil
+ lastattr = attr
+ end
+ start[attribute] = unsetvalue
+ local action = actions[attr%100] -- map back to low number
+ if action then
+ start, ok = action(start,attribute,attr)
+ done = done and ok
+ if trace_casing then
+ report_casing("case trigger %a, instance %a, result %a",attr%100,div(attr,100),ok)
+ end
+ elseif trace_casing then
+ report_casing("unknown case trigger %a",attr)
+ end
+ end
+ elseif id == math_code then
+ start = end_of_math(start)
+ end
+ if start then -- why test
+ start = start.next
+ end
+ end
+ lastfont = nil
+ return head, done
+end
+
+local m, enabled = 0, false -- a trick to make neighbouring ranges work
+
+function cases.set(n)
+ if n == v_reset then
+ n = unsetvalue
+ else
+ n = registered[n] or tonumber(n)
+ if n then
+ if not enabled then
+ tasks.enableaction("processors","typesetters.cases.handler")
+ if trace_casing then
+ report_casing("enabling case handler")
+ end
+ enabled = true
+ end
+ if m == 100 then
+ m = 1
+ else
+ m = m + 1
+ end
+ n = m * 100 + n
+ else
+ n = unsetvalue
+ end
+ end
+ texattribute[a_cases] = n
+ -- return n -- bonus
+end
+
+cases.handler = nodes.installattributehandler {
+ name = "case",
+ namespace = cases,
+ processor = process,
+}
+
+-- interface
+
+commands.setcharactercasing = cases.set
diff --git a/tex/context/base/typo-cln.lua b/tex/context/base/typo-cln.lua
index be00ac10d..70d2f7b60 100644
--- a/tex/context/base/typo-cln.lua
+++ b/tex/context/base/typo-cln.lua
@@ -1,102 +1,102 @@
-if not modules then modules = { } end modules ['typo-cln'] = {
- version = 1.001,
- comment = "companion to typo-cln.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This quick and dirty hack took less time than listening to a CD (In
--- this case Dream Theaters' Octavium. Of course extensions will take
--- more time.
-
-local utfbyte = utf.byte
-
-local trace_cleaners = false trackers.register("typesetters.cleaners", function(v) trace_cleaners = v end)
-local trace_autocase = false trackers.register("typesetters.cleaners.autocase",function(v) trace_autocase = v end)
-
-local report_cleaners = logs.reporter("nodes","cleaners")
-local report_autocase = logs.reporter("nodes","autocase")
-
-typesetters.cleaners = typesetters.cleaners or { }
-local cleaners = typesetters.cleaners
-
-local variables = interfaces.variables
-
-local nodecodes = nodes.nodecodes
-local tasks = nodes.tasks
-
-local texattribute = tex.attribute
-
-local traverse_id = node.traverse_id
-
-local unsetvalue = attributes.unsetvalue
-
-local glyph_code = nodecodes.glyph
-local uccodes = characters.uccodes
-
-local a_cleaner = attributes.private("cleaner")
-
-local resetter = { -- this will become an entry in char-def
- [utfbyte(".")] = true
-}
-
--- Contrary to the casing code we need to keep track of a state.
--- We could extend the casing code with a status tracker but on
--- the other hand we might want to apply casing afterwards. So,
--- cleaning comes first.
-
-local function process(namespace,attribute,head)
- local inline, done = false, false
- for n in traverse_id(glyph_code,head) do
- local char = n.char
- if resetter[char] then
- inline = false
- elseif not inline then
- local a = n[attribute]
- if a == 1 then -- currently only one cleaner so no need to be fancy
- local upper = uccodes[char]
- if type(upper) == "table" then
- -- some day, not much change that \SS ends up here
- else
- n.char = upper
- done = true
- if trace_autocase then
- report_autocase("")
- end
- end
- end
- inline = true
- end
- end
- return head, done
-end
-
--- see typo-cap for a more advanced settings handler .. not needed now
-
-local enabled = false
-
-function cleaners.set(n)
- if n == variables.reset or not tonumber(n) or n == 0 then
- texattribute[a_cleaner] = unsetvalue
- else
- if not enabled then
- tasks.enableaction("processors","typesetters.cleaners.handler")
- if trace_cleaners then
- report_cleaners("enabling cleaners")
- end
- enabled = true
- end
- texattribute[a_cleaner] = n
- end
-end
-
-cleaners.handler = nodes.installattributehandler {
- name = "cleaner",
- namespace = cleaners,
- processor = process,
-}
-
--- interface
-
-commands.setcharactercleaning = cleaners.set
+if not modules then modules = { } end modules ['typo-cln'] = {
+ version = 1.001,
+ comment = "companion to typo-cln.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This quick and dirty hack took less time than listening to a CD (In
+-- this case Dream Theaters' Octavium. Of course extensions will take
+-- more time.
+
+local utfbyte = utf.byte
+
+local trace_cleaners = false trackers.register("typesetters.cleaners", function(v) trace_cleaners = v end)
+local trace_autocase = false trackers.register("typesetters.cleaners.autocase",function(v) trace_autocase = v end)
+
+local report_cleaners = logs.reporter("nodes","cleaners")
+local report_autocase = logs.reporter("nodes","autocase")
+
+typesetters.cleaners = typesetters.cleaners or { }
+local cleaners = typesetters.cleaners
+
+local variables = interfaces.variables
+
+local nodecodes = nodes.nodecodes
+local tasks = nodes.tasks
+
+local texattribute = tex.attribute
+
+local traverse_id = node.traverse_id
+
+local unsetvalue = attributes.unsetvalue
+
+local glyph_code = nodecodes.glyph
+local uccodes = characters.uccodes
+
+local a_cleaner = attributes.private("cleaner")
+
+local resetter = { -- this will become an entry in char-def
+ [utfbyte(".")] = true
+}
+
+-- Contrary to the casing code we need to keep track of a state.
+-- We could extend the casing code with a status tracker but on
+-- the other hand we might want to apply casing afterwards. So,
+-- cleaning comes first.
+
+local function process(namespace,attribute,head)
+ local inline, done = false, false
+ for n in traverse_id(glyph_code,head) do
+ local char = n.char
+ if resetter[char] then
+ inline = false
+ elseif not inline then
+ local a = n[attribute]
+ if a == 1 then -- currently only one cleaner so no need to be fancy
+ local upper = uccodes[char]
+ if type(upper) == "table" then
+ -- some day, not much change that \SS ends up here
+ else
+ n.char = upper
+ done = true
+ if trace_autocase then
+ report_autocase("")
+ end
+ end
+ end
+ inline = true
+ end
+ end
+ return head, done
+end
+
+-- see typo-cap for a more advanced settings handler .. not needed now
+
+local enabled = false
+
+function cleaners.set(n)
+ if n == variables.reset or not tonumber(n) or n == 0 then
+ texattribute[a_cleaner] = unsetvalue
+ else
+ if not enabled then
+ tasks.enableaction("processors","typesetters.cleaners.handler")
+ if trace_cleaners then
+ report_cleaners("enabling cleaners")
+ end
+ enabled = true
+ end
+ texattribute[a_cleaner] = n
+ end
+end
+
+cleaners.handler = nodes.installattributehandler {
+ name = "cleaner",
+ namespace = cleaners,
+ processor = process,
+}
+
+-- interface
+
+commands.setcharactercleaning = cleaners.set
diff --git a/tex/context/base/typo-dig.lua b/tex/context/base/typo-dig.lua
index 62d17fa3b..9cf8417b8 100644
--- a/tex/context/base/typo-dig.lua
+++ b/tex/context/base/typo-dig.lua
@@ -1,162 +1,162 @@
-if not modules then modules = { } end modules ['typo-dig'] = {
- version = 1.001,
- comment = "companion to typo-dig.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- we might consider doing this after the otf pass because now osf do not work
--- out well in node mode.
-
-local next, type = next, type
-local format, insert = string.format, table.insert
-local round, div = math.round, math.div
-
-local trace_digits = false trackers.register("typesetters.digits", function(v) trace_digits = v end)
-
-local report_digits = logs.reporter("typesetting","digits")
-
-local nodes, node = nodes, node
-
-local hpack_node = node.hpack
-local traverse_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-
-local texattribute = tex.attribute
-local unsetvalue = attributes.unsetvalue
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-
-local nodepool = nodes.pool
-local tasks = nodes.tasks
-
-local new_glue = nodepool.glue
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-local chardata = fonthashes.characters
-local quaddata = fonthashes.quads
-
-local v_reset = interfaces.variables.reset
-
-local charbase = characters.data
-local getdigitwidth = fonts.helpers.getdigitwidth
-
-typesetters = typesetters or { }
-local typesetters = typesetters
-
-typesetters.digits = typesetters.digits or { }
-local digits = typesetters.digits
-
-digits.actions = { }
-local actions = digits.actions
-
-local a_digits = attributes.private("digits")
-digits.attribute = a_digits
-
--- at some point we can manipulate the glyph node so then i need
--- to rewrite this then
-
-function nodes.aligned(head,start,stop,width,how)
- if how == "flushright" or how == "middle" then
- head, start = insert_node_before(head,start,new_glue(0,65536,65536))
- end
- if how == "flushleft" or how == "middle" then
- head, stop = insert_node_after(head,stop,new_glue(0,65536,65536))
- end
- local prv, nxt = start.prev, stop.next
- start.prev, stop.next = nil, nil
- local packed = hpack_node(start,width,"exactly") -- no directional mess here, just lr
- if prv then
- prv.next, packed.prev = packed, prv
- end
- if nxt then
- nxt.prev, packed.next = packed, nxt
- end
- if packed.prev then
- return head, packed
- else
- return packed, packed
- end
-end
-
-actions[1] = function(head,start,attribute,attr)
- local font = start.font
- local char = start.char
- local unic = chardata[font][char].tounicode
- local what = unic and tonumber(unic,16) or char
- if charbase[what].category == "nd" then
- local oldwidth, newwidth = start.width, getdigitwidth(font)
- if newwidth ~= oldwidth then
- if trace_digits then
- report_digits("digit trigger %a, instance %a, char %C, unicode %U, delta %s",
- attr%100,div(attr,100),char,what,newwidth-oldwidth)
- end
- head, start = nodes.aligned(head,start,start,newwidth,"middle")
- return head, start, true
- end
- end
- return head, start, false
-end
-
-local function process(namespace,attribute,head)
- local done, current, ok = false, head, false
- while current do
- if current.id == glyph_code then
- local attr = current[attribute]
- if attr and attr > 0 then
- current[attribute] = unsetvalue
- local action = actions[attr%100] -- map back to low number
- if action then
- head, current, ok = action(head,current,attribute,attr)
- done = done and ok
- elseif trace_digits then
- report_digits("unknown digit trigger %a",attr)
- end
- end
- end
- current = current and current.next
- end
- return head, done
-end
-
-local m, enabled = 0, false -- a trick to make neighbouring ranges work
-
-function digits.set(n) -- number or 'reset'
- if n == v_reset then
- n = unsetvalue
- else
- n = tonumber(n)
- if n then
- if not enabled then
- tasks.enableaction("processors","typesetters.digits.handler")
- if trace_digits then
- report_digits("enabling digit handler")
- end
- enabled = true
- end
- if m == 100 then
- m = 1
- else
- m = m + 1
- end
- n = m * 100 + n
- else
- n = unsetvalue
- end
- end
- texattribute[a_digits] = n
-end
-
-digits.handler = nodes.installattributehandler { -- we could avoid this wrapper
- name = "digits",
- namespace = digits,
- processor = process,
-}
-
--- interface
-
-commands.setdigitsmanipulation = digits.set
+if not modules then modules = { } end modules ['typo-dig'] = {
+ version = 1.001,
+ comment = "companion to typo-dig.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- we might consider doing this after the otf pass because now osf do not work
+-- out well in node mode.
+
+local next, type = next, type
+local format, insert = string.format, table.insert
+local round, div = math.round, math.div
+
+local trace_digits = false trackers.register("typesetters.digits", function(v) trace_digits = v end)
+
+local report_digits = logs.reporter("typesetting","digits")
+
+local nodes, node = nodes, node
+
+local hpack_node = node.hpack
+local traverse_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+
+local texattribute = tex.attribute
+local unsetvalue = attributes.unsetvalue
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+
+local nodepool = nodes.pool
+local tasks = nodes.tasks
+
+local new_glue = nodepool.glue
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+local chardata = fonthashes.characters
+local quaddata = fonthashes.quads
+
+local v_reset = interfaces.variables.reset
+
+local charbase = characters.data
+local getdigitwidth = fonts.helpers.getdigitwidth
+
+typesetters = typesetters or { }
+local typesetters = typesetters
+
+typesetters.digits = typesetters.digits or { }
+local digits = typesetters.digits
+
+digits.actions = { }
+local actions = digits.actions
+
+local a_digits = attributes.private("digits")
+digits.attribute = a_digits
+
+-- at some point we can manipulate the glyph node so then i need
+-- to rewrite this then
+
+function nodes.aligned(head,start,stop,width,how)
+ if how == "flushright" or how == "middle" then
+ head, start = insert_node_before(head,start,new_glue(0,65536,65536))
+ end
+ if how == "flushleft" or how == "middle" then
+ head, stop = insert_node_after(head,stop,new_glue(0,65536,65536))
+ end
+ local prv, nxt = start.prev, stop.next
+ start.prev, stop.next = nil, nil
+ local packed = hpack_node(start,width,"exactly") -- no directional mess here, just lr
+ if prv then
+ prv.next, packed.prev = packed, prv
+ end
+ if nxt then
+ nxt.prev, packed.next = packed, nxt
+ end
+ if packed.prev then
+ return head, packed
+ else
+ return packed, packed
+ end
+end
+
+actions[1] = function(head,start,attribute,attr)
+ local font = start.font
+ local char = start.char
+ local unic = chardata[font][char].tounicode
+ local what = unic and tonumber(unic,16) or char
+ if charbase[what].category == "nd" then
+ local oldwidth, newwidth = start.width, getdigitwidth(font)
+ if newwidth ~= oldwidth then
+ if trace_digits then
+ report_digits("digit trigger %a, instance %a, char %C, unicode %U, delta %s",
+ attr%100,div(attr,100),char,what,newwidth-oldwidth)
+ end
+ head, start = nodes.aligned(head,start,start,newwidth,"middle")
+ return head, start, true
+ end
+ end
+ return head, start, false
+end
+
+local function process(namespace,attribute,head)
+ local done, current, ok = false, head, false
+ while current do
+ if current.id == glyph_code then
+ local attr = current[attribute]
+ if attr and attr > 0 then
+ current[attribute] = unsetvalue
+ local action = actions[attr%100] -- map back to low number
+ if action then
+ head, current, ok = action(head,current,attribute,attr)
+ done = done and ok
+ elseif trace_digits then
+ report_digits("unknown digit trigger %a",attr)
+ end
+ end
+ end
+ current = current and current.next
+ end
+ return head, done
+end
+
+local m, enabled = 0, false -- a trick to make neighbouring ranges work
+
+function digits.set(n) -- number or 'reset'
+ if n == v_reset then
+ n = unsetvalue
+ else
+ n = tonumber(n)
+ if n then
+ if not enabled then
+ tasks.enableaction("processors","typesetters.digits.handler")
+ if trace_digits then
+ report_digits("enabling digit handler")
+ end
+ enabled = true
+ end
+ if m == 100 then
+ m = 1
+ else
+ m = m + 1
+ end
+ n = m * 100 + n
+ else
+ n = unsetvalue
+ end
+ end
+ texattribute[a_digits] = n
+end
+
+digits.handler = nodes.installattributehandler { -- we could avoid this wrapper
+ name = "digits",
+ namespace = digits,
+ processor = process,
+}
+
+-- interface
+
+commands.setdigitsmanipulation = digits.set
diff --git a/tex/context/base/typo-dir.lua b/tex/context/base/typo-dir.lua
index 7e5f8c2d3..f02395475 100644
--- a/tex/context/base/typo-dir.lua
+++ b/tex/context/base/typo-dir.lua
@@ -1,463 +1,463 @@
-if not modules then modules = { } end modules ['typo-dir'] = {
- version = 1.001,
- comment = "companion to typo-dir.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: also use end_of_math here?
-
-local next, type = next, type
-local format, insert, sub, find, match = string.format, table.insert, string.sub, string.find, string.match
-local utfchar = utf.char
-
--- vertical space handler
-
-local nodes, node = nodes, node
-
-local trace_directions = false trackers.register("typesetters.directions", function(v) trace_directions = v end)
-
-local report_directions = logs.reporter("typesetting","directions")
-
-local traverse_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local remove_node = nodes.remove
-
-local texattribute = tex.attribute
-local unsetvalue = attributes.unsetvalue
-
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
-local mathcodes = nodes.mathcodes
-
-local tasks = nodes.tasks
-
-local glyph_code = nodecodes.glyph
-local whatsit_code = nodecodes.whatsit
-local math_code = nodecodes.math
-
-local localpar_code = whatcodes.localpar
-local dir_code = whatcodes.dir
-
-local nodepool = nodes.pool
-
-local new_textdir = nodepool.textdir
-
-local beginmath_code = mathcodes.beginmath
-local endmath_code = mathcodes.endmath
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-local fontchar = fonthashes.characters
-
-local chardata = characters.data
-local chardirs = characters.directions -- maybe make a special mirror table
-
---~ Analysis by Idris:
---~
---~ 1. Assuming the reading- vs word-order distinction (bidi-char types) is governing;
---~ 2. Assuming that 'ARAB' represents an actual arabic string in raw input order, not word-order;
---~ 3. Assuming that 'BARA' represent the correct RL word order;
---~
---~ Then we have, with input: LATIN ARAB
---~
---~ \textdir TLT LATIN ARAB => LATIN BARA
---~ \textdir TRT LATIN ARAB => LATIN BARA
---~ \textdir TRT LRO LATIN ARAB => LATIN ARAB
---~ \textdir TLT LRO LATIN ARAB => LATIN ARAB
---~ \textdir TLT RLO LATIN ARAB => NITAL ARAB
---~ \textdir TRT RLO LATIN ARAB => NITAL ARAB
-
--- elseif d == "es" then -- European Number Separator
--- elseif d == "et" then -- European Number Terminator
--- elseif d == "cs" then -- Common Number Separator
--- elseif d == "nsm" then -- Non-Spacing Mark
--- elseif d == "bn" then -- Boundary Neutral
--- elseif d == "b" then -- Paragraph Separator
--- elseif d == "s" then -- Segment Separator
--- elseif d == "ws" then -- Whitespace
--- elseif d == "on" then -- Other Neutrals
-
-typesetters.directions = typesetters.directions or { }
-local directions = typesetters.directions
-
-local a_state = attributes.private('state')
-local a_directions = attributes.private('directions')
-
-local skipmath = true
-local strip = false
-
--- todo: delayed inserts here
--- todo: get rid of local functions here
-
--- beware, math adds whatsits afterwards so that will mess things up
-
-local finish, autodir, embedded, override, done = nil, 0, 0, 0, false
-local list, glyphs = nil, false
-local finished, finidir, finipos = nil, nil, 1
-local head, current, inserted = nil, nil, nil
-
-local function finish_auto_before()
- head, inserted = insert_node_before(head,current,new_textdir("-"..finish))
- finished, finidir = inserted, finish
- if trace_directions then
- insert(list,#list,format("auto finish inserted before: %s",finish))
- finipos = #list-1
- end
- finish, autodir, done = nil, 0, true
-end
-
-local function finish_auto_after()
- head, current = insert_node_after(head,current,new_textdir("-"..finish))
- finished, finidir = current, finish
- if trace_directions then
- list[#list+1] = format("auto finish inserted after: %s",finish)
- finipos = #list
- end
- finish, autodir, done = nil, 0, true
-end
-
-local function force_auto_left_before()
- if finish then
- finish_auto_before()
- end
- if embedded >= 0 then
- finish, autodir, done = "TLT", 1, true
- else
- finish, autodir, done = "TRT", -1, true
- end
- if finidir == finish then
- head = remove_node(head,finished,true)
- if trace_directions then
- list[finipos] = list[finipos] .. " (deleted afterwards)"
- insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded))
- end
- else
- head, inserted = insert_node_before(head,current,new_textdir("+"..finish))
- if trace_directions then
- insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded))
- end
- end
-end
-
-local function force_auto_right_before()
- if finish then
- finish_auto_before()
- end
- if embedded <= 0 then
- finish, autodir, done = "TRT", -1, true
- else
- finish, autodir, done = "TLT", 1, true
- end
- if finidir == finish then
- head = remove_node(head,finished,true)
- if trace_directions then
- list[finipos] = list[finipos] .. " (deleted afterwards)"
- insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded))
- end
- else
- head, inserted = insert_node_before(head,current,new_textdir("+"..finish))
- if trace_directions then
- insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded))
- end
- end
-end
-
--- todo: use new dir functions
-
-local s_isol = fonts.analyzers.states.isol
-
-function directions.process(namespace,attribute,start) -- todo: make faster
- if not start.next then
- return start, false
- end
- head, current, inserted = start, start, nil
- finish, autodir, embedded, override, done = nil, 0, 0, 0, false
- list, glyphs = trace_directions and { }, false
- finished, finidir, finipos = nil, nil, 1
- local stack, top, obsolete = { }, 0, { }
- local lro, rlo, prevattr, inmath = false, false, 0, false
- while current do
- local id = current.id
- if skipmath and id == math_code then
- local subtype = current.subtype
- if subtype == beginmath_code then
- inmath = true
- elseif subtype == endmath_code then
- inmath = false
- else
- -- todo
- end
- current = current.next
- elseif inmath then
- current = current.next
- else
- local attr = current[attribute]
- if attr and attr > 0 then
- -- current[attribute] = unsetvalue -- slow, needed?
- if attr == 1 then
- -- bidi parsing mode
- elseif attr ~= prevattr then
- -- no pop, grouped driven (2=normal,3=lro,4=rlo)
- if attr == 3 then
- if trace_directions then
- list[#list+1] = format("override right -> left (lro) (bidi=%s)",attr)
- end
- lro, rlo = true, false
- elseif attr == 4 then
- if trace_directions then
- list[#list+1] = format("override left -> right (rlo) (bidi=%s)",attr)
- end
- lro, rlo = false, true
- else
- if trace_directions and
- current ~= head then list[#list+1] = format("override reset (bidi=%s)",attr)
- end
- lro, rlo = false, false
- end
- prevattr = attr
- end
- end
- if id == glyph_code then
- glyphs = true
- if attr and attr > 0 then
- local char = current.char
- local d = chardirs[char]
- if rlo or override > 0 then
- if d == "l" then
- if trace_directions then
- list[#list+1] = format("char %s (%s / U+%04X) of class %s overidden to r (bidi=%s)",utfchar(char),char,char,d,attr)
- end
- d = "r"
- elseif trace_directions then
- if d == "lro" or d == "rlo" or d == "pdf" then -- else side effects on terminal
- list[#list+1] = format("override char of class %s (bidi=%s)",d,attr)
- else -- todo: rle lre
- list[#list+1] = format("char %s (%s / U+%04X) of class %s (bidi=%s)",utfchar(char),char,char,d,attr)
- end
- end
- elseif lro or override < 0 then
- if d == "r" or d == "al" then
- current[a_state] = s_isol -- maybe better have a special bidi attr value -> override (9) -> todo
- if trace_directions then
- list[#list+1] = format("char %s (%s / U+%04X) of class %s overidden to l (bidi=%s) (state=isol)",utfchar(char),char,char,d,attr)
- end
- d = "l"
- elseif trace_directions then
- if d == "lro" or d == "rlo" or d == "pdf" then -- else side effects on terminal
- list[#list+1] = format("override char of class %s (bidi=%s)",d,attr)
- else -- todo: rle lre
- list[#list+1] = format("char %s (%s / U+%04X) of class %s (bidi=%s)",utfchar(char),char,char,d,attr)
- end
- end
- elseif trace_directions then
- if d == "lro" or d == "rlo" or d == "pdf" then -- else side effects on terminal
- list[#list+1] = format("override char of class %s (bidi=%s)",d,attr)
- else -- todo: rle lre
- list[#list+1] = format("char %s (%s / U+%04X) of class %s (bidi=%s)",utfchar(char),char,char,d,attr)
- end
- end
- if d == "on" then
- local mirror = chardata[char].mirror -- maybe make a special mirror table
- if mirror and fontchar[current.font][mirror] then
- -- todo: set attribute
- if autodir < 0 then
- current.char = mirror
- done = true
- --~ elseif left or autodir > 0 then
- --~ if not is_right(current.prev) then
- --~ current.char = mirror
- --~ done = true
- --~ end
- end
- end
- elseif d == "l" or d == "en" then -- european number
- if autodir <= 0 then -- could be option
- force_auto_left_before()
- end
- elseif d == "r" or d == "al" then -- arabic number
- if autodir >= 0 then
- force_auto_right_before()
- end
- elseif d == "an" then -- arabic number
- -- actually this is language dependent ...
--- if autodir <= 0 then
--- force_auto_left_before()
--- end
- if autodir >= 0 then
- force_auto_right_before()
- end
- elseif d == "lro" then -- Left-to-Right Override -> right becomes left
- if trace_directions then
- list[#list+1] = "override right -> left"
- end
- top = top + 1
- stack[top] = { override, embedded }
- override = -1
- obsolete[#obsolete+1] = current
- elseif d == "rlo" then -- Right-to-Left Override -> left becomes right
- if trace_directions then
- list[#list+1] = "override left -> right"
- end
- top = top + 1
- stack[top] = { override, embedded }
- override = 1
- obsolete[#obsolete+1] = current
- elseif d == "lre" then -- Left-to-Right Embedding -> TLT
- if trace_directions then
- list[#list+1] = "embedding left -> right"
- end
- top = top + 1
- stack[top] = { override, embedded }
- embedded = 1
- obsolete[#obsolete+1] = current
- elseif d == "rle" then -- Right-to-Left Embedding -> TRT
- if trace_directions then
- list[#list+1] = "embedding right -> left"
- end
- top = top + 1
- stack[top] = { override, embedded }
- embedded = -1 -- was 1
- obsolete[#obsolete+1] = current
- elseif d == "pdf" then -- Pop Directional Format
- -- override = 0
- if top > 0 then
- local s = stack[top]
- override, embedded = s[1], s[2]
- top = top - 1
- if trace_directions then
- list[#list+1] = format("state: override: %s, embedded: %s, autodir: %s",override,embedded,autodir)
- end
- else
- if trace_directions then
- list[#list+1] = "pop (error, too many pops)"
- end
- end
- obsolete[#obsolete+1] = current
- end
- elseif trace_directions then
- local char = current.char
- local d = chardirs[char]
- list[#list+1] = format("char %s (%s / U+%04X) of class %s (no bidi)",utfchar(char),char,char,d or "?")
- end
- elseif id == whatsit_code then
- if finish then
- finish_auto_before()
- end
- local subtype = current.subtype
- if subtype == localpar_code then
- local dir = current.dir
- local d = sub(dir,2,2)
- if d == 'R' then -- find(dir,".R.") / dir == "TRT"
- autodir = -1
- else
- autodir = 1
- end
- -- embedded = autodir
- if trace_directions then
- list[#list+1] = format("pardir %s",dir)
- end
- elseif subtype == dir_code then
- local dir = current.dir
- -- local sign = sub(dir,1,1)
- -- local dire = sub(dir,3,3)
- local sign, dire = match(dir,"^(.).(.)")
- if dire == "R" then
- if sign == "+" then
- finish, autodir = "TRT", -1
- else
- finish, autodir = nil, 0
- end
- else
- if sign == "+" then
- finish, autodir = "TLT", 1
- else
- finish, autodir = nil, 0
- end
- end
- if trace_directions then
- list[#list+1] = format("textdir %s",dir)
- end
- end
- else
- if trace_directions then
- list[#list+1] = format("node %s (subtype %s)",nodecodes[id],current.subtype)
- end
- if finish then
- finish_auto_before()
- end
- end
- local cn = current.next
- if not cn then
- if finish then
- finish_auto_after()
- end
- end
- current = cn
- end
- end
- if trace_directions and glyphs then
- report_directions("start log")
- for i=1,#list do
- report_directions("%02i: %s",i,list[i])
- end
- report_directions("stop log")
- end
- if done and strip then
- local n = #obsolete
- if n > 0 then
- for i=1,n do
- remove_node(head,obsolete[i],true)
- end
- report_directions("%s character nodes removed",n)
- end
- end
- return head, done
-end
-
---~ local function is_right(n) -- keep !
---~ if n then
---~ local id = n.id
---~ if id == glyph_code then
---~ local attr = n[attribute]
---~ if attr and attr > 0 then
---~ local d = chardirs[n.char]
---~ if d == "r" or d == "al" then -- override
---~ return true
---~ end
---~ end
---~ end
---~ end
---~ return false
---~ end
-
---~ function directions.enable()
---~ tasks.enableaction("processors","directions.handler")
---~ end
-
-local enabled = false
-
-function directions.set(n) -- todo: names and numbers
- if not enabled then
- if trace_directions then
- report_breakpoints("enabling directions handler")
- end
- tasks.enableaction("processors","typesetters.directions.handler")
- enabled = true
- end
- if not n or n == 0 then
- n = unsetvalue
- -- maybe tracing
- end
- texattribute[a_directions] = n
-end
-
-commands.setdirection = directions.set
-
-directions.handler = nodes.installattributehandler {
- name = "directions",
- namespace = directions,
- processor = directions.process,
-}
+if not modules then modules = { } end modules ['typo-dir'] = {
+ version = 1.001,
+ comment = "companion to typo-dir.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: also use end_of_math here?
+
+local next, type = next, type
+local format, insert, sub, find, match = string.format, table.insert, string.sub, string.find, string.match
+local utfchar = utf.char
+
+-- vertical space handler
+
+local nodes, node = nodes, node
+
+local trace_directions = false trackers.register("typesetters.directions", function(v) trace_directions = v end)
+
+local report_directions = logs.reporter("typesetting","directions")
+
+local traverse_id = node.traverse_id
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local remove_node = nodes.remove
+
+local texattribute = tex.attribute
+local unsetvalue = attributes.unsetvalue
+
+local nodecodes = nodes.nodecodes
+local whatcodes = nodes.whatcodes
+local mathcodes = nodes.mathcodes
+
+local tasks = nodes.tasks
+
+local glyph_code = nodecodes.glyph
+local whatsit_code = nodecodes.whatsit
+local math_code = nodecodes.math
+
+local localpar_code = whatcodes.localpar
+local dir_code = whatcodes.dir
+
+local nodepool = nodes.pool
+
+local new_textdir = nodepool.textdir
+
+local beginmath_code = mathcodes.beginmath
+local endmath_code = mathcodes.endmath
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+local fontchar = fonthashes.characters
+
+local chardata = characters.data
+local chardirs = characters.directions -- maybe make a special mirror table
+
+--~ Analysis by Idris:
+--~
+--~ 1. Assuming the reading- vs word-order distinction (bidi-char types) is governing;
+--~ 2. Assuming that 'ARAB' represents an actual arabic string in raw input order, not word-order;
+--~ 3. Assuming that 'BARA' represent the correct RL word order;
+--~
+--~ Then we have, with input: LATIN ARAB
+--~
+--~ \textdir TLT LATIN ARAB => LATIN BARA
+--~ \textdir TRT LATIN ARAB => LATIN BARA
+--~ \textdir TRT LRO LATIN ARAB => LATIN ARAB
+--~ \textdir TLT LRO LATIN ARAB => LATIN ARAB
+--~ \textdir TLT RLO LATIN ARAB => NITAL ARAB
+--~ \textdir TRT RLO LATIN ARAB => NITAL ARAB
+
+-- elseif d == "es" then -- European Number Separator
+-- elseif d == "et" then -- European Number Terminator
+-- elseif d == "cs" then -- Common Number Separator
+-- elseif d == "nsm" then -- Non-Spacing Mark
+-- elseif d == "bn" then -- Boundary Neutral
+-- elseif d == "b" then -- Paragraph Separator
+-- elseif d == "s" then -- Segment Separator
+-- elseif d == "ws" then -- Whitespace
+-- elseif d == "on" then -- Other Neutrals
+
+typesetters.directions = typesetters.directions or { }
+local directions = typesetters.directions
+
+local a_state = attributes.private('state')
+local a_directions = attributes.private('directions')
+
+local skipmath = true
+local strip = false
+
+-- todo: delayed inserts here
+-- todo: get rid of local functions here
+
+-- beware, math adds whatsits afterwards so that will mess things up
+
+local finish, autodir, embedded, override, done = nil, 0, 0, 0, false
+local list, glyphs = nil, false
+local finished, finidir, finipos = nil, nil, 1
+local head, current, inserted = nil, nil, nil
+
+local function finish_auto_before()
+ head, inserted = insert_node_before(head,current,new_textdir("-"..finish))
+ finished, finidir = inserted, finish
+ if trace_directions then
+ insert(list,#list,format("auto finish inserted before: %s",finish))
+ finipos = #list-1
+ end
+ finish, autodir, done = nil, 0, true
+end
+
+local function finish_auto_after()
+ head, current = insert_node_after(head,current,new_textdir("-"..finish))
+ finished, finidir = current, finish
+ if trace_directions then
+ list[#list+1] = format("auto finish inserted after: %s",finish)
+ finipos = #list
+ end
+ finish, autodir, done = nil, 0, true
+end
+
+local function force_auto_left_before()
+ if finish then
+ finish_auto_before()
+ end
+ if embedded >= 0 then
+ finish, autodir, done = "TLT", 1, true
+ else
+ finish, autodir, done = "TRT", -1, true
+ end
+ if finidir == finish then
+ head = remove_node(head,finished,true)
+ if trace_directions then
+ list[finipos] = list[finipos] .. " (deleted afterwards)"
+ insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded))
+ end
+ else
+ head, inserted = insert_node_before(head,current,new_textdir("+"..finish))
+ if trace_directions then
+ insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded))
+ end
+ end
+end
+
+local function force_auto_right_before()
+ if finish then
+ finish_auto_before()
+ end
+ if embedded <= 0 then
+ finish, autodir, done = "TRT", -1, true
+ else
+ finish, autodir, done = "TLT", 1, true
+ end
+ if finidir == finish then
+ head = remove_node(head,finished,true)
+ if trace_directions then
+ list[finipos] = list[finipos] .. " (deleted afterwards)"
+ insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded))
+ end
+ else
+ head, inserted = insert_node_before(head,current,new_textdir("+"..finish))
+ if trace_directions then
+ insert(list,#list,format("start text dir %s (embedded: %s)",finish,embedded))
+ end
+ end
+end
+
+-- todo: use new dir functions
+
+local s_isol = fonts.analyzers.states.isol
+
+function directions.process(namespace,attribute,start) -- todo: make faster
+ if not start.next then
+ return start, false
+ end
+ head, current, inserted = start, start, nil
+ finish, autodir, embedded, override, done = nil, 0, 0, 0, false
+ list, glyphs = trace_directions and { }, false
+ finished, finidir, finipos = nil, nil, 1
+ local stack, top, obsolete = { }, 0, { }
+ local lro, rlo, prevattr, inmath = false, false, 0, false
+ while current do
+ local id = current.id
+ if skipmath and id == math_code then
+ local subtype = current.subtype
+ if subtype == beginmath_code then
+ inmath = true
+ elseif subtype == endmath_code then
+ inmath = false
+ else
+ -- todo
+ end
+ current = current.next
+ elseif inmath then
+ current = current.next
+ else
+ local attr = current[attribute]
+ if attr and attr > 0 then
+ -- current[attribute] = unsetvalue -- slow, needed?
+ if attr == 1 then
+ -- bidi parsing mode
+ elseif attr ~= prevattr then
+ -- no pop, grouped driven (2=normal,3=lro,4=rlo)
+ if attr == 3 then
+ if trace_directions then
+ list[#list+1] = format("override right -> left (lro) (bidi=%s)",attr)
+ end
+ lro, rlo = true, false
+ elseif attr == 4 then
+ if trace_directions then
+ list[#list+1] = format("override left -> right (rlo) (bidi=%s)",attr)
+ end
+ lro, rlo = false, true
+ else
+ if trace_directions and
+ current ~= head then list[#list+1] = format("override reset (bidi=%s)",attr)
+ end
+ lro, rlo = false, false
+ end
+ prevattr = attr
+ end
+ end
+ if id == glyph_code then
+ glyphs = true
+ if attr and attr > 0 then
+ local char = current.char
+ local d = chardirs[char]
+ if rlo or override > 0 then
+ if d == "l" then
+ if trace_directions then
+ list[#list+1] = format("char %s (%s / U+%04X) of class %s overidden to r (bidi=%s)",utfchar(char),char,char,d,attr)
+ end
+ d = "r"
+ elseif trace_directions then
+ if d == "lro" or d == "rlo" or d == "pdf" then -- else side effects on terminal
+ list[#list+1] = format("override char of class %s (bidi=%s)",d,attr)
+ else -- todo: rle lre
+ list[#list+1] = format("char %s (%s / U+%04X) of class %s (bidi=%s)",utfchar(char),char,char,d,attr)
+ end
+ end
+ elseif lro or override < 0 then
+ if d == "r" or d == "al" then
+ current[a_state] = s_isol -- maybe better have a special bidi attr value -> override (9) -> todo
+ if trace_directions then
+ list[#list+1] = format("char %s (%s / U+%04X) of class %s overidden to l (bidi=%s) (state=isol)",utfchar(char),char,char,d,attr)
+ end
+ d = "l"
+ elseif trace_directions then
+ if d == "lro" or d == "rlo" or d == "pdf" then -- else side effects on terminal
+ list[#list+1] = format("override char of class %s (bidi=%s)",d,attr)
+ else -- todo: rle lre
+ list[#list+1] = format("char %s (%s / U+%04X) of class %s (bidi=%s)",utfchar(char),char,char,d,attr)
+ end
+ end
+ elseif trace_directions then
+ if d == "lro" or d == "rlo" or d == "pdf" then -- else side effects on terminal
+ list[#list+1] = format("override char of class %s (bidi=%s)",d,attr)
+ else -- todo: rle lre
+ list[#list+1] = format("char %s (%s / U+%04X) of class %s (bidi=%s)",utfchar(char),char,char,d,attr)
+ end
+ end
+ if d == "on" then
+ local mirror = chardata[char].mirror -- maybe make a special mirror table
+ if mirror and fontchar[current.font][mirror] then
+ -- todo: set attribute
+ if autodir < 0 then
+ current.char = mirror
+ done = true
+ --~ elseif left or autodir > 0 then
+ --~ if not is_right(current.prev) then
+ --~ current.char = mirror
+ --~ done = true
+ --~ end
+ end
+ end
+ elseif d == "l" or d == "en" then -- european number
+ if autodir <= 0 then -- could be option
+ force_auto_left_before()
+ end
+ elseif d == "r" or d == "al" then -- arabic number
+ if autodir >= 0 then
+ force_auto_right_before()
+ end
+ elseif d == "an" then -- arabic number
+ -- actually this is language dependent ...
+-- if autodir <= 0 then
+-- force_auto_left_before()
+-- end
+ if autodir >= 0 then
+ force_auto_right_before()
+ end
+ elseif d == "lro" then -- Left-to-Right Override -> right becomes left
+ if trace_directions then
+ list[#list+1] = "override right -> left"
+ end
+ top = top + 1
+ stack[top] = { override, embedded }
+ override = -1
+ obsolete[#obsolete+1] = current
+ elseif d == "rlo" then -- Right-to-Left Override -> left becomes right
+ if trace_directions then
+ list[#list+1] = "override left -> right"
+ end
+ top = top + 1
+ stack[top] = { override, embedded }
+ override = 1
+ obsolete[#obsolete+1] = current
+ elseif d == "lre" then -- Left-to-Right Embedding -> TLT
+ if trace_directions then
+ list[#list+1] = "embedding left -> right"
+ end
+ top = top + 1
+ stack[top] = { override, embedded }
+ embedded = 1
+ obsolete[#obsolete+1] = current
+ elseif d == "rle" then -- Right-to-Left Embedding -> TRT
+ if trace_directions then
+ list[#list+1] = "embedding right -> left"
+ end
+ top = top + 1
+ stack[top] = { override, embedded }
+ embedded = -1 -- was 1
+ obsolete[#obsolete+1] = current
+ elseif d == "pdf" then -- Pop Directional Format
+ -- override = 0
+ if top > 0 then
+ local s = stack[top]
+ override, embedded = s[1], s[2]
+ top = top - 1
+ if trace_directions then
+ list[#list+1] = format("state: override: %s, embedded: %s, autodir: %s",override,embedded,autodir)
+ end
+ else
+ if trace_directions then
+ list[#list+1] = "pop (error, too many pops)"
+ end
+ end
+ obsolete[#obsolete+1] = current
+ end
+ elseif trace_directions then
+ local char = current.char
+ local d = chardirs[char]
+ list[#list+1] = format("char %s (%s / U+%04X) of class %s (no bidi)",utfchar(char),char,char,d or "?")
+ end
+ elseif id == whatsit_code then
+ if finish then
+ finish_auto_before()
+ end
+ local subtype = current.subtype
+ if subtype == localpar_code then
+ local dir = current.dir
+ local d = sub(dir,2,2)
+ if d == 'R' then -- find(dir,".R.") / dir == "TRT"
+ autodir = -1
+ else
+ autodir = 1
+ end
+ -- embedded = autodir
+ if trace_directions then
+ list[#list+1] = format("pardir %s",dir)
+ end
+ elseif subtype == dir_code then
+ local dir = current.dir
+ -- local sign = sub(dir,1,1)
+ -- local dire = sub(dir,3,3)
+ local sign, dire = match(dir,"^(.).(.)")
+ if dire == "R" then
+ if sign == "+" then
+ finish, autodir = "TRT", -1
+ else
+ finish, autodir = nil, 0
+ end
+ else
+ if sign == "+" then
+ finish, autodir = "TLT", 1
+ else
+ finish, autodir = nil, 0
+ end
+ end
+ if trace_directions then
+ list[#list+1] = format("textdir %s",dir)
+ end
+ end
+ else
+ if trace_directions then
+ list[#list+1] = format("node %s (subtype %s)",nodecodes[id],current.subtype)
+ end
+ if finish then
+ finish_auto_before()
+ end
+ end
+ local cn = current.next
+ if not cn then
+ if finish then
+ finish_auto_after()
+ end
+ end
+ current = cn
+ end
+ end
+ if trace_directions and glyphs then
+ report_directions("start log")
+ for i=1,#list do
+ report_directions("%02i: %s",i,list[i])
+ end
+ report_directions("stop log")
+ end
+ if done and strip then
+ local n = #obsolete
+ if n > 0 then
+ for i=1,n do
+ remove_node(head,obsolete[i],true)
+ end
+ report_directions("%s character nodes removed",n)
+ end
+ end
+ return head, done
+end
+
+--~ local function is_right(n) -- keep !
+--~ if n then
+--~ local id = n.id
+--~ if id == glyph_code then
+--~ local attr = n[attribute]
+--~ if attr and attr > 0 then
+--~ local d = chardirs[n.char]
+--~ if d == "r" or d == "al" then -- override
+--~ return true
+--~ end
+--~ end
+--~ end
+--~ end
+--~ return false
+--~ end
+
+--~ function directions.enable()
+--~ tasks.enableaction("processors","directions.handler")
+--~ end
+
+local enabled = false
+
+function directions.set(n) -- todo: names and numbers
+ if not enabled then
+ if trace_directions then
+ report_breakpoints("enabling directions handler")
+ end
+ tasks.enableaction("processors","typesetters.directions.handler")
+ enabled = true
+ end
+ if not n or n == 0 then
+ n = unsetvalue
+ -- maybe tracing
+ end
+ texattribute[a_directions] = n
+end
+
+commands.setdirection = directions.set
+
+directions.handler = nodes.installattributehandler {
+ name = "directions",
+ namespace = directions,
+ processor = directions.process,
+}
diff --git a/tex/context/base/typo-ini.lua b/tex/context/base/typo-ini.lua
index c45d29664..42c752c31 100644
--- a/tex/context/base/typo-ini.lua
+++ b/tex/context/base/typo-ini.lua
@@ -1,11 +1,11 @@
-if not modules then modules = { } end modules ['typo-ini'] = {
- version = 1.001,
- comment = "companion to typo-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- nothing yet
-
-typesetters = typesetters or { }
+if not modules then modules = { } end modules ['typo-ini'] = {
+ version = 1.001,
+ comment = "companion to typo-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- nothing yet
+
+typesetters = typesetters or { }
diff --git a/tex/context/base/typo-itc.lua b/tex/context/base/typo-itc.lua
index b39ea2f23..bee2cf41e 100644
--- a/tex/context/base/typo-itc.lua
+++ b/tex/context/base/typo-itc.lua
@@ -1,256 +1,256 @@
-if not modules then modules = { } end modules ['typo-itc'] = {
- version = 1.001,
- comment = "companion to typo-itc.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local utfchar = utf.char
-
-local trace_italics = false trackers.register("typesetters.italics", function(v) trace_italics = v end)
-
-local report_italics = logs.reporter("nodes","italics")
-
-typesetters.italics = typesetters.italics or { }
-local italics = typesetters.italics
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-local kern_code = nodecodes.kern
-local glue_code = nodecodes.glue
-local disc_code = nodecodes.disc
-local math_code = nodecodes.math
-
-local tasks = nodes.tasks
-
-local insert_node_after = node.insert_after
-local delete_node = nodes.delete
-local end_of_math = node.end_of_math
-
-local texattribute = tex.attribute
-local a_italics = attributes.private("italics")
-local unsetvalue = attributes.unsetvalue
-
-local new_correction_kern = nodes.pool.fontkern
-local new_correction_glue = nodes.pool.glue
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-local italicsdata = fonthashes.italics
-
-local forcedvariant = false
-
-function typesetters.italics.forcevariant(variant)
- forcedvariant = variant
-end
-
-local function setitalicinfont(font,char)
- local tfmdata = fontdata[font]
- local character = tfmdata.characters[char]
- if character then
- local italic = character.italic_correction
- if not italic then
- local autoitalicamount = tfmdata.properties.autoitalicamount or 0
- if autoitalicamount ~= 0 then
- local description = tfmdata.descriptions[char]
- if description then
- italic = description.italic
- if not italic then
- local boundingbox = description.boundingbox
- italic = boundingbox[3] - description.width + autoitalicamount
- if italic < 0 then -- < 0 indicates no overshoot or a very small auto italic
- italic = 0
- end
- end
- if italic ~= 0 then
- italic = italic * tfmdata.parameters.hfactor
- end
- end
- end
- if trace_italics then
- report_italics("setting italic correction of %C of font %a to %p",char,font,italic)
- end
- character.italic_correction = italic or 0
- end
- return italic
- else
- return 0
- end
-end
-
--- todo: clear attribute
-
-local function process(namespace,attribute,head)
- local done = false
- local italic = 0
- local lastfont = nil
- local lastattr = nil
- local previous = nil
- local prevchar = nil
- local current = head
- local inserted = nil
- while current do
- local id = current.id
- if id == glyph_code then
- local font = current.font
- local char = current.char
- local data = italicsdata[font]
- if font ~= lastfont then
- if italic ~= 0 then
- if data then
- if trace_italics then
- report_italics("ignoring %p between italic %C and italic %C",italic,prevchar,char)
- end
- else
- if trace_italics then
- report_italics("inserting %p between italic %C and regular %C",italic,prevchar,char)
- end
- insert_node_after(head,previous,new_correction_kern(italic))
- done = true
- end
- elseif inserted and data then
- if trace_italics then
- report_italics("deleting last correction before %C",char)
- end
- delete_node(head,inserted)
- else
- -- nothing
- end
- lastfont = font
- end
- if data then
- local attr = forcedvariant or current[attribute]
- if attr and attr > 0 then
- local cd = data[char]
- if not cd then
- -- this really can happen
- italic = 0
- else
- italic = cd.italic or cd.italic_correction
- if not italic then
- italic = setitalicinfont(font,char) -- calculated once
- -- italic = 0
- end
- if italic ~= 0 then
- lastfont = font
- lastattr = attr
- previous = current
- prevchar = char
- end
- end
- else
- italic = 0
- end
- else
- italic = 0
- end
- inserted = nil
- elseif id == disc_code then
- -- skip
- elseif id == kern_code then
- inserted = nil
- italic = 0
- elseif id == glue_code then
- if italic ~= 0 then
- if trace_italics then
- report_italics("inserting %p between italic %C and glue",italic,prevchar)
- end
- inserted = new_correction_glue(italic) -- maybe just add ? else problem with penalties
- insert_node_after(head,previous,inserted)
- italic = 0
- done = true
- end
- elseif id == math_code then
- current = end_of_math(current)
- elseif italic ~= 0 then
- if trace_italics then
- report_italics("inserting %p between italic %C and whatever",italic,prevchar)
- end
- inserted = nil
- insert_node_after(head,previous,new_correction_kern(italic))
- italic = 0
- done = true
- end
- current = current.next
- end
- if italic ~= 0 and lastattr > 1 then -- more control is needed here
- if trace_italics then
- report_italics("inserting %p between italic %C and end of list",italic,prevchar)
- end
- insert_node_after(head,previous,new_correction_kern(italic))
- done = true
- end
- return head, done
-end
-
-local enable
-
-enable = function()
- tasks.enableaction("processors","typesetters.italics.handler")
- if trace_italics then
- report_italics("enabling text italics")
- end
- enable = false
-end
-
-function italics.set(n)
- if enable then
- enable()
- end
- if n == variables.reset then
- texattribute[a_italics] = unsetvalue
- else
- texattribute[a_italics] = tonumber(n) or unsetvalue
- end
-end
-
-function italics.reset()
- texattribute[a_italics] = unsetvalue
-end
-
-italics.handler = nodes.installattributehandler {
- name = "italics",
- namespace = italics,
- processor = process,
-}
-
-local variables = interfaces.variables
-local settings_to_hash = utilities.parsers.settings_to_hash
-
-function commands.setupitaliccorrection(option) -- no grouping !
- if enable then
- enable()
- end
- local options = settings_to_hash(option)
- local variant = unsetvalue
- if options[variables.text] then
- variant = 1
- elseif options[variables.always] then
- variant = 2
- end
- if options[variables.global] then
- forcedvariant = variant
- texattribute[a_italics] = unsetvalue
- else
- forcedvariant = false
- texattribute[a_italics] = variant
- end
- if trace_italics then
- report_italics("forcing %a, variant %a",forcedvariant,variant ~= unsetvalue and variant)
- end
-end
-
--- for manuals:
-
-local stack = { }
-
-function commands.pushitaliccorrection()
- table.insert(stack,{forcedvariant, texattribute[a_italics] })
-end
-
-function commands.popitaliccorrection()
- local top = table.remove(stack)
- forcedvariant = top[1]
- texattribute[a_italics] = top[2]
-end
+if not modules then modules = { } end modules ['typo-itc'] = {
+ version = 1.001,
+ comment = "companion to typo-itc.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local utfchar = utf.char
+
+local trace_italics = false trackers.register("typesetters.italics", function(v) trace_italics = v end)
+
+local report_italics = logs.reporter("nodes","italics")
+
+typesetters.italics = typesetters.italics or { }
+local italics = typesetters.italics
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local kern_code = nodecodes.kern
+local glue_code = nodecodes.glue
+local disc_code = nodecodes.disc
+local math_code = nodecodes.math
+
+local tasks = nodes.tasks
+
+local insert_node_after = node.insert_after
+local delete_node = nodes.delete
+local end_of_math = node.end_of_math
+
+local texattribute = tex.attribute
+local a_italics = attributes.private("italics")
+local unsetvalue = attributes.unsetvalue
+
+local new_correction_kern = nodes.pool.fontkern
+local new_correction_glue = nodes.pool.glue
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+local italicsdata = fonthashes.italics
+
+local forcedvariant = false
+
+function typesetters.italics.forcevariant(variant)
+ forcedvariant = variant
+end
+
+local function setitalicinfont(font,char)
+ local tfmdata = fontdata[font]
+ local character = tfmdata.characters[char]
+ if character then
+ local italic = character.italic_correction
+ if not italic then
+ local autoitalicamount = tfmdata.properties.autoitalicamount or 0
+ if autoitalicamount ~= 0 then
+ local description = tfmdata.descriptions[char]
+ if description then
+ italic = description.italic
+ if not italic then
+ local boundingbox = description.boundingbox
+ italic = boundingbox[3] - description.width + autoitalicamount
+ if italic < 0 then -- < 0 indicates no overshoot or a very small auto italic
+ italic = 0
+ end
+ end
+ if italic ~= 0 then
+ italic = italic * tfmdata.parameters.hfactor
+ end
+ end
+ end
+ if trace_italics then
+ report_italics("setting italic correction of %C of font %a to %p",char,font,italic)
+ end
+ character.italic_correction = italic or 0
+ end
+ return italic
+ else
+ return 0
+ end
+end
+
+-- todo: clear attribute
+
+local function process(namespace,attribute,head)
+ local done = false
+ local italic = 0
+ local lastfont = nil
+ local lastattr = nil
+ local previous = nil
+ local prevchar = nil
+ local current = head
+ local inserted = nil
+ while current do
+ local id = current.id
+ if id == glyph_code then
+ local font = current.font
+ local char = current.char
+ local data = italicsdata[font]
+ if font ~= lastfont then
+ if italic ~= 0 then
+ if data then
+ if trace_italics then
+ report_italics("ignoring %p between italic %C and italic %C",italic,prevchar,char)
+ end
+ else
+ if trace_italics then
+ report_italics("inserting %p between italic %C and regular %C",italic,prevchar,char)
+ end
+ insert_node_after(head,previous,new_correction_kern(italic))
+ done = true
+ end
+ elseif inserted and data then
+ if trace_italics then
+ report_italics("deleting last correction before %C",char)
+ end
+ delete_node(head,inserted)
+ else
+ -- nothing
+ end
+ lastfont = font
+ end
+ if data then
+ local attr = forcedvariant or current[attribute]
+ if attr and attr > 0 then
+ local cd = data[char]
+ if not cd then
+ -- this really can happen
+ italic = 0
+ else
+ italic = cd.italic or cd.italic_correction
+ if not italic then
+ italic = setitalicinfont(font,char) -- calculated once
+ -- italic = 0
+ end
+ if italic ~= 0 then
+ lastfont = font
+ lastattr = attr
+ previous = current
+ prevchar = char
+ end
+ end
+ else
+ italic = 0
+ end
+ else
+ italic = 0
+ end
+ inserted = nil
+ elseif id == disc_code then
+ -- skip
+ elseif id == kern_code then
+ inserted = nil
+ italic = 0
+ elseif id == glue_code then
+ if italic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between italic %C and glue",italic,prevchar)
+ end
+ inserted = new_correction_glue(italic) -- maybe just add ? else problem with penalties
+ insert_node_after(head,previous,inserted)
+ italic = 0
+ done = true
+ end
+ elseif id == math_code then
+ current = end_of_math(current)
+ elseif italic ~= 0 then
+ if trace_italics then
+ report_italics("inserting %p between italic %C and whatever",italic,prevchar)
+ end
+ inserted = nil
+ insert_node_after(head,previous,new_correction_kern(italic))
+ italic = 0
+ done = true
+ end
+ current = current.next
+ end
+ if italic ~= 0 and lastattr > 1 then -- more control is needed here
+ if trace_italics then
+ report_italics("inserting %p between italic %C and end of list",italic,prevchar)
+ end
+ insert_node_after(head,previous,new_correction_kern(italic))
+ done = true
+ end
+ return head, done
+end
+
+local enable
+
+enable = function()
+ tasks.enableaction("processors","typesetters.italics.handler")
+ if trace_italics then
+ report_italics("enabling text italics")
+ end
+ enable = false
+end
+
+function italics.set(n)
+ if enable then
+ enable()
+ end
+ if n == variables.reset then
+ texattribute[a_italics] = unsetvalue
+ else
+ texattribute[a_italics] = tonumber(n) or unsetvalue
+ end
+end
+
+function italics.reset()
+ texattribute[a_italics] = unsetvalue
+end
+
+italics.handler = nodes.installattributehandler {
+ name = "italics",
+ namespace = italics,
+ processor = process,
+}
+
+local variables = interfaces.variables
+local settings_to_hash = utilities.parsers.settings_to_hash
+
+function commands.setupitaliccorrection(option) -- no grouping !
+ if enable then
+ enable()
+ end
+ local options = settings_to_hash(option)
+ local variant = unsetvalue
+ if options[variables.text] then
+ variant = 1
+ elseif options[variables.always] then
+ variant = 2
+ end
+ if options[variables.global] then
+ forcedvariant = variant
+ texattribute[a_italics] = unsetvalue
+ else
+ forcedvariant = false
+ texattribute[a_italics] = variant
+ end
+ if trace_italics then
+ report_italics("forcing %a, variant %a",forcedvariant,variant ~= unsetvalue and variant)
+ end
+end
+
+-- for manuals:
+
+local stack = { }
+
+function commands.pushitaliccorrection()
+ table.insert(stack,{forcedvariant, texattribute[a_italics] })
+end
+
+function commands.popitaliccorrection()
+ local top = table.remove(stack)
+ forcedvariant = top[1]
+ texattribute[a_italics] = top[2]
+end
diff --git a/tex/context/base/typo-krn.lua b/tex/context/base/typo-krn.lua
index fb28d3b2d..eac876262 100644
--- a/tex/context/base/typo-krn.lua
+++ b/tex/context/base/typo-krn.lua
@@ -1,335 +1,335 @@
-if not modules then modules = { } end modules ['typo-krn'] = {
- version = 1.001,
- comment = "companion to typo-krn.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local next, type, tonumber = next, type, tonumber
-local utfchar = utf.char
-
-local nodes, node, fonts = nodes, node, fonts
-
-local find_node_tail = node.tail or node.slide
-local free_node = node.free
-local free_nodelist = node.flush_list
-local copy_node = node.copy
-local copy_nodelist = node.copy_list
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local end_of_math = node.end_of_math
-
-local texattribute = tex.attribute
-local unsetvalue = attributes.unsetvalue
-
-local nodepool = nodes.pool
-local tasks = nodes.tasks
-
-local new_gluespec = nodepool.gluespec
-local new_kern = nodepool.kern
-local new_glue = nodepool.glue
-
-local nodecodes = nodes.nodecodes
-local kerncodes = nodes.kerncodes
-local skipcodes = nodes.skipcodes
-
-local glyph_code = nodecodes.glyph
-local kern_code = nodecodes.kern
-local disc_code = nodecodes.disc
-local glue_code = nodecodes.glue
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local math_code = nodecodes.math
-
-local kerning_code = kerncodes.kerning
-local userkern_code = kerncodes.userkern
-local userskip_code = skipcodes.userskip
-local spaceskip_code = skipcodes.spaceskip
-local xspaceskip_code = skipcodes.xspaceskip
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-local chardata = fonthashes.characters
-local quaddata = fonthashes.quads
-local markdata = fonthashes.marks
-
-local v_max = interfaces.variables.max
-
-typesetters = typesetters or { }
-local typesetters = typesetters
-
-typesetters.kerns = typesetters.kerns or { }
-local kerns = typesetters.kerns
-
-kerns.mapping = kerns.mapping or { }
-kerns.factors = kerns.factors or { }
-local a_kerns = attributes.private("kern")
-local a_fontkern = attributes.private('fontkern')
-kerns.attribute = kerns.attribute
-
-storage.register("typesetters/kerns/mapping", kerns.mapping, "typesetters.kerns.mapping")
-storage.register("typesetters/kerns/factors", kerns.factors, "typesetters.kerns.factors")
-
-local mapping = kerns.mapping
-local factors = kerns.factors
-
--- one must use liga=no and mode=base and kern=yes
--- use more helpers
--- make sure it runs after all others
--- there will be a width adaptor field in nodes so this will change
--- todo: interchar kerns / disc nodes / can be made faster
-
-local gluefactor = 4 -- assumes quad = .5 enspace
-
-kerns.keepligature = false -- just for fun (todo: control setting with key/value)
-kerns.keeptogether = false -- just for fun (todo: control setting with key/value)
-
--- can be optimized .. the prev thing .. but hardly worth the effort
-
-local function kern_injector(fillup,kern)
- if fillup then
- local g = new_glue(kern)
- local s = g.spec
- s.stretch = kern
- s.stretch_order = 1
- return g
- else
- return new_kern(kern)
- end
-end
-
-local function spec_injector(fillup,width,stretch,shrink)
- if fillup then
- local s = new_gluespec(width,2*stretch,2*shrink)
- s.stretch_order = 1
- return s
- else
- return new_gluespec(width,stretch,shrink)
- end
-end
-
--- needs checking ... base mode / node mode
-
-local function do_process(namespace,attribute,head,force) -- todo: glue so that we can fully stretch
- local start, done, lastfont = head, false, nil
- local keepligature = kerns.keepligature
- local keeptogether = kerns.keeptogether
- local fillup = false
- while start do
- -- faster to test for attr first
- local attr = force or start[attribute]
- if attr and attr > 0 then
- start[attribute] = unsetvalue
- local krn = mapping[attr]
- if krn == v_max then
- krn = .25
- fillup = true
- else
- fillup = false
- end
- if krn and krn ~= 0 then
- local id = start.id
- if id == glyph_code then
- lastfont = start.font
- local c = start.components
- if c then
- if keepligature and keepligature(start) then
- -- keep 'm
- else
- c = do_process(namespace,attribute,c,attr)
- local s = start
- local p, n = s.prev, s.next
- local tail = find_node_tail(c)
- if p then
- p.next = c
- c.prev = p
- else
- head = c
- end
- if n then
- n.prev = tail
- end
- tail.next = n
- start = c
- s.components = nil
- -- we now leak nodes !
- -- free_node(s)
- done = true
- end
- end
- local prev = start.prev
- if not prev then
- -- skip
- elseif markdata[lastfont][start.char] then
- -- skip
- else
- local pid = prev.id
- if not pid then
- -- nothing
- elseif pid == kern_code then
- if prev.subtype == kerning_code or prev[a_fontkern] then
- if keeptogether and prev.prev.id == glyph_code and keeptogether(prev.prev,start) then -- we could also pass start
- -- keep 'm
- else
- -- not yet ok, as injected kerns can be overlays (from node-inj.lua)
- prev.subtype = userkern_code
- prev.kern = prev.kern + quaddata[lastfont]*krn -- here
- done = true
- end
- end
- elseif pid == glyph_code then
- if prev.font == lastfont then
- local prevchar, lastchar = prev.char, start.char
- if keeptogether and keeptogether(prev,start) then
- -- keep 'm
- else
- local kerns = chardata[lastfont][prevchar].kerns
- local kern = kerns and kerns[lastchar] or 0
- krn = kern + quaddata[lastfont]*krn -- here
- insert_node_before(head,start,kern_injector(fillup,krn))
- done = true
- end
- else
- krn = quaddata[lastfont]*krn -- here
- insert_node_before(head,start,kern_injector(fillup,krn))
- done = true
- end
- elseif pid == disc_code then
- -- a bit too complicated, we can best not copy and just calculate
- -- but we could have multiple glyphs involved so ...
- local disc = prev -- disc
- local pre, post, replace = disc.pre, disc.post, disc.replace
- local prv, nxt = disc.prev, disc.next
- if pre and prv then -- must pair with start.prev
- -- this one happens in most cases
- local before = copy_node(prv)
- pre.prev = before
- before.next = pre
- before.prev = nil
- pre = do_process(namespace,attribute,before,attr)
- pre = pre.next
- pre.prev = nil
- disc.pre = pre
- free_node(before)
- end
- if post and nxt then -- must pair with start
- local after = copy_node(nxt)
- local tail = find_node_tail(post)
- tail.next = after
- after.prev = tail
- after.next = nil
- post = do_process(namespace,attribute,post,attr)
- tail.next = nil
- disc.post = post
- free_node(after)
- end
- if replace and prv and nxt then -- must pair with start and start.prev
- local before = copy_node(prv)
- local after = copy_node(nxt)
- local tail = find_node_tail(replace)
- replace.prev = before
- before.next = replace
- before.prev = nil
- tail.next = after
- after.prev = tail
- after.next = nil
- replace = do_process(namespace,attribute,before,attr)
- replace = replace.next
- replace.prev = nil
- after.prev.next = nil
- disc.replace = replace
- free_node(after)
- free_node(before)
- else
- if prv and prv.id == glyph_code and prv.font == lastfont then
- local prevchar, lastchar = prv.char, start.char
- local kerns = chardata[lastfont][prevchar].kerns
- local kern = kerns and kerns[lastchar] or 0
- krn = kern + quaddata[lastfont]*krn -- here
- else
- krn = quaddata[lastfont]*krn -- here
- end
- disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue
- end
- end
- end
- elseif id == glue_code then
- local subtype = start.subtype
- if subtype == userskip_code or subtype == xspaceskip_code or subtype == spaceskip_code then
- local s = start.spec
- local w = s.width
- if w > 0 then
- local width, stretch, shrink = w+gluefactor*w*krn, s.stretch, s.shrink
- start.spec = spec_injector(fillup,width,stretch*width/w,shrink*width/w)
- done = true
- end
- end
- elseif id == kern_code then
- -- if start.subtype == kerning_code then -- handle with glyphs
- -- local sk = start.kern
- -- if sk > 0 then
- -- start.kern = sk*krn
- -- done = true
- -- end
- -- end
- elseif lastfont and (id == hlist_code or id == vlist_code) then -- todo: lookahead
- local p = start.prev
- if p and p.id ~= glue_code then
- insert_node_before(head,start,kern_injector(fillup,quaddata[lastfont]*krn))
- done = true
- end
- local n = start.next
- if n and n.id ~= glue_code then
- insert_node_after(head,start,kern_injector(fillup,quaddata[lastfont]*krn))
- done = true
- end
- elseif id == math_code then
- start = end_of_math(start)
- end
- end
- end
- if start then
- start = start.next
- end
- end
- return head, done
-end
-
-local enabled = false
-
-function kerns.set(factor)
- if factor ~= v_max then
- factor = tonumber(factor) or 0
- end
- if factor == v_max or factor ~= 0 then
- if not enabled then
- tasks.enableaction("processors","typesetters.kerns.handler")
- enabled = true
- end
- local a = factors[factor]
- if not a then
- a = #mapping + 1
- factors[factors], mapping[a] = a, factor
- end
- factor = a
- else
- factor = unsetvalue
- end
- texattribute[a_kerns] = factor
- return factor
-end
-
-local function process(namespace,attribute,head)
- return do_process(namespace,attribute,head) -- no direct map, because else fourth argument is tail == true
-end
-
-kerns.handler = nodes.installattributehandler {
- name = "kern",
- namespace = kerns,
- processor = process,
-}
-
--- interface
-
-commands.setcharacterkerning = kerns.set
+if not modules then modules = { } end modules ['typo-krn'] = {
+ version = 1.001,
+ comment = "companion to typo-krn.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local next, type, tonumber = next, type, tonumber
+local utfchar = utf.char
+
+local nodes, node, fonts = nodes, node, fonts
+
+local find_node_tail = node.tail or node.slide
+local free_node = node.free
+local free_nodelist = node.flush_list
+local copy_node = node.copy
+local copy_nodelist = node.copy_list
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local end_of_math = node.end_of_math
+
+local texattribute = tex.attribute
+local unsetvalue = attributes.unsetvalue
+
+local nodepool = nodes.pool
+local tasks = nodes.tasks
+
+local new_gluespec = nodepool.gluespec
+local new_kern = nodepool.kern
+local new_glue = nodepool.glue
+
+local nodecodes = nodes.nodecodes
+local kerncodes = nodes.kerncodes
+local skipcodes = nodes.skipcodes
+
+local glyph_code = nodecodes.glyph
+local kern_code = nodecodes.kern
+local disc_code = nodecodes.disc
+local glue_code = nodecodes.glue
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local math_code = nodecodes.math
+
+local kerning_code = kerncodes.kerning
+local userkern_code = kerncodes.userkern
+local userskip_code = skipcodes.userskip
+local spaceskip_code = skipcodes.spaceskip
+local xspaceskip_code = skipcodes.xspaceskip
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+local chardata = fonthashes.characters
+local quaddata = fonthashes.quads
+local markdata = fonthashes.marks
+
+local v_max = interfaces.variables.max
+
+typesetters = typesetters or { }
+local typesetters = typesetters
+
+typesetters.kerns = typesetters.kerns or { }
+local kerns = typesetters.kerns
+
+kerns.mapping = kerns.mapping or { }
+kerns.factors = kerns.factors or { }
+local a_kerns = attributes.private("kern")
+local a_fontkern = attributes.private('fontkern')
+kerns.attribute = kerns.attribute
+
+storage.register("typesetters/kerns/mapping", kerns.mapping, "typesetters.kerns.mapping")
+storage.register("typesetters/kerns/factors", kerns.factors, "typesetters.kerns.factors")
+
+local mapping = kerns.mapping
+local factors = kerns.factors
+
+-- one must use liga=no and mode=base and kern=yes
+-- use more helpers
+-- make sure it runs after all others
+-- there will be a width adaptor field in nodes so this will change
+-- todo: interchar kerns / disc nodes / can be made faster
+
+local gluefactor = 4 -- assumes quad = .5 enspace
+
+kerns.keepligature = false -- just for fun (todo: control setting with key/value)
+kerns.keeptogether = false -- just for fun (todo: control setting with key/value)
+
+-- can be optimized .. the prev thing .. but hardly worth the effort
+
+local function kern_injector(fillup,kern)
+ if fillup then
+ local g = new_glue(kern)
+ local s = g.spec
+ s.stretch = kern
+ s.stretch_order = 1
+ return g
+ else
+ return new_kern(kern)
+ end
+end
+
+local function spec_injector(fillup,width,stretch,shrink)
+ if fillup then
+ local s = new_gluespec(width,2*stretch,2*shrink)
+ s.stretch_order = 1
+ return s
+ else
+ return new_gluespec(width,stretch,shrink)
+ end
+end
+
+-- needs checking ... base mode / node mode
+
+local function do_process(namespace,attribute,head,force) -- todo: glue so that we can fully stretch
+ local start, done, lastfont = head, false, nil
+ local keepligature = kerns.keepligature
+ local keeptogether = kerns.keeptogether
+ local fillup = false
+ while start do
+ -- faster to test for attr first
+ local attr = force or start[attribute]
+ if attr and attr > 0 then
+ start[attribute] = unsetvalue
+ local krn = mapping[attr]
+ if krn == v_max then
+ krn = .25
+ fillup = true
+ else
+ fillup = false
+ end
+ if krn and krn ~= 0 then
+ local id = start.id
+ if id == glyph_code then
+ lastfont = start.font
+ local c = start.components
+ if c then
+ if keepligature and keepligature(start) then
+ -- keep 'm
+ else
+ c = do_process(namespace,attribute,c,attr)
+ local s = start
+ local p, n = s.prev, s.next
+ local tail = find_node_tail(c)
+ if p then
+ p.next = c
+ c.prev = p
+ else
+ head = c
+ end
+ if n then
+ n.prev = tail
+ end
+ tail.next = n
+ start = c
+ s.components = nil
+ -- we now leak nodes !
+ -- free_node(s)
+ done = true
+ end
+ end
+ local prev = start.prev
+ if not prev then
+ -- skip
+ elseif markdata[lastfont][start.char] then
+ -- skip
+ else
+ local pid = prev.id
+ if not pid then
+ -- nothing
+ elseif pid == kern_code then
+ if prev.subtype == kerning_code or prev[a_fontkern] then
+ if keeptogether and prev.prev.id == glyph_code and keeptogether(prev.prev,start) then -- we could also pass start
+ -- keep 'm
+ else
+ -- not yet ok, as injected kerns can be overlays (from node-inj.lua)
+ prev.subtype = userkern_code
+ prev.kern = prev.kern + quaddata[lastfont]*krn -- here
+ done = true
+ end
+ end
+ elseif pid == glyph_code then
+ if prev.font == lastfont then
+ local prevchar, lastchar = prev.char, start.char
+ if keeptogether and keeptogether(prev,start) then
+ -- keep 'm
+ else
+ local kerns = chardata[lastfont][prevchar].kerns
+ local kern = kerns and kerns[lastchar] or 0
+ krn = kern + quaddata[lastfont]*krn -- here
+ insert_node_before(head,start,kern_injector(fillup,krn))
+ done = true
+ end
+ else
+ krn = quaddata[lastfont]*krn -- here
+ insert_node_before(head,start,kern_injector(fillup,krn))
+ done = true
+ end
+ elseif pid == disc_code then
+ -- a bit too complicated, we can best not copy and just calculate
+ -- but we could have multiple glyphs involved so ...
+ local disc = prev -- disc
+ local pre, post, replace = disc.pre, disc.post, disc.replace
+ local prv, nxt = disc.prev, disc.next
+ if pre and prv then -- must pair with start.prev
+ -- this one happens in most cases
+ local before = copy_node(prv)
+ pre.prev = before
+ before.next = pre
+ before.prev = nil
+ pre = do_process(namespace,attribute,before,attr)
+ pre = pre.next
+ pre.prev = nil
+ disc.pre = pre
+ free_node(before)
+ end
+ if post and nxt then -- must pair with start
+ local after = copy_node(nxt)
+ local tail = find_node_tail(post)
+ tail.next = after
+ after.prev = tail
+ after.next = nil
+ post = do_process(namespace,attribute,post,attr)
+ tail.next = nil
+ disc.post = post
+ free_node(after)
+ end
+ if replace and prv and nxt then -- must pair with start and start.prev
+ local before = copy_node(prv)
+ local after = copy_node(nxt)
+ local tail = find_node_tail(replace)
+ replace.prev = before
+ before.next = replace
+ before.prev = nil
+ tail.next = after
+ after.prev = tail
+ after.next = nil
+ replace = do_process(namespace,attribute,before,attr)
+ replace = replace.next
+ replace.prev = nil
+ after.prev.next = nil
+ disc.replace = replace
+ free_node(after)
+ free_node(before)
+ else
+ if prv and prv.id == glyph_code and prv.font == lastfont then
+ local prevchar, lastchar = prv.char, start.char
+ local kerns = chardata[lastfont][prevchar].kerns
+ local kern = kerns and kerns[lastchar] or 0
+ krn = kern + quaddata[lastfont]*krn -- here
+ else
+ krn = quaddata[lastfont]*krn -- here
+ end
+ disc.replace = kern_injector(false,krn) -- only kerns permitted, no glue
+ end
+ end
+ end
+ elseif id == glue_code then
+ local subtype = start.subtype
+ if subtype == userskip_code or subtype == xspaceskip_code or subtype == spaceskip_code then
+ local s = start.spec
+ local w = s.width
+ if w > 0 then
+ local width, stretch, shrink = w+gluefactor*w*krn, s.stretch, s.shrink
+ start.spec = spec_injector(fillup,width,stretch*width/w,shrink*width/w)
+ done = true
+ end
+ end
+ elseif id == kern_code then
+ -- if start.subtype == kerning_code then -- handle with glyphs
+ -- local sk = start.kern
+ -- if sk > 0 then
+ -- start.kern = sk*krn
+ -- done = true
+ -- end
+ -- end
+ elseif lastfont and (id == hlist_code or id == vlist_code) then -- todo: lookahead
+ local p = start.prev
+ if p and p.id ~= glue_code then
+ insert_node_before(head,start,kern_injector(fillup,quaddata[lastfont]*krn))
+ done = true
+ end
+ local n = start.next
+ if n and n.id ~= glue_code then
+ insert_node_after(head,start,kern_injector(fillup,quaddata[lastfont]*krn))
+ done = true
+ end
+ elseif id == math_code then
+ start = end_of_math(start)
+ end
+ end
+ end
+ if start then
+ start = start.next
+ end
+ end
+ return head, done
+end
+
+local enabled = false
+
+function kerns.set(factor)
+ if factor ~= v_max then
+ factor = tonumber(factor) or 0
+ end
+ if factor == v_max or factor ~= 0 then
+ if not enabled then
+ tasks.enableaction("processors","typesetters.kerns.handler")
+ enabled = true
+ end
+ local a = factors[factor]
+ if not a then
+ a = #mapping + 1
+ factors[factors], mapping[a] = a, factor
+ end
+ factor = a
+ else
+ factor = unsetvalue
+ end
+ texattribute[a_kerns] = factor
+ return factor
+end
+
+local function process(namespace,attribute,head)
+ return do_process(namespace,attribute,head) -- no direct map, because else fourth argument is tail == true
+end
+
+kerns.handler = nodes.installattributehandler {
+ name = "kern",
+ namespace = kerns,
+ processor = process,
+}
+
+-- interface
+
+commands.setcharacterkerning = kerns.set
diff --git a/tex/context/base/typo-lan.lua b/tex/context/base/typo-lan.lua
index 50927f744..a17732900 100644
--- a/tex/context/base/typo-lan.lua
+++ b/tex/context/base/typo-lan.lua
@@ -1,72 +1,72 @@
-if not modules then modules = { } end modules ['typo-lan'] = {
- version = 1.001,
- comment = "companion to typo-lan.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local type, next = type, next
-
-local currentfont = font.current
-local setmetatableindex = table.setmetatableindex
-local utfbyte = utf.byte
-
-local hashes = fonts.hashes
-local fontdata = hashes.characters
-local emwidths = hashes.emwidths
-
-local frequencies = languages.frequencies or { }
-languages.frequencies = frequencies
-
-local frequencydata = { }
-local frequencyfile = string.formatters["lang-frq-%s.lua"]
-local frequencycache = { }
-
-setmetatableindex(frequencydata, function(t,language)
- local fullname = resolvers.findfile(frequencyfile(language))
- local v = fullname ~= "" and dofile(fullname)
- if not v or not v.frequencies then
- v = t.en
- end
- t[language] = v
- return v
-end)
-
-setmetatableindex(frequencycache, function(t,language)
- local dataset = frequencydata[language]
- local frequencies = dataset.frequencies
- if not frequencies then
- return t.en
- end
- local v = { }
- setmetatableindex(v, function(t,font)
- local average = emwidths[font] / 2
- if frequencies then
- local characters = fontdata[font]
- local sum, tot = 0, 0
- for k, v in next, frequencies do
- local character = characters[k] -- characters[type(k) == "number" and k or utfbyte(k)]
- tot = tot + v
- sum = sum + v * (character and character.width or average)
- end
- average = sum / tot -- widths
- end
- t[font] = average
- return average
- end)
- t[language] = v
- return v
-end)
-
-function frequencies.getdata(language)
- return frequencydata[language]
-end
-
-function frequencies.averagecharwidth(language,font)
- return frequencycache[language or "en"][font or currentfont()]
-end
-
-function commands.averagecharwidth(language,font)
- context(frequencycache[language or "en"][font or currentfont()])
-end
+if not modules then modules = { } end modules ['typo-lan'] = {
+ version = 1.001,
+ comment = "companion to typo-lan.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local type, next = type, next
+
+local currentfont = font.current
+local setmetatableindex = table.setmetatableindex
+local utfbyte = utf.byte
+
+local hashes = fonts.hashes
+local fontdata = hashes.characters
+local emwidths = hashes.emwidths
+
+local frequencies = languages.frequencies or { }
+languages.frequencies = frequencies
+
+local frequencydata = { }
+local frequencyfile = string.formatters["lang-frq-%s.lua"]
+local frequencycache = { }
+
+setmetatableindex(frequencydata, function(t,language)
+ local fullname = resolvers.findfile(frequencyfile(language))
+ local v = fullname ~= "" and dofile(fullname)
+ if not v or not v.frequencies then
+ v = t.en
+ end
+ t[language] = v
+ return v
+end)
+
+setmetatableindex(frequencycache, function(t,language)
+ local dataset = frequencydata[language]
+ local frequencies = dataset.frequencies
+ if not frequencies then
+ return t.en
+ end
+ local v = { }
+ setmetatableindex(v, function(t,font)
+ local average = emwidths[font] / 2
+ if frequencies then
+ local characters = fontdata[font]
+ local sum, tot = 0, 0
+ for k, v in next, frequencies do
+ local character = characters[k] -- characters[type(k) == "number" and k or utfbyte(k)]
+ tot = tot + v
+ sum = sum + v * (character and character.width or average)
+ end
+ average = sum / tot -- widths
+ end
+ t[font] = average
+ return average
+ end)
+ t[language] = v
+ return v
+end)
+
+function frequencies.getdata(language)
+ return frequencydata[language]
+end
+
+function frequencies.averagecharwidth(language,font)
+ return frequencycache[language or "en"][font or currentfont()]
+end
+
+function commands.averagecharwidth(language,font)
+ context(frequencycache[language or "en"][font or currentfont()])
+end
diff --git a/tex/context/base/typo-mar.lua b/tex/context/base/typo-mar.lua
index ec827883d..65b205098 100644
--- a/tex/context/base/typo-mar.lua
+++ b/tex/context/base/typo-mar.lua
@@ -1,879 +1,879 @@
-if not modules then modules = { } end modules ['typo-mar'] = {
- version = 1.001,
- comment = "companion to typo-mar.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo:
---
--- * autoleft/right depending on available space (or distance to margin)
--- * stack across paragraphs, but that is messy and one should reconsider
--- using margin data then as also vertical spacing kicks in
--- * floating margin data, with close-to-call anchoring
-
--- -- experiment (does not work, too much interference)
---
--- local pdfprint = pdf.print
--- local format = string.format
---
--- anchors = anchors or { }
---
--- local whatever = { }
--- local factor = (7200/7227)/65536
---
--- function anchors.set(tag)
--- whatever[tag] = { pdf.h, pdf.v }
--- end
---
--- function anchors.reset(tag)
--- whatever[tag] = nil
--- end
---
--- function anchors.startmove(tag,how) -- save/restore nodes but they don't support moves
--- local w = whatever[tag]
--- if not w then
--- -- error
--- elseif how == "horizontal" or how == "h" then
--- pdfprint("page",format(" q 1 0 0 1 %f 0 cm ", (w[1] - pdf.h) * factor))
--- elseif how == "vertical" or how == "v" then
--- pdfprint("page",format(" q 1 0 0 1 0 %f cm ", (w[2] - pdf.v) * factor))
--- else
--- pdfprint("page",format(" q 1 0 0 1 %f %f cm ", (w[1] - pdf.h) * factor, (w[2] - pdf.v) * factor))
--- end
--- end
---
--- function anchors.stopmove(tag)
--- local w = whatever[tag]
--- if not w then
--- -- error
--- else
--- pdfprint("page"," Q ")
--- end
--- end
---
--- local latelua = nodes.pool.latelua
---
--- function anchors.node_set(tag)
--- return latelua(formatters["anchors.set(%q)"](tag))
--- end
---
--- function anchors.node_reset(tag)
--- return latelua(formatters["anchors.reset(%q)"](tag))
--- end
---
--- function anchors.node_start_move(tag,how)
--- return latelua(formatters["anchors.startmove(%q,%q)](tag,how))
--- end
---
--- function anchors.node_stop_move(tag)
--- return latelua(formatters["anchors.stopmove(%q)"](tag))
--- end
-
--- so far
-
-local format, validstring = string.format, string.valid
-local insert, remove = table.insert, table.remove
-local setmetatable, next = setmetatable, next
-
-local attributes, nodes, node, variables = attributes, nodes, node, variables
-
-local trace_margindata = false trackers.register("typesetters.margindata", function(v) trace_margindata = v end)
-local trace_marginstack = false trackers.register("typesetters.margindata.stack", function(v) trace_marginstack = v end)
-local trace_margingroup = false trackers.register("typesetters.margindata.group", function(v) trace_margingroup = v end)
-
-local report_margindata = logs.reporter("typesetters","margindata")
-
-local tasks = nodes.tasks
-local prependaction = tasks.prependaction
-local disableaction = tasks.disableaction
-local enableaction = tasks.enableaction
-
-local variables = interfaces.variables
-
-local conditionals = tex.conditionals
-local systemmodes = tex.systemmodes
-
-local v_top = variables.top
-local v_depth = variables.depth
-local v_local = variables["local"]
-local v_global = variables["global"]
-local v_left = variables.left
-local v_right = variables.right
-local v_flushleft = variables.flushleft
-local v_flushright = variables.flushright
-local v_inner = variables.inner
-local v_outer = variables.outer
-local v_margin = variables.margin
-local v_edge = variables.edge
-local v_default = variables.default
-local v_normal = variables.normal
-local v_yes = variables.yes
-local v_continue = variables.continue
-local v_first = variables.first
-local v_text = variables.text
-local v_column = variables.column
-
-local copy_node_list = node.copy_list
-local slide_nodes = node.slide
-local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here
-local traverse_id = node.traverse_id
-local free_node_list = node.flush_list
-local insert_node_after = node.insert_after
-local insert_node_before = node.insert_before
-
-local concat_nodes = nodes.concat
-
-local nodecodes = nodes.nodecodes
-local listcodes = nodes.listcodes
-local gluecodes = nodes.gluecodes
-local whatsitcodes = nodes.whatsitcodes
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local glue_code = nodecodes.glue
-local kern_code = nodecodes.kern
-local penalty_code = nodecodes.penalty
-local whatsit_code = nodecodes.whatsit
-local line_code = listcodes.line
-local cell_code = listcodes.cell
-local alignment_code = listcodes.alignment
-local leftskip_code = gluecodes.leftskip
-local rightskip_code = gluecodes.rightskip
-local userdefined_code = whatsitcodes.userdefined
-
-local dir_code = whatsitcodes.dir
-local localpar_code = whatsitcodes.localpar
-
-local nodepool = nodes.pool
-
-local new_kern = nodepool.kern
-local new_glue = nodepool.glue
-local new_penalty = nodepool.penalty
-local new_stretch = nodepool.stretch
-local new_usernumber = nodepool.usernumber
-local new_latelua = nodepool.latelua
-
-local texcount = tex.count
-local texdimen = tex.dimen
-local texbox = tex.box
-
-local points = number.points
-
-local isleftpage = layouts.status.isleftpage
-local registertogether = builders.paragraphs.registertogether
-
-local jobpositions = job.positions
-local getposition = jobpositions.position
-
-local a_margindata = attributes.private("margindata")
-
-local inline_mark = nodepool.userids["margins.inline"]
-
-local margins = { }
-typesetters.margins = margins
-
-local locations = { v_left, v_right, v_inner, v_outer } -- order might change
-local categories = { }
-local displaystore = { } -- [category][location][scope]
-local inlinestore = { } -- [number]
-local nofsaved = 0
-local nofstored = 0
-local nofinlined = 0
-local nofdelayed = 0
-local h_anchors = 0
-local v_anchors = 0
-
-local mt1 = {
- __index = function(t,location)
- local v = { [v_local] = { }, [v_global] = { } }
- t[location] = v
- return v
- end
-}
-
-local mt2 = {
- __index = function(stores,category)
- categories[#categories+1] = category
- local v = { }
- setmetatable(v,mt1)
- stores[category] = v
- return v
- end
-}
-
-setmetatable(displaystore,mt2)
-
-local defaults = {
- __index = {
- location = v_left,
- align = v_normal,
- method = "",
- name = "",
- threshold = 0, -- .25ex
- margin = v_normal,
- scope = v_global,
- distance = 0,
- hoffset = 0,
- voffset = 0,
- category = v_default,
- line = 0,
- vstack = 0,
- dy = 0,
- baseline = false,
- inline = false,
- leftskip = 0,
- rightskip = 0,
- }
-}
-
-local enablelocal, enableglobal -- forward reference (delayed initialization)
-
-local function showstore(store,banner,location)
- if next(store) then
- for i, si in table.sortedpairs(store) do
- local si =store[i]
- report_margindata("%s: stored in %a at %s: %a => %s",banner,location,i,validstring(si.name,"no name"),nodes.toutf(si.box.list))
- end
- else
- report_margindata("%s: nothing stored in location %a",banner,location)
- end
-end
-
-function margins.save(t)
- setmetatable(t,defaults)
- local content = texbox[t.number]
- local location = t.location
- local category = t.category
- local inline = t.inline
- local scope = t.scope or v_global
- if not content then
- report_margindata("ignoring empty margin data %a",location or "unknown")
- return
- end
- local store
- if inline then
- store = inlinestore
- else
- store = displaystore[category][location]
- if not store then
- report_margindata("invalid location %a",location)
- return
- end
- store = store[scope]
- end
- if not store then
- report_margindata("invalid scope %a",scope)
- return
- end
- if enablelocal and scope == v_local then
- enablelocal()
- if enableglobal then
- enableglobal() -- is the fallback
- end
- elseif enableglobal and scope == v_global then
- enableglobal()
- end
- nofsaved = nofsaved + 1
- nofstored = nofstored + 1
- local name = t.name
- if trace_marginstack then
- showstore(store,"before",location)
- end
- if name and name ~= "" then
- if inlinestore then -- todo: inline store has to be done differently (not sparse)
- local t = table.sortedkeys(store) for j=#t,1,-1 do local i = t[j]
- local si = store[i]
- if si.name == name then
- local s = remove(store,i)
- free_node_list(s.box)
- end
- end
- else
- for i=#store,1,-1 do
- local si = store[i]
- if si.name == name then
- local s = remove(store,i)
- free_node_list(s.box)
- end
- end
- end
- if trace_marginstack then
- showstore(store,"between",location)
- end
- end
- if t.number then
- -- better make a new table and make t entry in t
- t.box = copy_node_list(content)
- t.n = nofsaved
- -- used later (we will clean up this natural mess later)
- -- nice is to make a special status table mechanism
- local leftmargindistance = texdimen.naturalleftmargindistance
- local rightmargindistance = texdimen.naturalrightmargindistance
- t.strutdepth = texbox.strutbox.depth
- t.strutheight = texbox.strutbox.height
- t.leftskip = tex.leftskip.width -- we're not in forgetall
- t.rightskip = tex.rightskip.width -- we're not in forgetall
- t.leftmargindistance = leftmargindistance -- todo:layoutstatus table
- t.rightmargindistance = rightmargindistance
- t.leftedgedistance = texdimen.naturalleftedgedistance
- + texdimen.leftmarginwidth
- + leftmargindistance
- t.rightedgedistance = texdimen.naturalrightedgedistance
- + texdimen.rightmarginwidth
- + rightmargindistance
- t.lineheight = texdimen.lineheight
- --
- -- t.realpageno = texcount.realpageno
- if inline then
- context(new_usernumber(inline_mark,nofsaved))
- store[nofsaved] = t -- no insert
- nofinlined = nofinlined + 1
- else
- insert(store,t)
- end
- end
- if trace_marginstack then
- showstore(store,"after",location)
- end
- if trace_margindata then
- report_margindata("saved %a, location %a, scope %a, inline %a",nofsaved,location,scope,inline)
- end
-end
-
--- Actually it's an advantage to have them all anchored left (tags and such)
--- we could keep them in store and flush in stage two but we might want to
--- do more before that so we need the content to be there unless we can be
--- sure that we flush this first which might not be the case in the future.
---
--- When the prototype inner/outer code that was part of this proved to be
--- okay it was moved elsewhere.
-
-local status, nofstatus = { }, 0
-
-local function realign(current,candidate)
- local location = candidate.location
- local margin = candidate.margin
- local hoffset = candidate.hoffset
- local distance = candidate.distance
- local hsize = candidate.hsize
- local width = candidate.width
- local align = candidate.align
- -- local realpageno = candidate.realpageno
- local leftpage = isleftpage(false,true)
- local delta = 0
- local leftdelta = 0
- local rightdelta = 0
- local leftdistance = distance
- local rightdistance = distance
- if margin == v_normal then
- --
- elseif margin == v_local then
- leftdelta = - candidate.leftskip
- rightdelta = candidate.rightskip
- elseif margin == v_margin then
- leftdistance = candidate.leftmargindistance
- rightdistance = candidate.rightmargindistance
- elseif margin == v_edge then
- leftdistance = candidate.leftedgedistance
- rightdistance = candidate.rightedgedistance
- end
- if leftpage then
- leftdistance, rightdistance = rightdistance, leftdistance
- end
-
- if location == v_left then
- delta = hoffset + width + leftdistance + leftdelta
- elseif location == v_right then
- delta = -hoffset - hsize - rightdistance + rightdelta
- elseif location == v_inner then
- if leftpage then
- delta = -hoffset - hsize - rightdistance + rightdelta
- else
- delta = hoffset + width + leftdistance + leftdelta
- end
- elseif location == v_outer then
- if leftpage then
- delta = hoffset + width + leftdistance + leftdelta
- else
- delta = -hoffset - hsize - rightdistance + rightdelta
- end
- end
-
- -- we assume that list is a hbox, otherwise we had to take the whole current
- -- in order to get it right
-
- current.width = 0
- local anchornode, move_x
-
- -- this mess is needed for alignments (combinations) so we use that
- -- oportunity to add arbitrary anchoring
-
- -- always increment anchor is nicer for multipass when we add new ..
-
- local inline = candidate.inline
- local anchor = candidate.anchor
- if not anchor or anchor == "" then
- anchor = v_text
- end
- if inline or anchor ~= v_text or candidate.psubtype == alignment_code then
- -- the alignment_code check catches margintexts ste before a tabulate
- h_anchors = h_anchors + 1
- anchornode = new_latelua(format("_plib_.set('md:h',%i,{x=true,c=true})",h_anchors))
- local blob = jobpositions.get('md:h', h_anchors)
- if blob then
- local reference = jobpositions.getreserved(anchor,blob.c)
- if reference then
- if location == v_left then
- move_x = (reference.x or 0) - (blob.x or 0)
- elseif location == v_right then
- move_x = (reference.x or 0) - (blob.x or 0) + (reference.w or 0) - hsize
- else
- -- not yet done
- end
- end
- end
- end
-
- if move_x then
- delta = delta - move_x
- if trace_margindata then
- report_margindata("realigned %a, location %a, margin %a, move %p",candidate.n,location,margin,move_x)
- end
- else
- if trace_margindata then
- report_margindata("realigned %a, location %a, margin %a",candidate.n,location,margin)
- end
- end
-
- current.list = hpack_nodes(concat_nodes{anchornode,new_kern(-delta),current.list,new_kern(delta)})
- current.width = 0
-end
-
-local function realigned(current,a)
- local candidate = status[a]
- realign(current,candidate)
- nofdelayed = nofdelayed - 1
- status[a] = nil
- return true
-end
-
--- Stacking is done in two ways: the v_yes option stacks per paragraph (or line,
--- depending on what gets by) and mostly concerns margin data dat got set at more or
--- less the same time. The v_continue option uses position tracking and works on
--- larger range. However, crossing pages is not part of it. Anyway, when you have
--- such messed up margin data you'd better think twice.
---
--- The stacked table keeps track (per location) of the offsets (the v_yes case). This
--- table gets saved when the v_continue case is active. We use a special variant
--- of position tracking, after all we only need the page number and vertical position.
-
-local stacked = { } -- left/right keys depending on location
-local cache = { }
-
-local function resetstacked()
- stacked = { }
-end
-
--- resetstacked()
-
-function margins.ha(tag) -- maybe l/r keys ipv left/right keys
- local p = cache[tag]
- p.p = true
- p.y = true
- jobpositions.set('md:v',tag,p)
- cache[tag] = nil
-end
-
-local function markovershoot(current)
- v_anchors = v_anchors + 1
- cache[v_anchors] = stacked
- local anchor = new_latelua(format("typesetters.margins.ha(%s)",v_anchors)) -- todo: alleen als offset > line
- current.list = hpack_nodes(concat_nodes{anchor,current.list})
-end
-
-local function getovershoot(location)
- local p = jobpositions.get("md:v",v_anchors)
- local c = jobpositions.get("md:v",v_anchors+1)
- if p and c and p.p and p.p == c.p then
- local distance = p.y - c.y
- local offset = p[location] or 0
- local overshoot = offset - distance
- if trace_marginstack then
- report_margindata("location %a, distance %p, offset %p, overshoot %p",location,distance,offset,overshoot)
- end
- if overshoot > 0 then
- return overshoot
- end
- end
- return 0
-end
-
-local function inject(parent,head,candidate)
- local box = candidate.box
- local width = box.width
- local height = box.height
- local depth = box.depth
- local shift = box.shift
- local stack = candidate.stack
- local location = candidate.location
- local method = candidate.method
- local voffset = candidate.voffset
- local line = candidate.line
- local baseline = candidate.baseline
- local strutheight = candidate.strutheight
- local strutdepth = candidate.strutdepth
- local psubtype = parent.subtype
- local offset = stacked[location]
- local firstonstack = offset == false or offset == nil
- nofstatus = nofstatus + 1
- nofdelayed = nofdelayed + 1
- status[nofstatus] = candidate
- -- yet untested
- if baseline == true then
- baseline = false
- -- hbox vtop
---~ for h in traverse_id(hlist_code,box.list.list) do
---~ baseline = h.height
---~ break
---~ end
- else
- baseline = tonumber(baseline)
- if not baseline or baseline <= 0 then
- -- in case we have a box of width 0 that is not analyzed
- baseline = false -- strutheight -- actually a hack
- end
- end
- candidate.width = width
- candidate.hsize = parent.width -- we can also pass textwidth
- candidate.psubtype = psubtype
- if trace_margindata then
- report_margindata("processing, index %s, height %p, depth %p, parent %s",candidate.n,height,depth,listcodes[psubtype])
- end
- if firstonstack then
- offset = 0
- else
--- offset = offset + height
- end
- if stack == v_yes then
- offset = offset + candidate.dy
- shift = shift + offset
- elseif stack == v_continue then
- offset = offset + candidate.dy
- if firstonstack then
- offset = offset + getovershoot(location)
- end
- shift = shift + offset
- end
- -- -- --
- -- Maybe we also need to patch offset when we apply methods, but how ...
- -- This needs a bit of playing as it depends on the stack setting of the
- -- following which we don't know yet ... so, consider stacking partially
- -- experimental.
- -- -- --
- if method == v_top then
- local delta = height - parent.height
- if trace_margindata then
- report_margindata("top aligned by %p",delta)
- end
- if delta < candidate.threshold then
- shift = shift + voffset + delta
- end
- elseif method == v_first then
- if baseline then
- shift = shift + voffset + height - baseline -- option
- else
- shift = shift + voffset -- normal
- end
- if trace_margindata then
- report_margindata("first aligned")
- end
- elseif method == v_depth then
- local delta = strutdepth
- if trace_margindata then
- report_margindata("depth aligned by %p",delta)
- end
- shift = shift + voffset + delta
- elseif method == v_height then
- local delta = - strutheight
- if trace_margindata then
- report_margindata("height aligned by %p",delta)
- end
- shift = shift + voffset + delta
- elseif voffset ~= 0 then
- if trace_margindata then
- report_margindata("voffset %p applied",voffset)
- end
- shift = shift + voffset
- end
- -- -- --
- if line ~= 0 then
- local delta = line * candidate.lineheight
- if trace_margindata then
- report_margindata("offset %p applied to line %s",delta,line)
- end
- shift = shift + delta
- offset = offset + delta
- end
- box.shift = shift
- box.width = 0
- if not head then
- head = box
- elseif head.id == whatsit_code and head.subtype == localpar_code then
- -- experimental
- if head.dir == "TRT" then
- box.list = hpack_nodes(concat_nodes{new_kern(candidate.hsize),box.list,new_kern(-candidate.hsize)})
- end
- insert_node_after(head,head,box)
- else
- head.prev = box
- box.next = head
- head = box
- end
- box[a_margindata] = nofstatus
- if trace_margindata then
- report_margindata("injected, location %a, shift %p",location,shift)
- end
- -- we need to add line etc to offset as well
- offset = offset + depth
- local room = {
- height = height,
- depth = offset,
- slack = candidate.bottomspace, -- todo: 'depth' => strutdepth
- lineheight = candidate.lineheight, -- only for tracing
- }
- offset = offset + height
- stacked[location] = offset -- weird, no table ?
- -- todo: if no real depth then zero
- if trace_margindata then
- report_margindata("status, offset %s",offset)
- end
- return head, room, stack == v_continue
-end
-
-local function flushinline(parent,head)
- local current = head
- local done = false
- local continue = false
- local room, don, con
- while current and nofinlined > 0 do
- local id = current.id
- if id == whatsit_code then
- if current.subtype == userdefined_code and current.user_id == inline_mark then
- local n = current.value
- local candidate = inlinestore[n]
- if candidate then -- no vpack, as we want to realign
- inlinestore[n] = nil
- nofinlined = nofinlined - 1
- head, room, con = inject(parent,head,candidate) -- maybe return applied offset
- continue = continue or con
- done = true
- nofstored = nofstored - 1
- end
- end
- elseif id == hlist_code or id == vlist_code then
- -- optional (but sometimes needed)
- current.list, don, con = flushinline(current,current.list)
- continue = continue or con
- done = done or don
- end
- current = current.next
- end
- return head, done, continue
-end
-
-local a_linenumber = attributes.private('linenumber')
-
-local function flushed(scope,parent) -- current is hlist
- local head = parent.list
- local done = false
- local continue = false
- local room, con, don
- for c=1,#categories do
- local category = categories[c]
- for l=1,#locations do
- local location = locations[l]
- local store = displaystore[category][location][scope]
- while true do
- local candidate = remove(store,1) -- brr, local stores are sparse
- if candidate then -- no vpack, as we want to realign
- head, room, con = inject(parent,head,candidate)
- done = true
- continue = continue or con
- nofstored = nofstored - 1
- registertogether(parent,room)
- else
- break
- end
- end
- end
- end
- if nofinlined > 0 then
- if done then
- parent.list = head
- end
- head, don, con = flushinline(parent,head)
- continue = continue or con
- done = done or don
- end
- if done then
- local a = head[a_linenumber] -- hack .. we need a more decent critical attribute inheritance mechanism
- parent.list = hpack_nodes(head,parent.width,"exactly")
- if a then
- parent.list[a_linenumber] = a
- end
- -- resetstacked()
- end
- return done, continue
-end
-
--- only when group : vbox|vmode_par
--- only when subtype : line, box (no indent alignment cell)
-
-local function handler(scope,head,group)
- if nofstored > 0 then
- if trace_margindata then
- report_margindata("flushing stage one, stored %s, scope %s, delayed %s, group %a",nofstored,scope,nofdelayed,group)
- end
- local current = head
- local done = false
- while current do
- local id = current.id
- if (id == vlist_code or id == hlist_code) and not current[a_margindata] then
- local don, continue = flushed(scope,current)
- if don then
- current[a_margindata] = 0 -- signal to prevent duplicate processing
- if continue then
- markovershoot(current)
- end
- if nofstored <= 0 then
- break
- end
- done = true
- end
- end
- current = current.next
- end
- -- if done then
- resetstacked() -- why doesn't done work ok here?
- -- end
- return head, done
- else
- return head, false
- end
-end
-
-function margins.localhandler(head,group) -- sometimes group is "" which is weird
- local inhibit = conditionals.inhibitmargindata
- if inhibit then
- if trace_margingroup then
- report_margindata("ignored 3, group %a, stored %s, inhibit %a",group,nofstored,inhibit)
- end
- return head, false
- elseif nofstored > 0 then
- return handler(v_local,head,group)
- else
- if trace_margingroup then
- report_margindata("ignored 4, group %a, stored %s, inhibit %a",group,nofstored,inhibit)
- end
- return head, false
- end
-end
-
-function margins.globalhandler(head,group) -- check group
- local inhibit = conditionals.inhibitmargindata
- if inhibit or nofstored == 0 then
- if trace_margingroup then
- report_margindata("ignored 1, group %a, stored %s, inhibit %a",group,nofstored,inhibit)
- end
- return head, false
- elseif group == "hmode_par" then
- return handler("global",head,group)
- elseif group == "vmode_par" then -- experiment (for alignments)
- return handler("global",head,group)
- -- this needs checking as we then get quite some one liners to process and
- -- we cannot look ahead then:
- elseif group == "box" then -- experiment (for alignments)
- return handler("global",head,group)
- elseif group == "alignment" then -- experiment (for alignments)
- return handler("global",head,group)
- else
- if trace_margingroup then
- report_margindata("ignored 2, group %a, stored %s, inhibit %a",group,nofstored,inhibit)
- end
- return head, false
- end
-end
-
-local function finalhandler(head)
- if nofdelayed > 0 then
- local current = head
- local done = false
- while current do
- local id = current.id
- if id == hlist_code then
- local a = current[a_margindata]
- if not a or a == 0 then
- finalhandler(current.list)
- elseif realigned(current,a) then
- done = true
- if nofdelayed == 0 then
- return head, true
- end
- end
- elseif id == vlist_code then
- finalhandler(current.list)
- end
- current = current.next
- end
- return head, done
- else
- return head, false
- end
-end
-
-function margins.finalhandler(head)
- if nofdelayed > 0 then
- -- if trace_margindata then
- -- report_margindata("flushing stage two, instore: %s, delayed: %s",nofstored,nofdelayed)
- -- end
- return finalhandler(head)
- else
- return head, false
- end
-end
-
--- Somehow the vbox builder (in combinations) gets pretty confused and decides to
--- go horizontal. So this needs more testing.
-
-prependaction("finalizers", "lists", "typesetters.margins.localhandler")
--- ("vboxbuilders", "normalizers", "typesetters.margins.localhandler")
-prependaction("mvlbuilders", "normalizers", "typesetters.margins.globalhandler")
-prependaction("shipouts", "normalizers", "typesetters.margins.finalhandler")
-
-disableaction("finalizers", "typesetters.margins.localhandler")
--- ("vboxbuilders", "typesetters.margins.localhandler")
-disableaction("mvlbuilders", "typesetters.margins.globalhandler")
-disableaction("shipouts", "typesetters.margins.finalhandler")
-
-enablelocal = function()
- enableaction("finalizers", "typesetters.margins.localhandler")
- -- enableaction("vboxbuilders", "typesetters.margins.localhandler")
- enableaction("shipouts", "typesetters.margins.finalhandler")
- enablelocal = nil
-end
-
-enableglobal = function()
- enableaction("mvlbuilders", "typesetters.margins.globalhandler")
- enableaction("shipouts", "typesetters.margins.finalhandler")
- enableglobal = nil
-end
-
-statistics.register("margin data", function()
- if nofsaved > 0 then
- return format("%s entries, %s pending",nofsaved,nofdelayed)
- else
- return nil
- end
-end)
+if not modules then modules = { } end modules ['typo-mar'] = {
+ version = 1.001,
+ comment = "companion to typo-mar.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo:
+--
+-- * autoleft/right depending on available space (or distance to margin)
+-- * stack across paragraphs, but that is messy and one should reconsider
+-- using margin data then as also vertical spacing kicks in
+-- * floating margin data, with close-to-call anchoring
+
+-- -- experiment (does not work, too much interference)
+--
+-- local pdfprint = pdf.print
+-- local format = string.format
+--
+-- anchors = anchors or { }
+--
+-- local whatever = { }
+-- local factor = (7200/7227)/65536
+--
+-- function anchors.set(tag)
+-- whatever[tag] = { pdf.h, pdf.v }
+-- end
+--
+-- function anchors.reset(tag)
+-- whatever[tag] = nil
+-- end
+--
+-- function anchors.startmove(tag,how) -- save/restore nodes but they don't support moves
+-- local w = whatever[tag]
+-- if not w then
+-- -- error
+-- elseif how == "horizontal" or how == "h" then
+-- pdfprint("page",format(" q 1 0 0 1 %f 0 cm ", (w[1] - pdf.h) * factor))
+-- elseif how == "vertical" or how == "v" then
+-- pdfprint("page",format(" q 1 0 0 1 0 %f cm ", (w[2] - pdf.v) * factor))
+-- else
+-- pdfprint("page",format(" q 1 0 0 1 %f %f cm ", (w[1] - pdf.h) * factor, (w[2] - pdf.v) * factor))
+-- end
+-- end
+--
+-- function anchors.stopmove(tag)
+-- local w = whatever[tag]
+-- if not w then
+-- -- error
+-- else
+-- pdfprint("page"," Q ")
+-- end
+-- end
+--
+-- local latelua = nodes.pool.latelua
+--
+-- function anchors.node_set(tag)
+-- return latelua(formatters["anchors.set(%q)"](tag))
+-- end
+--
+-- function anchors.node_reset(tag)
+-- return latelua(formatters["anchors.reset(%q)"](tag))
+-- end
+--
+-- function anchors.node_start_move(tag,how)
+-- return latelua(formatters["anchors.startmove(%q,%q)](tag,how))
+-- end
+--
+-- function anchors.node_stop_move(tag)
+-- return latelua(formatters["anchors.stopmove(%q)"](tag))
+-- end
+
+-- so far
+
+local format, validstring = string.format, string.valid
+local insert, remove = table.insert, table.remove
+local setmetatable, next = setmetatable, next
+
+local attributes, nodes, node, variables = attributes, nodes, node, variables
+
+local trace_margindata = false trackers.register("typesetters.margindata", function(v) trace_margindata = v end)
+local trace_marginstack = false trackers.register("typesetters.margindata.stack", function(v) trace_marginstack = v end)
+local trace_margingroup = false trackers.register("typesetters.margindata.group", function(v) trace_margingroup = v end)
+
+local report_margindata = logs.reporter("typesetters","margindata")
+
+local tasks = nodes.tasks
+local prependaction = tasks.prependaction
+local disableaction = tasks.disableaction
+local enableaction = tasks.enableaction
+
+local variables = interfaces.variables
+
+local conditionals = tex.conditionals
+local systemmodes = tex.systemmodes
+
+local v_top = variables.top
+local v_depth = variables.depth
+local v_local = variables["local"]
+local v_global = variables["global"]
+local v_left = variables.left
+local v_right = variables.right
+local v_flushleft = variables.flushleft
+local v_flushright = variables.flushright
+local v_inner = variables.inner
+local v_outer = variables.outer
+local v_margin = variables.margin
+local v_edge = variables.edge
+local v_default = variables.default
+local v_normal = variables.normal
+local v_yes = variables.yes
+local v_continue = variables.continue
+local v_first = variables.first
+local v_text = variables.text
+local v_column = variables.column
+
+local copy_node_list = node.copy_list
+local slide_nodes = node.slide
+local hpack_nodes = node.hpack -- nodes.fasthpack not really faster here
+local traverse_id = node.traverse_id
+local free_node_list = node.flush_list
+local insert_node_after = node.insert_after
+local insert_node_before = node.insert_before
+
+local concat_nodes = nodes.concat
+
+local nodecodes = nodes.nodecodes
+local listcodes = nodes.listcodes
+local gluecodes = nodes.gluecodes
+local whatsitcodes = nodes.whatsitcodes
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local glue_code = nodecodes.glue
+local kern_code = nodecodes.kern
+local penalty_code = nodecodes.penalty
+local whatsit_code = nodecodes.whatsit
+local line_code = listcodes.line
+local cell_code = listcodes.cell
+local alignment_code = listcodes.alignment
+local leftskip_code = gluecodes.leftskip
+local rightskip_code = gluecodes.rightskip
+local userdefined_code = whatsitcodes.userdefined
+
+local dir_code = whatsitcodes.dir
+local localpar_code = whatsitcodes.localpar
+
+local nodepool = nodes.pool
+
+local new_kern = nodepool.kern
+local new_glue = nodepool.glue
+local new_penalty = nodepool.penalty
+local new_stretch = nodepool.stretch
+local new_usernumber = nodepool.usernumber
+local new_latelua = nodepool.latelua
+
+local texcount = tex.count
+local texdimen = tex.dimen
+local texbox = tex.box
+
+local points = number.points
+
+local isleftpage = layouts.status.isleftpage
+local registertogether = builders.paragraphs.registertogether
+
+local jobpositions = job.positions
+local getposition = jobpositions.position
+
+local a_margindata = attributes.private("margindata")
+
+local inline_mark = nodepool.userids["margins.inline"]
+
+local margins = { }
+typesetters.margins = margins
+
+local locations = { v_left, v_right, v_inner, v_outer } -- order might change
+local categories = { }
+local displaystore = { } -- [category][location][scope]
+local inlinestore = { } -- [number]
+local nofsaved = 0
+local nofstored = 0
+local nofinlined = 0
+local nofdelayed = 0
+local h_anchors = 0
+local v_anchors = 0
+
+local mt1 = {
+ __index = function(t,location)
+ local v = { [v_local] = { }, [v_global] = { } }
+ t[location] = v
+ return v
+ end
+}
+
+local mt2 = {
+ __index = function(stores,category)
+ categories[#categories+1] = category
+ local v = { }
+ setmetatable(v,mt1)
+ stores[category] = v
+ return v
+ end
+}
+
+setmetatable(displaystore,mt2)
+
+local defaults = {
+ __index = {
+ location = v_left,
+ align = v_normal,
+ method = "",
+ name = "",
+ threshold = 0, -- .25ex
+ margin = v_normal,
+ scope = v_global,
+ distance = 0,
+ hoffset = 0,
+ voffset = 0,
+ category = v_default,
+ line = 0,
+ vstack = 0,
+ dy = 0,
+ baseline = false,
+ inline = false,
+ leftskip = 0,
+ rightskip = 0,
+ }
+}
+
+local enablelocal, enableglobal -- forward reference (delayed initialization)
+
+local function showstore(store,banner,location)
+ if next(store) then
+ for i, si in table.sortedpairs(store) do
+ local si =store[i]
+ report_margindata("%s: stored in %a at %s: %a => %s",banner,location,i,validstring(si.name,"no name"),nodes.toutf(si.box.list))
+ end
+ else
+ report_margindata("%s: nothing stored in location %a",banner,location)
+ end
+end
+
+function margins.save(t)
+ setmetatable(t,defaults)
+ local content = texbox[t.number]
+ local location = t.location
+ local category = t.category
+ local inline = t.inline
+ local scope = t.scope or v_global
+ if not content then
+ report_margindata("ignoring empty margin data %a",location or "unknown")
+ return
+ end
+ local store
+ if inline then
+ store = inlinestore
+ else
+ store = displaystore[category][location]
+ if not store then
+ report_margindata("invalid location %a",location)
+ return
+ end
+ store = store[scope]
+ end
+ if not store then
+ report_margindata("invalid scope %a",scope)
+ return
+ end
+ if enablelocal and scope == v_local then
+ enablelocal()
+ if enableglobal then
+ enableglobal() -- is the fallback
+ end
+ elseif enableglobal and scope == v_global then
+ enableglobal()
+ end
+ nofsaved = nofsaved + 1
+ nofstored = nofstored + 1
+ local name = t.name
+ if trace_marginstack then
+ showstore(store,"before",location)
+ end
+ if name and name ~= "" then
+ if inlinestore then -- todo: inline store has to be done differently (not sparse)
+ local t = table.sortedkeys(store) for j=#t,1,-1 do local i = t[j]
+ local si = store[i]
+ if si.name == name then
+ local s = remove(store,i)
+ free_node_list(s.box)
+ end
+ end
+ else
+ for i=#store,1,-1 do
+ local si = store[i]
+ if si.name == name then
+ local s = remove(store,i)
+ free_node_list(s.box)
+ end
+ end
+ end
+ if trace_marginstack then
+ showstore(store,"between",location)
+ end
+ end
+ if t.number then
+ -- better make a new table and make t entry in t
+ t.box = copy_node_list(content)
+ t.n = nofsaved
+ -- used later (we will clean up this natural mess later)
+ -- nice is to make a special status table mechanism
+ local leftmargindistance = texdimen.naturalleftmargindistance
+ local rightmargindistance = texdimen.naturalrightmargindistance
+ t.strutdepth = texbox.strutbox.depth
+ t.strutheight = texbox.strutbox.height
+ t.leftskip = tex.leftskip.width -- we're not in forgetall
+ t.rightskip = tex.rightskip.width -- we're not in forgetall
+ t.leftmargindistance = leftmargindistance -- todo:layoutstatus table
+ t.rightmargindistance = rightmargindistance
+ t.leftedgedistance = texdimen.naturalleftedgedistance
+ + texdimen.leftmarginwidth
+ + leftmargindistance
+ t.rightedgedistance = texdimen.naturalrightedgedistance
+ + texdimen.rightmarginwidth
+ + rightmargindistance
+ t.lineheight = texdimen.lineheight
+ --
+ -- t.realpageno = texcount.realpageno
+ if inline then
+ context(new_usernumber(inline_mark,nofsaved))
+ store[nofsaved] = t -- no insert
+ nofinlined = nofinlined + 1
+ else
+ insert(store,t)
+ end
+ end
+ if trace_marginstack then
+ showstore(store,"after",location)
+ end
+ if trace_margindata then
+ report_margindata("saved %a, location %a, scope %a, inline %a",nofsaved,location,scope,inline)
+ end
+end
+
+-- Actually it's an advantage to have them all anchored left (tags and such)
+-- we could keep them in store and flush in stage two but we might want to
+-- do more before that so we need the content to be there unless we can be
+-- sure that we flush this first which might not be the case in the future.
+--
+-- When the prototype inner/outer code that was part of this proved to be
+-- okay it was moved elsewhere.
+
+local status, nofstatus = { }, 0
+
+local function realign(current,candidate)
+ local location = candidate.location
+ local margin = candidate.margin
+ local hoffset = candidate.hoffset
+ local distance = candidate.distance
+ local hsize = candidate.hsize
+ local width = candidate.width
+ local align = candidate.align
+ -- local realpageno = candidate.realpageno
+ local leftpage = isleftpage(false,true)
+ local delta = 0
+ local leftdelta = 0
+ local rightdelta = 0
+ local leftdistance = distance
+ local rightdistance = distance
+ if margin == v_normal then
+ --
+ elseif margin == v_local then
+ leftdelta = - candidate.leftskip
+ rightdelta = candidate.rightskip
+ elseif margin == v_margin then
+ leftdistance = candidate.leftmargindistance
+ rightdistance = candidate.rightmargindistance
+ elseif margin == v_edge then
+ leftdistance = candidate.leftedgedistance
+ rightdistance = candidate.rightedgedistance
+ end
+ if leftpage then
+ leftdistance, rightdistance = rightdistance, leftdistance
+ end
+
+ if location == v_left then
+ delta = hoffset + width + leftdistance + leftdelta
+ elseif location == v_right then
+ delta = -hoffset - hsize - rightdistance + rightdelta
+ elseif location == v_inner then
+ if leftpage then
+ delta = -hoffset - hsize - rightdistance + rightdelta
+ else
+ delta = hoffset + width + leftdistance + leftdelta
+ end
+ elseif location == v_outer then
+ if leftpage then
+ delta = hoffset + width + leftdistance + leftdelta
+ else
+ delta = -hoffset - hsize - rightdistance + rightdelta
+ end
+ end
+
+ -- we assume that list is a hbox, otherwise we had to take the whole current
+ -- in order to get it right
+
+ current.width = 0
+ local anchornode, move_x
+
+ -- this mess is needed for alignments (combinations) so we use that
+ -- oportunity to add arbitrary anchoring
+
+ -- always increment anchor is nicer for multipass when we add new ..
+
+ local inline = candidate.inline
+ local anchor = candidate.anchor
+ if not anchor or anchor == "" then
+ anchor = v_text
+ end
+ if inline or anchor ~= v_text or candidate.psubtype == alignment_code then
+ -- the alignment_code check catches margintexts ste before a tabulate
+ h_anchors = h_anchors + 1
+ anchornode = new_latelua(format("_plib_.set('md:h',%i,{x=true,c=true})",h_anchors))
+ local blob = jobpositions.get('md:h', h_anchors)
+ if blob then
+ local reference = jobpositions.getreserved(anchor,blob.c)
+ if reference then
+ if location == v_left then
+ move_x = (reference.x or 0) - (blob.x or 0)
+ elseif location == v_right then
+ move_x = (reference.x or 0) - (blob.x or 0) + (reference.w or 0) - hsize
+ else
+ -- not yet done
+ end
+ end
+ end
+ end
+
+ if move_x then
+ delta = delta - move_x
+ if trace_margindata then
+ report_margindata("realigned %a, location %a, margin %a, move %p",candidate.n,location,margin,move_x)
+ end
+ else
+ if trace_margindata then
+ report_margindata("realigned %a, location %a, margin %a",candidate.n,location,margin)
+ end
+ end
+
+ current.list = hpack_nodes(concat_nodes{anchornode,new_kern(-delta),current.list,new_kern(delta)})
+ current.width = 0
+end
+
+local function realigned(current,a)
+ local candidate = status[a]
+ realign(current,candidate)
+ nofdelayed = nofdelayed - 1
+ status[a] = nil
+ return true
+end
+
+-- Stacking is done in two ways: the v_yes option stacks per paragraph (or line,
+-- depending on what gets by) and mostly concerns margin data dat got set at more or
+-- less the same time. The v_continue option uses position tracking and works on
+-- larger range. However, crossing pages is not part of it. Anyway, when you have
+-- such messed up margin data you'd better think twice.
+--
+-- The stacked table keeps track (per location) of the offsets (the v_yes case). This
+-- table gets saved when the v_continue case is active. We use a special variant
+-- of position tracking, after all we only need the page number and vertical position.
+
+local stacked = { } -- left/right keys depending on location
+local cache = { }
+
+local function resetstacked()
+ stacked = { }
+end
+
+-- resetstacked()
+
+function margins.ha(tag) -- maybe l/r keys ipv left/right keys
+ local p = cache[tag]
+ p.p = true
+ p.y = true
+ jobpositions.set('md:v',tag,p)
+ cache[tag] = nil
+end
+
+local function markovershoot(current)
+ v_anchors = v_anchors + 1
+ cache[v_anchors] = stacked
+ local anchor = new_latelua(format("typesetters.margins.ha(%s)",v_anchors)) -- todo: alleen als offset > line
+ current.list = hpack_nodes(concat_nodes{anchor,current.list})
+end
+
+local function getovershoot(location)
+ local p = jobpositions.get("md:v",v_anchors)
+ local c = jobpositions.get("md:v",v_anchors+1)
+ if p and c and p.p and p.p == c.p then
+ local distance = p.y - c.y
+ local offset = p[location] or 0
+ local overshoot = offset - distance
+ if trace_marginstack then
+ report_margindata("location %a, distance %p, offset %p, overshoot %p",location,distance,offset,overshoot)
+ end
+ if overshoot > 0 then
+ return overshoot
+ end
+ end
+ return 0
+end
+
+local function inject(parent,head,candidate)
+ local box = candidate.box
+ local width = box.width
+ local height = box.height
+ local depth = box.depth
+ local shift = box.shift
+ local stack = candidate.stack
+ local location = candidate.location
+ local method = candidate.method
+ local voffset = candidate.voffset
+ local line = candidate.line
+ local baseline = candidate.baseline
+ local strutheight = candidate.strutheight
+ local strutdepth = candidate.strutdepth
+ local psubtype = parent.subtype
+ local offset = stacked[location]
+ local firstonstack = offset == false or offset == nil
+ nofstatus = nofstatus + 1
+ nofdelayed = nofdelayed + 1
+ status[nofstatus] = candidate
+ -- yet untested
+ if baseline == true then
+ baseline = false
+ -- hbox vtop
+--~ for h in traverse_id(hlist_code,box.list.list) do
+--~ baseline = h.height
+--~ break
+--~ end
+ else
+ baseline = tonumber(baseline)
+ if not baseline or baseline <= 0 then
+ -- in case we have a box of width 0 that is not analyzed
+ baseline = false -- strutheight -- actually a hack
+ end
+ end
+ candidate.width = width
+ candidate.hsize = parent.width -- we can also pass textwidth
+ candidate.psubtype = psubtype
+ if trace_margindata then
+ report_margindata("processing, index %s, height %p, depth %p, parent %s",candidate.n,height,depth,listcodes[psubtype])
+ end
+ if firstonstack then
+ offset = 0
+ else
+-- offset = offset + height
+ end
+ if stack == v_yes then
+ offset = offset + candidate.dy
+ shift = shift + offset
+ elseif stack == v_continue then
+ offset = offset + candidate.dy
+ if firstonstack then
+ offset = offset + getovershoot(location)
+ end
+ shift = shift + offset
+ end
+ -- -- --
+ -- Maybe we also need to patch offset when we apply methods, but how ...
+ -- This needs a bit of playing as it depends on the stack setting of the
+ -- following which we don't know yet ... so, consider stacking partially
+ -- experimental.
+ -- -- --
+ if method == v_top then
+ local delta = height - parent.height
+ if trace_margindata then
+ report_margindata("top aligned by %p",delta)
+ end
+ if delta < candidate.threshold then
+ shift = shift + voffset + delta
+ end
+ elseif method == v_first then
+ if baseline then
+ shift = shift + voffset + height - baseline -- option
+ else
+ shift = shift + voffset -- normal
+ end
+ if trace_margindata then
+ report_margindata("first aligned")
+ end
+ elseif method == v_depth then
+ local delta = strutdepth
+ if trace_margindata then
+ report_margindata("depth aligned by %p",delta)
+ end
+ shift = shift + voffset + delta
+ elseif method == v_height then
+ local delta = - strutheight
+ if trace_margindata then
+ report_margindata("height aligned by %p",delta)
+ end
+ shift = shift + voffset + delta
+ elseif voffset ~= 0 then
+ if trace_margindata then
+ report_margindata("voffset %p applied",voffset)
+ end
+ shift = shift + voffset
+ end
+ -- -- --
+ if line ~= 0 then
+ local delta = line * candidate.lineheight
+ if trace_margindata then
+ report_margindata("offset %p applied to line %s",delta,line)
+ end
+ shift = shift + delta
+ offset = offset + delta
+ end
+ box.shift = shift
+ box.width = 0
+ if not head then
+ head = box
+ elseif head.id == whatsit_code and head.subtype == localpar_code then
+ -- experimental
+ if head.dir == "TRT" then
+ box.list = hpack_nodes(concat_nodes{new_kern(candidate.hsize),box.list,new_kern(-candidate.hsize)})
+ end
+ insert_node_after(head,head,box)
+ else
+ head.prev = box
+ box.next = head
+ head = box
+ end
+ box[a_margindata] = nofstatus
+ if trace_margindata then
+ report_margindata("injected, location %a, shift %p",location,shift)
+ end
+ -- we need to add line etc to offset as well
+ offset = offset + depth
+ local room = {
+ height = height,
+ depth = offset,
+ slack = candidate.bottomspace, -- todo: 'depth' => strutdepth
+ lineheight = candidate.lineheight, -- only for tracing
+ }
+ offset = offset + height
+ stacked[location] = offset -- weird, no table ?
+ -- todo: if no real depth then zero
+ if trace_margindata then
+ report_margindata("status, offset %s",offset)
+ end
+ return head, room, stack == v_continue
+end
+
+local function flushinline(parent,head)
+ local current = head
+ local done = false
+ local continue = false
+ local room, don, con
+ while current and nofinlined > 0 do
+ local id = current.id
+ if id == whatsit_code then
+ if current.subtype == userdefined_code and current.user_id == inline_mark then
+ local n = current.value
+ local candidate = inlinestore[n]
+ if candidate then -- no vpack, as we want to realign
+ inlinestore[n] = nil
+ nofinlined = nofinlined - 1
+ head, room, con = inject(parent,head,candidate) -- maybe return applied offset
+ continue = continue or con
+ done = true
+ nofstored = nofstored - 1
+ end
+ end
+ elseif id == hlist_code or id == vlist_code then
+ -- optional (but sometimes needed)
+ current.list, don, con = flushinline(current,current.list)
+ continue = continue or con
+ done = done or don
+ end
+ current = current.next
+ end
+ return head, done, continue
+end
+
+local a_linenumber = attributes.private('linenumber')
+
+local function flushed(scope,parent) -- current is hlist
+ local head = parent.list
+ local done = false
+ local continue = false
+ local room, con, don
+ for c=1,#categories do
+ local category = categories[c]
+ for l=1,#locations do
+ local location = locations[l]
+ local store = displaystore[category][location][scope]
+ while true do
+ local candidate = remove(store,1) -- brr, local stores are sparse
+ if candidate then -- no vpack, as we want to realign
+ head, room, con = inject(parent,head,candidate)
+ done = true
+ continue = continue or con
+ nofstored = nofstored - 1
+ registertogether(parent,room)
+ else
+ break
+ end
+ end
+ end
+ end
+ if nofinlined > 0 then
+ if done then
+ parent.list = head
+ end
+ head, don, con = flushinline(parent,head)
+ continue = continue or con
+ done = done or don
+ end
+ if done then
+ local a = head[a_linenumber] -- hack .. we need a more decent critical attribute inheritance mechanism
+ parent.list = hpack_nodes(head,parent.width,"exactly")
+ if a then
+ parent.list[a_linenumber] = a
+ end
+ -- resetstacked()
+ end
+ return done, continue
+end
+
+-- only when group : vbox|vmode_par
+-- only when subtype : line, box (no indent alignment cell)
+
+local function handler(scope,head,group)
+ if nofstored > 0 then
+ if trace_margindata then
+ report_margindata("flushing stage one, stored %s, scope %s, delayed %s, group %a",nofstored,scope,nofdelayed,group)
+ end
+ local current = head
+ local done = false
+ while current do
+ local id = current.id
+ if (id == vlist_code or id == hlist_code) and not current[a_margindata] then
+ local don, continue = flushed(scope,current)
+ if don then
+ current[a_margindata] = 0 -- signal to prevent duplicate processing
+ if continue then
+ markovershoot(current)
+ end
+ if nofstored <= 0 then
+ break
+ end
+ done = true
+ end
+ end
+ current = current.next
+ end
+ -- if done then
+ resetstacked() -- why doesn't done work ok here?
+ -- end
+ return head, done
+ else
+ return head, false
+ end
+end
+
+function margins.localhandler(head,group) -- sometimes group is "" which is weird
+ local inhibit = conditionals.inhibitmargindata
+ if inhibit then
+ if trace_margingroup then
+ report_margindata("ignored 3, group %a, stored %s, inhibit %a",group,nofstored,inhibit)
+ end
+ return head, false
+ elseif nofstored > 0 then
+ return handler(v_local,head,group)
+ else
+ if trace_margingroup then
+ report_margindata("ignored 4, group %a, stored %s, inhibit %a",group,nofstored,inhibit)
+ end
+ return head, false
+ end
+end
+
+function margins.globalhandler(head,group) -- check group
+ local inhibit = conditionals.inhibitmargindata
+ if inhibit or nofstored == 0 then
+ if trace_margingroup then
+ report_margindata("ignored 1, group %a, stored %s, inhibit %a",group,nofstored,inhibit)
+ end
+ return head, false
+ elseif group == "hmode_par" then
+ return handler("global",head,group)
+ elseif group == "vmode_par" then -- experiment (for alignments)
+ return handler("global",head,group)
+ -- this needs checking as we then get quite some one liners to process and
+ -- we cannot look ahead then:
+ elseif group == "box" then -- experiment (for alignments)
+ return handler("global",head,group)
+ elseif group == "alignment" then -- experiment (for alignments)
+ return handler("global",head,group)
+ else
+ if trace_margingroup then
+ report_margindata("ignored 2, group %a, stored %s, inhibit %a",group,nofstored,inhibit)
+ end
+ return head, false
+ end
+end
+
+local function finalhandler(head)
+ if nofdelayed > 0 then
+ local current = head
+ local done = false
+ while current do
+ local id = current.id
+ if id == hlist_code then
+ local a = current[a_margindata]
+ if not a or a == 0 then
+ finalhandler(current.list)
+ elseif realigned(current,a) then
+ done = true
+ if nofdelayed == 0 then
+ return head, true
+ end
+ end
+ elseif id == vlist_code then
+ finalhandler(current.list)
+ end
+ current = current.next
+ end
+ return head, done
+ else
+ return head, false
+ end
+end
+
+function margins.finalhandler(head)
+ if nofdelayed > 0 then
+ -- if trace_margindata then
+ -- report_margindata("flushing stage two, instore: %s, delayed: %s",nofstored,nofdelayed)
+ -- end
+ return finalhandler(head)
+ else
+ return head, false
+ end
+end
+
+-- Somehow the vbox builder (in combinations) gets pretty confused and decides to
+-- go horizontal. So this needs more testing.
+
+prependaction("finalizers", "lists", "typesetters.margins.localhandler")
+-- ("vboxbuilders", "normalizers", "typesetters.margins.localhandler")
+prependaction("mvlbuilders", "normalizers", "typesetters.margins.globalhandler")
+prependaction("shipouts", "normalizers", "typesetters.margins.finalhandler")
+
+disableaction("finalizers", "typesetters.margins.localhandler")
+-- ("vboxbuilders", "typesetters.margins.localhandler")
+disableaction("mvlbuilders", "typesetters.margins.globalhandler")
+disableaction("shipouts", "typesetters.margins.finalhandler")
+
+enablelocal = function()
+ enableaction("finalizers", "typesetters.margins.localhandler")
+ -- enableaction("vboxbuilders", "typesetters.margins.localhandler")
+ enableaction("shipouts", "typesetters.margins.finalhandler")
+ enablelocal = nil
+end
+
+enableglobal = function()
+ enableaction("mvlbuilders", "typesetters.margins.globalhandler")
+ enableaction("shipouts", "typesetters.margins.finalhandler")
+ enableglobal = nil
+end
+
+statistics.register("margin data", function()
+ if nofsaved > 0 then
+ return format("%s entries, %s pending",nofsaved,nofdelayed)
+ else
+ return nil
+ end
+end)
diff --git a/tex/context/base/typo-pag.lua b/tex/context/base/typo-pag.lua
index 0dd75ddf9..d39748d26 100644
--- a/tex/context/base/typo-pag.lua
+++ b/tex/context/base/typo-pag.lua
@@ -1,179 +1,179 @@
-if not modules then modules = { } end modules ['typo-pag'] = {
- version = 1.001,
- comment = "companion to typo-pag.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local nodecodes = nodes.nodecodes
-
-local hlist_code = nodecodes.hlist
-local vlist_code = nodecodes.vlist
-local glue_code = nodecodes.glue
-local kern_code = nodecodes.kern
-local penalty_code = nodecodes.penalty
-
-local insert_node_after = node.insert_after
-local new_penalty = nodes.pool.penalty
-
-local unsetvalue = attributes.unsetvalue
-
-local a_keeptogether = attributes.private("keeptogether")
-
-local trace_keeptogether = false
-local report_keeptogether = logs.reporter("parbuilders","keeptogether")
-
-local cache = { }
-local last = 0
-local enabled = false
-
-trackers.register("parbuilders.keeptogether", function(v) trace_keeptogether = v end)
-
--- todo: also support lines = 3 etc (e.g. dropped caps) but how to set that
--- when no hlists are there ? ... maybe the local_par
-
-function builders.paragraphs.registertogether(line,specification) -- might change
- if not enabled then
- nodes.tasks.enableaction("finalizers","builders.paragraphs.keeptogether")
- end
- local a = line[a_keeptogether]
- local c = a and cache[a]
- if c then
- local height = specification.height
- local depth = specification.depth
- local slack = specification.slack
- if height and height > c.height then
- c.height = height
- end
- if depth and depth > c.depth then
- c.depth = depth
- end
- if slack and slack > c.slack then
- c.slack = slack
- end
- else
- last = last + 1
- cache[last] = specification
- if not specification.height then
- specification.height = 0
- end
- if not specification.depth then
- specification.depth = 0
- end
- if not specification.slack then
- specification.slack = 0
- end
- line[a_keeptogether] = last
- end
- if trace_keeptogether then
- local a = a or last
- local c = cache[a]
- if trace_keeptogether then
- local noflines = specification.lineheight
- local height = c.height
- local depth = c.depth
- local slack = c.slack
- if not noflines or noflines == 0 then
- noflines = "unknown"
- else
- noflines = math.round((height + depth - slack) / noflines)
- end
- report_keeptogether("registered, index %s, height %p, depth %p, slack %p, noflines %a",a,height,depth,slack,noflines)
- end
- end
-end
-
-local function keeptogether(start,a)
- if start then
- local specification = cache[a]
- if a then
- local current = start.next
- local previous = start
- local total = previous.depth
- local slack = specification.slack
- local threshold = specification.depth - slack
- if trace_keeptogether then
- report_keeptogether("%s, index %s, total %p, threshold %p, slack %p","list",a,total,threshold,slack)
- end
- while current do
- local id = current.id
- if id == vlist_code or id == hlist_code then
- total = total + current.height + current.depth
- if trace_keeptogether then
- report_keeptogether("%s, index %s, total %p, threshold %p","list",a,total,threshold)
- end
- if total <= threshold then
- if previous.id == penalty_code then
- previous.penalty = 10000
- else
- insert_node_after(head,previous,new_penalty(10000))
- end
- else
- break
- end
- elseif id == glue_code then
- -- hm, breakpoint, maybe turn this into kern
- total = total + current.spec.width
- if trace_keeptogether then
- report_keeptogether("%s, index %s, total %p, threshold %p","glue",a,total,threshold)
- end
- if total <= threshold then
- if previous.id == penalty_code then
- previous.penalty = 10000
- else
- insert_node_after(head,previous,new_penalty(10000))
- end
- else
- break
- end
- elseif id == kern_code then
- total = total + current.kern
- if trace_keeptogether then
- report_keeptogether("%s, index %s, total %s, threshold %s","kern",a,total,threshold)
- end
- if total <= threshold then
- if previous.id == penalty_code then
- previous.penalty = 10000
- else
- insert_node_after(head,previous,new_penalty(10000))
- end
- else
- break
- end
- elseif id == penalty_code then
- if total <= threshold then
- if previous.id == penalty_code then
- previous.penalty = 10000
- end
- current.penalty = 10000
- else
- break
- end
- end
- previous = current
- current = current.next
- end
- end
- end
-end
-
--- also look at first non glue/kern node e.g for a dropped caps
-
-function builders.paragraphs.keeptogether(head)
- local done = false
- local current = head
- while current do
- if current.id == hlist_code then
- local a = current[a_keeptogether]
- if a and a > 0 then
- keeptogether(current,a)
- current[a_keeptogether] = unsetvalue
- cache[a] = nil
- done = true
- end
- end
- current = current.next
- end
- return head, done
-end
+if not modules then modules = { } end modules ['typo-pag'] = {
+ version = 1.001,
+ comment = "companion to typo-pag.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local nodecodes = nodes.nodecodes
+
+local hlist_code = nodecodes.hlist
+local vlist_code = nodecodes.vlist
+local glue_code = nodecodes.glue
+local kern_code = nodecodes.kern
+local penalty_code = nodecodes.penalty
+
+local insert_node_after = node.insert_after
+local new_penalty = nodes.pool.penalty
+
+local unsetvalue = attributes.unsetvalue
+
+local a_keeptogether = attributes.private("keeptogether")
+
+local trace_keeptogether = false
+local report_keeptogether = logs.reporter("parbuilders","keeptogether")
+
+local cache = { }
+local last = 0
+local enabled = false
+
+trackers.register("parbuilders.keeptogether", function(v) trace_keeptogether = v end)
+
+-- todo: also support lines = 3 etc (e.g. dropped caps) but how to set that
+-- when no hlists are there ? ... maybe the local_par
+
+function builders.paragraphs.registertogether(line,specification) -- might change
+ if not enabled then
+ nodes.tasks.enableaction("finalizers","builders.paragraphs.keeptogether")
+ end
+ local a = line[a_keeptogether]
+ local c = a and cache[a]
+ if c then
+ local height = specification.height
+ local depth = specification.depth
+ local slack = specification.slack
+ if height and height > c.height then
+ c.height = height
+ end
+ if depth and depth > c.depth then
+ c.depth = depth
+ end
+ if slack and slack > c.slack then
+ c.slack = slack
+ end
+ else
+ last = last + 1
+ cache[last] = specification
+ if not specification.height then
+ specification.height = 0
+ end
+ if not specification.depth then
+ specification.depth = 0
+ end
+ if not specification.slack then
+ specification.slack = 0
+ end
+ line[a_keeptogether] = last
+ end
+ if trace_keeptogether then
+ local a = a or last
+ local c = cache[a]
+ if trace_keeptogether then
+ local noflines = specification.lineheight
+ local height = c.height
+ local depth = c.depth
+ local slack = c.slack
+ if not noflines or noflines == 0 then
+ noflines = "unknown"
+ else
+ noflines = math.round((height + depth - slack) / noflines)
+ end
+ report_keeptogether("registered, index %s, height %p, depth %p, slack %p, noflines %a",a,height,depth,slack,noflines)
+ end
+ end
+end
+
+local function keeptogether(start,a)
+ if start then
+ local specification = cache[a]
+ if a then
+ local current = start.next
+ local previous = start
+ local total = previous.depth
+ local slack = specification.slack
+ local threshold = specification.depth - slack
+ if trace_keeptogether then
+ report_keeptogether("%s, index %s, total %p, threshold %p, slack %p","list",a,total,threshold,slack)
+ end
+ while current do
+ local id = current.id
+ if id == vlist_code or id == hlist_code then
+ total = total + current.height + current.depth
+ if trace_keeptogether then
+ report_keeptogether("%s, index %s, total %p, threshold %p","list",a,total,threshold)
+ end
+ if total <= threshold then
+ if previous.id == penalty_code then
+ previous.penalty = 10000
+ else
+ insert_node_after(head,previous,new_penalty(10000))
+ end
+ else
+ break
+ end
+ elseif id == glue_code then
+ -- hm, breakpoint, maybe turn this into kern
+ total = total + current.spec.width
+ if trace_keeptogether then
+ report_keeptogether("%s, index %s, total %p, threshold %p","glue",a,total,threshold)
+ end
+ if total <= threshold then
+ if previous.id == penalty_code then
+ previous.penalty = 10000
+ else
+ insert_node_after(head,previous,new_penalty(10000))
+ end
+ else
+ break
+ end
+ elseif id == kern_code then
+ total = total + current.kern
+ if trace_keeptogether then
+ report_keeptogether("%s, index %s, total %s, threshold %s","kern",a,total,threshold)
+ end
+ if total <= threshold then
+ if previous.id == penalty_code then
+ previous.penalty = 10000
+ else
+ insert_node_after(head,previous,new_penalty(10000))
+ end
+ else
+ break
+ end
+ elseif id == penalty_code then
+ if total <= threshold then
+ if previous.id == penalty_code then
+ previous.penalty = 10000
+ end
+ current.penalty = 10000
+ else
+ break
+ end
+ end
+ previous = current
+ current = current.next
+ end
+ end
+ end
+end
+
+-- also look at first non glue/kern node e.g for a dropped caps
+
+function builders.paragraphs.keeptogether(head)
+ local done = false
+ local current = head
+ while current do
+ if current.id == hlist_code then
+ local a = current[a_keeptogether]
+ if a and a > 0 then
+ keeptogether(current,a)
+ current[a_keeptogether] = unsetvalue
+ cache[a] = nil
+ done = true
+ end
+ end
+ current = current.next
+ end
+ return head, done
+end
diff --git a/tex/context/base/typo-par.lua b/tex/context/base/typo-par.lua
index b25ae4a5b..0449becbf 100644
--- a/tex/context/base/typo-par.lua
+++ b/tex/context/base/typo-par.lua
@@ -1,181 +1,181 @@
-if not modules then modules = { } end modules ['typo-par'] = {
- version = 1.001,
- comment = "companion to typo-par.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- A playground for experiments.
-
-local utfbyte = utf.byte
-local utfchar = utf.char
-
-local trace_paragraphs = false trackers.register("typesetters.paragraphs", function(v) trace_paragraphs = v end)
-local trace_dropper = false trackers.register("typesetters.paragraphs.dropper",function(v) trace_dropper = v end)
-
-local report_paragraphs = logs.reporter("nodes","paragraphs")
-local report_dropper = logs.reporter("nodes","dropped")
-
-typesetters.paragraphs = typesetters.paragraphs or { }
-local paragraphs = typesetters.paragraphs
-
-local nodecodes = nodes.nodecodes
-local whatsitcodes = nodes.whatsitcodes
-local tasks = nodes.tasks
-
-local variables = interfaces.variables
-
-local texattribute = tex.attribute
-local unsetvalue = attributes.unsetvalue
-
-local glyph_code = nodecodes.glyph
-local hlist_code = nodecodes.hlist
-local kern_node = nodecodes.kern
-local whatsit_code = nodecodes.whatsit
-local localpar_code = whatsitcodes.localpar
-
-local a_paragraph = attributes.private("paragraphspecial")
-local a_color = attributes.private('color')
-local a_transparency = attributes.private('transparency')
-local a_colorspace = attributes.private('colormodel')
-
-local dropper = {
- enabled = false,
- -- font = 0,
- -- n = 0,
- -- distance = 0,
- -- hoffset = 0,
- -- voffset = 0,
-}
-
-local droppers = { }
-
-typesetters.paragraphs.droppers = droppers
-
-function droppers.set(specification)
- dropper = specification or { }
-end
-
-function droppers.freeze()
- if dropper.enabled then
- dropper.font = font.current()
- end
-end
-
--- dropped caps experiment (will be done properly when luatex
--- stores the state in the local par node) .. btw, search still
--- works with dropped caps, as does an export
-
--- we need a 'par' attribute and in fact for dropped caps we don't need
--- need an attribute ... dropit will become s state counter (or end up
--- in the localpar user data
-
--- for the moment, each paragraph gets a number as id (attribute) ..problem
--- with nesting .. or anyhow, needed for tagging anyway
-
--- todo: prevent linebreak .. but normally a dropper ends up atthe top of
--- a page so this has a low priority
-
-local function process(namespace,attribute,head)
- local done = false
- if head.id == whatsit_code and head.subtype == localpar_code then
- -- begin of par
- local a = head[attribute]
- if a and a > 0 then
- if dropper.enabled then
- dropper.enabled = false -- dangerous for e.g. nested || in tufte
- local first = head.next
- if first and first.id == hlist_code then
- -- parbox .. needs to be set at 0
- first = first.next
- end
- if first and first.id == glyph_code then
--- if texattribute[a_paragraph] >= 0 then
--- texattribute[a_paragraph] = unsetvalue
--- end
- local char = first.char
- local prev = first.prev
- local next = first.next
- -- if prev.id == hlist_code then
- -- -- set the width to 0
- -- end
- if next and next.id == kern_node then
- next.kern = 0
- end
- first.font = dropper.font or first.font
- -- can be a helper
- local ma = dropper.ma or 0
- local ca = dropper.ca
- local ta = dropper.ta
- if ca and ca > 0 then
- first[a_colorspace] = ma == 0 and 1 or ma
- first[a_color] = ca
- end
- if ta and ta > 0 then
- first[a_transparency] = ta
- end
- --
- local width = first.width
- local height = first.height
- local depth = first.depth
- local distance = dropper.distance or 0
- local voffset = dropper.voffset or 0
- local hoffset = dropper.hoffset or 0
- first.xoffset = - width - hoffset - distance
- first.yoffset = - height - voffset
- if true then
- -- needed till we can store parindent with localpar
- first.prev = nil
- first.next = nil
- local h = node.hpack(first)
- h.width = 0
- h.height = 0
- h.depth = 0
- prev.next = h
- next.prev = h
- h.next = next
- h.prev = prev
- end
- if dropper.location == variables.margin then
- -- okay
- else
- local lines = tonumber(dropper.n) or 0
- if lines == 0 then -- safeguard, not too precise
- lines = math.ceil((height+voffset) / tex.baselineskip.width)
- end
- tex.hangafter = - lines
- tex.hangindent = width + distance
- end
- done = true
- end
- end
- end
- end
- return head, done
-end
-
-local enabled = false
-
-function paragraphs.set(n)
- if n == variables.reset or not tonumber(n) or n == 0 then
- texattribute[a_paragraph] = unsetvalue
- else
- if not enabled then
- tasks.enableaction("processors","typesetters.paragraphs.handler")
- if trace_paragraphs then
- report_paragraphs("enabling paragraphs")
- end
- enabled = true
- end
- texattribute[a_paragraph] = n
- end
-end
-
-paragraphs.attribute = a_paragraph
-
-paragraphs.handler = nodes.installattributehandler {
- name = "paragraphs",
- namespace = paragraphs,
- processor = process,
-}
+if not modules then modules = { } end modules ['typo-par'] = {
+ version = 1.001,
+ comment = "companion to typo-par.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- A playground for experiments.
+
+local utfbyte = utf.byte
+local utfchar = utf.char
+
+local trace_paragraphs = false trackers.register("typesetters.paragraphs", function(v) trace_paragraphs = v end)
+local trace_dropper = false trackers.register("typesetters.paragraphs.dropper",function(v) trace_dropper = v end)
+
+local report_paragraphs = logs.reporter("nodes","paragraphs")
+local report_dropper = logs.reporter("nodes","dropped")
+
+typesetters.paragraphs = typesetters.paragraphs or { }
+local paragraphs = typesetters.paragraphs
+
+local nodecodes = nodes.nodecodes
+local whatsitcodes = nodes.whatsitcodes
+local tasks = nodes.tasks
+
+local variables = interfaces.variables
+
+local texattribute = tex.attribute
+local unsetvalue = attributes.unsetvalue
+
+local glyph_code = nodecodes.glyph
+local hlist_code = nodecodes.hlist
+local kern_node = nodecodes.kern
+local whatsit_code = nodecodes.whatsit
+local localpar_code = whatsitcodes.localpar
+
+local a_paragraph = attributes.private("paragraphspecial")
+local a_color = attributes.private('color')
+local a_transparency = attributes.private('transparency')
+local a_colorspace = attributes.private('colormodel')
+
+local dropper = {
+ enabled = false,
+ -- font = 0,
+ -- n = 0,
+ -- distance = 0,
+ -- hoffset = 0,
+ -- voffset = 0,
+}
+
+local droppers = { }
+
+typesetters.paragraphs.droppers = droppers
+
+function droppers.set(specification)
+ dropper = specification or { }
+end
+
+function droppers.freeze()
+ if dropper.enabled then
+ dropper.font = font.current()
+ end
+end
+
+-- dropped caps experiment (will be done properly when luatex
+-- stores the state in the local par node) .. btw, search still
+-- works with dropped caps, as does an export
+
+-- we need a 'par' attribute and in fact for dropped caps we don't need
+-- need an attribute ... dropit will become s state counter (or end up
+-- in the localpar user data
+
+-- for the moment, each paragraph gets a number as id (attribute) ..problem
+-- with nesting .. or anyhow, needed for tagging anyway
+
+-- todo: prevent linebreak .. but normally a dropper ends up atthe top of
+-- a page so this has a low priority
+
+local function process(namespace,attribute,head)
+ local done = false
+ if head.id == whatsit_code and head.subtype == localpar_code then
+ -- begin of par
+ local a = head[attribute]
+ if a and a > 0 then
+ if dropper.enabled then
+ dropper.enabled = false -- dangerous for e.g. nested || in tufte
+ local first = head.next
+ if first and first.id == hlist_code then
+ -- parbox .. needs to be set at 0
+ first = first.next
+ end
+ if first and first.id == glyph_code then
+-- if texattribute[a_paragraph] >= 0 then
+-- texattribute[a_paragraph] = unsetvalue
+-- end
+ local char = first.char
+ local prev = first.prev
+ local next = first.next
+ -- if prev.id == hlist_code then
+ -- -- set the width to 0
+ -- end
+ if next and next.id == kern_node then
+ next.kern = 0
+ end
+ first.font = dropper.font or first.font
+ -- can be a helper
+ local ma = dropper.ma or 0
+ local ca = dropper.ca
+ local ta = dropper.ta
+ if ca and ca > 0 then
+ first[a_colorspace] = ma == 0 and 1 or ma
+ first[a_color] = ca
+ end
+ if ta and ta > 0 then
+ first[a_transparency] = ta
+ end
+ --
+ local width = first.width
+ local height = first.height
+ local depth = first.depth
+ local distance = dropper.distance or 0
+ local voffset = dropper.voffset or 0
+ local hoffset = dropper.hoffset or 0
+ first.xoffset = - width - hoffset - distance
+ first.yoffset = - height - voffset
+ if true then
+ -- needed till we can store parindent with localpar
+ first.prev = nil
+ first.next = nil
+ local h = node.hpack(first)
+ h.width = 0
+ h.height = 0
+ h.depth = 0
+ prev.next = h
+ next.prev = h
+ h.next = next
+ h.prev = prev
+ end
+ if dropper.location == variables.margin then
+ -- okay
+ else
+ local lines = tonumber(dropper.n) or 0
+ if lines == 0 then -- safeguard, not too precise
+ lines = math.ceil((height+voffset) / tex.baselineskip.width)
+ end
+ tex.hangafter = - lines
+ tex.hangindent = width + distance
+ end
+ done = true
+ end
+ end
+ end
+ end
+ return head, done
+end
+
+local enabled = false
+
+function paragraphs.set(n)
+ if n == variables.reset or not tonumber(n) or n == 0 then
+ texattribute[a_paragraph] = unsetvalue
+ else
+ if not enabled then
+ tasks.enableaction("processors","typesetters.paragraphs.handler")
+ if trace_paragraphs then
+ report_paragraphs("enabling paragraphs")
+ end
+ enabled = true
+ end
+ texattribute[a_paragraph] = n
+ end
+end
+
+paragraphs.attribute = a_paragraph
+
+paragraphs.handler = nodes.installattributehandler {
+ name = "paragraphs",
+ namespace = paragraphs,
+ processor = process,
+}
diff --git a/tex/context/base/typo-prc.lua b/tex/context/base/typo-prc.lua
index 5b74abd0b..4fb64d0f5 100644
--- a/tex/context/base/typo-prc.lua
+++ b/tex/context/base/typo-prc.lua
@@ -1,125 +1,125 @@
-if not modules then modules = { } end modules ['typo-prc'] = {
- version = 1.001,
- comment = "companion to typo-prc.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- moved from strc-ini.lua
-
-
-local formatters = string.formatters
-local lpegmatch, patterns, P, C, Cs = lpeg.match, lpeg.patterns, lpeg.P, lpeg.C, lpeg.Cs
-
--- processors: syntax: processor->data ... not ok yet
-
-typesetters.processors = typesetters.processors or { }
-local processors = typesetters.processors
-
-local trace_processors = false
-local report_processors = logs.reporter("processors")
-local registered = { }
-
-trackers.register("typesetters.processors", function(v) trace_processors = v end)
-
-function processors.register(p)
- registered[p] = true
-end
-
-function processors.reset(p)
- registered[p] = nil
-end
-
---~ local splitter = lpeg.splitat("->",true) -- also support =>
-
-local becomes = P('->')
-local processor = (1-becomes)^1
-local splitter = C(processor) * becomes * Cs(patterns.argument + patterns.content)
-
-function processors.split(str)
- local p, s = lpegmatch(splitter,str)
- if registered[p] then
- return p, s
- else
- return false, str
- end
-end
-
-function processors.apply(p,s)
- local str = p
- if s == nil then
- p, s = lpegmatch(splitter,p)
- end
- if p and registered[p] then
- if trace_processors then
- report_processors("applying %s processor %a, argument: %s","known",p,s)
- end
- context.applyprocessor(p,s)
- elseif s then
- if trace_processors then
- report_processors("applying %s processor %a, argument: %s","unknown",p,s)
- end
- context(s)
- elseif str then
- if trace_processors then
- report_processors("applying %s processor, data: %s","ignored",str)
- end
- context(str)
- end
-end
-
-function processors.startapply(p,s)
- local str = p
- if s == nil then
- p, s = lpegmatch(splitter,p)
- end
- if p and registered[p] then
- if trace_processors then
- report_processors("start applying %s processor %a","known",p)
- end
- context.applyprocessor(p)
- context("{")
- return s
- elseif p then
- if trace_processors then
- report_processors("start applying %s processor %a","unknown",p)
- end
- context.firstofoneargument()
- context("{")
- return s
- else
- if trace_processors then
- report_processors("start applying %s processor","ignored")
- end
- context.firstofoneargument()
- context("{")
- return str
- end
-end
-
-function processors.stopapply()
- context("}")
- if trace_processors then
- report_processors("stop applying processor")
- end
-end
-
-function processors.tostring(str)
- local p, s = lpegmatch(splitter,str)
- if registered[p] then
- return formatters["\\applyprocessor{%s}{%s}"](p,s)
- else
- return str
- end
-end
-
-function processors.stripped(str)
- local p, s = lpegmatch(splitter,str)
- return s or str
-end
-
--- interface
-
-commands.registerstructureprocessor = processors.register
-commands.resetstructureprocessor = processors.reset
+if not modules then modules = { } end modules ['typo-prc'] = {
+ version = 1.001,
+ comment = "companion to typo-prc.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- moved from strc-ini.lua
+
+
+local formatters = string.formatters
+local lpegmatch, patterns, P, C, Cs = lpeg.match, lpeg.patterns, lpeg.P, lpeg.C, lpeg.Cs
+
+-- processors: syntax: processor->data ... not ok yet
+
+typesetters.processors = typesetters.processors or { }
+local processors = typesetters.processors
+
+local trace_processors = false
+local report_processors = logs.reporter("processors")
+local registered = { }
+
+trackers.register("typesetters.processors", function(v) trace_processors = v end)
+
+function processors.register(p)
+ registered[p] = true
+end
+
+function processors.reset(p)
+ registered[p] = nil
+end
+
+--~ local splitter = lpeg.splitat("->",true) -- also support =>
+
+local becomes = P('->')
+local processor = (1-becomes)^1
+local splitter = C(processor) * becomes * Cs(patterns.argument + patterns.content)
+
+function processors.split(str)
+ local p, s = lpegmatch(splitter,str)
+ if registered[p] then
+ return p, s
+ else
+ return false, str
+ end
+end
+
+function processors.apply(p,s)
+ local str = p
+ if s == nil then
+ p, s = lpegmatch(splitter,p)
+ end
+ if p and registered[p] then
+ if trace_processors then
+ report_processors("applying %s processor %a, argument: %s","known",p,s)
+ end
+ context.applyprocessor(p,s)
+ elseif s then
+ if trace_processors then
+ report_processors("applying %s processor %a, argument: %s","unknown",p,s)
+ end
+ context(s)
+ elseif str then
+ if trace_processors then
+ report_processors("applying %s processor, data: %s","ignored",str)
+ end
+ context(str)
+ end
+end
+
+function processors.startapply(p,s)
+ local str = p
+ if s == nil then
+ p, s = lpegmatch(splitter,p)
+ end
+ if p and registered[p] then
+ if trace_processors then
+ report_processors("start applying %s processor %a","known",p)
+ end
+ context.applyprocessor(p)
+ context("{")
+ return s
+ elseif p then
+ if trace_processors then
+ report_processors("start applying %s processor %a","unknown",p)
+ end
+ context.firstofoneargument()
+ context("{")
+ return s
+ else
+ if trace_processors then
+ report_processors("start applying %s processor","ignored")
+ end
+ context.firstofoneargument()
+ context("{")
+ return str
+ end
+end
+
+function processors.stopapply()
+ context("}")
+ if trace_processors then
+ report_processors("stop applying processor")
+ end
+end
+
+function processors.tostring(str)
+ local p, s = lpegmatch(splitter,str)
+ if registered[p] then
+ return formatters["\\applyprocessor{%s}{%s}"](p,s)
+ else
+ return str
+ end
+end
+
+function processors.stripped(str)
+ local p, s = lpegmatch(splitter,str)
+ return s or str
+end
+
+-- interface
+
+commands.registerstructureprocessor = processors.register
+commands.resetstructureprocessor = processors.reset
diff --git a/tex/context/base/typo-rep.lua b/tex/context/base/typo-rep.lua
index 8451ce52b..e7e11bbf0 100644
--- a/tex/context/base/typo-rep.lua
+++ b/tex/context/base/typo-rep.lua
@@ -1,128 +1,128 @@
-if not modules then modules = { } end modules ['typo-rep'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This was rather boring to program (more of the same) but I could
--- endure it by listening to a couple cd's by The Scene and The Lau
--- on the squeezebox on my desk.
-
-local trace_stripping = false trackers.register("nodes.stripping", function(v) trace_stripping = v end)
- trackers.register("fonts.stripping", function(v) trace_stripping = v end)
-
-local report_stripping = logs.reporter("fonts","stripping")
-
-local nodes, node = nodes, node
-
-local delete_node = nodes.delete
-local replace_node = nodes.replace
-local copy_node = node.copy
-
-local chardata = characters.data
-local collected = false
-local a_stripping = attributes.private("stripping")
-local fontdata = fonts.hashes.identifiers
-local tasks = nodes.tasks
-
-local texattribute = tex.attribute
-local unsetvalue = attributes.unsetvalue
-
-local v_reset = interfaces.variables.reset
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-
--- todo: other namespace -> typesetters
-
-nodes.stripping = nodes.stripping or { } local stripping = nodes.stripping
-stripping.glyphs = stripping.glyphs or { } local glyphs = stripping.glyphs
-
-local function initialize()
- for k,v in next, chardata do
- if v.category == "cf" and v.visible ~= "yes" then
- if not glyphs[k] then
- glyphs[k] = true
- end
- end
- end
- initialize = nil
-end
-
-local function process(what,head,current,char)
- if what == true then
- if trace_stripping then
- report_stripping("deleting %C from text",char)
- end
- head, current = delete_node(head,current)
- elseif type(what) == "function" then
- head, current = what(head,current)
- current = current.next
- if trace_stripping then
- report_stripping("processing %C in text",char)
- end
- elseif what then -- assume node
- head, current = replace_node(head,current,copy_node(what))
- current = current.next
- if trace_stripping then
- report_stripping("replacing %C in text",char)
- end
- end
- return head, current
-end
-
-function nodes.handlers.stripping(head)
- local current, done = head, false
- while current do
- if current.id == glyph_code then
- -- it's more efficient to keep track of what needs to be kept
- local todo = current[a_stripping]
- if todo == 1 then
- local char = current.char
- local what = glyphs[char]
- if what then
- head, current = process(what,head,current,char)
- done = true
- else -- handling of spacing etc has to be done elsewhere
- current = current.next
- end
- else
- current = current.next
- end
- else
- current = current.next
- end
- end
- return head, done
-end
-
-local enabled = false
-
-function stripping.set(n) -- number or 'reset'
- if n == v_reset then
- n = unsetvalue
- else
- n = tonumber(n)
- if n then
- if not enabled then
- if initialize then initialize() end
- tasks.enableaction("processors","nodes.handlers.stripping")
- enabled = true
- end
- else
- n = unsetvalue
- end
- end
- texattribute[a_stripping] = n
-end
-
--- why not in task-ini?
-
-tasks.appendaction("processors","fonts","nodes.handlers.stripping",nil,"nodes.handlers.characters")
-tasks.disableaction("processors","nodes.handlers.stripping")
-
--- interface
-
-commands.setcharacterstripping = stripping.set
+if not modules then modules = { } end modules ['typo-rep'] = {
+ version = 1.001,
+ comment = "companion to node-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This was rather boring to program (more of the same) but I could
+-- endure it by listening to a couple cd's by The Scene and The Lau
+-- on the squeezebox on my desk.
+
+local trace_stripping = false trackers.register("nodes.stripping", function(v) trace_stripping = v end)
+ trackers.register("fonts.stripping", function(v) trace_stripping = v end)
+
+local report_stripping = logs.reporter("fonts","stripping")
+
+local nodes, node = nodes, node
+
+local delete_node = nodes.delete
+local replace_node = nodes.replace
+local copy_node = node.copy
+
+local chardata = characters.data
+local collected = false
+local a_stripping = attributes.private("stripping")
+local fontdata = fonts.hashes.identifiers
+local tasks = nodes.tasks
+
+local texattribute = tex.attribute
+local unsetvalue = attributes.unsetvalue
+
+local v_reset = interfaces.variables.reset
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+
+-- todo: other namespace -> typesetters
+
+nodes.stripping = nodes.stripping or { } local stripping = nodes.stripping
+stripping.glyphs = stripping.glyphs or { } local glyphs = stripping.glyphs
+
+local function initialize()
+ for k,v in next, chardata do
+ if v.category == "cf" and v.visible ~= "yes" then
+ if not glyphs[k] then
+ glyphs[k] = true
+ end
+ end
+ end
+ initialize = nil
+end
+
+local function process(what,head,current,char)
+ if what == true then
+ if trace_stripping then
+ report_stripping("deleting %C from text",char)
+ end
+ head, current = delete_node(head,current)
+ elseif type(what) == "function" then
+ head, current = what(head,current)
+ current = current.next
+ if trace_stripping then
+ report_stripping("processing %C in text",char)
+ end
+ elseif what then -- assume node
+ head, current = replace_node(head,current,copy_node(what))
+ current = current.next
+ if trace_stripping then
+ report_stripping("replacing %C in text",char)
+ end
+ end
+ return head, current
+end
+
+function nodes.handlers.stripping(head)
+ local current, done = head, false
+ while current do
+ if current.id == glyph_code then
+ -- it's more efficient to keep track of what needs to be kept
+ local todo = current[a_stripping]
+ if todo == 1 then
+ local char = current.char
+ local what = glyphs[char]
+ if what then
+ head, current = process(what,head,current,char)
+ done = true
+ else -- handling of spacing etc has to be done elsewhere
+ current = current.next
+ end
+ else
+ current = current.next
+ end
+ else
+ current = current.next
+ end
+ end
+ return head, done
+end
+
+local enabled = false
+
+function stripping.set(n) -- number or 'reset'
+ if n == v_reset then
+ n = unsetvalue
+ else
+ n = tonumber(n)
+ if n then
+ if not enabled then
+ if initialize then initialize() end
+ tasks.enableaction("processors","nodes.handlers.stripping")
+ enabled = true
+ end
+ else
+ n = unsetvalue
+ end
+ end
+ texattribute[a_stripping] = n
+end
+
+-- why not in task-ini?
+
+tasks.appendaction("processors","fonts","nodes.handlers.stripping",nil,"nodes.handlers.characters")
+tasks.disableaction("processors","nodes.handlers.stripping")
+
+-- interface
+
+commands.setcharacterstripping = stripping.set
diff --git a/tex/context/base/typo-spa.lua b/tex/context/base/typo-spa.lua
index 5eba22889..11de65f7b 100644
--- a/tex/context/base/typo-spa.lua
+++ b/tex/context/base/typo-spa.lua
@@ -1,229 +1,229 @@
-if not modules then modules = { } end modules ['typo-spa'] = {
- version = 1.001,
- comment = "companion to typo-spa.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local next, type = next, type
-local utfchar = utf.char
-
-local trace_spacing = false trackers.register("typesetters.spacing", function(v) trace_spacing = v end)
-
-local report_spacing = logs.reporter("typesetting","spacing")
-
-local nodes, fonts, node = nodes, fonts, node
-
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-local remove_node = nodes.remove
-local end_of_math = node.end_of_math
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-local quaddata = fonthashes.quads
-
-local texattribute = tex.attribute
-local unsetvalue = attributes.unsetvalue
-
-local v_reset = interfaces.variables.reset
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-local math_code = nodecodes.math
-
-local somespace = nodes.somespace
-local somepenalty = nodes.somepenalty
-
-local nodepool = nodes.pool
-local tasks = nodes.tasks
-
-local new_penalty = nodepool.penalty
-local new_glue = nodepool.glue
-
-typesetters = typesetters or { }
-local typesetters = typesetters
-
-typesetters.spacings = typesetters.spacings or { }
-local spacings = typesetters.spacings
-
-spacings.mapping = spacings.mapping or { }
-spacings.numbers = spacings.numbers or { }
-
-local a_spacings = attributes.private("spacing")
-spacings.attribute = a_spacings
-
-storage.register("typesetters/spacings/mapping", spacings.mapping, "typesetters.spacings.mapping")
-
-local mapping = spacings.mapping
-local numbers = spacings.numbers
-
-for i=1,#mapping do
- local m = mapping[i]
- numbers[m.name] = m
-end
-
--- todo cache lastattr
-
-local function process(namespace,attribute,head)
- local done = false
- local start = head
- -- head is always begin of par (whatsit), so we have at least two prev nodes
- -- penalty followed by glue
- while start do
- local id = start.id
- if id == glyph_code then
- local attr = start[attribute]
- if attr and attr > 0 then
- local data = mapping[attr]
- if data then
- local char = start.char
- local map = data.characters[char]
- start[attribute] = unsetvalue -- needed?
- if map then
- local left = map.left
- local right = map.right
- local alternative = map.alternative
- local quad = quaddata[start.font]
- local prev = start.prev
- if left and left ~= 0 and prev then
- local ok = false
- local prevprev = prev.prev
- if alternative == 1 then
- local somespace = somespace(prev,true)
- if somespace then
- local somepenalty = somepenalty(prevprev,10000)
- if somepenalty then
- if trace_spacing then
- report_spacing("removing penalty and space before %C (left)",char)
- end
- head = remove_node(head,prev,true)
- head = remove_node(head,prevprev,true)
- else
- if trace_spacing then
- report_spacing("removing space before %C (left)",char)
- end
- head = remove_node(head,prev,true)
- end
- end
- ok = true
- else
- ok = not (somespace(prev,true) and somepenalty(prevprev,true)) or somespace(prev,true)
- end
- if ok then
- if trace_spacing then
- report_spacing("inserting penalty and space before %C (left)",char)
- end
- insert_node_before(head,start,new_penalty(10000))
- insert_node_before(head,start,new_glue(left*quad))
- done = true
- end
- end
- local next = start.next
- if right and right ~= 0 and next then
- local ok = false
- local nextnext = next.next
- if alternative == 1 then
- local somepenalty = somepenalty(next,10000)
- if somepenalty then
- local somespace = somespace(nextnext,true)
- if somespace then
- if trace_spacing then
- report_spacing("removing penalty and space after %C right",char)
- end
- head = remove_node(head,next,true)
- head = remove_node(head,nextnext,true)
- end
- else
- local somespace = somespace(next,true)
- if somespace then
- if trace_spacing then
- report_spacing("removing space after %C (right)", char)
- end
- head = remove_node(head,next,true)
- end
- end
- ok = true
- else
- ok = not (somepenalty(next,10000) and somespace(nextnext,true)) or somespace(next,true)
- end
- if ok then
- if trace_spacing then
- report_spacing("inserting penalty and space after %C (right)",char)
- end
- insert_node_after(head,start,new_glue(right*quad))
- insert_node_after(head,start,new_penalty(10000))
- done = true
- end
- end
- end
- end
- end
- elseif id == math_code then
- start = end_of_math(start) -- weird, can return nil .. no math end?
- end
- if start then
- start = start.next
- end
- end
- return head, done
-end
-
-local enabled = false
-
-function spacings.define(name)
- local data = numbers[name]
- if data then
- -- error
- else
- local number = #mapping + 1
- local data = {
- name = name,
- number = number,
- characters = { },
- }
- mapping[number] = data
- numbers[name] = data
- end
-end
-
-function spacings.setup(name,char,settings)
- local data = numbers[name]
- if not data then
- -- error
- else
- data.characters[char] = settings
- end
-end
-
-function spacings.set(name)
- local n = unsetvalue
- if name ~= v_reset then
- local data = numbers[name]
- if data then
- if not enabled then
- tasks.enableaction("processors","typesetters.spacings.handler")
- enabled = true
- end
- n = data.number or unsetvalue
- end
- end
- texattribute[a_spacings] = n
-end
-
-function spacings.reset()
- texattribute[a_spacings] = unsetvalue
-end
-
-spacings.handler = nodes.installattributehandler {
- name = "spacing",
- namespace = spacings,
- processor = process,
-}
-
--- interface
-
-commands.definecharacterspacing = spacings.define
-commands.setupcharacterspacing = spacings.setup
-commands.setcharacterspacing = spacings.set
+if not modules then modules = { } end modules ['typo-spa'] = {
+ version = 1.001,
+ comment = "companion to typo-spa.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local next, type = next, type
+local utfchar = utf.char
+
+local trace_spacing = false trackers.register("typesetters.spacing", function(v) trace_spacing = v end)
+
+local report_spacing = logs.reporter("typesetting","spacing")
+
+local nodes, fonts, node = nodes, fonts, node
+
+local insert_node_before = node.insert_before
+local insert_node_after = node.insert_after
+local remove_node = nodes.remove
+local end_of_math = node.end_of_math
+
+local fonthashes = fonts.hashes
+local fontdata = fonthashes.identifiers
+local quaddata = fonthashes.quads
+
+local texattribute = tex.attribute
+local unsetvalue = attributes.unsetvalue
+
+local v_reset = interfaces.variables.reset
+
+local nodecodes = nodes.nodecodes
+local glyph_code = nodecodes.glyph
+local math_code = nodecodes.math
+
+local somespace = nodes.somespace
+local somepenalty = nodes.somepenalty
+
+local nodepool = nodes.pool
+local tasks = nodes.tasks
+
+local new_penalty = nodepool.penalty
+local new_glue = nodepool.glue
+
+typesetters = typesetters or { }
+local typesetters = typesetters
+
+typesetters.spacings = typesetters.spacings or { }
+local spacings = typesetters.spacings
+
+spacings.mapping = spacings.mapping or { }
+spacings.numbers = spacings.numbers or { }
+
+local a_spacings = attributes.private("spacing")
+spacings.attribute = a_spacings
+
+storage.register("typesetters/spacings/mapping", spacings.mapping, "typesetters.spacings.mapping")
+
+local mapping = spacings.mapping
+local numbers = spacings.numbers
+
+for i=1,#mapping do
+ local m = mapping[i]
+ numbers[m.name] = m
+end
+
+-- todo cache lastattr
+
+local function process(namespace,attribute,head)
+ local done = false
+ local start = head
+ -- head is always begin of par (whatsit), so we have at least two prev nodes
+ -- penalty followed by glue
+ while start do
+ local id = start.id
+ if id == glyph_code then
+ local attr = start[attribute]
+ if attr and attr > 0 then
+ local data = mapping[attr]
+ if data then
+ local char = start.char
+ local map = data.characters[char]
+ start[attribute] = unsetvalue -- needed?
+ if map then
+ local left = map.left
+ local right = map.right
+ local alternative = map.alternative
+ local quad = quaddata[start.font]
+ local prev = start.prev
+ if left and left ~= 0 and prev then
+ local ok = false
+ local prevprev = prev.prev
+ if alternative == 1 then
+ local somespace = somespace(prev,true)
+ if somespace then
+ local somepenalty = somepenalty(prevprev,10000)
+ if somepenalty then
+ if trace_spacing then
+ report_spacing("removing penalty and space before %C (left)",char)
+ end
+ head = remove_node(head,prev,true)
+ head = remove_node(head,prevprev,true)
+ else
+ if trace_spacing then
+ report_spacing("removing space before %C (left)",char)
+ end
+ head = remove_node(head,prev,true)
+ end
+ end
+ ok = true
+ else
+ ok = not (somespace(prev,true) and somepenalty(prevprev,true)) or somespace(prev,true)
+ end
+ if ok then
+ if trace_spacing then
+ report_spacing("inserting penalty and space before %C (left)",char)
+ end
+ insert_node_before(head,start,new_penalty(10000))
+ insert_node_before(head,start,new_glue(left*quad))
+ done = true
+ end
+ end
+ local next = start.next
+ if right and right ~= 0 and next then
+ local ok = false
+ local nextnext = next.next
+ if alternative == 1 then
+ local somepenalty = somepenalty(next,10000)
+ if somepenalty then
+ local somespace = somespace(nextnext,true)
+ if somespace then
+ if trace_spacing then
+ report_spacing("removing penalty and space after %C right",char)
+ end
+ head = remove_node(head,next,true)
+ head = remove_node(head,nextnext,true)
+ end
+ else
+ local somespace = somespace(next,true)
+ if somespace then
+ if trace_spacing then
+ report_spacing("removing space after %C (right)", char)
+ end
+ head = remove_node(head,next,true)
+ end
+ end
+ ok = true
+ else
+ ok = not (somepenalty(next,10000) and somespace(nextnext,true)) or somespace(next,true)
+ end
+ if ok then
+ if trace_spacing then
+ report_spacing("inserting penalty and space after %C (right)",char)
+ end
+ insert_node_after(head,start,new_glue(right*quad))
+ insert_node_after(head,start,new_penalty(10000))
+ done = true
+ end
+ end
+ end
+ end
+ end
+ elseif id == math_code then
+ start = end_of_math(start) -- weird, can return nil .. no math end?
+ end
+ if start then
+ start = start.next
+ end
+ end
+ return head, done
+end
+
+local enabled = false
+
+function spacings.define(name)
+ local data = numbers[name]
+ if data then
+ -- error
+ else
+ local number = #mapping + 1
+ local data = {
+ name = name,
+ number = number,
+ characters = { },
+ }
+ mapping[number] = data
+ numbers[name] = data
+ end
+end
+
+function spacings.setup(name,char,settings)
+ local data = numbers[name]
+ if not data then
+ -- error
+ else
+ data.characters[char] = settings
+ end
+end
+
+function spacings.set(name)
+ local n = unsetvalue
+ if name ~= v_reset then
+ local data = numbers[name]
+ if data then
+ if not enabled then
+ tasks.enableaction("processors","typesetters.spacings.handler")
+ enabled = true
+ end
+ n = data.number or unsetvalue
+ end
+ end
+ texattribute[a_spacings] = n
+end
+
+function spacings.reset()
+ texattribute[a_spacings] = unsetvalue
+end
+
+spacings.handler = nodes.installattributehandler {
+ name = "spacing",
+ namespace = spacings,
+ processor = process,
+}
+
+-- interface
+
+commands.definecharacterspacing = spacings.define
+commands.setupcharacterspacing = spacings.setup
+commands.setcharacterspacing = spacings.set
diff --git a/tex/context/base/unic-ini.lua b/tex/context/base/unic-ini.lua
index cca1f0617..6a0c387d3 100644
--- a/tex/context/base/unic-ini.lua
+++ b/tex/context/base/unic-ini.lua
@@ -1,19 +1,19 @@
-if not modules then modules = { } end modules ['unic-ini'] = {
- version = 1.001,
- comment = "companion to unic-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local utfchar = utf.char
-
--- Beware, initializing unicodechar happens at first usage and takes
--- 0.05 -- 0.1 second (lots of function calls).
-
-function commands.unicodechar(asked)
- local n = characters.unicodechar(asked)
- if n then
- context(utfchar(n))
- end
-end
+if not modules then modules = { } end modules ['unic-ini'] = {
+ version = 1.001,
+ comment = "companion to unic-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local utfchar = utf.char
+
+-- Beware, initializing unicodechar happens at first usage and takes
+-- 0.05 -- 0.1 second (lots of function calls).
+
+function commands.unicodechar(asked)
+ local n = characters.unicodechar(asked)
+ if n then
+ context(utfchar(n))
+ end
+end
diff --git a/tex/context/base/util-deb.lua b/tex/context/base/util-deb.lua
index 785373f86..9e5233774 100644
--- a/tex/context/base/util-deb.lua
+++ b/tex/context/base/util-deb.lua
@@ -1,128 +1,128 @@
-if not modules then modules = { } end modules ['util-deb'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- the tag is kind of generic and used for functions that are not
--- bound to a variable, like node.new, node.copy etc (contrary to for instance
--- node.has_attribute which is bound to a has_attribute local variable in mkiv)
-
-local debug = require "debug"
-
-local getinfo = debug.getinfo
-local type, next, tostring = type, next, tostring
-local format, find = string.format, string.find
-local is_boolean = string.is_boolean
-
-utilities = utilities or { }
-local debugger = utilities.debugger or { }
-utilities.debugger = debugger
-
-local counters = { }
-local names = { }
-
-local report = logs.reporter("debugger")
-
--- one
-
-local function hook()
- local f = getinfo(2) -- "nS"
- if f then
- local n = "unknown"
- if f.what == "C" then
- n = f.name or ''
- if not names[n] then
- names[n] = format("%42s",n)
- end
- else
- -- source short_src linedefined what name namewhat nups func
- n = f.name or f.namewhat or f.what
- if not n or n == "" then
- n = "?"
- end
- if not names[n] then
- names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
- end
- end
- counters[n] = (counters[n] or 0) + 1
- end
-end
-
-function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
- printer = printer or report
- threshold = threshold or 0
- local total, grandtotal, functions = 0, 0, 0
- local dataset = { }
- for name, count in next, counters do
- dataset[#dataset+1] = { name, count }
- end
- table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
- for i=1,#dataset do
- local d = dataset[i]
- local name = d[1]
- local count = d[2]
- if count > threshold and not find(name,"for generator") then -- move up
- printer(format("%8i %s\n", count, names[name]))
- total = total + count
- end
- grandtotal = grandtotal + count
- functions = functions + 1
- end
- printer("\n")
- printer(format("functions : % 10i\n", functions))
- printer(format("total : % 10i\n", total))
- printer(format("grand total: % 10i\n", grandtotal))
- printer(format("threshold : % 10i\n", threshold))
-end
-
-function debugger.savestats(filename,threshold)
- local f = io.open(filename,'w')
- if f then
- debugger.showstats(function(str) f:write(str) end,threshold)
- f:close()
- end
-end
-
-function debugger.enable()
- debug.sethook(hook,"c")
-end
-
-function debugger.disable()
- debug.sethook()
---~ counters[debug.getinfo(2,"f").func] = nil
-end
-
---~ debugger.enable()
-
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
---~ print(math.sin(1*.5))
-
---~ debugger.disable()
-
---~ print("")
---~ debugger.showstats()
---~ print("")
---~ debugger.showstats(print,3)
-
--- from the lua book:
-
-function traceback()
- local level = 1
- while true do
- local info = debug.getinfo(level, "Sl")
- if not info then
- break
- elseif info.what == "C" then
- print(format("%3i : C function",level))
- else
- print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
- end
- level = level + 1
- end
-end
+if not modules then modules = { } end modules ['util-deb'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- the tag is kind of generic and used for functions that are not
+-- bound to a variable, like node.new, node.copy etc (contrary to for instance
+-- node.has_attribute which is bound to a has_attribute local variable in mkiv)
+
+local debug = require "debug"
+
+local getinfo = debug.getinfo
+local type, next, tostring = type, next, tostring
+local format, find = string.format, string.find
+local is_boolean = string.is_boolean
+
+utilities = utilities or { }
+local debugger = utilities.debugger or { }
+utilities.debugger = debugger
+
+local counters = { }
+local names = { }
+
+local report = logs.reporter("debugger")
+
+-- one
+
+local function hook()
+ local f = getinfo(2) -- "nS"
+ if f then
+ local n = "unknown"
+ if f.what == "C" then
+ n = f.name or ''
+ if not names[n] then
+ names[n] = format("%42s",n)
+ end
+ else
+ -- source short_src linedefined what name namewhat nups func
+ n = f.name or f.namewhat or f.what
+ if not n or n == "" then
+ n = "?"
+ end
+ if not names[n] then
+ names[n] = format("%42s : % 5i : %s",n,f.linedefined or 0,f.short_src or "unknown source")
+ end
+ end
+ counters[n] = (counters[n] or 0) + 1
+ end
+end
+
+function debugger.showstats(printer,threshold) -- hm, something has changed, rubish now
+ printer = printer or report
+ threshold = threshold or 0
+ local total, grandtotal, functions = 0, 0, 0
+ local dataset = { }
+ for name, count in next, counters do
+ dataset[#dataset+1] = { name, count }
+ end
+ table.sort(dataset,function(a,b) return a[2] == b[2] and b[1] > a[1] or a[2] > b[2] end)
+ for i=1,#dataset do
+ local d = dataset[i]
+ local name = d[1]
+ local count = d[2]
+ if count > threshold and not find(name,"for generator") then -- move up
+ printer(format("%8i %s\n", count, names[name]))
+ total = total + count
+ end
+ grandtotal = grandtotal + count
+ functions = functions + 1
+ end
+ printer("\n")
+ printer(format("functions : % 10i\n", functions))
+ printer(format("total : % 10i\n", total))
+ printer(format("grand total: % 10i\n", grandtotal))
+ printer(format("threshold : % 10i\n", threshold))
+end
+
+function debugger.savestats(filename,threshold)
+ local f = io.open(filename,'w')
+ if f then
+ debugger.showstats(function(str) f:write(str) end,threshold)
+ f:close()
+ end
+end
+
+function debugger.enable()
+ debug.sethook(hook,"c")
+end
+
+function debugger.disable()
+ debug.sethook()
+--~ counters[debug.getinfo(2,"f").func] = nil
+end
+
+--~ debugger.enable()
+
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+--~ print(math.sin(1*.5))
+
+--~ debugger.disable()
+
+--~ print("")
+--~ debugger.showstats()
+--~ print("")
+--~ debugger.showstats(print,3)
+
+-- from the lua book:
+
+function traceback()
+ local level = 1
+ while true do
+ local info = debug.getinfo(level, "Sl")
+ if not info then
+ break
+ elseif info.what == "C" then
+ print(format("%3i : C function",level))
+ else
+ print(format("%3i : [%s]:%d",level,info.short_src,info.currentline))
+ end
+ level = level + 1
+ end
+end
diff --git a/tex/context/base/util-dim.lua b/tex/context/base/util-dim.lua
index 47b2706b7..bbfeae7d4 100644
--- a/tex/context/base/util-dim.lua
+++ b/tex/context/base/util-dim.lua
@@ -1,449 +1,449 @@
-if not modules then modules = { } end modules ['util-dim'] = {
- version = 1.001,
- comment = "support for dimensions",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-
Internally work with scaled point, which are
-represented by integers. However, in practice, at east at the
- end we work with more generic units like points (pt). Going
-from scaled points (numbers) to one of those units can be
-done by using the conversion factors collected in the following
-table.
A conversion function that takes a number, unit (string) and optional
-format (string) is implemented using this table.
---ldx]]--
-
-
-local function numbertodimen(n,unit,fmt)
- if type(n) == 'string' then
- return n
- else
- unit = unit or 'pt'
- if not fmt then
- fmt = "%s%s"
- elseif fmt == true then
- fmt = "%0.5f%s"
- end
- return format(fmt,n*dimenfactors[unit],unit)
- -- if fmt then
- -- return format(fmt,n*dimenfactors[unit],unit)
- -- else
- -- return match(format("%.20f",n*dimenfactors[unit]),"(.-0?)0*$") .. unit
- -- end
- end
-end
-
---[[ldx--
-
We collect a bunch of converters in the number namespace.
---ldx]]--
-
-number.maxdimen = 1073741823
-number.todimen = numbertodimen
-number.dimenfactors = dimenfactors
-
-function number.topoints (n,fmt) return numbertodimen(n,"pt",fmt) end
-function number.toinches (n,fmt) return numbertodimen(n,"in",fmt) end
-function number.tocentimeters (n,fmt) return numbertodimen(n,"cm",fmt) end
-function number.tomillimeters (n,fmt) return numbertodimen(n,"mm",fmt) end
-function number.toscaledpoints(n,fmt) return numbertodimen(n,"sp",fmt) end
-function number.toscaledpoints(n) return n .. "sp" end
-function number.tobasepoints (n,fmt) return numbertodimen(n,"bp",fmt) end
-function number.topicas (n,fmt) return numbertodimen(n "pc",fmt) end
-function number.todidots (n,fmt) return numbertodimen(n,"dd",fmt) end
-function number.tociceros (n,fmt) return numbertodimen(n,"cc",fmt) end
-function number.tonewdidots (n,fmt) return numbertodimen(n,"nd",fmt) end
-function number.tonewciceros (n,fmt) return numbertodimen(n,"nc",fmt) end
-
---[[ldx--
-
More interesting it to implement a (sort of) dimen datatype, one
-that permits calculations too. First we define a function that
-converts a string to scaledpoints. We use . We capture
-a number and optionally a unit. When no unit is given a constant
-capture takes place.
We use a metatable to intercept errors. When no key is found in
-the table with factors, the metatable will be consulted for an
-alternative index function.
---ldx]]--
-
-setmetatableindex(dimenfactors, function(t,s)
- -- error("wrong dimension: " .. (s or "?")) -- better a message
- return false
-end)
-
---[[ldx--
-
We redefine the following function later on, so we comment it
-here (which saves us bytecodes.
---ldx]]--
-
--- function string.todimen(str)
--- if type(str) == "number" then
--- return str
--- else
--- local value, unit = lpegmatch(dimenpair,str)
--- return value/unit
--- end
--- end
---
--- local stringtodimen = string.todimen
-
-local stringtodimen -- assigned later (commenting saves bytecode)
-
-local amount = S("+-")^0 * R("09")^0 * S(".,")^0 * R("09")^0
-local unit = P("pt") + P("cm") + P("mm") + P("sp") + P("bp") + P("in") +
- P("pc") + P("dd") + P("cc") + P("nd") + P("nc")
-
-local validdimen = amount * unit
-
-lpeg.patterns.validdimen = validdimen
-
---[[ldx--
-
The main (and globally) visible representation of a dimen is defined next: it is
-a one-element table. The unit that is returned from the match is normally a number
-(one of the previously defined factors) but we also accept functions. Later we will
-see why. This function is redefined later.
---ldx]]--
-
--- function dimen(a)
--- if a then
--- local ta= type(a)
--- if ta == "string" then
--- local value, unit = lpegmatch(pattern,a)
--- if type(unit) == "function" then
--- k = value/unit()
--- else
--- k = value/unit
--- end
--- a = k
--- elseif ta == "table" then
--- a = a[1]
--- end
--- return setmetatable({ a }, dimensions)
--- else
--- return setmetatable({ 0 }, dimensions)
--- end
--- end
-
---[[ldx--
-
This function return a small hash with a metatable attached. It is
-through this metatable that we can do the calculations. We could have
-shared some of the code but for reasons of speed we don't.
---ldx]]--
-
-function dimensions.__add(a, b)
- local ta, tb = type(a), type(b)
- if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end
- if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end
- return setmetatable({ a + b }, dimensions)
-end
-
-function dimensions.__sub(a, b)
- local ta, tb = type(a), type(b)
- if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end
- if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end
- return setmetatable({ a - b }, dimensions)
-end
-
-function dimensions.__mul(a, b)
- local ta, tb = type(a), type(b)
- if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end
- if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end
- return setmetatable({ a * b }, dimensions)
-end
-
-function dimensions.__div(a, b)
- local ta, tb = type(a), type(b)
- if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end
- if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end
- return setmetatable({ a / b }, dimensions)
-end
-
-function dimensions.__unm(a)
- local ta = type(a)
- if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end
- return setmetatable({ - a }, dimensions)
-end
-
---[[ldx--
-
It makes no sense to implement the power and modulo function but
-the next two do make sense because they permits is code like:
-
-
-local a, b = dimen "10pt", dimen "11pt"
-...
-if a > b then
- ...
-end
-
---ldx]]--
-
--- makes no sense: dimensions.__pow and dimensions.__mod
-
-function dimensions.__lt(a, b)
- return a[1] < b[1]
-end
-
-function dimensions.__eq(a, b)
- return a[1] == b[1]
-end
-
---[[ldx--
-
We also need to provide a function for conversion to string (so that
-we can print dimensions). We print them as points, just like .
Since it does not take much code, we also provide a way to access
-a few accessors
-
-
-print(dimen().pt)
-print(dimen().sp)
-
---ldx]]--
-
-function dimensions.__index(tab,key)
- local d = dimenfactors[key]
- if not d then
- error("illegal property of dimen: " .. key)
- d = 1
- end
- return 1/d
-end
-
---[[ldx--
-
In the converter from string to dimension we support functions as
-factors. This is because in we have a few more units:
-ex and em. These are not constant factors but
-depend on the current font. They are not defined by default, but need
-an explicit function call. This is because at the moment that this code
-is loaded, the relevant tables that hold the functions needed may not
-yet be available.
The previous code is rather efficient (also thanks to ) but we
-can speed it up by caching converted dimensions. On my machine (2008) the following
-loop takes about 25.5 seconds.
When we cache converted strings this becomes 16.3 seconds. In order not
-to waste too much memory on it, we tag the values of the cache as being
-week which mean that the garbage collector will collect them in a next
-sweep. This means that in most cases the speed up is mostly affecting the
-current couple of calculations and as such the speed penalty is small.
-
-
We redefine two previous defined functions that can benefit from
-this:
---ldx]]--
-
-local known = { } setmetatable(known, { __mode = "v" })
-
-function dimen(a)
- if a then
- local ta= type(a)
- if ta == "string" then
- local k = known[a]
- if k then
- a = k
- else
- local value, unit = lpegmatch(dimenpair,a)
- if type(unit) == "function" then
- k = value/unit()
- else
- k = value/unit
- end
- known[a] = k
- a = k
- end
- elseif ta == "table" then
- a = a[1]
- end
- return setmetatable({ a }, dimensions)
- else
- return setmetatable({ 0 }, dimensions)
- end
-end
-
-function string.todimen(str) -- maybe use tex.sp when available
- if type(str) == "number" then
- return str
- else
- local k = known[str]
- if not k then
- local value, unit = lpegmatch(dimenpair,str)
- if value and unit then
- k = value/unit -- to be considered: round
- else
- k = 0
- end
- -- print(str,value,unit)
- known[str] = k
- end
- return k
- end
-end
-
---~ local known = { }
-
---~ function string.todimen(str) -- maybe use tex.sp
---~ local k = known[str]
---~ if not k then
---~ k = tex.sp(str)
---~ known[str] = k
---~ end
---~ return k
---~ end
-
-stringtodimen = string.todimen -- local variable defined earlier
-
-function number.toscaled(d)
- return format("%0.5f",d/2^16)
-end
-
---[[ldx--
-
In a similar fashion we can define a glue datatype. In that case we
-probably use a hash instead of a one-element table.
---ldx]]--
-
---[[ldx--
-
Goodie:s
---ldx]]--
-
-function number.percent(n,d) -- will be cleaned up once luatex 0.30 is out
- d = d or tex.hsize
- if type(d) == "string" then
- d = stringtodimen(d)
- end
- return (n/100) * d
-end
-
-number["%"] = number.percent
+if not modules then modules = { } end modules ['util-dim'] = {
+ version = 1.001,
+ comment = "support for dimensions",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+
Internally work with scaled point, which are
+represented by integers. However, in practice, at east at the
+ end we work with more generic units like points (pt). Going
+from scaled points (numbers) to one of those units can be
+done by using the conversion factors collected in the following
+table.
A conversion function that takes a number, unit (string) and optional
+format (string) is implemented using this table.
+--ldx]]--
+
+
+local function numbertodimen(n,unit,fmt)
+ if type(n) == 'string' then
+ return n
+ else
+ unit = unit or 'pt'
+ if not fmt then
+ fmt = "%s%s"
+ elseif fmt == true then
+ fmt = "%0.5f%s"
+ end
+ return format(fmt,n*dimenfactors[unit],unit)
+ -- if fmt then
+ -- return format(fmt,n*dimenfactors[unit],unit)
+ -- else
+ -- return match(format("%.20f",n*dimenfactors[unit]),"(.-0?)0*$") .. unit
+ -- end
+ end
+end
+
+--[[ldx--
+
We collect a bunch of converters in the number namespace.
+--ldx]]--
+
+number.maxdimen = 1073741823
+number.todimen = numbertodimen
+number.dimenfactors = dimenfactors
+
+function number.topoints (n,fmt) return numbertodimen(n,"pt",fmt) end
+function number.toinches (n,fmt) return numbertodimen(n,"in",fmt) end
+function number.tocentimeters (n,fmt) return numbertodimen(n,"cm",fmt) end
+function number.tomillimeters (n,fmt) return numbertodimen(n,"mm",fmt) end
+function number.toscaledpoints(n,fmt) return numbertodimen(n,"sp",fmt) end
+function number.toscaledpoints(n) return n .. "sp" end
+function number.tobasepoints (n,fmt) return numbertodimen(n,"bp",fmt) end
+function number.topicas (n,fmt) return numbertodimen(n "pc",fmt) end
+function number.todidots (n,fmt) return numbertodimen(n,"dd",fmt) end
+function number.tociceros (n,fmt) return numbertodimen(n,"cc",fmt) end
+function number.tonewdidots (n,fmt) return numbertodimen(n,"nd",fmt) end
+function number.tonewciceros (n,fmt) return numbertodimen(n,"nc",fmt) end
+
+--[[ldx--
+
More interesting it to implement a (sort of) dimen datatype, one
+that permits calculations too. First we define a function that
+converts a string to scaledpoints. We use . We capture
+a number and optionally a unit. When no unit is given a constant
+capture takes place.
We use a metatable to intercept errors. When no key is found in
+the table with factors, the metatable will be consulted for an
+alternative index function.
+--ldx]]--
+
+setmetatableindex(dimenfactors, function(t,s)
+ -- error("wrong dimension: " .. (s or "?")) -- better a message
+ return false
+end)
+
+--[[ldx--
+
We redefine the following function later on, so we comment it
+here (which saves us bytecodes.
+--ldx]]--
+
+-- function string.todimen(str)
+-- if type(str) == "number" then
+-- return str
+-- else
+-- local value, unit = lpegmatch(dimenpair,str)
+-- return value/unit
+-- end
+-- end
+--
+-- local stringtodimen = string.todimen
+
+local stringtodimen -- assigned later (commenting saves bytecode)
+
+local amount = S("+-")^0 * R("09")^0 * S(".,")^0 * R("09")^0
+local unit = P("pt") + P("cm") + P("mm") + P("sp") + P("bp") + P("in") +
+ P("pc") + P("dd") + P("cc") + P("nd") + P("nc")
+
+local validdimen = amount * unit
+
+lpeg.patterns.validdimen = validdimen
+
+--[[ldx--
+
The main (and globally) visible representation of a dimen is defined next: it is
+a one-element table. The unit that is returned from the match is normally a number
+(one of the previously defined factors) but we also accept functions. Later we will
+see why. This function is redefined later.
+--ldx]]--
+
+-- function dimen(a)
+-- if a then
+-- local ta= type(a)
+-- if ta == "string" then
+-- local value, unit = lpegmatch(pattern,a)
+-- if type(unit) == "function" then
+-- k = value/unit()
+-- else
+-- k = value/unit
+-- end
+-- a = k
+-- elseif ta == "table" then
+-- a = a[1]
+-- end
+-- return setmetatable({ a }, dimensions)
+-- else
+-- return setmetatable({ 0 }, dimensions)
+-- end
+-- end
+
+--[[ldx--
+
This function return a small hash with a metatable attached. It is
+through this metatable that we can do the calculations. We could have
+shared some of the code but for reasons of speed we don't.
+--ldx]]--
+
+function dimensions.__add(a, b)
+ local ta, tb = type(a), type(b)
+ if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end
+ if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end
+ return setmetatable({ a + b }, dimensions)
+end
+
+function dimensions.__sub(a, b)
+ local ta, tb = type(a), type(b)
+ if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end
+ if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end
+ return setmetatable({ a - b }, dimensions)
+end
+
+function dimensions.__mul(a, b)
+ local ta, tb = type(a), type(b)
+ if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end
+ if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end
+ return setmetatable({ a * b }, dimensions)
+end
+
+function dimensions.__div(a, b)
+ local ta, tb = type(a), type(b)
+ if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end
+ if tb == "string" then b = stringtodimen(b) elseif tb == "table" then b = b[1] end
+ return setmetatable({ a / b }, dimensions)
+end
+
+function dimensions.__unm(a)
+ local ta = type(a)
+ if ta == "string" then a = stringtodimen(a) elseif ta == "table" then a = a[1] end
+ return setmetatable({ - a }, dimensions)
+end
+
+--[[ldx--
+
It makes no sense to implement the power and modulo function but
+the next two do make sense because they permits is code like:
+
+
+local a, b = dimen "10pt", dimen "11pt"
+...
+if a > b then
+ ...
+end
+
+--ldx]]--
+
+-- makes no sense: dimensions.__pow and dimensions.__mod
+
+function dimensions.__lt(a, b)
+ return a[1] < b[1]
+end
+
+function dimensions.__eq(a, b)
+ return a[1] == b[1]
+end
+
+--[[ldx--
+
We also need to provide a function for conversion to string (so that
+we can print dimensions). We print them as points, just like .
Since it does not take much code, we also provide a way to access
+a few accessors
+
+
+print(dimen().pt)
+print(dimen().sp)
+
+--ldx]]--
+
+function dimensions.__index(tab,key)
+ local d = dimenfactors[key]
+ if not d then
+ error("illegal property of dimen: " .. key)
+ d = 1
+ end
+ return 1/d
+end
+
+--[[ldx--
+
In the converter from string to dimension we support functions as
+factors. This is because in we have a few more units:
+ex and em. These are not constant factors but
+depend on the current font. They are not defined by default, but need
+an explicit function call. This is because at the moment that this code
+is loaded, the relevant tables that hold the functions needed may not
+yet be available.
The previous code is rather efficient (also thanks to ) but we
+can speed it up by caching converted dimensions. On my machine (2008) the following
+loop takes about 25.5 seconds.
When we cache converted strings this becomes 16.3 seconds. In order not
+to waste too much memory on it, we tag the values of the cache as being
+week which mean that the garbage collector will collect them in a next
+sweep. This means that in most cases the speed up is mostly affecting the
+current couple of calculations and as such the speed penalty is small.
+
+
We redefine two previous defined functions that can benefit from
+this:
+--ldx]]--
+
+local known = { } setmetatable(known, { __mode = "v" })
+
+function dimen(a)
+ if a then
+ local ta= type(a)
+ if ta == "string" then
+ local k = known[a]
+ if k then
+ a = k
+ else
+ local value, unit = lpegmatch(dimenpair,a)
+ if type(unit) == "function" then
+ k = value/unit()
+ else
+ k = value/unit
+ end
+ known[a] = k
+ a = k
+ end
+ elseif ta == "table" then
+ a = a[1]
+ end
+ return setmetatable({ a }, dimensions)
+ else
+ return setmetatable({ 0 }, dimensions)
+ end
+end
+
+function string.todimen(str) -- maybe use tex.sp when available
+ if type(str) == "number" then
+ return str
+ else
+ local k = known[str]
+ if not k then
+ local value, unit = lpegmatch(dimenpair,str)
+ if value and unit then
+ k = value/unit -- to be considered: round
+ else
+ k = 0
+ end
+ -- print(str,value,unit)
+ known[str] = k
+ end
+ return k
+ end
+end
+
+--~ local known = { }
+
+--~ function string.todimen(str) -- maybe use tex.sp
+--~ local k = known[str]
+--~ if not k then
+--~ k = tex.sp(str)
+--~ known[str] = k
+--~ end
+--~ return k
+--~ end
+
+stringtodimen = string.todimen -- local variable defined earlier
+
+function number.toscaled(d)
+ return format("%0.5f",d/2^16)
+end
+
+--[[ldx--
+
In a similar fashion we can define a glue datatype. In that case we
+probably use a hash instead of a one-element table.
+--ldx]]--
+
+--[[ldx--
+
Goodie:s
+--ldx]]--
+
+function number.percent(n,d) -- will be cleaned up once luatex 0.30 is out
+ d = d or tex.hsize
+ if type(d) == "string" then
+ d = stringtodimen(d)
+ end
+ return (n/100) * d
+end
+
+number["%"] = number.percent
diff --git a/tex/context/base/util-env.lua b/tex/context/base/util-env.lua
index f4f3ef69f..1b1157931 100644
--- a/tex/context/base/util-env.lua
+++ b/tex/context/base/util-env.lua
@@ -1,287 +1,287 @@
-if not modules then modules = { } end modules ['util-env'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local allocate, mark = utilities.storage.allocate, utilities.storage.mark
-
-local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
-local unquoted, quoted = string.unquoted, string.quoted
-local concat, insert, remove = table.concat, table.insert, table.remove
-
-environment = environment or { }
-local environment = environment
-
--- precautions
-
-os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
-
-function os.setlocale()
- -- no way you can mess with it
-end
-
--- dirty tricks (we will replace the texlua call by luatex --luaonly)
-
-local validengines = allocate {
- ["luatex"] = true,
- ["luajittex"] = true,
- -- ["luatex.exe"] = true,
- -- ["luajittex.exe"] = true,
-}
-
-local basicengines = allocate {
- ["luatex"] = "luatex",
- ["texlua"] = "luatex",
- ["texluac"] = "luatex",
- ["luajittex"] = "luajittex",
- ["texluajit"] = "luajittex",
- -- ["texlua.exe"] = "luatex",
- -- ["texluajit.exe"] = "luajittex",
-}
-
-local luaengines=allocate {
- ["lua"] = true,
- ["luajit"] = true,
-}
-
-environment.validengines = validengines
-environment.basicengines = basicengines
-
--- [-1] = binary
--- [ 0] = self
--- [ 1] = argument 1 ...
-
--- instead we could set ranges
-
-if not arg then
- -- used as library
-elseif luaengines[file.removesuffix(arg[-1])] then
--- arg[-1] = arg[0]
--- arg[ 0] = arg[1]
--- for k=2,#arg do
--- arg[k-1] = arg[k]
--- end
--- remove(arg) -- last
-elseif validengines[file.removesuffix(arg[0])] then
- if arg[1] == "--luaonly" then
- arg[-1] = arg[0]
- arg[ 0] = arg[2]
- for k=3,#arg do
- arg[k-2] = arg[k]
- end
- remove(arg) -- last
- remove(arg) -- pre-last
- else
- -- tex run
- end
-
- -- This is an ugly hack but it permits symlinking a script (say 'context') to 'mtxrun' as in:
- --
- -- ln -s /opt/minimals/tex/texmf-linux-64/bin/mtxrun context
- --
- -- The special mapping hack is needed because 'luatools' boils down to 'mtxrun --script base'
- -- but it's unlikely that there will be more of this
-
- local originalzero = file.basename(arg[0])
- local specialmapping = { luatools == "base" }
-
- if originalzero ~= "mtxrun" and originalzero ~= "mtxrun.lua" then
- arg[0] = specialmapping[originalzero] or originalzero
- insert(arg,0,"--script")
- insert(arg,0,"mtxrun")
- end
-
-end
-
--- environment
-
-environment.arguments = allocate()
-environment.files = allocate()
-environment.sortedflags = nil
-
--- context specific arguments (in order not to confuse the engine)
-
-function environment.initializearguments(arg)
- local arguments, files = { }, { }
- environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
- for index=1,#arg do
- local argument = arg[index]
- if index > 0 then
- local flag, value = match(argument,"^%-+(.-)=(.-)$")
- if flag then
- flag = gsub(flag,"^c:","")
- arguments[flag] = unquoted(value or "")
- else
- flag = match(argument,"^%-+(.+)")
- if flag then
- flag = gsub(flag,"^c:","")
- arguments[flag] = true
- else
- files[#files+1] = argument
- end
- end
- end
- end
- environment.ownname = file.reslash(environment.ownname or arg[0] or 'unknown.lua')
-end
-
-function environment.setargument(name,value)
- environment.arguments[name] = value
-end
-
--- todo: defaults, better checks e.g on type (boolean versus string)
---
--- tricky: too many hits when we support partials unless we add
--- a registration of arguments so from now on we have 'partial'
-
-function environment.getargument(name,partial)
- local arguments, sortedflags = environment.arguments, environment.sortedflags
- if arguments[name] then
- return arguments[name]
- elseif partial then
- if not sortedflags then
- sortedflags = allocate(table.sortedkeys(arguments))
- for k=1,#sortedflags do
- sortedflags[k] = "^" .. sortedflags[k]
- end
- environment.sortedflags = sortedflags
- end
- -- example of potential clash: ^mode ^modefile
- for k=1,#sortedflags do
- local v = sortedflags[k]
- if find(name,v) then
- return arguments[sub(v,2,#v)]
- end
- end
- end
- return nil
-end
-
-environment.argument = environment.getargument
-
-function environment.splitarguments(separator) -- rather special, cut-off before separator
- local done, before, after = false, { }, { }
- local originalarguments = environment.originalarguments
- for k=1,#originalarguments do
- local v = originalarguments[k]
- if not done and v == separator then
- done = true
- elseif done then
- after[#after+1] = v
- else
- before[#before+1] = v
- end
- end
- return before, after
-end
-
-function environment.reconstructcommandline(arg,noquote)
- arg = arg or environment.originalarguments
- if noquote and #arg == 1 then
- -- we could just do: return unquoted(resolvers.resolve(arg[i]))
- local a = arg[1]
- a = resolvers.resolve(a)
- a = unquoted(a)
- return a
- elseif #arg > 0 then
- local result = { }
- for i=1,#arg do
- -- we could just do: result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i])))
- local a = arg[i]
- a = resolvers.resolve(a)
- a = unquoted(a)
- a = gsub(a,'"','\\"') -- tricky
- if find(a," ") then
- result[#result+1] = quoted(a)
- else
- result[#result+1] = a
- end
- end
- return concat(result," ")
- else
- return ""
- end
-end
-
--- handy in e.g. package.addluapath(environment.relativepath("scripts"))
-
-function environment.relativepath(path,root)
- if not path then
- path = ""
- end
- if not file.is_rootbased_path(path) then
- if not root then
- root = file.pathpart(environment.ownscript or environment.ownname or ".")
- end
- if root == "" then
- root = "."
- end
- path = root .. "/" .. path
- end
- return file.collapsepath(path,true)
-end
-
--- -- when script lives on e:/tmp we get this:
---
--- print(environment.relativepath("x/y/z","c:/w")) -- c:/w/x/y/z
--- print(environment.relativepath("x")) -- e:/tmp/x
--- print(environment.relativepath("../x")) -- e:/x
--- print(environment.relativepath("./x")) -- e:/tmp/x
--- print(environment.relativepath("/x")) -- /x
--- print(environment.relativepath("c:/x")) -- c:/x
--- print(environment.relativepath("//x")) -- //x
--- print(environment.relativepath()) -- e:/tmp
-
--- -- to be tested:
---
--- function environment.reconstructcommandline(arg,noquote)
--- arg = arg or environment.originalarguments
--- if noquote and #arg == 1 then
--- return unquoted(resolvers.resolve(arg[1]))
--- elseif #arg > 0 then
--- local result = { }
--- for i=1,#arg do
--- result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i]))) -- always quote
--- end
--- return concat(result," ")
--- else
--- return ""
--- end
--- end
-
-if arg then
-
- -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
- local newarg, instring = { }, false
-
- for index=1,#arg do
- local argument = arg[index]
- if find(argument,"^\"") then
- newarg[#newarg+1] = gsub(argument,"^\"","")
- if not find(argument,"\"$") then
- instring = true
- end
- elseif find(argument,"\"$") then
- newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
- instring = false
- elseif instring then
- newarg[#newarg] = newarg[#newarg] .. " " .. argument
- else
- newarg[#newarg+1] = argument
- end
- end
- for i=1,-5,-1 do
- newarg[i] = arg[i]
- end
-
- environment.initializearguments(newarg)
-
- environment.originalarguments = mark(newarg)
- environment.rawarguments = mark(arg)
-
- arg = { } -- prevent duplicate handling
-
-end
+if not modules then modules = { } end modules ['util-env'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local allocate, mark = utilities.storage.allocate, utilities.storage.mark
+
+local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
+local unquoted, quoted = string.unquoted, string.quoted
+local concat, insert, remove = table.concat, table.insert, table.remove
+
+environment = environment or { }
+local environment = environment
+
+-- precautions
+
+os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
+
+function os.setlocale()
+ -- no way you can mess with it
+end
+
+-- dirty tricks (we will replace the texlua call by luatex --luaonly)
+
+local validengines = allocate {
+ ["luatex"] = true,
+ ["luajittex"] = true,
+ -- ["luatex.exe"] = true,
+ -- ["luajittex.exe"] = true,
+}
+
+local basicengines = allocate {
+ ["luatex"] = "luatex",
+ ["texlua"] = "luatex",
+ ["texluac"] = "luatex",
+ ["luajittex"] = "luajittex",
+ ["texluajit"] = "luajittex",
+ -- ["texlua.exe"] = "luatex",
+ -- ["texluajit.exe"] = "luajittex",
+}
+
+local luaengines=allocate {
+ ["lua"] = true,
+ ["luajit"] = true,
+}
+
+environment.validengines = validengines
+environment.basicengines = basicengines
+
+-- [-1] = binary
+-- [ 0] = self
+-- [ 1] = argument 1 ...
+
+-- instead we could set ranges
+
+if not arg then
+ -- used as library
+elseif luaengines[file.removesuffix(arg[-1])] then
+-- arg[-1] = arg[0]
+-- arg[ 0] = arg[1]
+-- for k=2,#arg do
+-- arg[k-1] = arg[k]
+-- end
+-- remove(arg) -- last
+elseif validengines[file.removesuffix(arg[0])] then
+ if arg[1] == "--luaonly" then
+ arg[-1] = arg[0]
+ arg[ 0] = arg[2]
+ for k=3,#arg do
+ arg[k-2] = arg[k]
+ end
+ remove(arg) -- last
+ remove(arg) -- pre-last
+ else
+ -- tex run
+ end
+
+ -- This is an ugly hack but it permits symlinking a script (say 'context') to 'mtxrun' as in:
+ --
+ -- ln -s /opt/minimals/tex/texmf-linux-64/bin/mtxrun context
+ --
+ -- The special mapping hack is needed because 'luatools' boils down to 'mtxrun --script base'
+ -- but it's unlikely that there will be more of this
+
+ local originalzero = file.basename(arg[0])
+ local specialmapping = { luatools == "base" }
+
+ if originalzero ~= "mtxrun" and originalzero ~= "mtxrun.lua" then
+ arg[0] = specialmapping[originalzero] or originalzero
+ insert(arg,0,"--script")
+ insert(arg,0,"mtxrun")
+ end
+
+end
+
+-- environment
+
+environment.arguments = allocate()
+environment.files = allocate()
+environment.sortedflags = nil
+
+-- context specific arguments (in order not to confuse the engine)
+
+function environment.initializearguments(arg)
+ local arguments, files = { }, { }
+ environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
+ for index=1,#arg do
+ local argument = arg[index]
+ if index > 0 then
+ local flag, value = match(argument,"^%-+(.-)=(.-)$")
+ if flag then
+ flag = gsub(flag,"^c:","")
+ arguments[flag] = unquoted(value or "")
+ else
+ flag = match(argument,"^%-+(.+)")
+ if flag then
+ flag = gsub(flag,"^c:","")
+ arguments[flag] = true
+ else
+ files[#files+1] = argument
+ end
+ end
+ end
+ end
+ environment.ownname = file.reslash(environment.ownname or arg[0] or 'unknown.lua')
+end
+
+function environment.setargument(name,value)
+ environment.arguments[name] = value
+end
+
+-- todo: defaults, better checks e.g on type (boolean versus string)
+--
+-- tricky: too many hits when we support partials unless we add
+-- a registration of arguments so from now on we have 'partial'
+
+function environment.getargument(name,partial)
+ local arguments, sortedflags = environment.arguments, environment.sortedflags
+ if arguments[name] then
+ return arguments[name]
+ elseif partial then
+ if not sortedflags then
+ sortedflags = allocate(table.sortedkeys(arguments))
+ for k=1,#sortedflags do
+ sortedflags[k] = "^" .. sortedflags[k]
+ end
+ environment.sortedflags = sortedflags
+ end
+ -- example of potential clash: ^mode ^modefile
+ for k=1,#sortedflags do
+ local v = sortedflags[k]
+ if find(name,v) then
+ return arguments[sub(v,2,#v)]
+ end
+ end
+ end
+ return nil
+end
+
+environment.argument = environment.getargument
+
+function environment.splitarguments(separator) -- rather special, cut-off before separator
+ local done, before, after = false, { }, { }
+ local originalarguments = environment.originalarguments
+ for k=1,#originalarguments do
+ local v = originalarguments[k]
+ if not done and v == separator then
+ done = true
+ elseif done then
+ after[#after+1] = v
+ else
+ before[#before+1] = v
+ end
+ end
+ return before, after
+end
+
+function environment.reconstructcommandline(arg,noquote)
+ arg = arg or environment.originalarguments
+ if noquote and #arg == 1 then
+ -- we could just do: return unquoted(resolvers.resolve(arg[i]))
+ local a = arg[1]
+ a = resolvers.resolve(a)
+ a = unquoted(a)
+ return a
+ elseif #arg > 0 then
+ local result = { }
+ for i=1,#arg do
+ -- we could just do: result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i])))
+ local a = arg[i]
+ a = resolvers.resolve(a)
+ a = unquoted(a)
+ a = gsub(a,'"','\\"') -- tricky
+ if find(a," ") then
+ result[#result+1] = quoted(a)
+ else
+ result[#result+1] = a
+ end
+ end
+ return concat(result," ")
+ else
+ return ""
+ end
+end
+
+-- handy in e.g. package.addluapath(environment.relativepath("scripts"))
+
+function environment.relativepath(path,root)
+ if not path then
+ path = ""
+ end
+ if not file.is_rootbased_path(path) then
+ if not root then
+ root = file.pathpart(environment.ownscript or environment.ownname or ".")
+ end
+ if root == "" then
+ root = "."
+ end
+ path = root .. "/" .. path
+ end
+ return file.collapsepath(path,true)
+end
+
+-- -- when script lives on e:/tmp we get this:
+--
+-- print(environment.relativepath("x/y/z","c:/w")) -- c:/w/x/y/z
+-- print(environment.relativepath("x")) -- e:/tmp/x
+-- print(environment.relativepath("../x")) -- e:/x
+-- print(environment.relativepath("./x")) -- e:/tmp/x
+-- print(environment.relativepath("/x")) -- /x
+-- print(environment.relativepath("c:/x")) -- c:/x
+-- print(environment.relativepath("//x")) -- //x
+-- print(environment.relativepath()) -- e:/tmp
+
+-- -- to be tested:
+--
+-- function environment.reconstructcommandline(arg,noquote)
+-- arg = arg or environment.originalarguments
+-- if noquote and #arg == 1 then
+-- return unquoted(resolvers.resolve(arg[1]))
+-- elseif #arg > 0 then
+-- local result = { }
+-- for i=1,#arg do
+-- result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i]))) -- always quote
+-- end
+-- return concat(result," ")
+-- else
+-- return ""
+-- end
+-- end
+
+if arg then
+
+ -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
+ local newarg, instring = { }, false
+
+ for index=1,#arg do
+ local argument = arg[index]
+ if find(argument,"^\"") then
+ newarg[#newarg+1] = gsub(argument,"^\"","")
+ if not find(argument,"\"$") then
+ instring = true
+ end
+ elseif find(argument,"\"$") then
+ newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
+ instring = false
+ elseif instring then
+ newarg[#newarg] = newarg[#newarg] .. " " .. argument
+ else
+ newarg[#newarg+1] = argument
+ end
+ end
+ for i=1,-5,-1 do
+ newarg[i] = arg[i]
+ end
+
+ environment.initializearguments(newarg)
+
+ environment.originalarguments = mark(newarg)
+ environment.rawarguments = mark(arg)
+
+ arg = { } -- prevent duplicate handling
+
+end
diff --git a/tex/context/base/util-fmt.lua b/tex/context/base/util-fmt.lua
index 371a5dfce..8ec7236a9 100644
--- a/tex/context/base/util-fmt.lua
+++ b/tex/context/base/util-fmt.lua
@@ -1,76 +1,76 @@
-if not modules then modules = { } end modules ['util-fmt'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-utilities = utilities or { }
-utilities.formatters = utilities.formatters or { }
-local formatters = utilities.formatters
-
-local concat, format = table.concat, string.format
-local tostring, type = tostring, type
-local strip = string.strip
-
-local lpegmatch = lpeg.match
-local stripper = lpeg.patterns.stripzeros
-
-function formatters.stripzeros(str)
- return lpegmatch(stripper,str)
-end
-
-function formatters.formatcolumns(result,between)
- if result and #result > 0 then
- between = between or " "
- local widths, numbers = { }, { }
- local first = result[1]
- local n = #first
- for i=1,n do
- widths[i] = 0
- end
- for i=1,#result do
- local r = result[i]
- for j=1,n do
- local rj = r[j]
- local tj = type(rj)
- if tj == "number" then
- numbers[j] = true
- end
- if tj ~= "string" then
- rj = tostring(rj)
- r[j] = rj
- end
- local w = #rj
- if w > widths[j] then
- widths[j] = w
- end
- end
- end
- for i=1,n do
- local w = widths[i]
- if numbers[i] then
- if w > 80 then
- widths[i] = "%s" .. between
- else
- widths[i] = "%0" .. w .. "i" .. between
- end
- else
- if w > 80 then
- widths[i] = "%s" .. between
- elseif w > 0 then
- widths[i] = "%-" .. w .. "s" .. between
- else
- widths[i] = "%s"
- end
- end
- end
- local template = strip(concat(widths))
- for i=1,#result do
- local str = format(template,unpack(result[i]))
- result[i] = strip(str)
- end
- end
- return result
-end
+if not modules then modules = { } end modules ['util-fmt'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+utilities = utilities or { }
+utilities.formatters = utilities.formatters or { }
+local formatters = utilities.formatters
+
+local concat, format = table.concat, string.format
+local tostring, type = tostring, type
+local strip = string.strip
+
+local lpegmatch = lpeg.match
+local stripper = lpeg.patterns.stripzeros
+
+function formatters.stripzeros(str)
+ return lpegmatch(stripper,str)
+end
+
+function formatters.formatcolumns(result,between)
+ if result and #result > 0 then
+ between = between or " "
+ local widths, numbers = { }, { }
+ local first = result[1]
+ local n = #first
+ for i=1,n do
+ widths[i] = 0
+ end
+ for i=1,#result do
+ local r = result[i]
+ for j=1,n do
+ local rj = r[j]
+ local tj = type(rj)
+ if tj == "number" then
+ numbers[j] = true
+ end
+ if tj ~= "string" then
+ rj = tostring(rj)
+ r[j] = rj
+ end
+ local w = #rj
+ if w > widths[j] then
+ widths[j] = w
+ end
+ end
+ end
+ for i=1,n do
+ local w = widths[i]
+ if numbers[i] then
+ if w > 80 then
+ widths[i] = "%s" .. between
+ else
+ widths[i] = "%0" .. w .. "i" .. between
+ end
+ else
+ if w > 80 then
+ widths[i] = "%s" .. between
+ elseif w > 0 then
+ widths[i] = "%-" .. w .. "s" .. between
+ else
+ widths[i] = "%s"
+ end
+ end
+ end
+ local template = strip(concat(widths))
+ for i=1,#result do
+ local str = format(template,unpack(result[i]))
+ result[i] = strip(str)
+ end
+ end
+ return result
+end
diff --git a/tex/context/base/util-jsn.lua b/tex/context/base/util-jsn.lua
index 29587cd38..9870d0896 100644
--- a/tex/context/base/util-jsn.lua
+++ b/tex/context/base/util-jsn.lua
@@ -1,146 +1,146 @@
-if not modules then modules = { } end modules ['util-jsn'] = {
- version = 1.001,
- comment = "companion to m-json.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- Of course we could make a nice complete parser with proper error messages but
--- as json is generated programmatically errors are systematic and we can assume
--- a correct stream. If not, we have some fatal error anyway. So, we can just rely
--- on strings being strings (apart from the unicode escape which is not in 5.1) and
--- as we first catch known types we just assume that anything else is a number.
---
--- Reminder for me: check usage in framework and extend when needed. Also document
--- it in the cld lib documentation.
-
-local P, V, R, S, C, Cc, Cs, Ct, Cf, Cg = lpeg.P, lpeg.V, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cf, lpeg.Cg
-local lpegmatch = lpeg.match
-local format = string.format
-local utfchar = utf.char
-local concat = table.concat
-
-local tonumber, tostring, rawset, type = tonumber, tostring, rawset, type
-
-local json = utilities.json or { }
-utilities.json = json
-
--- moduledata = moduledata or { }
--- moduledata.json = json
-
--- \\ \/ \b \f \n \r \t \uHHHH
-
-local lbrace = P("{")
-local rbrace = P("}")
-local lparent = P("[")
-local rparent = P("]")
-local comma = P(",")
-local colon = P(":")
-local dquote = P('"')
-
-local whitespace = lpeg.patterns.whitespace
-local optionalws = whitespace^0
-
-local escape = C(P("\\u") / "0x" * S("09","AF","af")) / function(s) return utfchar(tonumber(s)) end
-local jstring = dquote * Cs((escape + (1-dquote))^0) * dquote
-local jtrue = P("true") * Cc(true)
-local jfalse = P("false") * Cc(false)
-local jnull = P("null") * Cc(nil)
-local jnumber = (1-whitespace-rparent-rbrace-comma)^1 / tonumber
-
-local key = jstring
-
-local jsonconverter = { "value",
- object = lbrace * Cf(Ct("") * V("pair") * (comma * V("pair"))^0,rawset) * rbrace,
- pair = Cg(optionalws * key * optionalws * colon * V("value")),
- array = Ct(lparent * V("value") * (comma * V("value"))^0 * rparent),
- value = optionalws * (jstring + V("object") + V("array") + jtrue + jfalse + jnull + jnumber + #rparent) * optionalws,
-}
-
--- local jsonconverter = { "value",
--- object = lbrace * Cf(Ct("") * V("pair") * (comma * V("pair"))^0,rawset) * rbrace,
--- pair = Cg(optionalws * V("string") * optionalws * colon * V("value")),
--- array = Ct(lparent * V("value") * (comma * V("value"))^0 * rparent),
--- string = jstring,
--- value = optionalws * (V("string") + V("object") + V("array") + jtrue + jfalse + jnull + jnumber) * optionalws,
--- }
-
--- lpeg.print(jsonconverter) -- size 181
-
-function json.tolua(str)
- return lpegmatch(jsonconverter,str)
-end
-
-local function tojson(value,t) -- we could optimize #t
- local kind = type(value)
- if kind == "table" then
- local done = false
- local size = #value
- if size == 0 then
- for k, v in next, value do
- if done then
- t[#t+1] = ","
- else
- t[#t+1] = "{"
- done = true
- end
- t[#t+1] = format("%q:",k)
- tojson(v,t)
- end
- if done then
- t[#t+1] = "}"
- else
- t[#t+1] = "{}"
- end
- elseif size == 1 then
- -- we can optimize for non tables
- t[#t+1] = "["
- tojson(value[1],t)
- t[#t+1] = "]"
- else
- for i=1,size do
- if done then
- t[#t+1] = ","
- else
- t[#t+1] = "["
- done = true
- end
- tojson(value[i],t)
- end
- t[#t+1] = "]"
- end
- elseif kind == "string" then
- t[#t+1] = format("%q",value)
- elseif kind == "number" then
- t[#t+1] = value
- elseif kind == "boolean" then
- t[#t+1] = tostring(value)
- end
- return t
-end
-
-function json.tostring(value)
- -- todo optimize for non table
- local kind = type(value)
- if kind == "table" then
- return concat(tojson(value,{}),"")
- elseif kind == "string" or kind == "number" then
- return value
- else
- return tostring(value)
- end
-end
-
--- local tmp = [[ { "a" : true, "b" : [ 123 , 456E-10, { "a" : true, "b" : [ 123 , 456 ] } ] } ]]
-
--- tmp = json.tolua(tmp)
--- inspect(tmp)
--- tmp = json.tostring(tmp)
--- inspect(tmp)
--- tmp = json.tolua(tmp)
--- inspect(tmp)
--- tmp = json.tostring(tmp)
--- inspect(tmp)
-
--- inspect(json.tostring(true))
+if not modules then modules = { } end modules ['util-jsn'] = {
+ version = 1.001,
+ comment = "companion to m-json.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- Of course we could make a nice complete parser with proper error messages but
+-- as json is generated programmatically errors are systematic and we can assume
+-- a correct stream. If not, we have some fatal error anyway. So, we can just rely
+-- on strings being strings (apart from the unicode escape which is not in 5.1) and
+-- as we first catch known types we just assume that anything else is a number.
+--
+-- Reminder for me: check usage in framework and extend when needed. Also document
+-- it in the cld lib documentation.
+
+local P, V, R, S, C, Cc, Cs, Ct, Cf, Cg = lpeg.P, lpeg.V, lpeg.R, lpeg.S, lpeg.C, lpeg.Cc, lpeg.Cs, lpeg.Ct, lpeg.Cf, lpeg.Cg
+local lpegmatch = lpeg.match
+local format = string.format
+local utfchar = utf.char
+local concat = table.concat
+
+local tonumber, tostring, rawset, type = tonumber, tostring, rawset, type
+
+local json = utilities.json or { }
+utilities.json = json
+
+-- moduledata = moduledata or { }
+-- moduledata.json = json
+
+-- \\ \/ \b \f \n \r \t \uHHHH
+
+local lbrace = P("{")
+local rbrace = P("}")
+local lparent = P("[")
+local rparent = P("]")
+local comma = P(",")
+local colon = P(":")
+local dquote = P('"')
+
+local whitespace = lpeg.patterns.whitespace
+local optionalws = whitespace^0
+
+local escape = C(P("\\u") / "0x" * S("09","AF","af")) / function(s) return utfchar(tonumber(s)) end
+local jstring = dquote * Cs((escape + (1-dquote))^0) * dquote
+local jtrue = P("true") * Cc(true)
+local jfalse = P("false") * Cc(false)
+local jnull = P("null") * Cc(nil)
+local jnumber = (1-whitespace-rparent-rbrace-comma)^1 / tonumber
+
+local key = jstring
+
+local jsonconverter = { "value",
+ object = lbrace * Cf(Ct("") * V("pair") * (comma * V("pair"))^0,rawset) * rbrace,
+ pair = Cg(optionalws * key * optionalws * colon * V("value")),
+ array = Ct(lparent * V("value") * (comma * V("value"))^0 * rparent),
+ value = optionalws * (jstring + V("object") + V("array") + jtrue + jfalse + jnull + jnumber + #rparent) * optionalws,
+}
+
+-- local jsonconverter = { "value",
+-- object = lbrace * Cf(Ct("") * V("pair") * (comma * V("pair"))^0,rawset) * rbrace,
+-- pair = Cg(optionalws * V("string") * optionalws * colon * V("value")),
+-- array = Ct(lparent * V("value") * (comma * V("value"))^0 * rparent),
+-- string = jstring,
+-- value = optionalws * (V("string") + V("object") + V("array") + jtrue + jfalse + jnull + jnumber) * optionalws,
+-- }
+
+-- lpeg.print(jsonconverter) -- size 181
+
+function json.tolua(str)
+ return lpegmatch(jsonconverter,str)
+end
+
+local function tojson(value,t) -- we could optimize #t
+ local kind = type(value)
+ if kind == "table" then
+ local done = false
+ local size = #value
+ if size == 0 then
+ for k, v in next, value do
+ if done then
+ t[#t+1] = ","
+ else
+ t[#t+1] = "{"
+ done = true
+ end
+ t[#t+1] = format("%q:",k)
+ tojson(v,t)
+ end
+ if done then
+ t[#t+1] = "}"
+ else
+ t[#t+1] = "{}"
+ end
+ elseif size == 1 then
+ -- we can optimize for non tables
+ t[#t+1] = "["
+ tojson(value[1],t)
+ t[#t+1] = "]"
+ else
+ for i=1,size do
+ if done then
+ t[#t+1] = ","
+ else
+ t[#t+1] = "["
+ done = true
+ end
+ tojson(value[i],t)
+ end
+ t[#t+1] = "]"
+ end
+ elseif kind == "string" then
+ t[#t+1] = format("%q",value)
+ elseif kind == "number" then
+ t[#t+1] = value
+ elseif kind == "boolean" then
+ t[#t+1] = tostring(value)
+ end
+ return t
+end
+
+function json.tostring(value)
+ -- todo optimize for non table
+ local kind = type(value)
+ if kind == "table" then
+ return concat(tojson(value,{}),"")
+ elseif kind == "string" or kind == "number" then
+ return value
+ else
+ return tostring(value)
+ end
+end
+
+-- local tmp = [[ { "a" : true, "b" : [ 123 , 456E-10, { "a" : true, "b" : [ 123 , 456 ] } ] } ]]
+
+-- tmp = json.tolua(tmp)
+-- inspect(tmp)
+-- tmp = json.tostring(tmp)
+-- inspect(tmp)
+-- tmp = json.tolua(tmp)
+-- inspect(tmp)
+-- tmp = json.tostring(tmp)
+-- inspect(tmp)
+
+-- inspect(json.tostring(true))
diff --git a/tex/context/base/util-lib.lua b/tex/context/base/util-lib.lua
index c5c999113..065f91091 100644
--- a/tex/context/base/util-lib.lua
+++ b/tex/context/base/util-lib.lua
@@ -1,288 +1,288 @@
-if not modules then modules = { } end modules ['util-lib'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
-}
-
--- This is experimental code for Hans and Luigi. Don't depend on it! There
--- will be a plain variant.
-
---[[
-
-The problem with library bindings is manyfold. They are of course platform
-dependent and while a binary with its directly related libraries are often
-easy to maintain and load, additional libraries can each have their demands.
-
-One important aspect is that loading additional libraries from within the
-loaded one is also operating system dependent. There can be shared libraries
-elsewhere on the system and as there can be multiple libraries with the same
-name but different usage and versioning there can be clashes. So there has to
-be some logic in where to look for these sublibraries.
-
-We found out that for instance on windows libraries are by default sought on
-the parents path and then on the binary paths and these of course can be in
-an out of our control, thereby enlarging the changes on a clash. A rather
-safe solution for that to load the library on the path where it sits.
-
-Another aspect is initialization. When you ask for a library t.e.x it will
-try to initialize luaopen_t_e_x no matter if such an inializer is present.
-However, because loading is configurable and in the case of luatex is already
-partly under out control, this is easy to deal with. We only have to make
-sure that we inform the loader that the library has been loaded so that
-it won't load it twice.
-
-In swiglib we have chosen for a clear organization and although one can use
-variants normally in the tex directory structure predictability is more or
-less the standard. For instance:
-
-.../tex/texmf-mswin/bin/lib/luatex/lua/swiglib/mysql/core.dll
-.../tex/texmf-mswin/bin/lib/luajittex/lua/swiglib/mysql/core.dll
-.../tex/texmf-mswin/bin/lib/luatex/context/lua/swiglib/mysql/core.dll
-.../tex/texmf-mswin/bin/lib/swiglib/lua/mysql/core.dll
-.../tex/texmf-mswin/bin/lib/swiglib/lua/mysql/5.6/core.dll
-
-The lookups are determined via an entry in texmfcnf.lua:
-
-CLUAINPUTS = ".;$SELFAUTOLOC/lib/{$engine,luatex}/lua//",
-
-A request for t.e.x is converted to t/e/x.dll or t/e/x.so depending on the
-platform. Then we use the regular finder to locate the file in the tex
-directory structure. Once located we goto the path where it sits, load the
-file and return to the original path. We register as t.e.x in order to
-prevent reloading and also because the base name is seldom unique.
-
-The main function is a big one and evolved out of experiments that Luigi
-Scarso and I conducted when playing with variants of SwigLib. The function
-locates the library using the context mkiv resolver that operates on the
-tds tree and if that doesn't work out well, the normal clib path is used.
-
-The lookups is somewhat clever in the sense that it can deal with (optional)
-versions and can fall back on non versioned alternatives if needed, either
-or not using a wildcard lookup.
-
-This code is experimental and by providing a special abstract loader (called
-swiglib) we can start using the libraries.
-
-A complication is that we might end up with a luajittex path matching before a
-luatex path due to the path spec. One solution is to first check with the engine
-prefixed. This could be prevented by a more strict lib pattern but that is not
-always under our control. So, we first check for paths with engine in their name
-and then without.
-
-]]--
-
--- seems to be clua in recent texlive
-
-local gsub, find = string.gsub, string.find
-local pathpart, nameonly, joinfile = file.pathpart, file.nameonly, file.join
-local findfile, findfiles = resolvers and resolvers.findfile, resolvers and resolvers.findfiles
-
-local loaded = package.loaded
-
-local report_swiglib = logs.reporter("swiglib")
-local trace_swiglib = false trackers.register("resolvers.swiglib", function(v) trace_swiglib = v end)
-
--- We can check if there are more that one component, and if not, we can
--- append 'core'.
-
-local done = false
-
-local function requireswiglib(required,version)
- local trace_swiglib = trace_swiglib or package.helpers.trace
- local library = loaded[required]
- if library == nil then
- -- initialize a few variables
- local required_full = gsub(required,"%.","/") -- package.helpers.lualibfile
- local required_path = pathpart(required_full)
- local required_base = nameonly(required_full)
- local required_name = required_base .. "." .. os.libsuffix
- local version = type(version) == "string" and version ~= "" and version or false
- local engine = environment.ownmain or false
- --
- if trace_swiglib and not done then
- local list = resolvers.expandedpathlistfromvariable("lib") -- fresh, no reuse
- for i=1,#list do
- report_swiglib("tds path %i: %s",i,list[i])
- end
- end
- -- helpers
- local function found(locate,asked_library,how,...)
- if trace_swiglib then
- report_swiglib("checking %s: %a",how,asked_library)
- end
- return locate(asked_library,...)
- end
- local function check(locate,...)
- local found = nil
- if version then
- local asked_library = joinfile(required_path,version,required_name)
- if trace_swiglib then
- report_swiglib("checking %s: %a","with version",asked_library)
- end
- found = locate(asked_library,...)
- end
- if not found or found == "" then
- local asked_library = joinfile(required_path,required_name)
- if trace_swiglib then
- report_swiglib("checking %s: %a","with version",asked_library)
- end
- found = locate(asked_library,...)
- end
- return found and found ~= "" and found or false
- end
- -- Alternatively we could first collect the locations and then do the two attempts
- -- on this list but in practice this is not more efficient as we might have a fast
- -- match anyway.
- local function attempt(checkpattern)
- -- check cnf spec using name and version
- if trace_swiglib then
- report_swiglib("checking tds lib paths strictly")
- end
- local found = findfile and check(findfile,"lib")
- if found and (not checkpattern or find(found,checkpattern)) then
- return found
- end
- -- check cnf spec using wildcard
- if trace_swiglib then
- report_swiglib("checking tds lib paths with wildcard")
- end
- local asked_library = joinfile(required_path,".*",required_name)
- if trace_swiglib then
- report_swiglib("checking %s: %a","latest version",asked_library)
- end
- local list = findfiles(asked_library,"lib",true)
- if list and #list > 0 then
- table.sort(list)
- local found = list[#list]
- if found and (not checkpattern or find(found,checkpattern)) then
- return found
- end
- end
- -- Check lib paths using name and version.
- if trace_swiglib then
- report_swiglib("checking lib paths")
- end
- package.extralibpath(environment.ownpath)
- local paths = package.libpaths()
- for i=1,#paths do
- local found = check(lfs.isfile)
- if found and (not checkpattern or find(found,checkpattern)) then
- return found
- end
- end
- return false
- end
- local found_library = nil
- if engine then
- if trace_swiglib then
- report_swiglib("attemp 1, engine %a",engine)
- end
- found_library = attempt("/"..engine.."/")
- if not found_library then
- if trace_swiglib then
- report_swiglib("attemp 2, no engine",asked_library)
- end
- found_library = attempt()
- end
- else
- found_library = attempt()
- end
- -- load and initialize when found
- if not found_library then
- if trace_swiglib then
- report_swiglib("not found: %a",required)
- end
- library = false
- else
- local path = pathpart(found_library)
- local base = nameonly(found_library)
- dir.push(path)
- if trace_swiglib then
- report_swiglib("found: %a",found_library)
- end
- local message = nil
- local opener = "luaopen_" .. required_base
- library, message = package.loadlib(found_library,opener)
- local libtype = type(library)
- if libtype == "function" then
- library = library()
- else
- report_swiglib("load error: %a returns %a, message %a",opener,libtype,message or "no message")
- library = false
- end
- dir.pop()
- end
- -- cache result
- if not library then
- report_swiglib("unknown: %a",required)
- elseif trace_swiglib then
- report_swiglib("stored: %a",required)
- end
- loaded[required] = library
- else
- report_swiglib("reused: %a",required)
- end
- return library
-end
-
---[[
-
-For convenience we make the require loader function swiglib aware. Alternatively
-we could put the specific loader in the global namespace.
-
-]]--
-
-local savedrequire = require
-
-function require(name,version)
- if find(name,"^swiglib%.") then
- return requireswiglib(name,version)
- else
- return savedrequire(name)
- end
-end
-
---[[
-
-At the cost of some overhead we provide a specific loader so that we can keep
-track of swiglib usage which is handy for development. In context this is the
-recommended loader.
-
-]]--
-
-local swiglibs = { }
-
-function swiglib(name,version)
- local library = swiglibs[name]
- if not library then
- statistics.starttiming(swiglibs)
- if trace_swiglib then
- report_swiglib("loading %a",name)
- end
- library = requireswiglib("swiglib." .. name,version)
- swiglibs[name] = library
- statistics.stoptiming(swiglibs)
- end
- return library
-end
-
-statistics.register("used swiglibs", function()
- if next(swiglibs) then
- return string.format("%s, initial load time %s seconds",table.concat(table.sortedkeys(swiglibs)," "),statistics.elapsedtime(swiglibs))
- end
-end)
-
---[[
-
-So, we now have:
-
-local gm = require("swiglib.gmwand.core")
-local gm = swiglib("gmwand.core")
-local sq = swiglib("mysql.core")
-local sq = swiglib("mysql.core","5.6")
-
-Watch out, the last one is less explicit and lacks the swiglib prefix.
-
-]]--
+if not modules then modules = { } end modules ['util-lib'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
+-- This is experimental code for Hans and Luigi. Don't depend on it! There
+-- will be a plain variant.
+
+--[[
+
+The problem with library bindings is manyfold. They are of course platform
+dependent and while a binary with its directly related libraries are often
+easy to maintain and load, additional libraries can each have their demands.
+
+One important aspect is that loading additional libraries from within the
+loaded one is also operating system dependent. There can be shared libraries
+elsewhere on the system and as there can be multiple libraries with the same
+name but different usage and versioning there can be clashes. So there has to
+be some logic in where to look for these sublibraries.
+
+We found out that for instance on windows libraries are by default sought on
+the parents path and then on the binary paths and these of course can be in
+an out of our control, thereby enlarging the changes on a clash. A rather
+safe solution for that to load the library on the path where it sits.
+
+Another aspect is initialization. When you ask for a library t.e.x it will
+try to initialize luaopen_t_e_x no matter if such an inializer is present.
+However, because loading is configurable and in the case of luatex is already
+partly under out control, this is easy to deal with. We only have to make
+sure that we inform the loader that the library has been loaded so that
+it won't load it twice.
+
+In swiglib we have chosen for a clear organization and although one can use
+variants normally in the tex directory structure predictability is more or
+less the standard. For instance:
+
+.../tex/texmf-mswin/bin/lib/luatex/lua/swiglib/mysql/core.dll
+.../tex/texmf-mswin/bin/lib/luajittex/lua/swiglib/mysql/core.dll
+.../tex/texmf-mswin/bin/lib/luatex/context/lua/swiglib/mysql/core.dll
+.../tex/texmf-mswin/bin/lib/swiglib/lua/mysql/core.dll
+.../tex/texmf-mswin/bin/lib/swiglib/lua/mysql/5.6/core.dll
+
+The lookups are determined via an entry in texmfcnf.lua:
+
+CLUAINPUTS = ".;$SELFAUTOLOC/lib/{$engine,luatex}/lua//",
+
+A request for t.e.x is converted to t/e/x.dll or t/e/x.so depending on the
+platform. Then we use the regular finder to locate the file in the tex
+directory structure. Once located we goto the path where it sits, load the
+file and return to the original path. We register as t.e.x in order to
+prevent reloading and also because the base name is seldom unique.
+
+The main function is a big one and evolved out of experiments that Luigi
+Scarso and I conducted when playing with variants of SwigLib. The function
+locates the library using the context mkiv resolver that operates on the
+tds tree and if that doesn't work out well, the normal clib path is used.
+
+The lookups is somewhat clever in the sense that it can deal with (optional)
+versions and can fall back on non versioned alternatives if needed, either
+or not using a wildcard lookup.
+
+This code is experimental and by providing a special abstract loader (called
+swiglib) we can start using the libraries.
+
+A complication is that we might end up with a luajittex path matching before a
+luatex path due to the path spec. One solution is to first check with the engine
+prefixed. This could be prevented by a more strict lib pattern but that is not
+always under our control. So, we first check for paths with engine in their name
+and then without.
+
+]]--
+
+-- seems to be clua in recent texlive
+
+local gsub, find = string.gsub, string.find
+local pathpart, nameonly, joinfile = file.pathpart, file.nameonly, file.join
+local findfile, findfiles = resolvers and resolvers.findfile, resolvers and resolvers.findfiles
+
+local loaded = package.loaded
+
+local report_swiglib = logs.reporter("swiglib")
+local trace_swiglib = false trackers.register("resolvers.swiglib", function(v) trace_swiglib = v end)
+
+-- We can check if there are more that one component, and if not, we can
+-- append 'core'.
+
+local done = false
+
+local function requireswiglib(required,version)
+ local trace_swiglib = trace_swiglib or package.helpers.trace
+ local library = loaded[required]
+ if library == nil then
+ -- initialize a few variables
+ local required_full = gsub(required,"%.","/") -- package.helpers.lualibfile
+ local required_path = pathpart(required_full)
+ local required_base = nameonly(required_full)
+ local required_name = required_base .. "." .. os.libsuffix
+ local version = type(version) == "string" and version ~= "" and version or false
+ local engine = environment.ownmain or false
+ --
+ if trace_swiglib and not done then
+ local list = resolvers.expandedpathlistfromvariable("lib") -- fresh, no reuse
+ for i=1,#list do
+ report_swiglib("tds path %i: %s",i,list[i])
+ end
+ end
+ -- helpers
+ local function found(locate,asked_library,how,...)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a",how,asked_library)
+ end
+ return locate(asked_library,...)
+ end
+ local function check(locate,...)
+ local found = nil
+ if version then
+ local asked_library = joinfile(required_path,version,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","with version",asked_library)
+ end
+ found = locate(asked_library,...)
+ end
+ if not found or found == "" then
+ local asked_library = joinfile(required_path,required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","with version",asked_library)
+ end
+ found = locate(asked_library,...)
+ end
+ return found and found ~= "" and found or false
+ end
+ -- Alternatively we could first collect the locations and then do the two attempts
+ -- on this list but in practice this is not more efficient as we might have a fast
+ -- match anyway.
+ local function attempt(checkpattern)
+ -- check cnf spec using name and version
+ if trace_swiglib then
+ report_swiglib("checking tds lib paths strictly")
+ end
+ local found = findfile and check(findfile,"lib")
+ if found and (not checkpattern or find(found,checkpattern)) then
+ return found
+ end
+ -- check cnf spec using wildcard
+ if trace_swiglib then
+ report_swiglib("checking tds lib paths with wildcard")
+ end
+ local asked_library = joinfile(required_path,".*",required_name)
+ if trace_swiglib then
+ report_swiglib("checking %s: %a","latest version",asked_library)
+ end
+ local list = findfiles(asked_library,"lib",true)
+ if list and #list > 0 then
+ table.sort(list)
+ local found = list[#list]
+ if found and (not checkpattern or find(found,checkpattern)) then
+ return found
+ end
+ end
+ -- Check lib paths using name and version.
+ if trace_swiglib then
+ report_swiglib("checking lib paths")
+ end
+ package.extralibpath(environment.ownpath)
+ local paths = package.libpaths()
+ for i=1,#paths do
+ local found = check(lfs.isfile)
+ if found and (not checkpattern or find(found,checkpattern)) then
+ return found
+ end
+ end
+ return false
+ end
+ local found_library = nil
+ if engine then
+ if trace_swiglib then
+ report_swiglib("attemp 1, engine %a",engine)
+ end
+ found_library = attempt("/"..engine.."/")
+ if not found_library then
+ if trace_swiglib then
+ report_swiglib("attemp 2, no engine",asked_library)
+ end
+ found_library = attempt()
+ end
+ else
+ found_library = attempt()
+ end
+ -- load and initialize when found
+ if not found_library then
+ if trace_swiglib then
+ report_swiglib("not found: %a",required)
+ end
+ library = false
+ else
+ local path = pathpart(found_library)
+ local base = nameonly(found_library)
+ dir.push(path)
+ if trace_swiglib then
+ report_swiglib("found: %a",found_library)
+ end
+ local message = nil
+ local opener = "luaopen_" .. required_base
+ library, message = package.loadlib(found_library,opener)
+ local libtype = type(library)
+ if libtype == "function" then
+ library = library()
+ else
+ report_swiglib("load error: %a returns %a, message %a",opener,libtype,message or "no message")
+ library = false
+ end
+ dir.pop()
+ end
+ -- cache result
+ if not library then
+ report_swiglib("unknown: %a",required)
+ elseif trace_swiglib then
+ report_swiglib("stored: %a",required)
+ end
+ loaded[required] = library
+ else
+ report_swiglib("reused: %a",required)
+ end
+ return library
+end
+
+--[[
+
+For convenience we make the require loader function swiglib aware. Alternatively
+we could put the specific loader in the global namespace.
+
+]]--
+
+local savedrequire = require
+
+function require(name,version)
+ if find(name,"^swiglib%.") then
+ return requireswiglib(name,version)
+ else
+ return savedrequire(name)
+ end
+end
+
+--[[
+
+At the cost of some overhead we provide a specific loader so that we can keep
+track of swiglib usage which is handy for development. In context this is the
+recommended loader.
+
+]]--
+
+local swiglibs = { }
+
+function swiglib(name,version)
+ local library = swiglibs[name]
+ if not library then
+ statistics.starttiming(swiglibs)
+ if trace_swiglib then
+ report_swiglib("loading %a",name)
+ end
+ library = requireswiglib("swiglib." .. name,version)
+ swiglibs[name] = library
+ statistics.stoptiming(swiglibs)
+ end
+ return library
+end
+
+statistics.register("used swiglibs", function()
+ if next(swiglibs) then
+ return string.format("%s, initial load time %s seconds",table.concat(table.sortedkeys(swiglibs)," "),statistics.elapsedtime(swiglibs))
+ end
+end)
+
+--[[
+
+So, we now have:
+
+local gm = require("swiglib.gmwand.core")
+local gm = swiglib("gmwand.core")
+local sq = swiglib("mysql.core")
+local sq = swiglib("mysql.core","5.6")
+
+Watch out, the last one is less explicit and lacks the swiglib prefix.
+
+]]--
diff --git a/tex/context/base/util-lua.lua b/tex/context/base/util-lua.lua
index f3be9dcd2..a69fa9cdd 100644
--- a/tex/context/base/util-lua.lua
+++ b/tex/context/base/util-lua.lua
@@ -1,351 +1,351 @@
-if not modules then modules = { } end modules ['util-lua'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- comment = "the strip code is written by Peter Cawley",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- we will remove the 5.1 code some day soon
-
-local rep, sub, byte, dump, format = string.rep, string.sub, string.byte, string.dump, string.format
-local load, loadfile, type = load, loadfile, type
-
-utilities = utilities or {}
-utilities.lua = utilities.lua or { }
-local luautilities = utilities.lua
-
-local report_lua = logs.reporter("system","lua")
-
-local tracestripping = false
-local forcestupidcompile = true -- use internal bytecode compiler
-luautilities.stripcode = true -- support stripping when asked for
-luautilities.alwaysstripcode = false -- saves 1 meg on 7 meg compressed format file (2012.08.12)
-luautilities.nofstrippedchunks = 0
-luautilities.nofstrippedbytes = 0
-local strippedchunks = { } -- allocate()
-luautilities.strippedchunks = strippedchunks
-
-luautilities.suffixes = {
- tma = "tma",
- tmc = jit and "tmb" or "tmc",
- lua = "lua",
- luc = jit and "lub" or "luc",
- lui = "lui",
- luv = "luv",
- luj = "luj",
- tua = "tua",
- tuc = "tuc",
-}
-
--- environment.loadpreprocessedfile can be set to a preprocessor
-
-if jit or status.luatex_version >= 74 then
-
- local function register(name)
- if tracestripping then
- report_lua("stripped bytecode from %a",name or "unknown")
- end
- strippedchunks[#strippedchunks+1] = name
- luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1
- end
-
- local function stupidcompile(luafile,lucfile,strip)
- local code = io.loaddata(luafile)
- if code and code ~= "" then
- code = load(code)
- if code then
- code = dump(code,strip and luautilities.stripcode or luautilities.alwaysstripcode)
- if code and code ~= "" then
- register(name)
- io.savedata(lucfile,code)
- return true, 0
- end
- else
- report_lua("fatal error %a in file %a",1,luafile)
- end
- else
- report_lua("fatal error %a in file %a",2,luafile)
- end
- return false, 0
- end
-
- -- quite subtle ... doing this wrong incidentally can give more bytes
-
- function luautilities.loadedluacode(fullname,forcestrip,name)
- -- quite subtle ... doing this wrong incidentally can give more bytes
- name = name or fullname
- local code = environment.loadpreprocessedfile and environment.loadpreprocessedfile(fullname) or loadfile(fullname)
- if code then
- code()
- end
- if forcestrip and luautilities.stripcode then
- if type(forcestrip) == "function" then
- forcestrip = forcestrip(fullname)
- end
- if forcestrip or luautilities.alwaysstripcode then
- register(name)
- return load(dump(code,true)), 0
- else
- return code, 0
- end
- elseif luautilities.alwaysstripcode then
- register(name)
- return load(dump(code,true)), 0
- else
- return code, 0
- end
- end
-
- function luautilities.strippedloadstring(code,forcestrip,name) -- not executed
- if forcestrip and luautilities.stripcode or luautilities.alwaysstripcode then
- code = load(code)
- if not code then
- report_lua("fatal error %a in file %a",3,name)
- end
- register(name)
- code = dump(code,true)
- end
- return load(code), 0
- end
-
- function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
- report_lua("compiling %a into %a",luafile,lucfile)
- os.remove(lucfile)
- local done = stupidcompile(luafile,lucfile,strip ~= false)
- if done then
- report_lua("dumping %a into %a stripped",luafile,lucfile)
- if cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
- report_lua("removing %a",luafile)
- os.remove(luafile)
- end
- end
- return done
- end
-
- function luautilities.loadstripped(...)
- local l = load(...)
- if l then
- return load(dump(l,true))
- end
- end
-
-else
-
- -- The next function was posted by Peter Cawley on the lua list and strips line
- -- number information etc. from the bytecode data blob. We only apply this trick
- -- when we store data tables. Stripping makes the compressed format file about
- -- 1MB smaller (and uncompressed we save at least 6MB).
- --
- -- You can consider this feature an experiment, so it might disappear. There is
- -- no noticeable gain in runtime although the memory footprint should be somewhat
- -- smaller (and the file system has a bit less to deal with).
- --
- -- Begin of borrowed code ... works for Lua 5.1 which LuaTeX currently uses ...
-
- local function register(name,before,after)
- local delta = before - after
- if tracestripping then
- report_lua("bytecodes stripped from %a, # before %s, # after %s, delta %s",name,before,after,delta)
- end
- strippedchunks[#strippedchunks+1] = name
- luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1
- luautilities.nofstrippedbytes = luautilities.nofstrippedbytes + delta
- return delta
- end
-
- local strip_code_pc
-
- if _MAJORVERSION == 5 and _MINORVERSION == 1 then
-
- strip_code_pc = function(dump,name)
- local before = #dump
- local version, format, endian, int, size, ins, num = byte(dump,5,11)
- local subint
- if endian == 1 then
- subint = function(dump, i, l)
- local val = 0
- for n = l, 1, -1 do
- val = val * 256 + byte(dump,i + n - 1)
- end
- return val, i + l
- end
- else
- subint = function(dump, i, l)
- local val = 0
- for n = 1, l, 1 do
- val = val * 256 + byte(dump,i + n - 1)
- end
- return val, i + l
- end
- end
- local strip_function
- strip_function = function(dump)
- local count, offset = subint(dump, 1, size)
- local stripped, dirty = rep("\0", size), offset + count
- offset = offset + count + int * 2 + 4
- offset = offset + int + subint(dump, offset, int) * ins
- count, offset = subint(dump, offset, int)
- for n = 1, count do
- local t
- t, offset = subint(dump, offset, 1)
- if t == 1 then
- offset = offset + 1
- elseif t == 4 then
- offset = offset + size + subint(dump, offset, size)
- elseif t == 3 then
- offset = offset + num
- end
- end
- count, offset = subint(dump, offset, int)
- stripped = stripped .. sub(dump,dirty, offset - 1)
- for n = 1, count do
- local proto, off = strip_function(sub(dump,offset, -1))
- stripped, offset = stripped .. proto, offset + off - 1
- end
- offset = offset + subint(dump, offset, int) * int + int
- count, offset = subint(dump, offset, int)
- for n = 1, count do
- offset = offset + subint(dump, offset, size) + size + int * 2
- end
- count, offset = subint(dump, offset, int)
- for n = 1, count do
- offset = offset + subint(dump, offset, size) + size
- end
- stripped = stripped .. rep("\0", int * 3)
- return stripped, offset
- end
- dump = sub(dump,1,12) .. strip_function(sub(dump,13,-1))
- local after = #dump
- local delta = register(name,before,after)
- return dump, delta
- end
-
- else
-
- strip_code_pc = function(dump,name)
- return dump, 0
- end
-
- end
-
- -- ... end of borrowed code.
-
- -- quite subtle ... doing this wrong incidentally can give more bytes
-
- function luautilities.loadedluacode(fullname,forcestrip,name)
- -- quite subtle ... doing this wrong incidentally can give more bytes
- local code = environment.loadpreprocessedfile and environment.preprocessedloadfile(fullname) or loadfile(fullname)
- if code then
- code()
- end
- if forcestrip and luautilities.stripcode then
- if type(forcestrip) == "function" then
- forcestrip = forcestrip(fullname)
- end
- if forcestrip then
- local code, n = strip_code_pc(dump(code),name)
- return load(code), n
- elseif luautilities.alwaysstripcode then
- return load(strip_code_pc(dump(code),name))
- else
- return code, 0
- end
- elseif luautilities.alwaysstripcode then
- return load(strip_code_pc(dump(code),name))
- else
- return code, 0
- end
- end
-
- function luautilities.strippedloadstring(code,forcestrip,name) -- not executed
- local n = 0
- if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then
- code = load(code)
- if not code then
- report_lua("fatal error in file %a",name)
- end
- code, n = strip_code_pc(dump(code),name)
- end
- return load(code), n
- end
-
- local function stupidcompile(luafile,lucfile,strip)
- local code = io.loaddata(luafile)
- local n = 0
- if code and code ~= "" then
- code = load(code)
- if not code then
- report_lua("fatal error in file %a",luafile)
- end
- code = dump(code)
- if strip then
- code, n = strip_code_pc(code,luautilities.stripcode or luautilities.alwaysstripcode,luafile) -- last one is reported
- end
- if code and code ~= "" then
- io.savedata(lucfile,code)
- end
- end
- return n
- end
-
- local luac_normal = "texluac -o %q %q"
- local luac_strip = "texluac -s -o %q %q"
-
- function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
- report_lua("compiling %a into %a",luafile,lucfile)
- os.remove(lucfile)
- local done = false
- if strip ~= false then
- strip = true
- end
- if forcestupidcompile then
- fallback = true
- elseif strip then
- done = os.spawn(format(luac_strip, lucfile,luafile)) == 0
- else
- done = os.spawn(format(luac_normal,lucfile,luafile)) == 0
- end
- if not done and fallback then
- local n = stupidcompile(luafile,lucfile,strip)
- if n > 0 then
- report_lua("%a dumped into %a (%i bytes stripped)",luafile,lucfile,n)
- else
- report_lua("%a dumped into %a (unstripped)",luafile,lucfile)
- end
- cleanup = false -- better see how bad it is
- done = true -- hm
- end
- if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
- report_lua("removing %a",luafile)
- os.remove(luafile)
- end
- return done
- end
-
- luautilities.loadstripped = loadstring
-
-end
-
--- local getmetatable, type = getmetatable, type
---
--- local types = { }
---
--- function luautilities.registerdatatype(d,name)
--- types[getmetatable(d)] = name
--- end
---
--- function luautilities.datatype(d)
--- local t = type(d)
--- if t == "userdata" then
--- local m = getmetatable(d)
--- return m and types[m] or "userdata"
--- else
--- return t
--- end
--- end
---
--- luautilities.registerdatatype(lpeg.P("!"),"lpeg")
---
--- print(luautilities.datatype(lpeg.P("oeps")))
+if not modules then modules = { } end modules ['util-lua'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ comment = "the strip code is written by Peter Cawley",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- we will remove the 5.1 code some day soon
+
+local rep, sub, byte, dump, format = string.rep, string.sub, string.byte, string.dump, string.format
+local load, loadfile, type = load, loadfile, type
+
+utilities = utilities or {}
+utilities.lua = utilities.lua or { }
+local luautilities = utilities.lua
+
+local report_lua = logs.reporter("system","lua")
+
+local tracestripping = false
+local forcestupidcompile = true -- use internal bytecode compiler
+luautilities.stripcode = true -- support stripping when asked for
+luautilities.alwaysstripcode = false -- saves 1 meg on 7 meg compressed format file (2012.08.12)
+luautilities.nofstrippedchunks = 0
+luautilities.nofstrippedbytes = 0
+local strippedchunks = { } -- allocate()
+luautilities.strippedchunks = strippedchunks
+
+luautilities.suffixes = {
+ tma = "tma",
+ tmc = jit and "tmb" or "tmc",
+ lua = "lua",
+ luc = jit and "lub" or "luc",
+ lui = "lui",
+ luv = "luv",
+ luj = "luj",
+ tua = "tua",
+ tuc = "tuc",
+}
+
+-- environment.loadpreprocessedfile can be set to a preprocessor
+
+if jit or status.luatex_version >= 74 then
+
+ local function register(name)
+ if tracestripping then
+ report_lua("stripped bytecode from %a",name or "unknown")
+ end
+ strippedchunks[#strippedchunks+1] = name
+ luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1
+ end
+
+ local function stupidcompile(luafile,lucfile,strip)
+ local code = io.loaddata(luafile)
+ if code and code ~= "" then
+ code = load(code)
+ if code then
+ code = dump(code,strip and luautilities.stripcode or luautilities.alwaysstripcode)
+ if code and code ~= "" then
+ register(name)
+ io.savedata(lucfile,code)
+ return true, 0
+ end
+ else
+ report_lua("fatal error %a in file %a",1,luafile)
+ end
+ else
+ report_lua("fatal error %a in file %a",2,luafile)
+ end
+ return false, 0
+ end
+
+ -- quite subtle ... doing this wrong incidentally can give more bytes
+
+ function luautilities.loadedluacode(fullname,forcestrip,name)
+ -- quite subtle ... doing this wrong incidentally can give more bytes
+ name = name or fullname
+ local code = environment.loadpreprocessedfile and environment.loadpreprocessedfile(fullname) or loadfile(fullname)
+ if code then
+ code()
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip) == "function" then
+ forcestrip = forcestrip(fullname)
+ end
+ if forcestrip or luautilities.alwaysstripcode then
+ register(name)
+ return load(dump(code,true)), 0
+ else
+ return code, 0
+ end
+ elseif luautilities.alwaysstripcode then
+ register(name)
+ return load(dump(code,true)), 0
+ else
+ return code, 0
+ end
+ end
+
+ function luautilities.strippedloadstring(code,forcestrip,name) -- not executed
+ if forcestrip and luautilities.stripcode or luautilities.alwaysstripcode then
+ code = load(code)
+ if not code then
+ report_lua("fatal error %a in file %a",3,name)
+ end
+ register(name)
+ code = dump(code,true)
+ end
+ return load(code), 0
+ end
+
+ function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
+ report_lua("compiling %a into %a",luafile,lucfile)
+ os.remove(lucfile)
+ local done = stupidcompile(luafile,lucfile,strip ~= false)
+ if done then
+ report_lua("dumping %a into %a stripped",luafile,lucfile)
+ if cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
+ report_lua("removing %a",luafile)
+ os.remove(luafile)
+ end
+ end
+ return done
+ end
+
+ function luautilities.loadstripped(...)
+ local l = load(...)
+ if l then
+ return load(dump(l,true))
+ end
+ end
+
+else
+
+ -- The next function was posted by Peter Cawley on the lua list and strips line
+ -- number information etc. from the bytecode data blob. We only apply this trick
+ -- when we store data tables. Stripping makes the compressed format file about
+ -- 1MB smaller (and uncompressed we save at least 6MB).
+ --
+ -- You can consider this feature an experiment, so it might disappear. There is
+ -- no noticeable gain in runtime although the memory footprint should be somewhat
+ -- smaller (and the file system has a bit less to deal with).
+ --
+ -- Begin of borrowed code ... works for Lua 5.1 which LuaTeX currently uses ...
+
+ local function register(name,before,after)
+ local delta = before - after
+ if tracestripping then
+ report_lua("bytecodes stripped from %a, # before %s, # after %s, delta %s",name,before,after,delta)
+ end
+ strippedchunks[#strippedchunks+1] = name
+ luautilities.nofstrippedchunks = luautilities.nofstrippedchunks + 1
+ luautilities.nofstrippedbytes = luautilities.nofstrippedbytes + delta
+ return delta
+ end
+
+ local strip_code_pc
+
+ if _MAJORVERSION == 5 and _MINORVERSION == 1 then
+
+ strip_code_pc = function(dump,name)
+ local before = #dump
+ local version, format, endian, int, size, ins, num = byte(dump,5,11)
+ local subint
+ if endian == 1 then
+ subint = function(dump, i, l)
+ local val = 0
+ for n = l, 1, -1 do
+ val = val * 256 + byte(dump,i + n - 1)
+ end
+ return val, i + l
+ end
+ else
+ subint = function(dump, i, l)
+ local val = 0
+ for n = 1, l, 1 do
+ val = val * 256 + byte(dump,i + n - 1)
+ end
+ return val, i + l
+ end
+ end
+ local strip_function
+ strip_function = function(dump)
+ local count, offset = subint(dump, 1, size)
+ local stripped, dirty = rep("\0", size), offset + count
+ offset = offset + count + int * 2 + 4
+ offset = offset + int + subint(dump, offset, int) * ins
+ count, offset = subint(dump, offset, int)
+ for n = 1, count do
+ local t
+ t, offset = subint(dump, offset, 1)
+ if t == 1 then
+ offset = offset + 1
+ elseif t == 4 then
+ offset = offset + size + subint(dump, offset, size)
+ elseif t == 3 then
+ offset = offset + num
+ end
+ end
+ count, offset = subint(dump, offset, int)
+ stripped = stripped .. sub(dump,dirty, offset - 1)
+ for n = 1, count do
+ local proto, off = strip_function(sub(dump,offset, -1))
+ stripped, offset = stripped .. proto, offset + off - 1
+ end
+ offset = offset + subint(dump, offset, int) * int + int
+ count, offset = subint(dump, offset, int)
+ for n = 1, count do
+ offset = offset + subint(dump, offset, size) + size + int * 2
+ end
+ count, offset = subint(dump, offset, int)
+ for n = 1, count do
+ offset = offset + subint(dump, offset, size) + size
+ end
+ stripped = stripped .. rep("\0", int * 3)
+ return stripped, offset
+ end
+ dump = sub(dump,1,12) .. strip_function(sub(dump,13,-1))
+ local after = #dump
+ local delta = register(name,before,after)
+ return dump, delta
+ end
+
+ else
+
+ strip_code_pc = function(dump,name)
+ return dump, 0
+ end
+
+ end
+
+ -- ... end of borrowed code.
+
+ -- quite subtle ... doing this wrong incidentally can give more bytes
+
+ function luautilities.loadedluacode(fullname,forcestrip,name)
+ -- quite subtle ... doing this wrong incidentally can give more bytes
+ local code = environment.loadpreprocessedfile and environment.preprocessedloadfile(fullname) or loadfile(fullname)
+ if code then
+ code()
+ end
+ if forcestrip and luautilities.stripcode then
+ if type(forcestrip) == "function" then
+ forcestrip = forcestrip(fullname)
+ end
+ if forcestrip then
+ local code, n = strip_code_pc(dump(code),name)
+ return load(code), n
+ elseif luautilities.alwaysstripcode then
+ return load(strip_code_pc(dump(code),name))
+ else
+ return code, 0
+ end
+ elseif luautilities.alwaysstripcode then
+ return load(strip_code_pc(dump(code),name))
+ else
+ return code, 0
+ end
+ end
+
+ function luautilities.strippedloadstring(code,forcestrip,name) -- not executed
+ local n = 0
+ if (forcestrip and luautilities.stripcode) or luautilities.alwaysstripcode then
+ code = load(code)
+ if not code then
+ report_lua("fatal error in file %a",name)
+ end
+ code, n = strip_code_pc(dump(code),name)
+ end
+ return load(code), n
+ end
+
+ local function stupidcompile(luafile,lucfile,strip)
+ local code = io.loaddata(luafile)
+ local n = 0
+ if code and code ~= "" then
+ code = load(code)
+ if not code then
+ report_lua("fatal error in file %a",luafile)
+ end
+ code = dump(code)
+ if strip then
+ code, n = strip_code_pc(code,luautilities.stripcode or luautilities.alwaysstripcode,luafile) -- last one is reported
+ end
+ if code and code ~= "" then
+ io.savedata(lucfile,code)
+ end
+ end
+ return n
+ end
+
+ local luac_normal = "texluac -o %q %q"
+ local luac_strip = "texluac -s -o %q %q"
+
+ function luautilities.compile(luafile,lucfile,cleanup,strip,fallback) -- defaults: cleanup=false strip=true
+ report_lua("compiling %a into %a",luafile,lucfile)
+ os.remove(lucfile)
+ local done = false
+ if strip ~= false then
+ strip = true
+ end
+ if forcestupidcompile then
+ fallback = true
+ elseif strip then
+ done = os.spawn(format(luac_strip, lucfile,luafile)) == 0
+ else
+ done = os.spawn(format(luac_normal,lucfile,luafile)) == 0
+ end
+ if not done and fallback then
+ local n = stupidcompile(luafile,lucfile,strip)
+ if n > 0 then
+ report_lua("%a dumped into %a (%i bytes stripped)",luafile,lucfile,n)
+ else
+ report_lua("%a dumped into %a (unstripped)",luafile,lucfile)
+ end
+ cleanup = false -- better see how bad it is
+ done = true -- hm
+ end
+ if done and cleanup == true and lfs.isfile(lucfile) and lfs.isfile(luafile) then
+ report_lua("removing %a",luafile)
+ os.remove(luafile)
+ end
+ return done
+ end
+
+ luautilities.loadstripped = loadstring
+
+end
+
+-- local getmetatable, type = getmetatable, type
+--
+-- local types = { }
+--
+-- function luautilities.registerdatatype(d,name)
+-- types[getmetatable(d)] = name
+-- end
+--
+-- function luautilities.datatype(d)
+-- local t = type(d)
+-- if t == "userdata" then
+-- local m = getmetatable(d)
+-- return m and types[m] or "userdata"
+-- else
+-- return t
+-- end
+-- end
+--
+-- luautilities.registerdatatype(lpeg.P("!"),"lpeg")
+--
+-- print(luautilities.datatype(lpeg.P("oeps")))
diff --git a/tex/context/base/util-mrg.lua b/tex/context/base/util-mrg.lua
index 690188ef8..c50ae8a75 100644
--- a/tex/context/base/util-mrg.lua
+++ b/tex/context/base/util-mrg.lua
@@ -1,228 +1,228 @@
-if not modules then modules = { } end modules ['util-mrg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- hm, quite unreadable
-
-local gsub, format = string.gsub, string.format
-local concat = table.concat
-local type, next = type, next
-
-local P, R, S, V, Ct, C, Cs, Cc, Cp, Cmt, Cb, Cg = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp, lpeg.Cmt, lpeg.Cb, lpeg.Cg
-local lpegmatch, patterns = lpeg.match, lpeg.patterns
-
-utilities = utilities or { }
-local merger = utilities.merger or { }
-utilities.merger = merger
-merger.strip_comment = true
-
-local report = logs.reporter("system","merge")
-utilities.report = report
-
-local m_begin_merge = "begin library merge"
-local m_end_merge = "end library merge"
-local m_begin_closure = "do -- create closure to overcome 200 locals limit"
-local m_end_closure = "end -- of closure"
-
-local m_pattern =
- "%c+" ..
- "%-%-%s+" .. m_begin_merge ..
- "%c+(.-)%c+" ..
- "%-%-%s+" .. m_end_merge ..
- "%c+"
-
-local m_format =
- "\n\n-- " .. m_begin_merge ..
- "\n%s\n" ..
- "-- " .. m_end_merge .. "\n\n"
-
-local m_faked =
- "-- " .. "created merged file" .. "\n\n" ..
- "-- " .. m_begin_merge .. "\n\n" ..
- "-- " .. m_end_merge .. "\n\n"
-
-local m_report = [[
--- used libraries : %s
--- skipped libraries : %s
--- original bytes : %s
--- stripped bytes : %s
-]]
-
-local m_preloaded = [[package.loaded[%q] = package.loaded[%q] or true]]
-
-local function self_fake()
- return m_faked
-end
-
-local function self_nothing()
- return ""
-end
-
-local function self_load(name)
- local data = io.loaddata(name) or ""
- if data == "" then
- report("unknown file %a",name)
- else
- report("inserting file %a",name)
- end
- return data or ""
-end
-
--- -- saves some 20K .. scite comments
--- data = gsub(data,"%-%-~[^\n\r]*[\r\n]","")
--- -- saves some 20K .. ldx comments
--- data = gsub(data,"%-%-%[%[ldx%-%-.-%-%-ldx%]%]%-%-","")
-
-local space = patterns.space
-local eol = patterns.newline
-local equals = P("=")^0
-local open = P("[") * Cg(equals,"init") * P("[") * P("\n")^-1
-local close = P("]") * C(equals) * P("]")
-local closeeq = Cmt(close * Cb("init"), function(s,i,a,b) return a == b end)
-local longstring = open * (1 - closeeq)^0 * close
-
-local quoted = patterns.quoted
-local digit = patterns.digit
-local emptyline = space^0 * eol
-local operator1 = P("<=") + P(">=") + P("~=") + P("..") + S("/^<>=*+%%")
-local operator2 = S("*+/")
-local operator3 = S("-")
-local operator4 = P("..")
-local separator = S(",;")
-
-local ignore = (P("]") * space^1 * P("=") * space^1 * P("]")) / "]=[" +
- (P("=") * space^1 * P("{")) / "={" +
- (P("(") * space^1) / "(" +
- (P("{") * (space+eol)^1 * P("}")) / "{}"
-local strings = quoted -- / function (s) print("<<"..s..">>") return s end
-local longcmt = (emptyline^0 * P("--") * longstring * emptyline^0) / ""
-local longstr = longstring
-local comment = emptyline^0 * P("--") * P("-")^0 * (1-eol)^0 * emptyline^1 / "\n"
-local optionalspaces = space^0 / ""
-local mandatespaces = space^1 / ""
-local optionalspacing = (eol+space)^0 / ""
-local mandatespacing = (eol+space)^1 / ""
-local pack = digit * space^1 * operator4 * optionalspacing +
- optionalspacing * operator1 * optionalspacing +
- optionalspacing * operator2 * optionalspaces +
- mandatespacing * operator3 * mandatespaces +
- optionalspaces * separator * optionalspaces
-local lines = emptyline^2 / "\n"
-local spaces = (space * space) / " "
------ spaces = ((space+eol)^1 ) / " "
-
-local compact = Cs ( (
- ignore +
- strings +
- longcmt +
- longstr +
- comment +
- pack +
- lines +
- spaces +
- 1
-)^1 )
-
-local strip = Cs((emptyline^2/"\n" + 1)^0)
-local stripreturn = Cs((1-P("return") * space^1 * P(1-space-eol)^1 * (space+eol)^0 * P(-1))^1)
-
-function merger.compact(data)
- return lpegmatch(strip,lpegmatch(compact,data))
-end
-
-local function self_compact(data)
- local delta = 0
- if merger.strip_comment then
- local before = #data
- data = lpegmatch(compact,data)
- data = lpegmatch(strip,data) -- also strips in longstrings ... alas
- -- data = string.strip(data)
- local after = #data
- delta = before - after
- report("original size %s, compacted to %s, stripped %s",before,after,delta)
- data = format("-- original size: %s, stripped down to: %s\n\n%s",before,after,data)
- end
- return lpegmatch(stripreturn,data) or data, delta
-end
-
-local function self_save(name, data)
- if data ~= "" then
- io.savedata(name,data)
- report("saving %s with size %s",name,#data)
- end
-end
-
-local function self_swap(data,code)
- return data ~= "" and (gsub(data,m_pattern, function() return format(m_format,code) end, 1)) or ""
-end
-
-local function self_libs(libs,list)
- local result, f, frozen, foundpath = { }, nil, false, nil
- result[#result+1] = "\n"
- if type(libs) == 'string' then libs = { libs } end
- if type(list) == 'string' then list = { list } end
- for i=1,#libs do
- local lib = libs[i]
- for j=1,#list do
- local pth = gsub(list[j],"\\","/") -- file.clean_path
- report("checking library path %a",pth)
- local name = pth .. "/" .. lib
- if lfs.isfile(name) then
- foundpath = pth
- end
- end
- if foundpath then break end
- end
- if foundpath then
- report("using library path %a",foundpath)
- local right, wrong, original, stripped = { }, { }, 0, 0
- for i=1,#libs do
- local lib = libs[i]
- local fullname = foundpath .. "/" .. lib
- if lfs.isfile(fullname) then
- report("using library %a",fullname)
- local preloaded = file.nameonly(lib)
- local data = io.loaddata(fullname,true)
- original = original + #data
- local data, delta = self_compact(data)
- right[#right+1] = lib
- result[#result+1] = m_begin_closure
- result[#result+1] = format(m_preloaded,preloaded,preloaded)
- result[#result+1] = data
- result[#result+1] = m_end_closure
- stripped = stripped + delta
- else
- report("skipping library %a",fullname)
- wrong[#wrong+1] = lib
- end
- end
- right = #right > 0 and concat(right," ") or "-"
- wrong = #wrong > 0 and concat(wrong," ") or "-"
- report("used libraries: %a",right)
- report("skipped libraries: %a",wrong)
- report("original bytes: %a",original)
- report("stripped bytes: %a",stripped)
- result[#result+1] = format(m_report,right,wrong,original,stripped)
- else
- report("no valid library path found")
- end
- return concat(result, "\n\n")
-end
-
-function merger.selfcreate(libs,list,target)
- if target then
- self_save(target,self_swap(self_fake(),self_libs(libs,list)))
- end
-end
-
-function merger.selfmerge(name,libs,list,target)
- self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
-end
-
-function merger.selfclean(name)
- self_save(name,self_swap(self_load(name),self_nothing()))
-end
+if not modules then modules = { } end modules ['util-mrg'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- hm, quite unreadable
+
+local gsub, format = string.gsub, string.format
+local concat = table.concat
+local type, next = type, next
+
+local P, R, S, V, Ct, C, Cs, Cc, Cp, Cmt, Cb, Cg = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp, lpeg.Cmt, lpeg.Cb, lpeg.Cg
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+
+utilities = utilities or { }
+local merger = utilities.merger or { }
+utilities.merger = merger
+merger.strip_comment = true
+
+local report = logs.reporter("system","merge")
+utilities.report = report
+
+local m_begin_merge = "begin library merge"
+local m_end_merge = "end library merge"
+local m_begin_closure = "do -- create closure to overcome 200 locals limit"
+local m_end_closure = "end -- of closure"
+
+local m_pattern =
+ "%c+" ..
+ "%-%-%s+" .. m_begin_merge ..
+ "%c+(.-)%c+" ..
+ "%-%-%s+" .. m_end_merge ..
+ "%c+"
+
+local m_format =
+ "\n\n-- " .. m_begin_merge ..
+ "\n%s\n" ..
+ "-- " .. m_end_merge .. "\n\n"
+
+local m_faked =
+ "-- " .. "created merged file" .. "\n\n" ..
+ "-- " .. m_begin_merge .. "\n\n" ..
+ "-- " .. m_end_merge .. "\n\n"
+
+local m_report = [[
+-- used libraries : %s
+-- skipped libraries : %s
+-- original bytes : %s
+-- stripped bytes : %s
+]]
+
+local m_preloaded = [[package.loaded[%q] = package.loaded[%q] or true]]
+
+local function self_fake()
+ return m_faked
+end
+
+local function self_nothing()
+ return ""
+end
+
+local function self_load(name)
+ local data = io.loaddata(name) or ""
+ if data == "" then
+ report("unknown file %a",name)
+ else
+ report("inserting file %a",name)
+ end
+ return data or ""
+end
+
+-- -- saves some 20K .. scite comments
+-- data = gsub(data,"%-%-~[^\n\r]*[\r\n]","")
+-- -- saves some 20K .. ldx comments
+-- data = gsub(data,"%-%-%[%[ldx%-%-.-%-%-ldx%]%]%-%-","")
+
+local space = patterns.space
+local eol = patterns.newline
+local equals = P("=")^0
+local open = P("[") * Cg(equals,"init") * P("[") * P("\n")^-1
+local close = P("]") * C(equals) * P("]")
+local closeeq = Cmt(close * Cb("init"), function(s,i,a,b) return a == b end)
+local longstring = open * (1 - closeeq)^0 * close
+
+local quoted = patterns.quoted
+local digit = patterns.digit
+local emptyline = space^0 * eol
+local operator1 = P("<=") + P(">=") + P("~=") + P("..") + S("/^<>=*+%%")
+local operator2 = S("*+/")
+local operator3 = S("-")
+local operator4 = P("..")
+local separator = S(",;")
+
+local ignore = (P("]") * space^1 * P("=") * space^1 * P("]")) / "]=[" +
+ (P("=") * space^1 * P("{")) / "={" +
+ (P("(") * space^1) / "(" +
+ (P("{") * (space+eol)^1 * P("}")) / "{}"
+local strings = quoted -- / function (s) print("<<"..s..">>") return s end
+local longcmt = (emptyline^0 * P("--") * longstring * emptyline^0) / ""
+local longstr = longstring
+local comment = emptyline^0 * P("--") * P("-")^0 * (1-eol)^0 * emptyline^1 / "\n"
+local optionalspaces = space^0 / ""
+local mandatespaces = space^1 / ""
+local optionalspacing = (eol+space)^0 / ""
+local mandatespacing = (eol+space)^1 / ""
+local pack = digit * space^1 * operator4 * optionalspacing +
+ optionalspacing * operator1 * optionalspacing +
+ optionalspacing * operator2 * optionalspaces +
+ mandatespacing * operator3 * mandatespaces +
+ optionalspaces * separator * optionalspaces
+local lines = emptyline^2 / "\n"
+local spaces = (space * space) / " "
+----- spaces = ((space+eol)^1 ) / " "
+
+local compact = Cs ( (
+ ignore +
+ strings +
+ longcmt +
+ longstr +
+ comment +
+ pack +
+ lines +
+ spaces +
+ 1
+)^1 )
+
+local strip = Cs((emptyline^2/"\n" + 1)^0)
+local stripreturn = Cs((1-P("return") * space^1 * P(1-space-eol)^1 * (space+eol)^0 * P(-1))^1)
+
+function merger.compact(data)
+ return lpegmatch(strip,lpegmatch(compact,data))
+end
+
+local function self_compact(data)
+ local delta = 0
+ if merger.strip_comment then
+ local before = #data
+ data = lpegmatch(compact,data)
+ data = lpegmatch(strip,data) -- also strips in longstrings ... alas
+ -- data = string.strip(data)
+ local after = #data
+ delta = before - after
+ report("original size %s, compacted to %s, stripped %s",before,after,delta)
+ data = format("-- original size: %s, stripped down to: %s\n\n%s",before,after,data)
+ end
+ return lpegmatch(stripreturn,data) or data, delta
+end
+
+local function self_save(name, data)
+ if data ~= "" then
+ io.savedata(name,data)
+ report("saving %s with size %s",name,#data)
+ end
+end
+
+local function self_swap(data,code)
+ return data ~= "" and (gsub(data,m_pattern, function() return format(m_format,code) end, 1)) or ""
+end
+
+local function self_libs(libs,list)
+ local result, f, frozen, foundpath = { }, nil, false, nil
+ result[#result+1] = "\n"
+ if type(libs) == 'string' then libs = { libs } end
+ if type(list) == 'string' then list = { list } end
+ for i=1,#libs do
+ local lib = libs[i]
+ for j=1,#list do
+ local pth = gsub(list[j],"\\","/") -- file.clean_path
+ report("checking library path %a",pth)
+ local name = pth .. "/" .. lib
+ if lfs.isfile(name) then
+ foundpath = pth
+ end
+ end
+ if foundpath then break end
+ end
+ if foundpath then
+ report("using library path %a",foundpath)
+ local right, wrong, original, stripped = { }, { }, 0, 0
+ for i=1,#libs do
+ local lib = libs[i]
+ local fullname = foundpath .. "/" .. lib
+ if lfs.isfile(fullname) then
+ report("using library %a",fullname)
+ local preloaded = file.nameonly(lib)
+ local data = io.loaddata(fullname,true)
+ original = original + #data
+ local data, delta = self_compact(data)
+ right[#right+1] = lib
+ result[#result+1] = m_begin_closure
+ result[#result+1] = format(m_preloaded,preloaded,preloaded)
+ result[#result+1] = data
+ result[#result+1] = m_end_closure
+ stripped = stripped + delta
+ else
+ report("skipping library %a",fullname)
+ wrong[#wrong+1] = lib
+ end
+ end
+ right = #right > 0 and concat(right," ") or "-"
+ wrong = #wrong > 0 and concat(wrong," ") or "-"
+ report("used libraries: %a",right)
+ report("skipped libraries: %a",wrong)
+ report("original bytes: %a",original)
+ report("stripped bytes: %a",stripped)
+ result[#result+1] = format(m_report,right,wrong,original,stripped)
+ else
+ report("no valid library path found")
+ end
+ return concat(result, "\n\n")
+end
+
+function merger.selfcreate(libs,list,target)
+ if target then
+ self_save(target,self_swap(self_fake(),self_libs(libs,list)))
+ end
+end
+
+function merger.selfmerge(name,libs,list,target)
+ self_save(target or name,self_swap(self_load(name),self_libs(libs,list)))
+end
+
+function merger.selfclean(name)
+ self_save(name,self_swap(self_load(name),self_nothing()))
+end
diff --git a/tex/context/base/util-pck.lua b/tex/context/base/util-pck.lua
index 7be5e8f42..fe9911946 100644
--- a/tex/context/base/util-pck.lua
+++ b/tex/context/base/util-pck.lua
@@ -1,144 +1,144 @@
-if not modules then modules = { } end modules ['util-pck'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- moved from core-uti
-
-local next, tostring, type = next, tostring, type
-local sort, concat = table.sort, table.concat
-local sortedhashkeys, sortedkeys = table.sortedhashkeys, table.sortedkeys
-
-utilities = utilities or { }
-utilities.packers = utilities.packers or { }
-local packers = utilities.packers
-packers.version = 1.00
-
-local function hashed(t)
- local s, ns = { }, 0
- for k, v in next, t do
- ns = ns + 1
- if type(v) == "table" then
- s[ns] = k .. "={" .. hashed(v) .. "}"
- else
- s[ns] = k .. "=" .. tostring(v)
- end
- end
- sort(s)
- return concat(s,",")
-end
-
-local function simplehashed(t)
- local s, ns = { }, 0
- for k, v in next, t do
- ns = ns + 1
- s[ns] = k .. "=" .. v
- end
- sort(s)
- return concat(s,",")
-end
-
-packers.hashed = hashed
-packers.simplehashed = simplehashed
-
--- In luatex < 0.74 (lua 5.1) a next chain was the same for each run so no sort was needed,
--- but in the latest greatest versions (lua 5.2) we really need to sort the keys in order
--- not to get endless runs due to a difference in tuc files.
-
-local function pack(t,keys,hash,index)
- if t then
- -- for k, v in next, t do
- -- local sk = sortedkeys(t)
- local sk = sortedhashkeys(t)
- for i=1,#sk do
- local k = sk[i]
- local v = t[k]
- --
- if type(v) == "table" then
- pack(v,keys,hash,index)
- if keys[k] then
- local h = hashed(v)
- local i = hash[h]
- if not i then
- i = #index + 1
- index[i] = v
- hash[h] = i
- end
- t[k] = i
- end
- end
- end
- end
-end
-
-local function unpack(t,keys,index)
- if t then
- for k, v in next, t do
- if keys[k] and type(v) == "number" then
- local iv = index[v]
- if iv then
- v = iv
- t[k] = v
- end
- end
- if type(v) == "table" then
- unpack(v,keys,index)
- end
- end
- end
-end
-
-function packers.new(keys,version)
- return {
- version = version or packers.version,
- keys = table.tohash(keys),
- hash = { },
- index = { },
- }
-end
-
-function packers.pack(t,p,shared)
- if shared then
- pack(t,p.keys,p.hash,p.index)
- elseif not t.packer then
- pack(t,p.keys,p.hash,p.index)
- if #p.index > 0 then
- t.packer = {
- version = p.version or packers.version,
- keys = p.keys,
- index = p.index,
- }
- end
- p.hash = { }
- p.index = { }
- end
-end
-
-function packers.unpack(t,p,shared)
- if shared then
- if p then
- unpack(t,p.keys,p.index)
- end
- else
- local tp = t.packer
- if tp then
- if tp.version == (p and p.version or packers.version) then
- unpack(t,tp.keys,tp.index)
- else
- return false
- end
- t.packer = nil
- end
- end
- return true
-end
-
-function packers.strip(p)
- p.hash = nil
-end
-
--- We could have a packer.serialize where we first flush the shared table
--- and then use inline a reference . This saves an unpack.
+if not modules then modules = { } end modules ['util-pck'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- moved from core-uti
+
+local next, tostring, type = next, tostring, type
+local sort, concat = table.sort, table.concat
+local sortedhashkeys, sortedkeys = table.sortedhashkeys, table.sortedkeys
+
+utilities = utilities or { }
+utilities.packers = utilities.packers or { }
+local packers = utilities.packers
+packers.version = 1.00
+
+local function hashed(t)
+ local s, ns = { }, 0
+ for k, v in next, t do
+ ns = ns + 1
+ if type(v) == "table" then
+ s[ns] = k .. "={" .. hashed(v) .. "}"
+ else
+ s[ns] = k .. "=" .. tostring(v)
+ end
+ end
+ sort(s)
+ return concat(s,",")
+end
+
+local function simplehashed(t)
+ local s, ns = { }, 0
+ for k, v in next, t do
+ ns = ns + 1
+ s[ns] = k .. "=" .. v
+ end
+ sort(s)
+ return concat(s,",")
+end
+
+packers.hashed = hashed
+packers.simplehashed = simplehashed
+
+-- In luatex < 0.74 (lua 5.1) a next chain was the same for each run so no sort was needed,
+-- but in the latest greatest versions (lua 5.2) we really need to sort the keys in order
+-- not to get endless runs due to a difference in tuc files.
+
+local function pack(t,keys,hash,index)
+ if t then
+ -- for k, v in next, t do
+ -- local sk = sortedkeys(t)
+ local sk = sortedhashkeys(t)
+ for i=1,#sk do
+ local k = sk[i]
+ local v = t[k]
+ --
+ if type(v) == "table" then
+ pack(v,keys,hash,index)
+ if keys[k] then
+ local h = hashed(v)
+ local i = hash[h]
+ if not i then
+ i = #index + 1
+ index[i] = v
+ hash[h] = i
+ end
+ t[k] = i
+ end
+ end
+ end
+ end
+end
+
+local function unpack(t,keys,index)
+ if t then
+ for k, v in next, t do
+ if keys[k] and type(v) == "number" then
+ local iv = index[v]
+ if iv then
+ v = iv
+ t[k] = v
+ end
+ end
+ if type(v) == "table" then
+ unpack(v,keys,index)
+ end
+ end
+ end
+end
+
+function packers.new(keys,version)
+ return {
+ version = version or packers.version,
+ keys = table.tohash(keys),
+ hash = { },
+ index = { },
+ }
+end
+
+function packers.pack(t,p,shared)
+ if shared then
+ pack(t,p.keys,p.hash,p.index)
+ elseif not t.packer then
+ pack(t,p.keys,p.hash,p.index)
+ if #p.index > 0 then
+ t.packer = {
+ version = p.version or packers.version,
+ keys = p.keys,
+ index = p.index,
+ }
+ end
+ p.hash = { }
+ p.index = { }
+ end
+end
+
+function packers.unpack(t,p,shared)
+ if shared then
+ if p then
+ unpack(t,p.keys,p.index)
+ end
+ else
+ local tp = t.packer
+ if tp then
+ if tp.version == (p and p.version or packers.version) then
+ unpack(t,tp.keys,tp.index)
+ else
+ return false
+ end
+ t.packer = nil
+ end
+ end
+ return true
+end
+
+function packers.strip(p)
+ p.hash = nil
+end
+
+-- We could have a packer.serialize where we first flush the shared table
+-- and then use inline a reference . This saves an unpack.
diff --git a/tex/context/base/util-prs.lua b/tex/context/base/util-prs.lua
index cdf497588..ed1e32a99 100644
--- a/tex/context/base/util-prs.lua
+++ b/tex/context/base/util-prs.lua
@@ -1,593 +1,593 @@
-if not modules then modules = { } end modules ['util-prs'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local lpeg, table, string = lpeg, table, string
-local P, R, V, S, C, Ct, Cs, Carg, Cc, Cg, Cf, Cp = lpeg.P, lpeg.R, lpeg.V, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Cp
-local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
-local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
-local tostring, type, next, rawset = tostring, type, next, rawset
-
-utilities = utilities or {}
-local parsers = utilities.parsers or { }
-utilities.parsers = parsers
-local patterns = parsers.patterns or { }
-parsers.patterns = patterns
-
-local setmetatableindex = table.setmetatableindex
-local sortedhash = table.sortedhash
-
--- we share some patterns
-
-local digit = R("09")
-local space = P(' ')
-local equal = P("=")
-local comma = P(",")
-local lbrace = P("{")
-local rbrace = P("}")
-local lparent = P("(")
-local rparent = P(")")
-local period = S(".")
-local punctuation = S(".,:;")
-local spacer = lpegpatterns.spacer
-local whitespace = lpegpatterns.whitespace
-local newline = lpegpatterns.newline
-local anything = lpegpatterns.anything
-local endofstring = lpegpatterns.endofstring
-
-local nobrace = 1 - ( lbrace + rbrace )
-local noparent = 1 - ( lparent + rparent)
-
--- we could use a Cf Cg construct
-
-local escape, left, right = P("\\"), P('{'), P('}')
-
-lpegpatterns.balanced = P {
- [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0,
- [2] = left * V(1) * right
-}
-
-local nestedbraces = P { lbrace * (nobrace + V(1))^0 * rbrace }
-local nestedparents = P { lparent * (noparent + V(1))^0 * rparent }
-local spaces = space^0
-local argument = Cs((lbrace/"") * ((nobrace + nestedbraces)^0) * (rbrace/""))
-local content = (1-endofstring)^0
-
-lpegpatterns.nestedbraces = nestedbraces -- no capture
-lpegpatterns.nestedparents = nestedparents -- no capture
-lpegpatterns.nested = nestedbraces -- no capture
-lpegpatterns.argument = argument -- argument after e.g. =
-lpegpatterns.content = content -- rest after e.g =
-
-local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace) + C((nestedbraces + (1-comma))^0)
-
-local key = C((1-equal-comma)^1)
-local pattern_a = (space+comma)^0 * (key * equal * value + key * C(""))
-local pattern_c = (space+comma)^0 * (key * equal * value)
-
-local key = C((1-space-equal-comma)^1)
-local pattern_b = spaces * comma^0 * spaces * (key * ((spaces * equal * spaces * value) + C("")))
-
--- "a=1, b=2, c=3, d={a{b,c}d}, e=12345, f=xx{a{b,c}d}xx, g={}" : outer {} removes, leading spaces ignored
-
--- todo: rewrite to fold etc
---
--- parse = lpeg.Cf(lpeg.Carg(1) * lpeg.Cg(key * equal * value) * separator^0,rawset)^0 -- lpeg.match(parse,"...",1,hash)
-
-local hash = { }
-
-local function set(key,value)
- hash[key] = value
-end
-
-local pattern_a_s = (pattern_a/set)^1
-local pattern_b_s = (pattern_b/set)^1
-local pattern_c_s = (pattern_c/set)^1
-
-patterns.settings_to_hash_a = pattern_a_s
-patterns.settings_to_hash_b = pattern_b_s
-patterns.settings_to_hash_c = pattern_c_s
-
-function parsers.make_settings_to_hash_pattern(set,how)
- if type(str) == "table" then
- return set
- elseif how == "strict" then
- return (pattern_c/set)^1
- elseif how == "tolerant" then
- return (pattern_b/set)^1
- else
- return (pattern_a/set)^1
- end
-end
-
-function parsers.settings_to_hash(str,existing)
- if type(str) == "table" then
- if existing then
- for k, v in next, str do
- existing[k] = v
- end
- return exiting
- else
- return str
- end
- elseif str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_a_s,str)
- return hash
- else
- return { }
- end
-end
-
-function parsers.settings_to_hash_tolerant(str,existing)
- if type(str) == "table" then
- if existing then
- for k, v in next, str do
- existing[k] = v
- end
- return exiting
- else
- return str
- end
- elseif str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_b_s,str)
- return hash
- else
- return { }
- end
-end
-
-function parsers.settings_to_hash_strict(str,existing)
- if type(str) == "table" then
- if existing then
- for k, v in next, str do
- existing[k] = v
- end
- return exiting
- else
- return str
- end
- elseif str and str ~= "" then
- hash = existing or { }
- lpegmatch(pattern_c_s,str)
- return next(hash) and hash
- else
- return nil
- end
-end
-
-local separator = comma * space^0
-local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
- + C((nestedbraces + (1-comma))^0)
-local pattern = spaces * Ct(value*(separator*value)^0)
-
--- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
-
-patterns.settings_to_array = pattern
-
--- we could use a weak table as cache
-
-function parsers.settings_to_array(str,strict)
- if type(str) == "table" then
- return str
- elseif not str or str == "" then
- return { }
- elseif strict then
- if find(str,"{") then
- return lpegmatch(pattern,str)
- else
- return { str }
- end
- else
- return lpegmatch(pattern,str)
- end
-end
-
-local function set(t,v)
- t[#t+1] = v
-end
-
-local value = P(Carg(1)*value) / set
-local pattern = value*(separator*value)^0 * Carg(1)
-
-function parsers.add_settings_to_array(t,str)
- return lpegmatch(pattern,str,nil,t)
-end
-
-function parsers.hash_to_string(h,separator,yes,no,strict,omit)
- if h then
- local t, tn, s = { }, 0, table.sortedkeys(h)
- omit = omit and table.tohash(omit)
- for i=1,#s do
- local key = s[i]
- if not omit or not omit[key] then
- local value = h[key]
- if type(value) == "boolean" then
- if yes and no then
- if value then
- tn = tn + 1
- t[tn] = key .. '=' .. yes
- elseif not strict then
- tn = tn + 1
- t[tn] = key .. '=' .. no
- end
- elseif value or not strict then
- tn = tn + 1
- t[tn] = key .. '=' .. tostring(value)
- end
- else
- tn = tn + 1
- t[tn] = key .. '=' .. value
- end
- end
- end
- return concat(t,separator or ",")
- else
- return ""
- end
-end
-
-function parsers.array_to_string(a,separator)
- if a then
- return concat(a,separator or ",")
- else
- return ""
- end
-end
-
-function parsers.settings_to_set(str,t) -- tohash? -- todo: lpeg -- duplicate anyway
- t = t or { }
--- for s in gmatch(str,"%s*([^, ]+)") do -- space added
- for s in gmatch(str,"[^, ]+") do -- space added
- t[s] = true
- end
- return t
-end
-
-function parsers.simple_hash_to_string(h, separator)
- local t, tn = { }, 0
- for k, v in sortedhash(h) do
- if v then
- tn = tn + 1
- t[tn] = k
- end
- end
- return concat(t,separator or ",")
-end
-
--- for chem (currently one level)
-
-local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
- + C(digit^1 * lparent * (noparent + nestedparents)^1 * rparent)
- + C((nestedbraces + (1-comma))^1)
-local pattern_a = spaces * Ct(value*(separator*value)^0)
-
-local function repeater(n,str)
- if not n then
- return str
- else
- local s = lpegmatch(pattern_a,str)
- if n == 1 then
- return unpack(s)
- else
- local t, tn = { }, 0
- for i=1,n do
- for j=1,#s do
- tn = tn + 1
- t[tn] = s[j]
- end
- end
- return unpack(t)
- end
- end
-end
-
-local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
- + (C(digit^1)/tonumber * lparent * Cs((noparent + nestedparents)^1) * rparent) / repeater
- + C((nestedbraces + (1-comma))^1)
-local pattern_b = spaces * Ct(value*(separator*value)^0)
-
-function parsers.settings_to_array_with_repeat(str,expand) -- beware: "" => { }
- if expand then
- return lpegmatch(pattern_b,str) or { }
- else
- return lpegmatch(pattern_a,str) or { }
- end
-end
-
---
-
-local value = lbrace * C((nobrace + nestedbraces)^0) * rbrace
-local pattern = Ct((space + value)^0)
-
-function parsers.arguments_to_table(str)
- return lpegmatch(pattern,str)
-end
-
--- temporary here (unoptimized)
-
-function parsers.getparameters(self,class,parentclass,settings)
- local sc = self[class]
- if not sc then
- sc = { }
- self[class] = sc
- if parentclass then
- local sp = self[parentclass]
- if not sp then
- sp = { }
- self[parentclass] = sp
- end
- setmetatableindex(sc,sp)
- end
- end
- parsers.settings_to_hash(settings,sc)
-end
-
-function parsers.listitem(str)
- return gmatch(str,"[^, ]+")
-end
-
---
-
-local pattern = Cs { "start",
- start = V("one") + V("two") + V("three"),
- rest = (Cc(",") * V("thousand"))^0 * (P(".") + endofstring) * anything^0,
- thousand = digit * digit * digit,
- one = digit * V("rest"),
- two = digit * digit * V("rest"),
- three = V("thousand") * V("rest"),
-}
-
-lpegpatterns.splitthousands = pattern -- maybe better in the parsers namespace ?
-
-function parsers.splitthousands(str)
- return lpegmatch(pattern,str) or str
-end
-
--- print(parsers.splitthousands("11111111111.11"))
-
-local optionalwhitespace = whitespace^0
-
-lpegpatterns.words = Ct((Cs((1-punctuation-whitespace)^1) + anything)^1)
-lpegpatterns.sentences = Ct((optionalwhitespace * Cs((1-period)^0 * period))^1)
-lpegpatterns.paragraphs = Ct((optionalwhitespace * Cs((whitespace^1*endofstring/"" + 1 - (spacer^0*newline*newline))^1))^1)
-
--- local str = " Word1 word2. \n Word3 word4. \n\n Word5 word6.\n "
--- inspect(lpegmatch(lpegpatterns.paragraphs,str))
--- inspect(lpegmatch(lpegpatterns.sentences,str))
--- inspect(lpegmatch(lpegpatterns.words,str))
-
--- handy for k="v" [, ] k="v"
-
-local dquote = P('"')
-local equal = P('=')
-local escape = P('\\')
-local separator = S(' ,')
-
-local key = C((1-equal)^1)
-local value = dquote * C((1-dquote-escape*dquote)^0) * dquote
-
-local pattern = Cf(Ct("") * Cg(key * equal * value) * separator^0,rawset)^0 * P(-1)
-
-patterns.keq_to_hash_c = pattern
-
-function parsers.keq_to_hash(str)
- if str and str ~= "" then
- return lpegmatch(pattern,str)
- else
- return { }
- end
-end
-
--- inspect(lpeg.match(pattern,[[key="value"]]))
-
-local defaultspecification = { separator = ",", quote = '"' }
-
--- this version accepts multiple separators and quotes as used in the
--- database module
-
-function parsers.csvsplitter(specification)
- specification = specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
- local separator = specification.separator
- local quotechar = specification.quote
- local separator = S(separator ~= "" and separator or ",")
- local whatever = C((1 - separator - newline)^0)
- if quotechar and quotechar ~= "" then
- local quotedata = nil
- for chr in gmatch(quotechar,".") do
- local quotechar = P(chr)
- local quoteword = quotechar * C((1 - quotechar)^0) * quotechar
- if quotedata then
- quotedata = quotedata + quoteword
- else
- quotedata = quoteword
- end
- end
- whatever = quotedata + whatever
- end
- local parser = Ct((Ct(whatever * (separator * whatever)^0) * S("\n\r"))^0 )
- return function(data)
- return lpegmatch(parser,data)
- end
-end
-
--- and this is a slightly patched version of a version posted by Philipp Gesang
-
--- local mycsvsplitter = utilities.parsers.rfc4180splitter()
---
--- local crap = [[
--- first,second,third,fourth
--- "1","2","3","4"
--- "a","b","c","d"
--- "foo","bar""baz","boogie","xyzzy"
--- ]]
---
--- local list, names = mycsvsplitter(crap,true) inspect(list) inspect(names)
--- local list, names = mycsvsplitter(crap) inspect(list) inspect(names)
-
-function parsers.rfc4180splitter(specification)
- specification = specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
- local separator = specification.separator --> rfc: COMMA
- local quotechar = P(specification.quote) --> DQUOTE
- local dquotechar = quotechar * quotechar --> 2DQUOTE
- / specification.quote
- local separator = S(separator ~= "" and separator or ",")
- local escaped = quotechar
- * Cs((dquotechar + (1 - quotechar))^0)
- * quotechar
- local non_escaped = C((1 - quotechar - newline - separator)^1)
- local field = escaped + non_escaped
- local record = Ct((field * separator^-1)^1)
- local headerline = record * Cp()
- local wholeblob = Ct((newline^-1 * record)^0)
- return function(data,getheader)
- if getheader then
- local header, position = lpegmatch(headerline,data)
- local data = lpegmatch(wholeblob,data,position)
- return data, header
- else
- return lpegmatch(wholeblob,data)
- end
- end
-end
-
--- utilities.parsers.stepper("1,7-",9,function(i) print(">>>",i) end)
--- utilities.parsers.stepper("1-3,7,8,9")
--- utilities.parsers.stepper("1-3,6,7",function(i) print(">>>",i) end)
--- utilities.parsers.stepper(" 1 : 3, ,7 ")
--- utilities.parsers.stepper("1:4,9:13,24:*",30)
-
-local function ranger(first,last,n,action)
- if not first then
- -- forget about it
- elseif last == true then
- for i=first,n or first do
- action(i)
- end
- elseif last then
- for i=first,last do
- action(i)
- end
- else
- action(first)
- end
-end
-
-local cardinal = lpegpatterns.cardinal / tonumber
-local spacers = lpegpatterns.spacer^0
-local endofstring = lpegpatterns.endofstring
-
-local stepper = spacers * ( C(cardinal) * ( spacers * S(":-") * spacers * ( C(cardinal) + Cc(true) ) + Cc(false) )
- * Carg(1) * Carg(2) / ranger * S(", ")^0 )^1
-
-local stepper = spacers * ( C(cardinal) * ( spacers * S(":-") * spacers * ( C(cardinal) + (P("*") + endofstring) * Cc(true) ) + Cc(false) )
- * Carg(1) * Carg(2) / ranger * S(", ")^0 )^1 * endofstring -- we're sort of strict (could do without endofstring)
-
-function parsers.stepper(str,n,action)
- if type(n) == "function" then
- lpegmatch(stepper,str,1,false,n or print)
- else
- lpegmatch(stepper,str,1,n,action or print)
- end
-end
-
---
-
-local pattern_math = Cs((P("%")/"\\percent " + P("^") * Cc("{") * lpegpatterns.integer * Cc("}") + P(1))^0)
-local pattern_text = Cs((P("%")/"\\percent " + (P("^")/"\\high") * Cc("{") * lpegpatterns.integer * Cc("}") + P(1))^0)
-
-patterns.unittotex = pattern
-
-function parsers.unittotex(str,textmode)
- return lpegmatch(textmode and pattern_text or pattern_math,str)
-end
-
-local pattern = Cs((P("^") / "" * lpegpatterns.integer * Cc("") + P(1))^0)
-
-function parsers.unittoxml(str)
- return lpegmatch(pattern,str)
-end
-
--- print(utilities.parsers.unittotex("10^-32 %"),utilities.parsers.unittoxml("10^32 %"))
-
-local cache = { }
-local spaces = lpeg.patterns.space^0
-local dummy = function() end
-
-table.setmetatableindex(cache,function(t,k)
- local separator = P(k)
- local value = (1-separator)^0
- local pattern = spaces * C(value) * separator^0 * Cp()
- t[k] = pattern
- return pattern
-end)
-
-local commalistiterator = cache[","]
-
-function utilities.parsers.iterator(str,separator)
- local n = #str
- if n == 0 then
- return dummy
- else
- local pattern = separator and cache[separator] or commalistiterator
- local p = 1
- return function()
- if p <= n then
- local s, e = lpegmatch(pattern,str,p)
- if e then
- p = e
- return s
- end
- end
- end
- end
-end
-
--- for s in utilities.parsers.iterator("a b c,b,c") do
--- print(s)
--- end
-
-local function initialize(t,name)
- local source = t[name]
- if source then
- local result = { }
- for k, v in next, t[name] do
- result[k] = v
- end
- return result
- else
- return { }
- end
-end
-
-local function fetch(t,name)
- return t[name] or { }
-end
-
-function process(result,more)
- for k, v in next, more do
- result[k] = v
- end
- return result
-end
-
-local name = C((1-S(", "))^1)
-local parser = (Carg(1) * name / initialize) * (S(", ")^1 * (Carg(1) * name / fetch))^0
-local merge = Cf(parser,process)
-
-function utilities.parsers.mergehashes(hash,list)
- return lpegmatch(merge,list,1,hash)
-end
-
--- local t = {
--- aa = { alpha = 1, beta = 2, gamma = 3, },
--- bb = { alpha = 4, beta = 5, delta = 6, },
--- cc = { epsilon = 3 },
--- }
---
--- inspect(utilities.parsers.mergehashes(t,"aa, bb, cc"))
+if not modules then modules = { } end modules ['util-prs'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local lpeg, table, string = lpeg, table, string
+local P, R, V, S, C, Ct, Cs, Carg, Cc, Cg, Cf, Cp = lpeg.P, lpeg.R, lpeg.V, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.Cp
+local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns
+local concat, format, gmatch, find = table.concat, string.format, string.gmatch, string.find
+local tostring, type, next, rawset = tostring, type, next, rawset
+
+utilities = utilities or {}
+local parsers = utilities.parsers or { }
+utilities.parsers = parsers
+local patterns = parsers.patterns or { }
+parsers.patterns = patterns
+
+local setmetatableindex = table.setmetatableindex
+local sortedhash = table.sortedhash
+
+-- we share some patterns
+
+local digit = R("09")
+local space = P(' ')
+local equal = P("=")
+local comma = P(",")
+local lbrace = P("{")
+local rbrace = P("}")
+local lparent = P("(")
+local rparent = P(")")
+local period = S(".")
+local punctuation = S(".,:;")
+local spacer = lpegpatterns.spacer
+local whitespace = lpegpatterns.whitespace
+local newline = lpegpatterns.newline
+local anything = lpegpatterns.anything
+local endofstring = lpegpatterns.endofstring
+
+local nobrace = 1 - ( lbrace + rbrace )
+local noparent = 1 - ( lparent + rparent)
+
+-- we could use a Cf Cg construct
+
+local escape, left, right = P("\\"), P('{'), P('}')
+
+lpegpatterns.balanced = P {
+ [1] = ((escape * (left+right)) + (1 - (left+right)) + V(2))^0,
+ [2] = left * V(1) * right
+}
+
+local nestedbraces = P { lbrace * (nobrace + V(1))^0 * rbrace }
+local nestedparents = P { lparent * (noparent + V(1))^0 * rparent }
+local spaces = space^0
+local argument = Cs((lbrace/"") * ((nobrace + nestedbraces)^0) * (rbrace/""))
+local content = (1-endofstring)^0
+
+lpegpatterns.nestedbraces = nestedbraces -- no capture
+lpegpatterns.nestedparents = nestedparents -- no capture
+lpegpatterns.nested = nestedbraces -- no capture
+lpegpatterns.argument = argument -- argument after e.g. =
+lpegpatterns.content = content -- rest after e.g =
+
+local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace) + C((nestedbraces + (1-comma))^0)
+
+local key = C((1-equal-comma)^1)
+local pattern_a = (space+comma)^0 * (key * equal * value + key * C(""))
+local pattern_c = (space+comma)^0 * (key * equal * value)
+
+local key = C((1-space-equal-comma)^1)
+local pattern_b = spaces * comma^0 * spaces * (key * ((spaces * equal * spaces * value) + C("")))
+
+-- "a=1, b=2, c=3, d={a{b,c}d}, e=12345, f=xx{a{b,c}d}xx, g={}" : outer {} removes, leading spaces ignored
+
+-- todo: rewrite to fold etc
+--
+-- parse = lpeg.Cf(lpeg.Carg(1) * lpeg.Cg(key * equal * value) * separator^0,rawset)^0 -- lpeg.match(parse,"...",1,hash)
+
+local hash = { }
+
+local function set(key,value)
+ hash[key] = value
+end
+
+local pattern_a_s = (pattern_a/set)^1
+local pattern_b_s = (pattern_b/set)^1
+local pattern_c_s = (pattern_c/set)^1
+
+patterns.settings_to_hash_a = pattern_a_s
+patterns.settings_to_hash_b = pattern_b_s
+patterns.settings_to_hash_c = pattern_c_s
+
+function parsers.make_settings_to_hash_pattern(set,how)
+ if type(str) == "table" then
+ return set
+ elseif how == "strict" then
+ return (pattern_c/set)^1
+ elseif how == "tolerant" then
+ return (pattern_b/set)^1
+ else
+ return (pattern_a/set)^1
+ end
+end
+
+function parsers.settings_to_hash(str,existing)
+ if type(str) == "table" then
+ if existing then
+ for k, v in next, str do
+ existing[k] = v
+ end
+ return exiting
+ else
+ return str
+ end
+ elseif str and str ~= "" then
+ hash = existing or { }
+ lpegmatch(pattern_a_s,str)
+ return hash
+ else
+ return { }
+ end
+end
+
+function parsers.settings_to_hash_tolerant(str,existing)
+ if type(str) == "table" then
+ if existing then
+ for k, v in next, str do
+ existing[k] = v
+ end
+ return exiting
+ else
+ return str
+ end
+ elseif str and str ~= "" then
+ hash = existing or { }
+ lpegmatch(pattern_b_s,str)
+ return hash
+ else
+ return { }
+ end
+end
+
+function parsers.settings_to_hash_strict(str,existing)
+ if type(str) == "table" then
+ if existing then
+ for k, v in next, str do
+ existing[k] = v
+ end
+ return exiting
+ else
+ return str
+ end
+ elseif str and str ~= "" then
+ hash = existing or { }
+ lpegmatch(pattern_c_s,str)
+ return next(hash) and hash
+ else
+ return nil
+ end
+end
+
+local separator = comma * space^0
+local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
+ + C((nestedbraces + (1-comma))^0)
+local pattern = spaces * Ct(value*(separator*value)^0)
+
+-- "aap, {noot}, mies" : outer {} removes, leading spaces ignored
+
+patterns.settings_to_array = pattern
+
+-- we could use a weak table as cache
+
+function parsers.settings_to_array(str,strict)
+ if type(str) == "table" then
+ return str
+ elseif not str or str == "" then
+ return { }
+ elseif strict then
+ if find(str,"{") then
+ return lpegmatch(pattern,str)
+ else
+ return { str }
+ end
+ else
+ return lpegmatch(pattern,str)
+ end
+end
+
+local function set(t,v)
+ t[#t+1] = v
+end
+
+local value = P(Carg(1)*value) / set
+local pattern = value*(separator*value)^0 * Carg(1)
+
+function parsers.add_settings_to_array(t,str)
+ return lpegmatch(pattern,str,nil,t)
+end
+
+function parsers.hash_to_string(h,separator,yes,no,strict,omit)
+ if h then
+ local t, tn, s = { }, 0, table.sortedkeys(h)
+ omit = omit and table.tohash(omit)
+ for i=1,#s do
+ local key = s[i]
+ if not omit or not omit[key] then
+ local value = h[key]
+ if type(value) == "boolean" then
+ if yes and no then
+ if value then
+ tn = tn + 1
+ t[tn] = key .. '=' .. yes
+ elseif not strict then
+ tn = tn + 1
+ t[tn] = key .. '=' .. no
+ end
+ elseif value or not strict then
+ tn = tn + 1
+ t[tn] = key .. '=' .. tostring(value)
+ end
+ else
+ tn = tn + 1
+ t[tn] = key .. '=' .. value
+ end
+ end
+ end
+ return concat(t,separator or ",")
+ else
+ return ""
+ end
+end
+
+function parsers.array_to_string(a,separator)
+ if a then
+ return concat(a,separator or ",")
+ else
+ return ""
+ end
+end
+
+function parsers.settings_to_set(str,t) -- tohash? -- todo: lpeg -- duplicate anyway
+ t = t or { }
+-- for s in gmatch(str,"%s*([^, ]+)") do -- space added
+ for s in gmatch(str,"[^, ]+") do -- space added
+ t[s] = true
+ end
+ return t
+end
+
+function parsers.simple_hash_to_string(h, separator)
+ local t, tn = { }, 0
+ for k, v in sortedhash(h) do
+ if v then
+ tn = tn + 1
+ t[tn] = k
+ end
+ end
+ return concat(t,separator or ",")
+end
+
+-- for chem (currently one level)
+
+local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
+ + C(digit^1 * lparent * (noparent + nestedparents)^1 * rparent)
+ + C((nestedbraces + (1-comma))^1)
+local pattern_a = spaces * Ct(value*(separator*value)^0)
+
+local function repeater(n,str)
+ if not n then
+ return str
+ else
+ local s = lpegmatch(pattern_a,str)
+ if n == 1 then
+ return unpack(s)
+ else
+ local t, tn = { }, 0
+ for i=1,n do
+ for j=1,#s do
+ tn = tn + 1
+ t[tn] = s[j]
+ end
+ end
+ return unpack(t)
+ end
+ end
+end
+
+local value = P(lbrace * C((nobrace + nestedbraces)^0) * rbrace)
+ + (C(digit^1)/tonumber * lparent * Cs((noparent + nestedparents)^1) * rparent) / repeater
+ + C((nestedbraces + (1-comma))^1)
+local pattern_b = spaces * Ct(value*(separator*value)^0)
+
+function parsers.settings_to_array_with_repeat(str,expand) -- beware: "" => { }
+ if expand then
+ return lpegmatch(pattern_b,str) or { }
+ else
+ return lpegmatch(pattern_a,str) or { }
+ end
+end
+
+--
+
+local value = lbrace * C((nobrace + nestedbraces)^0) * rbrace
+local pattern = Ct((space + value)^0)
+
+function parsers.arguments_to_table(str)
+ return lpegmatch(pattern,str)
+end
+
+-- temporary here (unoptimized)
+
+function parsers.getparameters(self,class,parentclass,settings)
+ local sc = self[class]
+ if not sc then
+ sc = { }
+ self[class] = sc
+ if parentclass then
+ local sp = self[parentclass]
+ if not sp then
+ sp = { }
+ self[parentclass] = sp
+ end
+ setmetatableindex(sc,sp)
+ end
+ end
+ parsers.settings_to_hash(settings,sc)
+end
+
+function parsers.listitem(str)
+ return gmatch(str,"[^, ]+")
+end
+
+--
+
+local pattern = Cs { "start",
+ start = V("one") + V("two") + V("three"),
+ rest = (Cc(",") * V("thousand"))^0 * (P(".") + endofstring) * anything^0,
+ thousand = digit * digit * digit,
+ one = digit * V("rest"),
+ two = digit * digit * V("rest"),
+ three = V("thousand") * V("rest"),
+}
+
+lpegpatterns.splitthousands = pattern -- maybe better in the parsers namespace ?
+
+function parsers.splitthousands(str)
+ return lpegmatch(pattern,str) or str
+end
+
+-- print(parsers.splitthousands("11111111111.11"))
+
+local optionalwhitespace = whitespace^0
+
+lpegpatterns.words = Ct((Cs((1-punctuation-whitespace)^1) + anything)^1)
+lpegpatterns.sentences = Ct((optionalwhitespace * Cs((1-period)^0 * period))^1)
+lpegpatterns.paragraphs = Ct((optionalwhitespace * Cs((whitespace^1*endofstring/"" + 1 - (spacer^0*newline*newline))^1))^1)
+
+-- local str = " Word1 word2. \n Word3 word4. \n\n Word5 word6.\n "
+-- inspect(lpegmatch(lpegpatterns.paragraphs,str))
+-- inspect(lpegmatch(lpegpatterns.sentences,str))
+-- inspect(lpegmatch(lpegpatterns.words,str))
+
+-- handy for k="v" [, ] k="v"
+
+local dquote = P('"')
+local equal = P('=')
+local escape = P('\\')
+local separator = S(' ,')
+
+local key = C((1-equal)^1)
+local value = dquote * C((1-dquote-escape*dquote)^0) * dquote
+
+local pattern = Cf(Ct("") * Cg(key * equal * value) * separator^0,rawset)^0 * P(-1)
+
+patterns.keq_to_hash_c = pattern
+
+function parsers.keq_to_hash(str)
+ if str and str ~= "" then
+ return lpegmatch(pattern,str)
+ else
+ return { }
+ end
+end
+
+-- inspect(lpeg.match(pattern,[[key="value"]]))
+
+local defaultspecification = { separator = ",", quote = '"' }
+
+-- this version accepts multiple separators and quotes as used in the
+-- database module
+
+function parsers.csvsplitter(specification)
+ specification = specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ local separator = specification.separator
+ local quotechar = specification.quote
+ local separator = S(separator ~= "" and separator or ",")
+ local whatever = C((1 - separator - newline)^0)
+ if quotechar and quotechar ~= "" then
+ local quotedata = nil
+ for chr in gmatch(quotechar,".") do
+ local quotechar = P(chr)
+ local quoteword = quotechar * C((1 - quotechar)^0) * quotechar
+ if quotedata then
+ quotedata = quotedata + quoteword
+ else
+ quotedata = quoteword
+ end
+ end
+ whatever = quotedata + whatever
+ end
+ local parser = Ct((Ct(whatever * (separator * whatever)^0) * S("\n\r"))^0 )
+ return function(data)
+ return lpegmatch(parser,data)
+ end
+end
+
+-- and this is a slightly patched version of a version posted by Philipp Gesang
+
+-- local mycsvsplitter = utilities.parsers.rfc4180splitter()
+--
+-- local crap = [[
+-- first,second,third,fourth
+-- "1","2","3","4"
+-- "a","b","c","d"
+-- "foo","bar""baz","boogie","xyzzy"
+-- ]]
+--
+-- local list, names = mycsvsplitter(crap,true) inspect(list) inspect(names)
+-- local list, names = mycsvsplitter(crap) inspect(list) inspect(names)
+
+function parsers.rfc4180splitter(specification)
+ specification = specification and table.setmetatableindex(specification,defaultspecification) or defaultspecification
+ local separator = specification.separator --> rfc: COMMA
+ local quotechar = P(specification.quote) --> DQUOTE
+ local dquotechar = quotechar * quotechar --> 2DQUOTE
+ / specification.quote
+ local separator = S(separator ~= "" and separator or ",")
+ local escaped = quotechar
+ * Cs((dquotechar + (1 - quotechar))^0)
+ * quotechar
+ local non_escaped = C((1 - quotechar - newline - separator)^1)
+ local field = escaped + non_escaped
+ local record = Ct((field * separator^-1)^1)
+ local headerline = record * Cp()
+ local wholeblob = Ct((newline^-1 * record)^0)
+ return function(data,getheader)
+ if getheader then
+ local header, position = lpegmatch(headerline,data)
+ local data = lpegmatch(wholeblob,data,position)
+ return data, header
+ else
+ return lpegmatch(wholeblob,data)
+ end
+ end
+end
+
+-- utilities.parsers.stepper("1,7-",9,function(i) print(">>>",i) end)
+-- utilities.parsers.stepper("1-3,7,8,9")
+-- utilities.parsers.stepper("1-3,6,7",function(i) print(">>>",i) end)
+-- utilities.parsers.stepper(" 1 : 3, ,7 ")
+-- utilities.parsers.stepper("1:4,9:13,24:*",30)
+
+local function ranger(first,last,n,action)
+ if not first then
+ -- forget about it
+ elseif last == true then
+ for i=first,n or first do
+ action(i)
+ end
+ elseif last then
+ for i=first,last do
+ action(i)
+ end
+ else
+ action(first)
+ end
+end
+
+local cardinal = lpegpatterns.cardinal / tonumber
+local spacers = lpegpatterns.spacer^0
+local endofstring = lpegpatterns.endofstring
+
+local stepper = spacers * ( C(cardinal) * ( spacers * S(":-") * spacers * ( C(cardinal) + Cc(true) ) + Cc(false) )
+ * Carg(1) * Carg(2) / ranger * S(", ")^0 )^1
+
+local stepper = spacers * ( C(cardinal) * ( spacers * S(":-") * spacers * ( C(cardinal) + (P("*") + endofstring) * Cc(true) ) + Cc(false) )
+ * Carg(1) * Carg(2) / ranger * S(", ")^0 )^1 * endofstring -- we're sort of strict (could do without endofstring)
+
+function parsers.stepper(str,n,action)
+ if type(n) == "function" then
+ lpegmatch(stepper,str,1,false,n or print)
+ else
+ lpegmatch(stepper,str,1,n,action or print)
+ end
+end
+
+--
+
+local pattern_math = Cs((P("%")/"\\percent " + P("^") * Cc("{") * lpegpatterns.integer * Cc("}") + P(1))^0)
+local pattern_text = Cs((P("%")/"\\percent " + (P("^")/"\\high") * Cc("{") * lpegpatterns.integer * Cc("}") + P(1))^0)
+
+patterns.unittotex = pattern
+
+function parsers.unittotex(str,textmode)
+ return lpegmatch(textmode and pattern_text or pattern_math,str)
+end
+
+local pattern = Cs((P("^") / "" * lpegpatterns.integer * Cc("") + P(1))^0)
+
+function parsers.unittoxml(str)
+ return lpegmatch(pattern,str)
+end
+
+-- print(utilities.parsers.unittotex("10^-32 %"),utilities.parsers.unittoxml("10^32 %"))
+
+local cache = { }
+local spaces = lpeg.patterns.space^0
+local dummy = function() end
+
+table.setmetatableindex(cache,function(t,k)
+ local separator = P(k)
+ local value = (1-separator)^0
+ local pattern = spaces * C(value) * separator^0 * Cp()
+ t[k] = pattern
+ return pattern
+end)
+
+local commalistiterator = cache[","]
+
+function utilities.parsers.iterator(str,separator)
+ local n = #str
+ if n == 0 then
+ return dummy
+ else
+ local pattern = separator and cache[separator] or commalistiterator
+ local p = 1
+ return function()
+ if p <= n then
+ local s, e = lpegmatch(pattern,str,p)
+ if e then
+ p = e
+ return s
+ end
+ end
+ end
+ end
+end
+
+-- for s in utilities.parsers.iterator("a b c,b,c") do
+-- print(s)
+-- end
+
+local function initialize(t,name)
+ local source = t[name]
+ if source then
+ local result = { }
+ for k, v in next, t[name] do
+ result[k] = v
+ end
+ return result
+ else
+ return { }
+ end
+end
+
+local function fetch(t,name)
+ return t[name] or { }
+end
+
+function process(result,more)
+ for k, v in next, more do
+ result[k] = v
+ end
+ return result
+end
+
+local name = C((1-S(", "))^1)
+local parser = (Carg(1) * name / initialize) * (S(", ")^1 * (Carg(1) * name / fetch))^0
+local merge = Cf(parser,process)
+
+function utilities.parsers.mergehashes(hash,list)
+ return lpegmatch(merge,list,1,hash)
+end
+
+-- local t = {
+-- aa = { alpha = 1, beta = 2, gamma = 3, },
+-- bb = { alpha = 4, beta = 5, delta = 6, },
+-- cc = { epsilon = 3 },
+-- }
+--
+-- inspect(utilities.parsers.mergehashes(t,"aa, bb, cc"))
diff --git a/tex/context/base/util-ran.lua b/tex/context/base/util-ran.lua
index 50d0a7082..7e97be2e6 100644
--- a/tex/context/base/util-ran.lua
+++ b/tex/context/base/util-ran.lua
@@ -1,107 +1,107 @@
-if not modules then modules = { } end modules ['util-ran'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local random = math.random
-local concat = table.concat
-local sub, upper = string.sub, string.upper
-
-local randomizers = utilities.randomizers or { }
-utilities.randomizers = randomizers
-
-local l_one = "bcdfghjklmnpqrstvwxz"
-local l_two = "aeiouy"
-
-local u_one = upper(l_one)
-local u_two = upper(l_two)
-
-local n_one = #l_one
-local n_two = #l_two
-
-function randomizers.word(min,max,separator)
- local t = { }
- for i=1,random(min,max) do
- if i % 2 == 0 then
- local r = random(1,n_one)
- t[i] = sub(l_one,r,r)
- else
- local r = random(1,n_two)
- t[i] = sub(l_two,r,r)
- end
- end
- return concat(t,separator)
-end
-
-function randomizers.initials(min,max)
- if not min then
- if not max then
- min, max = 1, 3
- else
- min, max = 1, min
- end
- elseif not max then
- max = min
- end
- local t = { }
- local n = random(min or 1,max or 3)
- local m = 0
- for i=1,n do
- m = m + 1
- if i % 2 == 0 then
- local r = random(1,n_one)
- t[m] = sub(u_one,r,r)
- else
- local r = random(1,n_two)
- t[m] = sub(u_two,r,r)
- end
- m = m + 1
- t[m] = "."
- end
- return concat(t)
-end
-
-function randomizers.firstname(min,max)
- if not min then
- if not max then
- min, max = 3, 10
- else
- min, max = 1, min
- end
- elseif not max then
- max = min
- end
- local t = { }
- local n = random(min,max)
- local b = true
- if n % 2 == 0 then
- local r = random(1,n_two)
- t[1] = sub(u_two,r,r)
- b = true
- else
- local r = random(1,n_one)
- t[1] = sub(u_one,r,r)
- b = false
- end
- for i=2,n do
- if b then
- local r = random(1,n_one)
- t[i] = sub(l_one,r,r)
- b = false
- else
- local r = random(1,n_two)
- t[i] = sub(l_two,r,r)
- b = true
- end
- end
- return concat(t,separator)
-end
-
-randomizers.surname = randomizers.firstname
-
--- for i=1,10 do
--- print(randomizers.initials(1,3),randomizers.firstname(5,10),randomizers.surname(5,15))
--- end
+if not modules then modules = { } end modules ['util-ran'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local random = math.random
+local concat = table.concat
+local sub, upper = string.sub, string.upper
+
+local randomizers = utilities.randomizers or { }
+utilities.randomizers = randomizers
+
+local l_one = "bcdfghjklmnpqrstvwxz"
+local l_two = "aeiouy"
+
+local u_one = upper(l_one)
+local u_two = upper(l_two)
+
+local n_one = #l_one
+local n_two = #l_two
+
+function randomizers.word(min,max,separator)
+ local t = { }
+ for i=1,random(min,max) do
+ if i % 2 == 0 then
+ local r = random(1,n_one)
+ t[i] = sub(l_one,r,r)
+ else
+ local r = random(1,n_two)
+ t[i] = sub(l_two,r,r)
+ end
+ end
+ return concat(t,separator)
+end
+
+function randomizers.initials(min,max)
+ if not min then
+ if not max then
+ min, max = 1, 3
+ else
+ min, max = 1, min
+ end
+ elseif not max then
+ max = min
+ end
+ local t = { }
+ local n = random(min or 1,max or 3)
+ local m = 0
+ for i=1,n do
+ m = m + 1
+ if i % 2 == 0 then
+ local r = random(1,n_one)
+ t[m] = sub(u_one,r,r)
+ else
+ local r = random(1,n_two)
+ t[m] = sub(u_two,r,r)
+ end
+ m = m + 1
+ t[m] = "."
+ end
+ return concat(t)
+end
+
+function randomizers.firstname(min,max)
+ if not min then
+ if not max then
+ min, max = 3, 10
+ else
+ min, max = 1, min
+ end
+ elseif not max then
+ max = min
+ end
+ local t = { }
+ local n = random(min,max)
+ local b = true
+ if n % 2 == 0 then
+ local r = random(1,n_two)
+ t[1] = sub(u_two,r,r)
+ b = true
+ else
+ local r = random(1,n_one)
+ t[1] = sub(u_one,r,r)
+ b = false
+ end
+ for i=2,n do
+ if b then
+ local r = random(1,n_one)
+ t[i] = sub(l_one,r,r)
+ b = false
+ else
+ local r = random(1,n_two)
+ t[i] = sub(l_two,r,r)
+ b = true
+ end
+ end
+ return concat(t,separator)
+end
+
+randomizers.surname = randomizers.firstname
+
+-- for i=1,10 do
+-- print(randomizers.initials(1,3),randomizers.firstname(5,10),randomizers.surname(5,15))
+-- end
diff --git a/tex/context/base/util-seq.lua b/tex/context/base/util-seq.lua
index 27f95f0ee..1b56bbdba 100644
--- a/tex/context/base/util-seq.lua
+++ b/tex/context/base/util-seq.lua
@@ -1,330 +1,330 @@
-if not modules then modules = { } end modules ['util-seq'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-
Here we implement a mechanism for chaining the special functions
-that we use in to deal with mode list processing. We
-assume that namespaces for the functions are used, but for speed we
-use locals to refer to them when compiling the chain.
---ldx]]--
-
--- todo: delayed: i.e. we register them in the right order already but delay usage
-
--- todo: protect groups (as in tasks)
-
-local format, gsub, concat, gmatch = string.format, string.gsub, table.concat, string.gmatch
-local type, load = type, load
-
-utilities = utilities or { }
-local tables = utilities.tables
-local allocate = utilities.storage.allocate
-
-local sequencers = { }
-utilities.sequencers = sequencers
-
-local functions = allocate()
-sequencers.functions = functions
-
-local removevalue = tables.removevalue
-local insertaftervalue = tables.insertaftervalue
-local insertbeforevalue = tables.insertbeforevalue
-
-local function validaction(action)
- if type(action) == "string" then
- local g = _G
- for str in gmatch(action,"[^%.]+") do
- g = g[str]
- if not g then
- return false
- end
- end
- end
- return true
-end
-
-local compile
-
-local known = { } -- just a convenience, in case we want public access (only to a few methods)
-
-function sequencers.new(t) -- was reset
- local s = {
- list = { },
- order = { },
- kind = { },
- askip = { },
- gskip = { },
- dirty = true,
- runner = nil,
- }
- if t then
- s.arguments = t.arguments
- s.returnvalues = t.returnvalues
- s.results = t.results
- local name = t.name
- if name and name ~= "" then
- s.name = name
- known[name] = s
- end
- end
- table.setmetatableindex(s,function(t,k)
- -- this will automake a dirty runner
- if k == "runner" then
- local v = compile(t,t.compiler)
- return v
- end
- end)
- known[s] = s -- saves test for string later on
- return s
-end
-
-function sequencers.prependgroup(t,group,where)
- t = known[t]
- if t then
- local order = t.order
- removevalue(order,group)
- insertbeforevalue(order,where,group)
- t.list[group] = { }
- t.dirty = true
- t.runner = nil
- end
-end
-
-function sequencers.appendgroup(t,group,where)
- t = known[t]
- if t then
- local order = t.order
- removevalue(order,group)
- insertaftervalue(order,where,group)
- t.list[group] = { }
- t.dirty = true
- t.runner = nil
- end
-end
-
-function sequencers.prependaction(t,group,action,where,kind,force)
- t = known[t]
- if t then
- local g = t.list[group]
- if g and (force or validaction(action)) then
- removevalue(g,action)
- insertbeforevalue(g,where,action)
- t.kind[action] = kind
- t.dirty = true
- t.runner = nil
- end
- end
-end
-
-function sequencers.appendaction(t,group,action,where,kind,force)
- t = known[t]
- if t then
- local g = t.list[group]
- if g and (force or validaction(action)) then
- removevalue(g,action)
- insertaftervalue(g,where,action)
- t.kind[action] = kind
- t.dirty = true
- t.runner = nil
- end
- end
-end
-
-function sequencers.enableaction(t,action)
- t = known[t]
- if t then
- t.askip[action] = false
- t.dirty = true
- t.runner = nil
- end
-end
-
-function sequencers.disableaction(t,action)
- t = known[t]
- if t then
- t.askip[action] = true
- t.dirty = true
- t.runner = nil
- end
-end
-
-function sequencers.enablegroup(t,group)
- t = known[t]
- if t then
- t.gskip[action] = false
- t.dirty = true
- t.runner = nil
- end
-end
-
-function sequencers.disablegroup(t,group)
- t = known[t]
- if t then
- t.gskip[action] = true
- t.dirty = true
- t.runner = nil
- end
-end
-
-function sequencers.setkind(t,action,kind)
- t = known[t]
- if t then
- t.kind[action] = kind
- t.dirty = true
- t.runner = nil
- end
-end
-
-function sequencers.removeaction(t,group,action,force)
- t = known[t]
- local g = t and t.list[group]
- if g and (force or validaction(action)) then
- removevalue(g,action)
- t.dirty = true
- t.runner = nil
- end
-end
-
-local function localize(str)
- return (gsub(str,"[%.: ]+","_"))
-end
-
-local function construct(t)
- local list, order, kind, gskip, askip = t.list, t.order, t.kind, t.gskip, t.askip
- local arguments, returnvalues, results = t.arguments or "...", t.returnvalues, t.results
- local variables, calls, n = { }, { }, 0
- for i=1,#order do
- local group = order[i]
- if not gskip[group] then
- local actions = list[group]
- for i=1,#actions do
- local action = actions[i]
- if not askip[action] then
- if type(action) == "function" then
- local name = localize(tostring(action))
- functions[name] = action
- action = format("utilities.sequencers.functions.%s",name)
- end
- local localized = localize(action)
- n = n + 1
- variables[n] = format("local %s = %s",localized,action)
- if not returnvalues then
- calls[n] = format("%s(%s)",localized,arguments)
- elseif n == 1 then
- calls[n] = format("local %s = %s(%s)",returnvalues,localized,arguments)
- else
- calls[n] = format("%s = %s(%s)",returnvalues,localized,arguments)
- end
- end
- end
- end
- end
- t.dirty = false
- if n == 0 then
- t.compiled = ""
- else
- variables = concat(variables,"\n")
- calls = concat(calls,"\n")
- if results then
- t.compiled = format("%s\nreturn function(%s)\n%s\nreturn %s\nend",variables,arguments,calls,results)
- else
- t.compiled = format("%s\nreturn function(%s)\n%s\nend",variables,arguments,calls)
- end
- end
--- print(t.compiled)
- return t.compiled -- also stored so that we can trace
-end
-
-sequencers.tostring = construct
-sequencers.localize = localize
-
-compile = function(t,compiler,n) -- already referred to in sequencers.new
- local compiled
- if not t or type(t) == "string" then
- -- weird ... t.compiled = t .. so
- return false
- end
- if compiler then
- compiled = compiler(t,n)
- t.compiled = compiled
- else
- compiled = construct(t,n)
- end
- local runner
- if compiled == "" then
- runner = false
- else
- runner = compiled and load(compiled)() -- we can use loadstripped here
- end
- t.runner = runner
- return runner
-end
-
-sequencers.compile = compile
-
--- we used to deal with tail as well but now that the lists are always
--- double linked and the kernel function no longer expect tail as
--- argument we stick to head and done (done can probably also go
--- as luatex deals with return values efficiently now .. in the
--- past there was some copying involved, but no longer)
-
--- todo: use sequencer (can have arguments and returnvalues etc now)
-
-local template_yes = [[
-%s
-return function(head%s)
- local ok, done = false, false
-%s
- return head, done
-end]]
-
-local template_nop = [[
-return function()
- return false, false
-end]]
-
-function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug into tostring
- local list, order, kind, gskip, askip = t.list, t.order, t.kind, t.gskip, t.askip
- local vars, calls, args, n = { }, { }, nil, 0
- if nofarguments == 0 then
- args = ""
- elseif nofarguments == 1 then
- args = ",one"
- elseif nofarguments == 2 then
- args = ",one,two"
- elseif nofarguments == 3 then
- args = ",one,two,three"
- elseif nofarguments == 4 then
- args = ",one,two,three,four"
- elseif nofarguments == 5 then
- args = ",one,two,three,four,five"
- else
- args = ",..."
- end
- for i=1,#order do
- local group = order[i]
- if not gskip[group] then
- local actions = list[group]
- for i=1,#actions do
- local action = actions[i]
- if not askip[action] then
- local localized = localize(action)
- n = n + 1
- vars[n] = format("local %s = %s",localized,action)
- -- only difference with tostring is kind and rets (why no return)
- if kind[action] == "nohead" then
- calls[n] = format(" ok = %s(head%s) done = done or ok",localized,args)
- else
- calls[n] = format(" head, ok = %s(head%s) done = done or ok",localized,args)
- end
- end
- end
- end
- end
- local processor = #calls > 0 and format(template_yes,concat(vars,"\n"),args,concat(calls,"\n")) or template_nop
- return processor
-end
+if not modules then modules = { } end modules ['util-seq'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+--[[ldx--
+
Here we implement a mechanism for chaining the special functions
+that we use in to deal with mode list processing. We
+assume that namespaces for the functions are used, but for speed we
+use locals to refer to them when compiling the chain.
+--ldx]]--
+
+-- todo: delayed: i.e. we register them in the right order already but delay usage
+
+-- todo: protect groups (as in tasks)
+
+local format, gsub, concat, gmatch = string.format, string.gsub, table.concat, string.gmatch
+local type, load = type, load
+
+utilities = utilities or { }
+local tables = utilities.tables
+local allocate = utilities.storage.allocate
+
+local sequencers = { }
+utilities.sequencers = sequencers
+
+local functions = allocate()
+sequencers.functions = functions
+
+local removevalue = tables.removevalue
+local insertaftervalue = tables.insertaftervalue
+local insertbeforevalue = tables.insertbeforevalue
+
+local function validaction(action)
+ if type(action) == "string" then
+ local g = _G
+ for str in gmatch(action,"[^%.]+") do
+ g = g[str]
+ if not g then
+ return false
+ end
+ end
+ end
+ return true
+end
+
+local compile
+
+local known = { } -- just a convenience, in case we want public access (only to a few methods)
+
+function sequencers.new(t) -- was reset
+ local s = {
+ list = { },
+ order = { },
+ kind = { },
+ askip = { },
+ gskip = { },
+ dirty = true,
+ runner = nil,
+ }
+ if t then
+ s.arguments = t.arguments
+ s.returnvalues = t.returnvalues
+ s.results = t.results
+ local name = t.name
+ if name and name ~= "" then
+ s.name = name
+ known[name] = s
+ end
+ end
+ table.setmetatableindex(s,function(t,k)
+ -- this will automake a dirty runner
+ if k == "runner" then
+ local v = compile(t,t.compiler)
+ return v
+ end
+ end)
+ known[s] = s -- saves test for string later on
+ return s
+end
+
+function sequencers.prependgroup(t,group,where)
+ t = known[t]
+ if t then
+ local order = t.order
+ removevalue(order,group)
+ insertbeforevalue(order,where,group)
+ t.list[group] = { }
+ t.dirty = true
+ t.runner = nil
+ end
+end
+
+function sequencers.appendgroup(t,group,where)
+ t = known[t]
+ if t then
+ local order = t.order
+ removevalue(order,group)
+ insertaftervalue(order,where,group)
+ t.list[group] = { }
+ t.dirty = true
+ t.runner = nil
+ end
+end
+
+function sequencers.prependaction(t,group,action,where,kind,force)
+ t = known[t]
+ if t then
+ local g = t.list[group]
+ if g and (force or validaction(action)) then
+ removevalue(g,action)
+ insertbeforevalue(g,where,action)
+ t.kind[action] = kind
+ t.dirty = true
+ t.runner = nil
+ end
+ end
+end
+
+function sequencers.appendaction(t,group,action,where,kind,force)
+ t = known[t]
+ if t then
+ local g = t.list[group]
+ if g and (force or validaction(action)) then
+ removevalue(g,action)
+ insertaftervalue(g,where,action)
+ t.kind[action] = kind
+ t.dirty = true
+ t.runner = nil
+ end
+ end
+end
+
+function sequencers.enableaction(t,action)
+ t = known[t]
+ if t then
+ t.askip[action] = false
+ t.dirty = true
+ t.runner = nil
+ end
+end
+
+function sequencers.disableaction(t,action)
+ t = known[t]
+ if t then
+ t.askip[action] = true
+ t.dirty = true
+ t.runner = nil
+ end
+end
+
+function sequencers.enablegroup(t,group)
+ t = known[t]
+ if t then
+ t.gskip[action] = false
+ t.dirty = true
+ t.runner = nil
+ end
+end
+
+function sequencers.disablegroup(t,group)
+ t = known[t]
+ if t then
+ t.gskip[action] = true
+ t.dirty = true
+ t.runner = nil
+ end
+end
+
+function sequencers.setkind(t,action,kind)
+ t = known[t]
+ if t then
+ t.kind[action] = kind
+ t.dirty = true
+ t.runner = nil
+ end
+end
+
+function sequencers.removeaction(t,group,action,force)
+ t = known[t]
+ local g = t and t.list[group]
+ if g and (force or validaction(action)) then
+ removevalue(g,action)
+ t.dirty = true
+ t.runner = nil
+ end
+end
+
+local function localize(str)
+ return (gsub(str,"[%.: ]+","_"))
+end
+
+local function construct(t)
+ local list, order, kind, gskip, askip = t.list, t.order, t.kind, t.gskip, t.askip
+ local arguments, returnvalues, results = t.arguments or "...", t.returnvalues, t.results
+ local variables, calls, n = { }, { }, 0
+ for i=1,#order do
+ local group = order[i]
+ if not gskip[group] then
+ local actions = list[group]
+ for i=1,#actions do
+ local action = actions[i]
+ if not askip[action] then
+ if type(action) == "function" then
+ local name = localize(tostring(action))
+ functions[name] = action
+ action = format("utilities.sequencers.functions.%s",name)
+ end
+ local localized = localize(action)
+ n = n + 1
+ variables[n] = format("local %s = %s",localized,action)
+ if not returnvalues then
+ calls[n] = format("%s(%s)",localized,arguments)
+ elseif n == 1 then
+ calls[n] = format("local %s = %s(%s)",returnvalues,localized,arguments)
+ else
+ calls[n] = format("%s = %s(%s)",returnvalues,localized,arguments)
+ end
+ end
+ end
+ end
+ end
+ t.dirty = false
+ if n == 0 then
+ t.compiled = ""
+ else
+ variables = concat(variables,"\n")
+ calls = concat(calls,"\n")
+ if results then
+ t.compiled = format("%s\nreturn function(%s)\n%s\nreturn %s\nend",variables,arguments,calls,results)
+ else
+ t.compiled = format("%s\nreturn function(%s)\n%s\nend",variables,arguments,calls)
+ end
+ end
+-- print(t.compiled)
+ return t.compiled -- also stored so that we can trace
+end
+
+sequencers.tostring = construct
+sequencers.localize = localize
+
+compile = function(t,compiler,n) -- already referred to in sequencers.new
+ local compiled
+ if not t or type(t) == "string" then
+ -- weird ... t.compiled = t .. so
+ return false
+ end
+ if compiler then
+ compiled = compiler(t,n)
+ t.compiled = compiled
+ else
+ compiled = construct(t,n)
+ end
+ local runner
+ if compiled == "" then
+ runner = false
+ else
+ runner = compiled and load(compiled)() -- we can use loadstripped here
+ end
+ t.runner = runner
+ return runner
+end
+
+sequencers.compile = compile
+
+-- we used to deal with tail as well but now that the lists are always
+-- double linked and the kernel function no longer expect tail as
+-- argument we stick to head and done (done can probably also go
+-- as luatex deals with return values efficiently now .. in the
+-- past there was some copying involved, but no longer)
+
+-- todo: use sequencer (can have arguments and returnvalues etc now)
+
+local template_yes = [[
+%s
+return function(head%s)
+ local ok, done = false, false
+%s
+ return head, done
+end]]
+
+local template_nop = [[
+return function()
+ return false, false
+end]]
+
+function sequencers.nodeprocessor(t,nofarguments) -- todo: handle 'kind' in plug into tostring
+ local list, order, kind, gskip, askip = t.list, t.order, t.kind, t.gskip, t.askip
+ local vars, calls, args, n = { }, { }, nil, 0
+ if nofarguments == 0 then
+ args = ""
+ elseif nofarguments == 1 then
+ args = ",one"
+ elseif nofarguments == 2 then
+ args = ",one,two"
+ elseif nofarguments == 3 then
+ args = ",one,two,three"
+ elseif nofarguments == 4 then
+ args = ",one,two,three,four"
+ elseif nofarguments == 5 then
+ args = ",one,two,three,four,five"
+ else
+ args = ",..."
+ end
+ for i=1,#order do
+ local group = order[i]
+ if not gskip[group] then
+ local actions = list[group]
+ for i=1,#actions do
+ local action = actions[i]
+ if not askip[action] then
+ local localized = localize(action)
+ n = n + 1
+ vars[n] = format("local %s = %s",localized,action)
+ -- only difference with tostring is kind and rets (why no return)
+ if kind[action] == "nohead" then
+ calls[n] = format(" ok = %s(head%s) done = done or ok",localized,args)
+ else
+ calls[n] = format(" head, ok = %s(head%s) done = done or ok",localized,args)
+ end
+ end
+ end
+ end
+ end
+ local processor = #calls > 0 and format(template_yes,concat(vars,"\n"),args,concat(calls,"\n")) or template_nop
+ return processor
+end
diff --git a/tex/context/base/util-soc.lua b/tex/context/base/util-soc.lua
index 30301c510..ba2f7b507 100644
--- a/tex/context/base/util-soc.lua
+++ b/tex/context/base/util-soc.lua
@@ -1,93 +1,93 @@
-if not modules then modules = { } end modules ['util-soc'] = {
- version = 1.001,
- comment = "support for sockets / protocols",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local format = string.format
-
-local smtp = require("socket.smtp")
-local ltn12 = require("ltn12")
-local mime = require("mime")
-
-local mail = utilities.mail or { }
-utilities.mail = mail
-
-local report_mail = logs.reporter("mail")
-
-function mail.send(specification)
- local presets = specification.presets
- if presets then
- table.setmetatableindex(specification,presets)
- end
- local server = specification.server or ""
- if not server then
- report_mail("no server specified")
- return false
- end
- local to = specification.to or specification.recepient or ""
- if to == "" then
- report_mail("no recepient specified")
- return false
- end
- local from = specification.from or specification.sender or ""
- if from == "" then
- report_mail("no sender specified")
- return false
- end
- local message = { }
- local body = specification.body
- if body then
- message[#message+1] = {
- body = body
- }
- end
- local files = specification.files
- if files then
- for i=1,#files do
- local filename = files[i]
- local handle = io.open(filename, "rb")
- if handle then
- report_mail("attaching file %a",filename)
- message[#message+1] = {
- headers = {
- ["content-type"] = format('application/pdf; name="%s"',filename),
- ["content-disposition"] = format('attachment; filename="%s"',filename),
- ["content-description"] = format('file: %s',filename),
- ["content-transfer-encoding"] = "BASE64"
- },
- body = ltn12.source.chain(
- ltn12.source.file(handle),
- ltn12.filter.chain(mime.encode("base64"),mime.wrap())
- )
- }
- else
- report_mail("file %a not found",filename)
- end
- end
- end
- local result, detail = smtp.send {
- server = specification.server,
- port = specification.port,
- user = specification.user,
- password = specification.password,
- from = from,
- rcpt = to,
- source = smtp.message {
- headers = {
- to = to,
- from = from,
- cc = specification.cc,
- subject = specification.subject or "no subject",
- },
- body = message
- },
- }
- if detail then
- report_mail("error: %s",detail)
- else
- report_mail("message sent")
- end
-end
+if not modules then modules = { } end modules ['util-soc'] = {
+ version = 1.001,
+ comment = "support for sockets / protocols",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local format = string.format
+
+local smtp = require("socket.smtp")
+local ltn12 = require("ltn12")
+local mime = require("mime")
+
+local mail = utilities.mail or { }
+utilities.mail = mail
+
+local report_mail = logs.reporter("mail")
+
+function mail.send(specification)
+ local presets = specification.presets
+ if presets then
+ table.setmetatableindex(specification,presets)
+ end
+ local server = specification.server or ""
+ if not server then
+ report_mail("no server specified")
+ return false
+ end
+ local to = specification.to or specification.recepient or ""
+ if to == "" then
+ report_mail("no recepient specified")
+ return false
+ end
+ local from = specification.from or specification.sender or ""
+ if from == "" then
+ report_mail("no sender specified")
+ return false
+ end
+ local message = { }
+ local body = specification.body
+ if body then
+ message[#message+1] = {
+ body = body
+ }
+ end
+ local files = specification.files
+ if files then
+ for i=1,#files do
+ local filename = files[i]
+ local handle = io.open(filename, "rb")
+ if handle then
+ report_mail("attaching file %a",filename)
+ message[#message+1] = {
+ headers = {
+ ["content-type"] = format('application/pdf; name="%s"',filename),
+ ["content-disposition"] = format('attachment; filename="%s"',filename),
+ ["content-description"] = format('file: %s',filename),
+ ["content-transfer-encoding"] = "BASE64"
+ },
+ body = ltn12.source.chain(
+ ltn12.source.file(handle),
+ ltn12.filter.chain(mime.encode("base64"),mime.wrap())
+ )
+ }
+ else
+ report_mail("file %a not found",filename)
+ end
+ end
+ end
+ local result, detail = smtp.send {
+ server = specification.server,
+ port = specification.port,
+ user = specification.user,
+ password = specification.password,
+ from = from,
+ rcpt = to,
+ source = smtp.message {
+ headers = {
+ to = to,
+ from = from,
+ cc = specification.cc,
+ subject = specification.subject or "no subject",
+ },
+ body = message
+ },
+ }
+ if detail then
+ report_mail("error: %s",detail)
+ else
+ report_mail("message sent")
+ end
+end
diff --git a/tex/context/base/util-sql-imp-client.lua b/tex/context/base/util-sql-imp-client.lua
index e09dfde94..7c713a899 100644
--- a/tex/context/base/util-sql-imp-client.lua
+++ b/tex/context/base/util-sql-imp-client.lua
@@ -1,256 +1,256 @@
-if not modules then modules = { } end modules ['util-sql-client'] = {
- version = 1.001,
- comment = "companion to util-sql.lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: make a converter
-
-local rawset, setmetatable = rawset, setmetatable
-local P, S, V, C, Cs, Ct, Cc, Cg, Cf, patterns, lpegmatch = lpeg.P, lpeg.S, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.patterns, lpeg.match
-
-local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end)
-local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end)
-local report_state = logs.reporter("sql","client")
-
-local sql = utilities.sql
-local helpers = sql.helpers
-local methods = sql.methods
-local validspecification = helpers.validspecification
-local preparetemplate = helpers.preparetemplate
-local splitdata = helpers.splitdata
-local replacetemplate = utilities.templates.replace
-local serialize = sql.serialize
-local deserialize = sql.deserialize
-
--- Experiments with an p/action demonstrated that there is not much gain. We could do a runtime
--- capture but creating all the small tables is not faster and it doesn't work well anyway.
-
-local separator = P("\t")
-local newline = patterns.newline
-local empty = Cc("")
-
-local entry = C((1-separator-newline)^0) -- C 10% faster than Cs
-
-local unescaped = P("\\n") / "\n"
- + P("\\t") / "\t"
- + P("\\0") / "\000"
- + P("\\\\") / "\\"
-
-local entry = Cs((unescaped + (1-separator-newline))^0) -- C 10% faster than Cs but Cs needed due to nesting
-
-local getfirst = Ct( entry * (separator * (entry+empty))^0) + newline
-local skipfirst = (1-newline)^1 * newline
-local getfirstline = C((1-newline)^0)
-
-local cache = { }
-
-local function splitdata(data) -- todo: hash on first line ... maybe move to client module
- if data == "" then
- if trace_sql then
- report_state("no data")
- end
- return { }, { }
- end
- local first = lpegmatch(getfirstline,data)
- if not first then
- if trace_sql then
- report_state("no data")
- end
- return { }, { }
- end
- local p = cache[first]
- if p then
- -- report_state("reusing: %s",first)
- local entries = lpegmatch(p.parser,data)
- return entries or { }, p.keys
- elseif p == false then
- return { }, { }
- elseif p == nil then
- local keys = lpegmatch(getfirst,first) or { }
- if #keys == 0 then
- if trace_sql then
- report_state("no banner")
- end
- cache[first] = false
- return { }, { }
- end
- -- quite generic, could be a helper
- local n = #keys
- if n == 0 then
- report_state("no fields")
- cache[first] = false
- return { }, { }
- end
- if n == 1 then
- local key = keys[1]
- if trace_sql then
- report_state("one field with name %a",key)
- end
- p = Cg(Cc(key) * entry)
- else
- for i=1,n do
- local key = keys[i]
- if trace_sql then
- report_state("field %s has name %a",i,key)
- end
- local s = Cg(Cc(key) * entry)
- if p then
- p = p * separator * s
- else
- p = s
- end
- end
- end
- p = Cf(Ct("") * p,rawset) * newline^1
- p = skipfirst * Ct(p^0)
- cache[first] = { parser = p, keys = keys }
- local entries = lpegmatch(p,data)
- return entries or { }, keys
- end
-end
-
-local splitter = skipfirst * Ct((Ct(entry * (separator * entry)^0) * newline^1)^0)
-
-local function getdata(data)
- return lpegmatch(splitter,data)
-end
-
-helpers.splitdata = splitdata
-helpers.getdata = getdata
-
-local function dataprepared(specification)
- local query = preparetemplate(specification)
- if query then
- io.savedata(specification.queryfile,query)
- os.remove(specification.resultfile)
- if trace_queries then
- report_state("query: %s",query)
- end
- return true
- else
- -- maybe push an error
- os.remove(specification.queryfile)
- os.remove(specification.resultfile)
- end
-end
-
-local function datafetched(specification,runner)
- local command = replacetemplate(runner,specification)
- if trace_sql then
- local t = osclock()
- report_state("command: %s",command)
- local okay = os.execute(command)
- report_state("fetchtime: %.3f sec",osclock()-t) -- not okay under linux
- return okay == 0
- else
- return os.execute(command) == 0
- end
-end
-
-local function dataloaded(specification)
- if trace_sql then
- local t = osclock()
- local data = io.loaddata(specification.resultfile) or ""
- report_state("datasize: %.3f MB",#data/1024/1024)
- report_state("loadtime: %.3f sec",osclock()-t)
- return data
- else
- return io.loaddata(specification.resultfile) or ""
- end
-end
-
-local function dataconverted(data,converter)
- if converter then
- local data = getdata(data)
- if data then
- data = converter.client(data)
- end
- return data
- elseif trace_sql then
- local t = osclock()
- local data, keys = splitdata(data,target)
- report_state("converttime: %.3f",osclock()-t)
- report_state("keys: %s ",#keys)
- report_state("entries: %s ",#data)
- return data, keys
- else
- return splitdata(data)
- end
-end
-
--- todo: new, etc
-
-local function execute(specification)
- if trace_sql then
- report_state("executing client")
- end
- if not validspecification(specification) then
- report_state("error in specification")
- return
- end
- if not dataprepared(specification) then
- report_state("error in preparation")
- return
- end
- if not datafetched(specification,methods.client.runner) then
- report_state("error in fetching, query: %s",string.collapsespaces(io.loaddata(specification.queryfile)))
- return
- end
- local data = dataloaded(specification)
- if not data then
- report_state("error in loading")
- return
- end
- local data, keys = dataconverted(data,specification.converter)
- if not data then
- report_state("error in converting or no data")
- return
- end
- local one = data[1]
- if one then
- setmetatable(data,{ __index = one } )
- end
- return data, keys
-end
-
--- The following is not that (memory) efficient but normally we will use
--- the lib anyway. Of course we could make a dedicated converter and/or
--- hook into the splitter code but ... it makes not much sense because then
--- we can as well move the builder to the library modules.
---
--- Here we reuse data as the indexes are the same, unless we hash.
-
-local wraptemplate = [[
-local converters = utilities.sql.converters
-local deserialize = utilities.sql.deserialize
-
-local tostring = tostring
-local tonumber = tonumber
-local booleanstring = string.booleanstring
-
-%s
-
-return function(data)
- local target = %s -- data or { }
- for i=1,#data do
- local cells = data[i]
- target[%s] = {
- %s
- }
- end
- return target
-end
-]]
-
-local celltemplate = "cells[%s]"
-
-methods.client = {
- runner = [[mysql --batch --user="%username%" --password="%password%" --host="%host%" --port=%port% --database="%database%" --default-character-set=utf8 < "%queryfile%" > "%resultfile%"]],
- execute = execute,
- usesfiles = true,
- wraptemplate = wraptemplate,
- celltemplate = celltemplate,
-}
+if not modules then modules = { } end modules ['util-sql-client'] = {
+ version = 1.001,
+ comment = "companion to util-sql.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: make a converter
+
+local rawset, setmetatable = rawset, setmetatable
+local P, S, V, C, Cs, Ct, Cc, Cg, Cf, patterns, lpegmatch = lpeg.P, lpeg.S, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.patterns, lpeg.match
+
+local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end)
+local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end)
+local report_state = logs.reporter("sql","client")
+
+local sql = utilities.sql
+local helpers = sql.helpers
+local methods = sql.methods
+local validspecification = helpers.validspecification
+local preparetemplate = helpers.preparetemplate
+local splitdata = helpers.splitdata
+local replacetemplate = utilities.templates.replace
+local serialize = sql.serialize
+local deserialize = sql.deserialize
+
+-- Experiments with an p/action demonstrated that there is not much gain. We could do a runtime
+-- capture but creating all the small tables is not faster and it doesn't work well anyway.
+
+local separator = P("\t")
+local newline = patterns.newline
+local empty = Cc("")
+
+local entry = C((1-separator-newline)^0) -- C 10% faster than Cs
+
+local unescaped = P("\\n") / "\n"
+ + P("\\t") / "\t"
+ + P("\\0") / "\000"
+ + P("\\\\") / "\\"
+
+local entry = Cs((unescaped + (1-separator-newline))^0) -- C 10% faster than Cs but Cs needed due to nesting
+
+local getfirst = Ct( entry * (separator * (entry+empty))^0) + newline
+local skipfirst = (1-newline)^1 * newline
+local getfirstline = C((1-newline)^0)
+
+local cache = { }
+
+local function splitdata(data) -- todo: hash on first line ... maybe move to client module
+ if data == "" then
+ if trace_sql then
+ report_state("no data")
+ end
+ return { }, { }
+ end
+ local first = lpegmatch(getfirstline,data)
+ if not first then
+ if trace_sql then
+ report_state("no data")
+ end
+ return { }, { }
+ end
+ local p = cache[first]
+ if p then
+ -- report_state("reusing: %s",first)
+ local entries = lpegmatch(p.parser,data)
+ return entries or { }, p.keys
+ elseif p == false then
+ return { }, { }
+ elseif p == nil then
+ local keys = lpegmatch(getfirst,first) or { }
+ if #keys == 0 then
+ if trace_sql then
+ report_state("no banner")
+ end
+ cache[first] = false
+ return { }, { }
+ end
+ -- quite generic, could be a helper
+ local n = #keys
+ if n == 0 then
+ report_state("no fields")
+ cache[first] = false
+ return { }, { }
+ end
+ if n == 1 then
+ local key = keys[1]
+ if trace_sql then
+ report_state("one field with name %a",key)
+ end
+ p = Cg(Cc(key) * entry)
+ else
+ for i=1,n do
+ local key = keys[i]
+ if trace_sql then
+ report_state("field %s has name %a",i,key)
+ end
+ local s = Cg(Cc(key) * entry)
+ if p then
+ p = p * separator * s
+ else
+ p = s
+ end
+ end
+ end
+ p = Cf(Ct("") * p,rawset) * newline^1
+ p = skipfirst * Ct(p^0)
+ cache[first] = { parser = p, keys = keys }
+ local entries = lpegmatch(p,data)
+ return entries or { }, keys
+ end
+end
+
+local splitter = skipfirst * Ct((Ct(entry * (separator * entry)^0) * newline^1)^0)
+
+local function getdata(data)
+ return lpegmatch(splitter,data)
+end
+
+helpers.splitdata = splitdata
+helpers.getdata = getdata
+
+local function dataprepared(specification)
+ local query = preparetemplate(specification)
+ if query then
+ io.savedata(specification.queryfile,query)
+ os.remove(specification.resultfile)
+ if trace_queries then
+ report_state("query: %s",query)
+ end
+ return true
+ else
+ -- maybe push an error
+ os.remove(specification.queryfile)
+ os.remove(specification.resultfile)
+ end
+end
+
+local function datafetched(specification,runner)
+ local command = replacetemplate(runner,specification)
+ if trace_sql then
+ local t = osclock()
+ report_state("command: %s",command)
+ local okay = os.execute(command)
+ report_state("fetchtime: %.3f sec",osclock()-t) -- not okay under linux
+ return okay == 0
+ else
+ return os.execute(command) == 0
+ end
+end
+
+local function dataloaded(specification)
+ if trace_sql then
+ local t = osclock()
+ local data = io.loaddata(specification.resultfile) or ""
+ report_state("datasize: %.3f MB",#data/1024/1024)
+ report_state("loadtime: %.3f sec",osclock()-t)
+ return data
+ else
+ return io.loaddata(specification.resultfile) or ""
+ end
+end
+
+local function dataconverted(data,converter)
+ if converter then
+ local data = getdata(data)
+ if data then
+ data = converter.client(data)
+ end
+ return data
+ elseif trace_sql then
+ local t = osclock()
+ local data, keys = splitdata(data,target)
+ report_state("converttime: %.3f",osclock()-t)
+ report_state("keys: %s ",#keys)
+ report_state("entries: %s ",#data)
+ return data, keys
+ else
+ return splitdata(data)
+ end
+end
+
+-- todo: new, etc
+
+local function execute(specification)
+ if trace_sql then
+ report_state("executing client")
+ end
+ if not validspecification(specification) then
+ report_state("error in specification")
+ return
+ end
+ if not dataprepared(specification) then
+ report_state("error in preparation")
+ return
+ end
+ if not datafetched(specification,methods.client.runner) then
+ report_state("error in fetching, query: %s",string.collapsespaces(io.loaddata(specification.queryfile)))
+ return
+ end
+ local data = dataloaded(specification)
+ if not data then
+ report_state("error in loading")
+ return
+ end
+ local data, keys = dataconverted(data,specification.converter)
+ if not data then
+ report_state("error in converting or no data")
+ return
+ end
+ local one = data[1]
+ if one then
+ setmetatable(data,{ __index = one } )
+ end
+ return data, keys
+end
+
+-- The following is not that (memory) efficient but normally we will use
+-- the lib anyway. Of course we could make a dedicated converter and/or
+-- hook into the splitter code but ... it makes not much sense because then
+-- we can as well move the builder to the library modules.
+--
+-- Here we reuse data as the indexes are the same, unless we hash.
+
+local wraptemplate = [[
+local converters = utilities.sql.converters
+local deserialize = utilities.sql.deserialize
+
+local tostring = tostring
+local tonumber = tonumber
+local booleanstring = string.booleanstring
+
+%s
+
+return function(data)
+ local target = %s -- data or { }
+ for i=1,#data do
+ local cells = data[i]
+ target[%s] = {
+ %s
+ }
+ end
+ return target
+end
+]]
+
+local celltemplate = "cells[%s]"
+
+methods.client = {
+ runner = [[mysql --batch --user="%username%" --password="%password%" --host="%host%" --port=%port% --database="%database%" --default-character-set=utf8 < "%queryfile%" > "%resultfile%"]],
+ execute = execute,
+ usesfiles = true,
+ wraptemplate = wraptemplate,
+ celltemplate = celltemplate,
+}
diff --git a/tex/context/base/util-sql-imp-library.lua b/tex/context/base/util-sql-imp-library.lua
index 15754e26a..8a83b06d2 100644
--- a/tex/context/base/util-sql-imp-library.lua
+++ b/tex/context/base/util-sql-imp-library.lua
@@ -1,289 +1,289 @@
-if not modules then modules = { } end modules ['util-sql-library'] = {
- version = 1.001,
- comment = "companion to util-sql.lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- local function pcall(f,...) return true, f(...) end
-
--- For some reason the sql lib partially fails in luatex when creating hashed row. So far
--- we couldn't figure it out (some issue with adapting the table that is passes as first
--- argument in the fetch routine. Apart from this it looks like the mysql binding has some
--- efficiency issues (like creating a keys and types table for each row) but that could be
--- optimized. Anyhow, fecthing results can be done as follows:
-
--- local function collect_1(r)
--- local t = { }
--- for i=1,r:numrows() do
--- t[#t+1] = r:fetch({},"a")
--- end
--- return t
--- end
---
--- local function collect_2(r)
--- local keys = r:getcolnames()
--- local n = #keys
--- local t = { }
--- for i=1,r:numrows() do
--- local v = { r:fetch() }
--- local r = { }
--- for i=1,n do
--- r[keys[i]] = v[i]
--- end
--- t[#t+1] = r
--- end
--- return t
--- end
---
--- local function collect_3(r)
--- local keys = r:getcolnames()
--- local n = #keys
--- local t = { }
--- for i=1,r:numrows() do
--- local v = r:fetch({},"n")
--- local r = { }
--- for i=1,n do
--- r[keys[i]] = v[i]
--- end
--- t[#t+1] = r
--- end
--- return t
--- end
---
--- On a large table with some 8 columns (mixed text and numbers) we get the following
--- timings (the 'a' alternative is already using the more efficient variant in the
--- binding).
---
--- collect_1 : 1.31
--- collect_2 : 1.39
--- collect_3 : 1.75
---
--- Some, as a workaround for this 'bug' the second alternative can be used.
-
-local format = string.format
-local lpegmatch = lpeg.match
-local setmetatable, type = setmetatable, type
-
-local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end)
-local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end)
-local report_state = logs.reporter("sql","library")
-
-local sql = utilities.sql
-local mysql = require("luasql.mysql")
-local cache = { }
-local helpers = sql.helpers
-local methods = sql.methods
-local validspecification = helpers.validspecification
-local querysplitter = helpers.querysplitter
-local dataprepared = helpers.preparetemplate
-local serialize = sql.serialize
-local deserialize = sql.deserialize
-local formatters = string.formatters
-
-local initialize = mysql.mysql
-
-local function connect(session,specification)
- return session:connect(
- specification.database or "",
- specification.username or "",
- specification.password or "",
- specification.host or "",
- specification.port
- )
-end
-
-local function fetched(specification,query,converter)
- if not query or query == "" then
- report_state("no valid query")
- return false
- end
- local id = specification.id
- local session, connection
- if id then
- local c = cache[id]
- if c then
- session = c.session
- connection = c.connection
- end
- if not connection then
- session = initialize()
- if not session then
- return formatters["no session for %a"](id)
- end
- connection = connect(session,specification)
- if not connection then
- return formatters["no connection for %a"](id)
- end
- cache[id] = { session = session, connection = connection }
- end
- else
- session = initialize()
- if not session then
- return "no session"
- end
- connection = connect(session,specification)
- if not connection then
- return "no connection"
- end
- end
- if not connection then
- report_state("error in connection: %s@%s to %s:%s",
- specification.database or "no database",
- specification.username or "no username",
- specification.host or "no host",
- specification.port or "no port"
- )
- return "no connection"
- end
- query = lpegmatch(querysplitter,query)
- local result, okay
- for i=1,#query do
- local q = query[i]
- local r, m = connection:execute(q)
- if m then
- report_state("error in query to host %a: %s",specification.host,string.collapsespaces(q))
- if m then
- report_state("message: %s",m)
- end
- end
- local t = type(r)
- if t == "userdata" then
- result = r
- okay = true
- elseif t == "number" then
- okay = true
- end
- end
- if not okay then -- can go
- if session then
- session:close()
- end
- if connection then
- connection:close()
- end
- if id then
- cache[id] = nil
- end
- return "execution error"
- end
- local data, keys
- if result then
- if converter then
- data = converter.library(result)
- else
- keys = result:getcolnames()
- if keys then
- data = { }
- local n = result:numrows() or 0
- if n > 0 then
- local k = #keys
- for i=1,n do
- local v = { result:fetch() }
- local d = { }
- for i=1,k do
- d[keys[i]] = v[i]
- end
- data[#data+1] = d
- end
- end
- end
- end
- result:close()
- end
- if not id then
- if connection then
- connection:close()
- end
- if session then
- session:close()
- end
- end
- return false, data, keys
-end
-
-local function datafetched(specification,query,converter)
- local callokay, connectionerror, data, keys = pcall(fetched,specification,query,converter)
- if not callokay then
- report_state("call error, retrying")
- callokay, connectionerror, data, keys = pcall(fetched,specification,query,converter)
- elseif connectionerror then
- report_state("error: %s, retrying",connectionerror)
- callokay, connectionerror, data, keys = pcall(fetched,specification,query,converter)
- end
- if not callokay then
- report_state("persistent call error")
- elseif connectionerror then
- report_state("persistent error: %s",connectionerror)
- end
- return data or { }, keys or { }
-end
-
-local function execute(specification)
- if trace_sql then
- report_state("executing library")
- end
- if not validspecification(specification) then
- report_state("error in specification")
- return
- end
- local query = dataprepared(specification)
- if not query then
- report_state("error in preparation")
- return
- end
- local data, keys = datafetched(specification,query,specification.converter)
- if not data then
- report_state("error in fetching")
- return
- end
- local one = data[1]
- if one then
- setmetatable(data,{ __index = one } )
- end
- return data, keys
-end
-
--- Here we build the dataset stepwise so we don't use the data hack that
--- is used in the client variant.
-
-local wraptemplate = [[
-local converters = utilities.sql.converters
-local deserialize = utilities.sql.deserialize
-
-local tostring = tostring
-local tonumber = tonumber
-local booleanstring = string.booleanstring
-
-%s
-
-return function(result)
- if not result then
- return { }
- end
- local nofrows = result:numrows() or 0
- if nofrows == 0 then
- return { }
- end
- local target = { } -- no %s needed here
- for i=1,nofrows do
- local cells = { result:fetch() }
- target[%s] = {
- %s
- }
- end
- return target
-end
-]]
-
-local celltemplate = "cells[%s]"
-
-methods.library = {
- runner = function() end, -- never called
- execute = execute,
- initialize = initialize, -- returns session
- usesfiles = false,
- wraptemplate = wraptemplate,
- celltemplate = celltemplate,
-}
+if not modules then modules = { } end modules ['util-sql-library'] = {
+ version = 1.001,
+ comment = "companion to util-sql.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- local function pcall(f,...) return true, f(...) end
+
+-- For some reason the sql lib partially fails in luatex when creating hashed row. So far
+-- we couldn't figure it out (some issue with adapting the table that is passes as first
+-- argument in the fetch routine. Apart from this it looks like the mysql binding has some
+-- efficiency issues (like creating a keys and types table for each row) but that could be
+-- optimized. Anyhow, fecthing results can be done as follows:
+
+-- local function collect_1(r)
+-- local t = { }
+-- for i=1,r:numrows() do
+-- t[#t+1] = r:fetch({},"a")
+-- end
+-- return t
+-- end
+--
+-- local function collect_2(r)
+-- local keys = r:getcolnames()
+-- local n = #keys
+-- local t = { }
+-- for i=1,r:numrows() do
+-- local v = { r:fetch() }
+-- local r = { }
+-- for i=1,n do
+-- r[keys[i]] = v[i]
+-- end
+-- t[#t+1] = r
+-- end
+-- return t
+-- end
+--
+-- local function collect_3(r)
+-- local keys = r:getcolnames()
+-- local n = #keys
+-- local t = { }
+-- for i=1,r:numrows() do
+-- local v = r:fetch({},"n")
+-- local r = { }
+-- for i=1,n do
+-- r[keys[i]] = v[i]
+-- end
+-- t[#t+1] = r
+-- end
+-- return t
+-- end
+--
+-- On a large table with some 8 columns (mixed text and numbers) we get the following
+-- timings (the 'a' alternative is already using the more efficient variant in the
+-- binding).
+--
+-- collect_1 : 1.31
+-- collect_2 : 1.39
+-- collect_3 : 1.75
+--
+-- Some, as a workaround for this 'bug' the second alternative can be used.
+
+local format = string.format
+local lpegmatch = lpeg.match
+local setmetatable, type = setmetatable, type
+
+local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end)
+local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end)
+local report_state = logs.reporter("sql","library")
+
+local sql = utilities.sql
+local mysql = require("luasql.mysql")
+local cache = { }
+local helpers = sql.helpers
+local methods = sql.methods
+local validspecification = helpers.validspecification
+local querysplitter = helpers.querysplitter
+local dataprepared = helpers.preparetemplate
+local serialize = sql.serialize
+local deserialize = sql.deserialize
+local formatters = string.formatters
+
+local initialize = mysql.mysql
+
+local function connect(session,specification)
+ return session:connect(
+ specification.database or "",
+ specification.username or "",
+ specification.password or "",
+ specification.host or "",
+ specification.port
+ )
+end
+
+local function fetched(specification,query,converter)
+ if not query or query == "" then
+ report_state("no valid query")
+ return false
+ end
+ local id = specification.id
+ local session, connection
+ if id then
+ local c = cache[id]
+ if c then
+ session = c.session
+ connection = c.connection
+ end
+ if not connection then
+ session = initialize()
+ if not session then
+ return formatters["no session for %a"](id)
+ end
+ connection = connect(session,specification)
+ if not connection then
+ return formatters["no connection for %a"](id)
+ end
+ cache[id] = { session = session, connection = connection }
+ end
+ else
+ session = initialize()
+ if not session then
+ return "no session"
+ end
+ connection = connect(session,specification)
+ if not connection then
+ return "no connection"
+ end
+ end
+ if not connection then
+ report_state("error in connection: %s@%s to %s:%s",
+ specification.database or "no database",
+ specification.username or "no username",
+ specification.host or "no host",
+ specification.port or "no port"
+ )
+ return "no connection"
+ end
+ query = lpegmatch(querysplitter,query)
+ local result, okay
+ for i=1,#query do
+ local q = query[i]
+ local r, m = connection:execute(q)
+ if m then
+ report_state("error in query to host %a: %s",specification.host,string.collapsespaces(q))
+ if m then
+ report_state("message: %s",m)
+ end
+ end
+ local t = type(r)
+ if t == "userdata" then
+ result = r
+ okay = true
+ elseif t == "number" then
+ okay = true
+ end
+ end
+ if not okay then -- can go
+ if session then
+ session:close()
+ end
+ if connection then
+ connection:close()
+ end
+ if id then
+ cache[id] = nil
+ end
+ return "execution error"
+ end
+ local data, keys
+ if result then
+ if converter then
+ data = converter.library(result)
+ else
+ keys = result:getcolnames()
+ if keys then
+ data = { }
+ local n = result:numrows() or 0
+ if n > 0 then
+ local k = #keys
+ for i=1,n do
+ local v = { result:fetch() }
+ local d = { }
+ for i=1,k do
+ d[keys[i]] = v[i]
+ end
+ data[#data+1] = d
+ end
+ end
+ end
+ end
+ result:close()
+ end
+ if not id then
+ if connection then
+ connection:close()
+ end
+ if session then
+ session:close()
+ end
+ end
+ return false, data, keys
+end
+
+local function datafetched(specification,query,converter)
+ local callokay, connectionerror, data, keys = pcall(fetched,specification,query,converter)
+ if not callokay then
+ report_state("call error, retrying")
+ callokay, connectionerror, data, keys = pcall(fetched,specification,query,converter)
+ elseif connectionerror then
+ report_state("error: %s, retrying",connectionerror)
+ callokay, connectionerror, data, keys = pcall(fetched,specification,query,converter)
+ end
+ if not callokay then
+ report_state("persistent call error")
+ elseif connectionerror then
+ report_state("persistent error: %s",connectionerror)
+ end
+ return data or { }, keys or { }
+end
+
+local function execute(specification)
+ if trace_sql then
+ report_state("executing library")
+ end
+ if not validspecification(specification) then
+ report_state("error in specification")
+ return
+ end
+ local query = dataprepared(specification)
+ if not query then
+ report_state("error in preparation")
+ return
+ end
+ local data, keys = datafetched(specification,query,specification.converter)
+ if not data then
+ report_state("error in fetching")
+ return
+ end
+ local one = data[1]
+ if one then
+ setmetatable(data,{ __index = one } )
+ end
+ return data, keys
+end
+
+-- Here we build the dataset stepwise so we don't use the data hack that
+-- is used in the client variant.
+
+local wraptemplate = [[
+local converters = utilities.sql.converters
+local deserialize = utilities.sql.deserialize
+
+local tostring = tostring
+local tonumber = tonumber
+local booleanstring = string.booleanstring
+
+%s
+
+return function(result)
+ if not result then
+ return { }
+ end
+ local nofrows = result:numrows() or 0
+ if nofrows == 0 then
+ return { }
+ end
+ local target = { } -- no %s needed here
+ for i=1,nofrows do
+ local cells = { result:fetch() }
+ target[%s] = {
+ %s
+ }
+ end
+ return target
+end
+]]
+
+local celltemplate = "cells[%s]"
+
+methods.library = {
+ runner = function() end, -- never called
+ execute = execute,
+ initialize = initialize, -- returns session
+ usesfiles = false,
+ wraptemplate = wraptemplate,
+ celltemplate = celltemplate,
+}
diff --git a/tex/context/base/util-sql-imp-swiglib.lua b/tex/context/base/util-sql-imp-swiglib.lua
index 719620a6f..f456c9ccb 100644
--- a/tex/context/base/util-sql-imp-swiglib.lua
+++ b/tex/context/base/util-sql-imp-swiglib.lua
@@ -1,505 +1,505 @@
-if not modules then modules = { } end modules ['util-sql-swiglib'] = {
- version = 1.001,
- comment = "companion to util-sql.lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- As the regular library is flawed (i.e. there are crashes in the table
--- construction code) and also not that efficient, Luigi Scarso looked into
--- a swig binding. This is a bit more low level approach but as we stay
--- closer to the original library it's also less dependant.
-
-local concat = table.concat
-local format = string.format
-local lpegmatch = lpeg.match
-local setmetatable, type = setmetatable, type
-local sleep = os.sleep
-
-local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end)
-local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end)
-local report_state = logs.reporter("sql","swiglib")
-
-local sql = utilities.sql
-local mysql = require("swiglib.mysql.core") -- "5.6"
-
--- inspect(table.sortedkeys(mysql))
-
-local nofretries = 5
-local retrydelay = 1
-
-local cache = { }
-local helpers = sql.helpers
-local methods = sql.methods
-local validspecification = helpers.validspecification
-local querysplitter = helpers.querysplitter
-local dataprepared = helpers.preparetemplate
-local serialize = sql.serialize
-local deserialize = sql.deserialize
-
-local mysql_initialize = mysql.mysql_init
-
-local mysql_open_connection = mysql.mysql_real_connect
-local mysql_execute_query = mysql.mysql_real_query
-local mysql_close_connection = mysql.mysql_close
-
-local mysql_field_seek = mysql.mysql_field_seek
-local mysql_num_fields = mysql.mysql_num_fields
-local mysql_fetch_field = mysql.mysql_fetch_field
-local mysql_num_rows = mysql.mysql_num_rows
-local mysql_fetch_row = mysql.mysql_fetch_row
-local mysql_fetch_lengths = mysql.mysql_fetch_lengths
-local mysql_init = mysql.mysql_init
-local mysql_store_result = mysql.mysql_store_result
-local mysql_free_result = mysql.mysql_free_result
-local mysql_use_result = mysql.mysql_use_result
-
-local mysql_error_message = mysql.mysql_error
-local mysql_options_argument = mysql.mysql_options_argument
-
-local instance = mysql.MYSQL()
-
-local mysql_constant_false = false
-local mysql_constant_true = true
-
--- if mysql_options_argument then
---
--- mysql_constant_false = mysql_options_argument(false) -- 0 "\0"
--- mysql_constant_true = mysql_options_argument(true) -- 1 "\1"
---
--- -- print(swig_type(mysql_constant_false))
--- -- print(swig_type(mysql_constant_true))
---
--- mysql.mysql_options(instance,mysql.MYSQL_OPT_RECONNECT,mysql_constant_true);
---
--- else
---
--- print("")
--- print("incomplete swiglib.mysql interface")
--- print("")
---
--- end
-
-local typemap = mysql.MYSQL_TYPE_VAR_STRING and {
- [mysql.MYSQL_TYPE_VAR_STRING ] = "string",
- [mysql.MYSQL_TYPE_STRING ] = "string",
- [mysql.MYSQL_TYPE_DECIMAL ] = "number",
- [mysql.MYSQL_TYPE_SHORT ] = "number",
- [mysql.MYSQL_TYPE_LONG ] = "number",
- [mysql.MYSQL_TYPE_FLOAT ] = "number",
- [mysql.MYSQL_TYPE_DOUBLE ] = "number",
- [mysql.MYSQL_TYPE_LONGLONG ] = "number",
- [mysql.MYSQL_TYPE_INT24 ] = "number",
- [mysql.MYSQL_TYPE_YEAR ] = "number",
- [mysql.MYSQL_TYPE_TINY ] = "number",
- [mysql.MYSQL_TYPE_TINY_BLOB ] = "binary",
- [mysql.MYSQL_TYPE_MEDIUM_BLOB] = "binary",
- [mysql.MYSQL_TYPE_LONG_BLOB ] = "binary",
- [mysql.MYSQL_TYPE_BLOB ] = "binary",
- [mysql.MYSQL_TYPE_DATE ] = "date",
- [mysql.MYSQL_TYPE_NEWDATE ] = "date",
- [mysql.MYSQL_TYPE_DATETIME ] = "datetime",
- [mysql.MYSQL_TYPE_TIME ] = "time",
- [mysql.MYSQL_TYPE_TIMESTAMP ] = "time",
- [mysql.MYSQL_TYPE_ENUM ] = "set",
- [mysql.MYSQL_TYPE_SET ] = "set",
- [mysql.MYSQL_TYPE_NULL ] = "null",
-}
-
--- real_escape_string
-
-local function finish(t)
- local r = t._result_
- if r then
- mysql_free_result(r)
- end
-end
-
--- will become metatable magic
-
--- local function analyze(result)
--- mysql_field_seek(result,0)
--- local nofrows = mysql_num_rows(result) or 0
--- local noffields = mysql_num_fields(result)
--- local names = { }
--- local types = { }
--- for i=1,noffields do
--- local field = mysql_fetch_field(result)
--- names[i] = field.name
--- types[i] = field.type
--- end
--- return names, types, noffields, nofrows
--- end
-
-local function getcolnames(t)
- return t.names
-end
-
-local function getcoltypes(t)
- return t.types
-end
-
-local function numrows(t)
- return t.nofrows
-end
-
--- swig_type
-
--- local ulongArray_getitem = mysql.ulongArray_getitem
--- local util_getbytearray = mysql.util_getbytearray
-
--- local function list(t)
--- local result = t._result_
--- local row = mysql_fetch_row(result)
--- local len = mysql_fetch_lengths(result)
--- local result = { }
--- for i=1,t.noffields do
--- local r = i - 1 -- zero offset
--- result[i] = util_getbytearray(row,r,ulongArray_getitem(len,r))
--- end
--- return result
--- end
-
--- local function hash(t)
--- local list = util_mysql_fetch_fields_from_current_row(t._result_)
--- local result = t._result_
--- local fields = t.names
--- local row = mysql_fetch_row(result)
--- local len = mysql_fetch_lengths(result)
--- local result = { }
--- for i=1,t.noffields do
--- local r = i - 1 -- zero offset
--- result[fields[i]] = util_getbytearray(row,r,ulongArray_getitem(len,r))
--- end
--- return result
--- end
-
-local util_mysql_fetch_fields_from_current_row = mysql.util_mysql_fetch_fields_from_current_row
-local util_mysql_fetch_all_rows = mysql.util_mysql_fetch_all_rows
-
-local function list(t)
- return util_mysql_fetch_fields_from_current_row(t._result_)
-end
-
-local function hash(t)
- local list = util_mysql_fetch_fields_from_current_row(t._result_)
- local fields = t.names
- local data = { }
- for i=1,t.noffields do
- data[fields[i]] = list[i]
- end
- return data
-end
-
-local function wholelist(t)
- return util_mysql_fetch_all_rows(t._result_)
-end
-
-local mt = { __index = {
- -- regular
- finish = finish,
- list = list,
- hash = hash,
- wholelist = wholelist,
- -- compatibility
- numrows = numrows,
- getcolnames = getcolnames,
- getcoltypes = getcoltypes,
- -- fallback
- _result_ = nil,
- names = { },
- types = { },
- noffields = 0,
- nofrows = 0,
- }
-}
-
-local nt = setmetatable({},mt)
-
--- session
-
-local function close(t)
- mysql_close_connection(t._connection_)
-end
-
-local function execute(t,query)
- if query and query ~= "" then
- local connection = t._connection_
- local result = mysql_execute_query(connection,query,#query)
- if result == 0 then
- local result = mysql_store_result(connection)
- if result then
- mysql_field_seek(result,0)
- local nofrows = mysql_num_rows(result) or 0
- local noffields = mysql_num_fields(result)
- local names = { }
- local types = { }
- for i=1,noffields do
- local field = mysql_fetch_field(result)
- names[i] = field.name
- types[i] = field.type
- end
- local t = {
- _result_ = result,
- names = names,
- types = types,
- noffields = noffields,
- nofrows = nofrows,
- }
- return setmetatable(t,mt)
- else
- return nt
- end
- end
- end
- return false
-end
-
-local mt = { __index = {
- close = close,
- execute = execute,
- }
-}
-
-local function open(t,database,username,password,host,port)
- local connection = mysql_open_connection(t._session_,host or "localhost",username or "",password or "",database or "",port or 0,0,0)
- if connection then
- local t = {
- _connection_ = connection,
- }
- return setmetatable(t,mt)
- end
-end
-
-local function message(t)
- return mysql_error_message(t._session_)
-end
-
-local function close(t)
- -- dummy, as we have a global session
-end
-
-local mt = {
- __index = {
- connect = open,
- close = close,
- message = message,
- }
-}
-
-local function initialize()
- local session = {
- _session_ = mysql_initialize(instance) -- maybe share, single thread anyway
- }
- return setmetatable(session,mt)
-end
-
--- -- -- --
-
-local function connect(session,specification)
- return session:connect(
- specification.database or "",
- specification.username or "",
- specification.password or "",
- specification.host or "",
- specification.port
- )
-end
-
-local function error_in_connection(specification,action)
- report_state("error in connection: [%s] %s@%s to %s:%s",
- action or "unknown",
- specification.database or "no database",
- specification.username or "no username",
- specification.host or "no host",
- specification.port or "no port"
- )
-end
-
-local function datafetched(specification,query,converter)
- if not query or query == "" then
- report_state("no valid query")
- return { }, { }
- end
- local id = specification.id
- local session, connection
- if id then
- local c = cache[id]
- if c then
- session = c.session
- connection = c.connection
- end
- if not connection then
- session = initialize()
- connection = connect(session,specification)
- if not connection then
- for i=1,nofretries do
- sleep(retrydelay)
- report_state("retrying to connect: [%s.%s] %s@%s to %s:%s",
- id,i,
- specification.database or "no database",
- specification.username or "no username",
- specification.host or "no host",
- specification.port or "no port"
- )
- connection = connect(session,specification)
- if connection then
- break
- end
- end
- end
- if connection then
- cache[id] = { session = session, connection = connection }
- end
- end
- else
- session = initialize()
- connection = connect(session,specification)
- if not connection then
- for i=1,nofretries do
- sleep(retrydelay)
- report_state("retrying to connect: [%s] %s@%s to %s:%s",
- i,
- specification.database or "no database",
- specification.username or "no username",
- specification.host or "no host",
- specification.port or "no port"
- )
- connection = connect(session,specification)
- if connection then
- break
- end
- end
- end
- end
- if not connection then
- report_state("error in connection: %s@%s to %s:%s",
- specification.database or "no database",
- specification.username or "no username",
- specification.host or "no host",
- specification.port or "no port"
- )
- return { }, { }
- end
- query = lpegmatch(querysplitter,query)
- local result, message, okay
- for i=1,#query do
- local q = query[i]
- local r, m = connection:execute(q)
- if m then
- report_state("error in query, stage: %s",string.collapsespaces(q))
- message = message and format("%s\n%s",message,m) or m
- end
- if type(r) == "table" then
- result = r
- okay = true
- elseif not m then
- okay = true
- end
- end
- local data, keys
- if result then
- if converter then
- data = converter.swiglib(result)
- else
- keys = result.names
- data = { }
- for i=1,result.nofrows do
- data[i] = result:hash()
- end
- end
- result:finish() -- result:close()
- elseif message then
- report_state("message %s",message)
- end
- if not keys then
- keys = { }
- end
- if not data then
- data = { }
- end
- if not id then
- connection:close()
- session:close()
- end
- return data, keys
-end
-
-local function execute(specification)
- if trace_sql then
- report_state("executing library")
- end
- if not validspecification(specification) then
- report_state("error in specification")
- return
- end
- local query = dataprepared(specification)
- if not query then
- report_state("error in preparation")
- return
- end
- local data, keys = datafetched(specification,query,specification.converter)
- if not data then
- report_state("error in fetching")
- return
- end
- local one = data[1]
- if one then
- setmetatable(data,{ __index = one } )
- end
- return data, keys
-end
-
-local wraptemplate = [[
-local mysql = require("swigluamysql") -- will be stored in method
-
------ mysql_fetch_row = mysql.mysql_fetch_row
------ mysql_fetch_lengths = mysql.mysql_fetch_lengths
------ util_unpackbytearray = mysql.util_unpackbytearray
-local util_mysql_fetch_fields_from_current_row
- = mysql.util_mysql_fetch_fields_from_current_row
-
-local converters = utilities.sql.converters
-local deserialize = utilities.sql.deserialize
-
-local tostring = tostring
-local tonumber = tonumber
-local booleanstring = string.booleanstring
-
-%s
-
-return function(result)
- if not result then
- return { }
- end
- local nofrows = result.nofrows or 0
- if nofrows == 0 then
- return { }
- end
- local noffields = result.noffields or 0
- local target = { } -- no %s needed here
- result = result._result_
- for i=1,nofrows do
- -- local row = mysql_fetch_row(result)
- -- local len = mysql_fetch_lengths(result)
- -- local cells = util_unpackbytearray(row,noffields,len)
- local cells = util_mysql_fetch_fields_from_current_row(result)
- target[%s] = {
- %s
- }
- end
- return target
-end
-]]
-
-local celltemplate = "cells[%s]"
-
-methods.swiglib = {
- runner = function() end, -- never called
- execute = execute,
- initialize = initialize, -- returns session
- usesfiles = false,
- wraptemplate = wraptemplate,
- celltemplate = celltemplate,
-}
+if not modules then modules = { } end modules ['util-sql-swiglib'] = {
+ version = 1.001,
+ comment = "companion to util-sql.lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- As the regular library is flawed (i.e. there are crashes in the table
+-- construction code) and also not that efficient, Luigi Scarso looked into
+-- a swig binding. This is a bit more low level approach but as we stay
+-- closer to the original library it's also less dependant.
+
+local concat = table.concat
+local format = string.format
+local lpegmatch = lpeg.match
+local setmetatable, type = setmetatable, type
+local sleep = os.sleep
+
+local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end)
+local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end)
+local report_state = logs.reporter("sql","swiglib")
+
+local sql = utilities.sql
+local mysql = require("swiglib.mysql.core") -- "5.6"
+
+-- inspect(table.sortedkeys(mysql))
+
+local nofretries = 5
+local retrydelay = 1
+
+local cache = { }
+local helpers = sql.helpers
+local methods = sql.methods
+local validspecification = helpers.validspecification
+local querysplitter = helpers.querysplitter
+local dataprepared = helpers.preparetemplate
+local serialize = sql.serialize
+local deserialize = sql.deserialize
+
+local mysql_initialize = mysql.mysql_init
+
+local mysql_open_connection = mysql.mysql_real_connect
+local mysql_execute_query = mysql.mysql_real_query
+local mysql_close_connection = mysql.mysql_close
+
+local mysql_field_seek = mysql.mysql_field_seek
+local mysql_num_fields = mysql.mysql_num_fields
+local mysql_fetch_field = mysql.mysql_fetch_field
+local mysql_num_rows = mysql.mysql_num_rows
+local mysql_fetch_row = mysql.mysql_fetch_row
+local mysql_fetch_lengths = mysql.mysql_fetch_lengths
+local mysql_init = mysql.mysql_init
+local mysql_store_result = mysql.mysql_store_result
+local mysql_free_result = mysql.mysql_free_result
+local mysql_use_result = mysql.mysql_use_result
+
+local mysql_error_message = mysql.mysql_error
+local mysql_options_argument = mysql.mysql_options_argument
+
+local instance = mysql.MYSQL()
+
+local mysql_constant_false = false
+local mysql_constant_true = true
+
+-- if mysql_options_argument then
+--
+-- mysql_constant_false = mysql_options_argument(false) -- 0 "\0"
+-- mysql_constant_true = mysql_options_argument(true) -- 1 "\1"
+--
+-- -- print(swig_type(mysql_constant_false))
+-- -- print(swig_type(mysql_constant_true))
+--
+-- mysql.mysql_options(instance,mysql.MYSQL_OPT_RECONNECT,mysql_constant_true);
+--
+-- else
+--
+-- print("")
+-- print("incomplete swiglib.mysql interface")
+-- print("")
+--
+-- end
+
+local typemap = mysql.MYSQL_TYPE_VAR_STRING and {
+ [mysql.MYSQL_TYPE_VAR_STRING ] = "string",
+ [mysql.MYSQL_TYPE_STRING ] = "string",
+ [mysql.MYSQL_TYPE_DECIMAL ] = "number",
+ [mysql.MYSQL_TYPE_SHORT ] = "number",
+ [mysql.MYSQL_TYPE_LONG ] = "number",
+ [mysql.MYSQL_TYPE_FLOAT ] = "number",
+ [mysql.MYSQL_TYPE_DOUBLE ] = "number",
+ [mysql.MYSQL_TYPE_LONGLONG ] = "number",
+ [mysql.MYSQL_TYPE_INT24 ] = "number",
+ [mysql.MYSQL_TYPE_YEAR ] = "number",
+ [mysql.MYSQL_TYPE_TINY ] = "number",
+ [mysql.MYSQL_TYPE_TINY_BLOB ] = "binary",
+ [mysql.MYSQL_TYPE_MEDIUM_BLOB] = "binary",
+ [mysql.MYSQL_TYPE_LONG_BLOB ] = "binary",
+ [mysql.MYSQL_TYPE_BLOB ] = "binary",
+ [mysql.MYSQL_TYPE_DATE ] = "date",
+ [mysql.MYSQL_TYPE_NEWDATE ] = "date",
+ [mysql.MYSQL_TYPE_DATETIME ] = "datetime",
+ [mysql.MYSQL_TYPE_TIME ] = "time",
+ [mysql.MYSQL_TYPE_TIMESTAMP ] = "time",
+ [mysql.MYSQL_TYPE_ENUM ] = "set",
+ [mysql.MYSQL_TYPE_SET ] = "set",
+ [mysql.MYSQL_TYPE_NULL ] = "null",
+}
+
+-- real_escape_string
+
+local function finish(t)
+ local r = t._result_
+ if r then
+ mysql_free_result(r)
+ end
+end
+
+-- will become metatable magic
+
+-- local function analyze(result)
+-- mysql_field_seek(result,0)
+-- local nofrows = mysql_num_rows(result) or 0
+-- local noffields = mysql_num_fields(result)
+-- local names = { }
+-- local types = { }
+-- for i=1,noffields do
+-- local field = mysql_fetch_field(result)
+-- names[i] = field.name
+-- types[i] = field.type
+-- end
+-- return names, types, noffields, nofrows
+-- end
+
+local function getcolnames(t)
+ return t.names
+end
+
+local function getcoltypes(t)
+ return t.types
+end
+
+local function numrows(t)
+ return t.nofrows
+end
+
+-- swig_type
+
+-- local ulongArray_getitem = mysql.ulongArray_getitem
+-- local util_getbytearray = mysql.util_getbytearray
+
+-- local function list(t)
+-- local result = t._result_
+-- local row = mysql_fetch_row(result)
+-- local len = mysql_fetch_lengths(result)
+-- local result = { }
+-- for i=1,t.noffields do
+-- local r = i - 1 -- zero offset
+-- result[i] = util_getbytearray(row,r,ulongArray_getitem(len,r))
+-- end
+-- return result
+-- end
+
+-- local function hash(t)
+-- local list = util_mysql_fetch_fields_from_current_row(t._result_)
+-- local result = t._result_
+-- local fields = t.names
+-- local row = mysql_fetch_row(result)
+-- local len = mysql_fetch_lengths(result)
+-- local result = { }
+-- for i=1,t.noffields do
+-- local r = i - 1 -- zero offset
+-- result[fields[i]] = util_getbytearray(row,r,ulongArray_getitem(len,r))
+-- end
+-- return result
+-- end
+
+local util_mysql_fetch_fields_from_current_row = mysql.util_mysql_fetch_fields_from_current_row
+local util_mysql_fetch_all_rows = mysql.util_mysql_fetch_all_rows
+
+local function list(t)
+ return util_mysql_fetch_fields_from_current_row(t._result_)
+end
+
+local function hash(t)
+ local list = util_mysql_fetch_fields_from_current_row(t._result_)
+ local fields = t.names
+ local data = { }
+ for i=1,t.noffields do
+ data[fields[i]] = list[i]
+ end
+ return data
+end
+
+local function wholelist(t)
+ return util_mysql_fetch_all_rows(t._result_)
+end
+
+local mt = { __index = {
+ -- regular
+ finish = finish,
+ list = list,
+ hash = hash,
+ wholelist = wholelist,
+ -- compatibility
+ numrows = numrows,
+ getcolnames = getcolnames,
+ getcoltypes = getcoltypes,
+ -- fallback
+ _result_ = nil,
+ names = { },
+ types = { },
+ noffields = 0,
+ nofrows = 0,
+ }
+}
+
+local nt = setmetatable({},mt)
+
+-- session
+
+local function close(t)
+ mysql_close_connection(t._connection_)
+end
+
+local function execute(t,query)
+ if query and query ~= "" then
+ local connection = t._connection_
+ local result = mysql_execute_query(connection,query,#query)
+ if result == 0 then
+ local result = mysql_store_result(connection)
+ if result then
+ mysql_field_seek(result,0)
+ local nofrows = mysql_num_rows(result) or 0
+ local noffields = mysql_num_fields(result)
+ local names = { }
+ local types = { }
+ for i=1,noffields do
+ local field = mysql_fetch_field(result)
+ names[i] = field.name
+ types[i] = field.type
+ end
+ local t = {
+ _result_ = result,
+ names = names,
+ types = types,
+ noffields = noffields,
+ nofrows = nofrows,
+ }
+ return setmetatable(t,mt)
+ else
+ return nt
+ end
+ end
+ end
+ return false
+end
+
+local mt = { __index = {
+ close = close,
+ execute = execute,
+ }
+}
+
+local function open(t,database,username,password,host,port)
+ local connection = mysql_open_connection(t._session_,host or "localhost",username or "",password or "",database or "",port or 0,0,0)
+ if connection then
+ local t = {
+ _connection_ = connection,
+ }
+ return setmetatable(t,mt)
+ end
+end
+
+local function message(t)
+ return mysql_error_message(t._session_)
+end
+
+local function close(t)
+ -- dummy, as we have a global session
+end
+
+local mt = {
+ __index = {
+ connect = open,
+ close = close,
+ message = message,
+ }
+}
+
+local function initialize()
+ local session = {
+ _session_ = mysql_initialize(instance) -- maybe share, single thread anyway
+ }
+ return setmetatable(session,mt)
+end
+
+-- -- -- --
+
+local function connect(session,specification)
+ return session:connect(
+ specification.database or "",
+ specification.username or "",
+ specification.password or "",
+ specification.host or "",
+ specification.port
+ )
+end
+
+local function error_in_connection(specification,action)
+ report_state("error in connection: [%s] %s@%s to %s:%s",
+ action or "unknown",
+ specification.database or "no database",
+ specification.username or "no username",
+ specification.host or "no host",
+ specification.port or "no port"
+ )
+end
+
+local function datafetched(specification,query,converter)
+ if not query or query == "" then
+ report_state("no valid query")
+ return { }, { }
+ end
+ local id = specification.id
+ local session, connection
+ if id then
+ local c = cache[id]
+ if c then
+ session = c.session
+ connection = c.connection
+ end
+ if not connection then
+ session = initialize()
+ connection = connect(session,specification)
+ if not connection then
+ for i=1,nofretries do
+ sleep(retrydelay)
+ report_state("retrying to connect: [%s.%s] %s@%s to %s:%s",
+ id,i,
+ specification.database or "no database",
+ specification.username or "no username",
+ specification.host or "no host",
+ specification.port or "no port"
+ )
+ connection = connect(session,specification)
+ if connection then
+ break
+ end
+ end
+ end
+ if connection then
+ cache[id] = { session = session, connection = connection }
+ end
+ end
+ else
+ session = initialize()
+ connection = connect(session,specification)
+ if not connection then
+ for i=1,nofretries do
+ sleep(retrydelay)
+ report_state("retrying to connect: [%s] %s@%s to %s:%s",
+ i,
+ specification.database or "no database",
+ specification.username or "no username",
+ specification.host or "no host",
+ specification.port or "no port"
+ )
+ connection = connect(session,specification)
+ if connection then
+ break
+ end
+ end
+ end
+ end
+ if not connection then
+ report_state("error in connection: %s@%s to %s:%s",
+ specification.database or "no database",
+ specification.username or "no username",
+ specification.host or "no host",
+ specification.port or "no port"
+ )
+ return { }, { }
+ end
+ query = lpegmatch(querysplitter,query)
+ local result, message, okay
+ for i=1,#query do
+ local q = query[i]
+ local r, m = connection:execute(q)
+ if m then
+ report_state("error in query, stage: %s",string.collapsespaces(q))
+ message = message and format("%s\n%s",message,m) or m
+ end
+ if type(r) == "table" then
+ result = r
+ okay = true
+ elseif not m then
+ okay = true
+ end
+ end
+ local data, keys
+ if result then
+ if converter then
+ data = converter.swiglib(result)
+ else
+ keys = result.names
+ data = { }
+ for i=1,result.nofrows do
+ data[i] = result:hash()
+ end
+ end
+ result:finish() -- result:close()
+ elseif message then
+ report_state("message %s",message)
+ end
+ if not keys then
+ keys = { }
+ end
+ if not data then
+ data = { }
+ end
+ if not id then
+ connection:close()
+ session:close()
+ end
+ return data, keys
+end
+
+local function execute(specification)
+ if trace_sql then
+ report_state("executing library")
+ end
+ if not validspecification(specification) then
+ report_state("error in specification")
+ return
+ end
+ local query = dataprepared(specification)
+ if not query then
+ report_state("error in preparation")
+ return
+ end
+ local data, keys = datafetched(specification,query,specification.converter)
+ if not data then
+ report_state("error in fetching")
+ return
+ end
+ local one = data[1]
+ if one then
+ setmetatable(data,{ __index = one } )
+ end
+ return data, keys
+end
+
+local wraptemplate = [[
+local mysql = require("swigluamysql") -- will be stored in method
+
+----- mysql_fetch_row = mysql.mysql_fetch_row
+----- mysql_fetch_lengths = mysql.mysql_fetch_lengths
+----- util_unpackbytearray = mysql.util_unpackbytearray
+local util_mysql_fetch_fields_from_current_row
+ = mysql.util_mysql_fetch_fields_from_current_row
+
+local converters = utilities.sql.converters
+local deserialize = utilities.sql.deserialize
+
+local tostring = tostring
+local tonumber = tonumber
+local booleanstring = string.booleanstring
+
+%s
+
+return function(result)
+ if not result then
+ return { }
+ end
+ local nofrows = result.nofrows or 0
+ if nofrows == 0 then
+ return { }
+ end
+ local noffields = result.noffields or 0
+ local target = { } -- no %s needed here
+ result = result._result_
+ for i=1,nofrows do
+ -- local row = mysql_fetch_row(result)
+ -- local len = mysql_fetch_lengths(result)
+ -- local cells = util_unpackbytearray(row,noffields,len)
+ local cells = util_mysql_fetch_fields_from_current_row(result)
+ target[%s] = {
+ %s
+ }
+ end
+ return target
+end
+]]
+
+local celltemplate = "cells[%s]"
+
+methods.swiglib = {
+ runner = function() end, -- never called
+ execute = execute,
+ initialize = initialize, -- returns session
+ usesfiles = false,
+ wraptemplate = wraptemplate,
+ celltemplate = celltemplate,
+}
diff --git a/tex/context/base/util-sql-loggers.lua b/tex/context/base/util-sql-loggers.lua
index 7fceb8032..33071f2e3 100644
--- a/tex/context/base/util-sql-loggers.lua
+++ b/tex/context/base/util-sql-loggers.lua
@@ -1,277 +1,277 @@
-if not modules then modules = { } end modules ['util-sql-loggers'] = {
- version = 1.001,
- comment = "companion to lmx-*",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This is experimental code and currently part of the base installation simply
--- because it's easier to dirtribute this way. Eventually it will be documented
--- and the related scripts will show up as well.
-
-local tonumber = tonumber
-local format = string.format
-local concat = table.concat
-local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime
-local random = math.random
-
-local sql = utilities.sql
-local loggers = { }
-sql.loggers = loggers
-
-local trace_sql = false trackers.register("sql.loggers.trace", function(v) trace_sql = v end)
-local report = logs.reporter("sql","loggers")
-
-loggers.newtoken = sql.tokens.new
-local makeconverter = sql.makeconverter
-
-local function checkeddb(presets,datatable)
- return sql.usedatabase(presets,datatable or presets.datatable or "loggers")
-end
-
-loggers.usedb = checkeddb
-
-local totype = {
- ["error"] = 1, [1] = 1, ["1"] = 1,
- ["warning"] = 2, [2] = 2, ["2"] = 2,
- ["debug"] = 3, [3] = 3, ["3"] = 3,
- ["info"] = 4, [4] = 4, ["4"] = 4,
-}
-
-local fromtype = {
- ["error"] = "error", [1] = "error", ["1"] = "error",
- ["warning"] = "warning", [2] = "warning", ["2"] = "warning",
- ["debug"] = "debug", [3] = "debug", ["3"] = "debug",
- ["info"] = "info", [4] = "info", ["4"] = "info",
-}
-
-table.setmetatableindex(totype, function() return 4 end)
-table.setmetatableindex(fromtype,function() return "info" end)
-
-loggers.totype = totype
-loggers.fromtype = fromtype
-
-local template =[[
- CREATE TABLE IF NOT EXISTS %basename% (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `time` int(11) NOT NULL,
- `type` int(11) NOT NULL,
- `action` varchar(15) NOT NULL,
- `data` longtext,
- PRIMARY KEY (`id`),
- UNIQUE KEY `id_unique_key` (`id`)
- )
- DEFAULT CHARSET = utf8 ;
-]]
-
-function loggers.createdb(presets,datatable)
-
- local db = checkeddb(presets,datatable)
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- },
- }
-
- report("datatable %a created in %a",db.name,db.base)
-
- return db
-
-end
-
-local template =[[
- DROP TABLE IF EXISTS %basename% ;
-]]
-
-function loggers.deletedb(presets,datatable)
-
- local db = checkeddb(presets,datatable)
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- },
- }
-
- report("datatable %a removed in %a",db.name,db.base)
-
-end
-
-local template =[[
- INSERT INTO %basename% (
- `time`,
- `type`,
- `action`,
- `data`
- ) VALUES (
- %time%,
- %type%,
- '%action%',
- '%[data]%'
- ) ;
-]]
-
-function loggers.save(db,data) -- beware, we pass type and action in the data (saves a table)
-
- if data then
-
- local time = ostime()
- local kind = totype[data.type]
- local action = data.action or "unknown"
-
- data.type = nil
- data.action = nil
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- time = ostime(),
- type = kind,
- action = action,
- data = data and db.serialize(data,"return") or "",
- },
- }
-
- end
-
-end
-
--- local template =[[
--- REMOVE FROM
--- %basename%
--- WHERE
--- `token` = '%token%' ;
--- ]]
---
--- function loggers.remove(db,token)
---
--- db.execute {
--- template = template,
--- variables = {
--- basename = db.basename,
--- token = token,
--- },
--- }
---
--- if trace_sql then
--- report("removed: %s",token)
--- end
---
--- end
-
-local template_nop =[[
- SELECT
- `time`,
- `type`,
- `action`,
- `data`
- FROM
- %basename%
- ORDER BY
- `time`, `type`, `action`
- DESC LIMIT
- %limit% ;
-]]
-
-local template_yes =[[
- SELECT
- `time`,
- `type`,
- `action`,
- `data`
- FROM
- %basename%
- %WHERE%
- ORDER BY
- `time`, `type`, `action`
- DESC LIMIT
- %limit% ;
-]]
-
-local converter = makeconverter {
- -- { name = "time", type = os.localtime },
- { name = "time", type = "number" },
- { name = "type", type = fromtype },
- { name = "action", type = "string" },
- { name = "data", type = "deserialize" },
-}
-
-function loggers.collect(db,specification)
-
- specification = specification or { }
-
- local start = specification.start
- local stop = specification.stop
- local limit = specification.limit or 100
- local kind = specification.type
- local action = specification.action
-
- local filtered = start or stop
-
- local where = { }
-
- if filtered then
- local today = os.date("*t")
-
- if type(start) ~= "table" then
- start = { }
- end
- start = os.time {
- day = start.day or today.day,
- month = start.month or today.month,
- year = start.year or today.year,
- hour = start.hour or 0,
- minute = start.minute or 0,
- second = start.second or 0,
- isdst = true,
- }
-
- if type(stop) ~= "table" then
- stop = { }
- end
- stop = os.time {
- day = stop.day or today.day,
- month = stop.month or today.month,
- year = stop.year or today.year,
- hour = stop.hour or 24,
- minute = stop.minute or 0,
- second = stop.second or 0,
- isdst = true,
- }
-
- -- report("filter: %s => %s",start,stop)
-
- where[#where+1] = format("`time` BETWEEN %s AND %s",start,stop)
-
- end
-
- if kind then
- where[#where+1] = format("`type` = %s",totype[kind])
- end
-
- if action then
- where[#where+1] = format("`action` = '%s'",action)
- end
-
- local records = db.execute {
- template = filtered and template_yes or template_nop,
- converter = converter,
- variables = {
- basename = db.basename,
- limit = limit,
- WHERE = #where > 0 and format("WHERE\n%s",concat(where," AND ")) or "",
- },
- }
-
- if trace_sql then
- report("collected: %s loggers",#records)
- end
-
- return records, keys
-
-end
+if not modules then modules = { } end modules ['util-sql-loggers'] = {
+ version = 1.001,
+ comment = "companion to lmx-*",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is experimental code and currently part of the base installation simply
+-- because it's easier to dirtribute this way. Eventually it will be documented
+-- and the related scripts will show up as well.
+
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime
+local random = math.random
+
+local sql = utilities.sql
+local loggers = { }
+sql.loggers = loggers
+
+local trace_sql = false trackers.register("sql.loggers.trace", function(v) trace_sql = v end)
+local report = logs.reporter("sql","loggers")
+
+loggers.newtoken = sql.tokens.new
+local makeconverter = sql.makeconverter
+
+local function checkeddb(presets,datatable)
+ return sql.usedatabase(presets,datatable or presets.datatable or "loggers")
+end
+
+loggers.usedb = checkeddb
+
+local totype = {
+ ["error"] = 1, [1] = 1, ["1"] = 1,
+ ["warning"] = 2, [2] = 2, ["2"] = 2,
+ ["debug"] = 3, [3] = 3, ["3"] = 3,
+ ["info"] = 4, [4] = 4, ["4"] = 4,
+}
+
+local fromtype = {
+ ["error"] = "error", [1] = "error", ["1"] = "error",
+ ["warning"] = "warning", [2] = "warning", ["2"] = "warning",
+ ["debug"] = "debug", [3] = "debug", ["3"] = "debug",
+ ["info"] = "info", [4] = "info", ["4"] = "info",
+}
+
+table.setmetatableindex(totype, function() return 4 end)
+table.setmetatableindex(fromtype,function() return "info" end)
+
+loggers.totype = totype
+loggers.fromtype = fromtype
+
+local template =[[
+ CREATE TABLE IF NOT EXISTS %basename% (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `time` int(11) NOT NULL,
+ `type` int(11) NOT NULL,
+ `action` varchar(15) NOT NULL,
+ `data` longtext,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `id_unique_key` (`id`)
+ )
+ DEFAULT CHARSET = utf8 ;
+]]
+
+function loggers.createdb(presets,datatable)
+
+ local db = checkeddb(presets,datatable)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report("datatable %a created in %a",db.name,db.base)
+
+ return db
+
+end
+
+local template =[[
+ DROP TABLE IF EXISTS %basename% ;
+]]
+
+function loggers.deletedb(presets,datatable)
+
+ local db = checkeddb(presets,datatable)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report("datatable %a removed in %a",db.name,db.base)
+
+end
+
+local template =[[
+ INSERT INTO %basename% (
+ `time`,
+ `type`,
+ `action`,
+ `data`
+ ) VALUES (
+ %time%,
+ %type%,
+ '%action%',
+ '%[data]%'
+ ) ;
+]]
+
+function loggers.save(db,data) -- beware, we pass type and action in the data (saves a table)
+
+ if data then
+
+ local time = ostime()
+ local kind = totype[data.type]
+ local action = data.action or "unknown"
+
+ data.type = nil
+ data.action = nil
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ time = ostime(),
+ type = kind,
+ action = action,
+ data = data and db.serialize(data,"return") or "",
+ },
+ }
+
+ end
+
+end
+
+-- local template =[[
+-- REMOVE FROM
+-- %basename%
+-- WHERE
+-- `token` = '%token%' ;
+-- ]]
+--
+-- function loggers.remove(db,token)
+--
+-- db.execute {
+-- template = template,
+-- variables = {
+-- basename = db.basename,
+-- token = token,
+-- },
+-- }
+--
+-- if trace_sql then
+-- report("removed: %s",token)
+-- end
+--
+-- end
+
+local template_nop =[[
+ SELECT
+ `time`,
+ `type`,
+ `action`,
+ `data`
+ FROM
+ %basename%
+ ORDER BY
+ `time`, `type`, `action`
+ DESC LIMIT
+ %limit% ;
+]]
+
+local template_yes =[[
+ SELECT
+ `time`,
+ `type`,
+ `action`,
+ `data`
+ FROM
+ %basename%
+ %WHERE%
+ ORDER BY
+ `time`, `type`, `action`
+ DESC LIMIT
+ %limit% ;
+]]
+
+local converter = makeconverter {
+ -- { name = "time", type = os.localtime },
+ { name = "time", type = "number" },
+ { name = "type", type = fromtype },
+ { name = "action", type = "string" },
+ { name = "data", type = "deserialize" },
+}
+
+function loggers.collect(db,specification)
+
+ specification = specification or { }
+
+ local start = specification.start
+ local stop = specification.stop
+ local limit = specification.limit or 100
+ local kind = specification.type
+ local action = specification.action
+
+ local filtered = start or stop
+
+ local where = { }
+
+ if filtered then
+ local today = os.date("*t")
+
+ if type(start) ~= "table" then
+ start = { }
+ end
+ start = os.time {
+ day = start.day or today.day,
+ month = start.month or today.month,
+ year = start.year or today.year,
+ hour = start.hour or 0,
+ minute = start.minute or 0,
+ second = start.second or 0,
+ isdst = true,
+ }
+
+ if type(stop) ~= "table" then
+ stop = { }
+ end
+ stop = os.time {
+ day = stop.day or today.day,
+ month = stop.month or today.month,
+ year = stop.year or today.year,
+ hour = stop.hour or 24,
+ minute = stop.minute or 0,
+ second = stop.second or 0,
+ isdst = true,
+ }
+
+ -- report("filter: %s => %s",start,stop)
+
+ where[#where+1] = format("`time` BETWEEN %s AND %s",start,stop)
+
+ end
+
+ if kind then
+ where[#where+1] = format("`type` = %s",totype[kind])
+ end
+
+ if action then
+ where[#where+1] = format("`action` = '%s'",action)
+ end
+
+ local records = db.execute {
+ template = filtered and template_yes or template_nop,
+ converter = converter,
+ variables = {
+ basename = db.basename,
+ limit = limit,
+ WHERE = #where > 0 and format("WHERE\n%s",concat(where," AND ")) or "",
+ },
+ }
+
+ if trace_sql then
+ report("collected: %s loggers",#records)
+ end
+
+ return records, keys
+
+end
diff --git a/tex/context/base/util-sql-sessions.lua b/tex/context/base/util-sql-sessions.lua
index 76bb91962..d13293691 100644
--- a/tex/context/base/util-sql-sessions.lua
+++ b/tex/context/base/util-sql-sessions.lua
@@ -1,349 +1,349 @@
-if not modules then modules = { } end modules ['util-sql-sessions'] = {
- version = 1.001,
- comment = "companion to lmx-*",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This is experimental code and currently part of the base installation simply
--- because it's easier to dirtribute this way. Eventually it will be documented
--- and the related scripts will show up as well.
-
--- maybe store threshold in session (in seconds)
-
-local tonumber = tonumber
-local format = string.format
-local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime
-local random = math.random
-
--- In older frameworks we kept a session table in memory. This time we
--- follow a route where we store session data in a sql table. Each session
--- has a token (similar to what we do on q2p and pod services), a data
--- blob which is just a serialized lua table (we could consider a dump instead)
--- and two times: the creation and last accessed time. The first one is handy
--- for statistics and the second one for cleanup. Both are just numbers so that
--- we don't have to waste code on conversions. Anyhow, we provide variants so that
--- we can always choose what is best.
-
-local sql = utilities.sql
-local sessions = { }
-sql.sessions = sessions
-
-local trace_sql = false trackers.register("sql.sessions.trace", function(v) trace_sql = v end)
-local report = logs.reporter("sql","sessions")
-
-sessions.newtoken = sql.tokens.new
-
-local function checkeddb(presets,datatable)
- return sql.usedatabase(presets,datatable or presets.datatable or "sessions")
-end
-
-sessions.usedb = checkeddb
-
-local template =[[
- CREATE TABLE IF NOT EXISTS %basename% (
- `token` varchar(50) NOT NULL,
- `data` longtext NOT NULL,
- `created` int(11) NOT NULL,
- `accessed` int(11) NOT NULL,
- UNIQUE KEY `token_unique_key` (`token`)
- )
- DEFAULT CHARSET = utf8 ;
-]]
-
-function sessions.createdb(presets,datatable)
-
- local db = checkeddb(presets,datatable)
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- },
- }
-
- report("datatable %a created in %a",db.name,db.base)
-
- return db
-
-end
-
-local template =[[
- DROP TABLE IF EXISTS %basename% ;
-]]
-
-function sessions.deletedb(presets,datatable)
-
- local db = checkeddb(presets,datatable)
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- },
- }
-
- report("datatable %a removed in %a",db.name,db.base)
-
-end
-
-local template =[[
- INSERT INTO %basename% (
- `token`,
- `created`,
- `accessed`,
- `data`
- ) VALUES (
- '%token%',
- %time%,
- %time%,
- '%[data]%'
- ) ;
-]]
-
-function sessions.create(db,data)
-
- local token = sessions.newtoken()
- local time = ostime()
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- token = token,
- time = time,
- data = db.serialize(data or { },"return")
- },
- }
-
- if trace_sql then
- report("created: %s at %s",token,osfulltime(time))
- end
-
- return {
- token = token,
- created = time,
- accessed = time,
- data = data,
- }
-end
-
-local template =[[
- UPDATE
- %basename%
- SET
- `data` = '%[data]%',
- `accessed` = %time%
- WHERE
- `token` = '%token%' ;
-]]
-
-function sessions.save(db,session)
-
- local time = ostime()
- local data = db.serialize(session.data or { },"return")
- local token = session.token
-
- session.accessed = time
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- token = token,
- time = ostime(),
- data = data,
- },
- }
-
- if trace_sql then
- report("saved: %s at %s",token,osfulltime(time))
- end
-
- return session
-end
-
-local template = [[
- UPDATE
- %basename%
- SET
- `accessed` = %time%
- WHERE
- `token` = '%token%' ;
-]]
-
-function sessions.touch(db,token)
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- token = token,
- time = ostime(),
- },
- }
-
-end
-
-local template = [[
- UPDATE
- %basename%
- SET
- `accessed` = %time%
- WHERE
- `token` = '%token%' ;
- SELECT
- *
- FROM
- %basename%
- WHERE
- `token` = '%token%' ;
-]]
-
-function sessions.restore(db,token)
-
- local records, keys = db.execute {
- template = template,
- variables = {
- basename = db.basename,
- token = token,
- time = ostime(),
- },
- }
-
- local record = records and records[1]
-
- if record then
- if trace_sql then
- report("restored: %s",token)
- end
- record.data = db.deserialize(record.data or "")
- return record, keys
- elseif trace_sql then
- report("unknown: %s",token)
- end
-
-end
-
-local template =[[
- DELETE FROM
- %basename%
- WHERE
- `token` = '%token%' ;
-]]
-
-function sessions.remove(db,token)
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- token = token,
- },
- }
-
- if trace_sql then
- report("removed: %s",token)
- end
-
-end
-
-local template_collect_yes =[[
- SELECT
- *
- FROM
- %basename%
- ORDER BY
- `created` ;
-]]
-
-local template_collect_nop =[[
- SELECT
- `accessed`,
- `created`,
- `accessed`,
- `token`
- FROM
- %basename%
- ORDER BY
- `created` ;
-]]
-
-function sessions.collect(db,nodata)
-
- local records, keys = db.execute {
- template = nodata and template_collect_nop or template_collect_yes,
- variables = {
- basename = db.basename,
- },
- }
-
- if not nodata then
- db.unpackdata(records)
- end
-
- if trace_sql then
- report("collected: %s sessions",#records)
- end
-
- return records, keys
-
-end
-
-local template_cleanup_yes =[[
- SELECT
- *
- FROM
- %basename%
- WHERE
- `accessed` < %time%
- ORDER BY
- `created` ;
- DELETE FROM
- %basename%
- WHERE
- `accessed` < %time% ;
-]]
-
-local template_cleanup_nop =[[
- SELECT
- `accessed`,
- `created`,
- `accessed`,
- `token`
- FROM
- %basename%
- WHERE
- `accessed` < %time%
- ORDER BY
- `created` ;
- DELETE FROM
- %basename%
- WHERE
- `accessed` < %time% ;
-]]
-
-function sessions.cleanupdb(db,delta,nodata)
-
- local time = ostime()
-
- local records, keys = db.execute {
- template = nodata and template_cleanup_nop or template_cleanup_yes,
- variables = {
- basename = db.basename,
- time = time - delta
- },
- }
-
- if not nodata then
- db.unpackdata(records)
- end
-
- if trace_sql then
- report("cleaned: %s seconds before %s",delta,osfulltime(time))
- end
-
- return records, keys
-
-end
+if not modules then modules = { } end modules ['util-sql-sessions'] = {
+ version = 1.001,
+ comment = "companion to lmx-*",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is experimental code and currently part of the base installation simply
+-- because it's easier to dirtribute this way. Eventually it will be documented
+-- and the related scripts will show up as well.
+
+-- maybe store threshold in session (in seconds)
+
+local tonumber = tonumber
+local format = string.format
+local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime
+local random = math.random
+
+-- In older frameworks we kept a session table in memory. This time we
+-- follow a route where we store session data in a sql table. Each session
+-- has a token (similar to what we do on q2p and pod services), a data
+-- blob which is just a serialized lua table (we could consider a dump instead)
+-- and two times: the creation and last accessed time. The first one is handy
+-- for statistics and the second one for cleanup. Both are just numbers so that
+-- we don't have to waste code on conversions. Anyhow, we provide variants so that
+-- we can always choose what is best.
+
+local sql = utilities.sql
+local sessions = { }
+sql.sessions = sessions
+
+local trace_sql = false trackers.register("sql.sessions.trace", function(v) trace_sql = v end)
+local report = logs.reporter("sql","sessions")
+
+sessions.newtoken = sql.tokens.new
+
+local function checkeddb(presets,datatable)
+ return sql.usedatabase(presets,datatable or presets.datatable or "sessions")
+end
+
+sessions.usedb = checkeddb
+
+local template =[[
+ CREATE TABLE IF NOT EXISTS %basename% (
+ `token` varchar(50) NOT NULL,
+ `data` longtext NOT NULL,
+ `created` int(11) NOT NULL,
+ `accessed` int(11) NOT NULL,
+ UNIQUE KEY `token_unique_key` (`token`)
+ )
+ DEFAULT CHARSET = utf8 ;
+]]
+
+function sessions.createdb(presets,datatable)
+
+ local db = checkeddb(presets,datatable)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report("datatable %a created in %a",db.name,db.base)
+
+ return db
+
+end
+
+local template =[[
+ DROP TABLE IF EXISTS %basename% ;
+]]
+
+function sessions.deletedb(presets,datatable)
+
+ local db = checkeddb(presets,datatable)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report("datatable %a removed in %a",db.name,db.base)
+
+end
+
+local template =[[
+ INSERT INTO %basename% (
+ `token`,
+ `created`,
+ `accessed`,
+ `data`
+ ) VALUES (
+ '%token%',
+ %time%,
+ %time%,
+ '%[data]%'
+ ) ;
+]]
+
+function sessions.create(db,data)
+
+ local token = sessions.newtoken()
+ local time = ostime()
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ token = token,
+ time = time,
+ data = db.serialize(data or { },"return")
+ },
+ }
+
+ if trace_sql then
+ report("created: %s at %s",token,osfulltime(time))
+ end
+
+ return {
+ token = token,
+ created = time,
+ accessed = time,
+ data = data,
+ }
+end
+
+local template =[[
+ UPDATE
+ %basename%
+ SET
+ `data` = '%[data]%',
+ `accessed` = %time%
+ WHERE
+ `token` = '%token%' ;
+]]
+
+function sessions.save(db,session)
+
+ local time = ostime()
+ local data = db.serialize(session.data or { },"return")
+ local token = session.token
+
+ session.accessed = time
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ token = token,
+ time = ostime(),
+ data = data,
+ },
+ }
+
+ if trace_sql then
+ report("saved: %s at %s",token,osfulltime(time))
+ end
+
+ return session
+end
+
+local template = [[
+ UPDATE
+ %basename%
+ SET
+ `accessed` = %time%
+ WHERE
+ `token` = '%token%' ;
+]]
+
+function sessions.touch(db,token)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ token = token,
+ time = ostime(),
+ },
+ }
+
+end
+
+local template = [[
+ UPDATE
+ %basename%
+ SET
+ `accessed` = %time%
+ WHERE
+ `token` = '%token%' ;
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `token` = '%token%' ;
+]]
+
+function sessions.restore(db,token)
+
+ local records, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ token = token,
+ time = ostime(),
+ },
+ }
+
+ local record = records and records[1]
+
+ if record then
+ if trace_sql then
+ report("restored: %s",token)
+ end
+ record.data = db.deserialize(record.data or "")
+ return record, keys
+ elseif trace_sql then
+ report("unknown: %s",token)
+ end
+
+end
+
+local template =[[
+ DELETE FROM
+ %basename%
+ WHERE
+ `token` = '%token%' ;
+]]
+
+function sessions.remove(db,token)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ token = token,
+ },
+ }
+
+ if trace_sql then
+ report("removed: %s",token)
+ end
+
+end
+
+local template_collect_yes =[[
+ SELECT
+ *
+ FROM
+ %basename%
+ ORDER BY
+ `created` ;
+]]
+
+local template_collect_nop =[[
+ SELECT
+ `accessed`,
+ `created`,
+ `accessed`,
+ `token`
+ FROM
+ %basename%
+ ORDER BY
+ `created` ;
+]]
+
+function sessions.collect(db,nodata)
+
+ local records, keys = db.execute {
+ template = nodata and template_collect_nop or template_collect_yes,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ if not nodata then
+ db.unpackdata(records)
+ end
+
+ if trace_sql then
+ report("collected: %s sessions",#records)
+ end
+
+ return records, keys
+
+end
+
+local template_cleanup_yes =[[
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `accessed` < %time%
+ ORDER BY
+ `created` ;
+ DELETE FROM
+ %basename%
+ WHERE
+ `accessed` < %time% ;
+]]
+
+local template_cleanup_nop =[[
+ SELECT
+ `accessed`,
+ `created`,
+ `accessed`,
+ `token`
+ FROM
+ %basename%
+ WHERE
+ `accessed` < %time%
+ ORDER BY
+ `created` ;
+ DELETE FROM
+ %basename%
+ WHERE
+ `accessed` < %time% ;
+]]
+
+function sessions.cleanupdb(db,delta,nodata)
+
+ local time = ostime()
+
+ local records, keys = db.execute {
+ template = nodata and template_cleanup_nop or template_cleanup_yes,
+ variables = {
+ basename = db.basename,
+ time = time - delta
+ },
+ }
+
+ if not nodata then
+ db.unpackdata(records)
+ end
+
+ if trace_sql then
+ report("cleaned: %s seconds before %s",delta,osfulltime(time))
+ end
+
+ return records, keys
+
+end
diff --git a/tex/context/base/util-sql-tickets.lua b/tex/context/base/util-sql-tickets.lua
index 5e958299d..65eb69bae 100644
--- a/tex/context/base/util-sql-tickets.lua
+++ b/tex/context/base/util-sql-tickets.lua
@@ -1,772 +1,772 @@
-if not modules then modules = { } end modules ['util-sql-tickets'] = {
- version = 1.001,
- comment = "companion to lmx-*",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- TODO: MAKE SOME INTO STORED PROCUDURES
-
--- This is experimental code and currently part of the base installation simply
--- because it's easier to distribute this way. Eventually it will be documented
--- and the related scripts will show up as well.
-
-local tonumber = tonumber
-local format = string.format
-local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime
-local random = math.random
-local concat = table.concat
-
-local sql = utilities.sql
-local tickets = { }
-sql.tickets = tickets
-
-local trace_sql = false trackers.register("sql.tickets.trace", function(v) trace_sql = v end)
-local report = logs.reporter("sql","tickets")
-
-local serialize = sql.serialize
-local deserialize = sql.deserialize
-local execute = sql.execute
-
-tickets.newtoken = sql.tokens.new
-
--- Beware as an index can be a string or a number, we will create
--- a combination of hash and index.
-
-local statustags = { [0] =
- "unknown",
- "pending",
- "busy",
- "finished",
- "dependent", -- same token but different subtoken (so we only need to find the first)
- "reserved-1",
- "reserved-2",
- "error",
- "deleted",
-}
-
-local status = table.swapped(statustags)
-tickets.status = status
-tickets.statustags = statustags
-
-local s_unknown = status.unknown
-local s_pending = status.pending
-local s_busy = status.busy
-local s_finished = status.finished
-local s_dependent = status.dependent
-local s_error = status.error
-local s_deleted = status.deleted
-
-local s_rubish = s_error -- and higher
-
-local function checkeddb(presets,datatable)
- return sql.usedatabase(presets,datatable or presets.datatable or "tickets")
-end
-
-tickets.usedb = checkeddb
-
-local template =[[
- CREATE TABLE IF NOT EXISTS %basename% (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `token` varchar(50) NOT NULL,
- `subtoken` INT(11) NOT NULL,
- `created` int(11) NOT NULL,
- `accessed` int(11) NOT NULL,
- `category` int(11) NOT NULL,
- `status` int(11) NOT NULL,
- `usertoken` varchar(50) NOT NULL,
- `data` longtext NOT NULL,
- `comment` longtext NOT NULL,
-
- PRIMARY KEY (`id`),
- UNIQUE INDEX `id_unique_index` (`id` ASC),
- KEY `token_unique_key` (`token`)
- )
- DEFAULT CHARSET = utf8 ;
-]]
-
-function tickets.createdb(presets,datatable)
- local db = checkeddb(presets,datatable)
- local data, keys = db.execute {
- template = template,
- variables = {
- basename = db.basename,
- },
- }
-
- report("datatable %a created in %a",db.name,db.base)
-
- return db
-
-end
-
-local template =[[
- DROP TABLE IF EXISTS %basename% ;
-]]
-
-function tickets.deletedb(presets,datatable)
-
- local db = checkeddb(presets,datatable)
-
- local data, keys = db.execute {
- template = template,
- variables = {
- basename = db.basename,
- },
- }
-
- report("datatable %a removed in %a",db.name,db.base)
-
-end
-
-local template_push =[[
- INSERT INTO %basename% (
- `token`,
- `subtoken`,
- `created`,
- `accessed`,
- `status`,
- `category`,
- `usertoken`,
- `data`,
- `comment`
- ) VALUES (
- '%token%',
- %subtoken%,
- %time%,
- %time%,
- %status%,
- %category%,
- '%usertoken%',
- '%[data]%',
- '%[comment]%'
- ) ;
-]]
-
-local template_fetch =[[
- SELECT
- *
- FROM
- %basename%
- WHERE
- `token` = '%token%'
- AND
- `subtoken` = '%subtoken%'
- ;
-]]
-
-function tickets.create(db,ticket)
-
- -- We assume a unique token .. if not we're toast anyway. We used to lock and
- -- get the last id etc etc but there is no real need for that.
-
- -- we could check for dependent here but we don't want the lookup
-
- local token = ticket.token or tickets.newtoken()
- local time = ostime()
- local status = ticket.status
- local category = ticket.category or 0
- local subtoken = ticket.subtoken or 0
- local usertoken = ticket.usertoken or ""
- local comment = ticket.comment or ""
-
- status = not status and subtoken > 1 and s_dependent or s_pending
-
- local result, message = db.execute {
- template = template_push,
- variables = {
- basename = db.basename,
- token = token,
- subtoken = subtoken,
- time = time,
- status = status,
- category = category,
- usertoken = usertoken,
- data = db.serialize(ticket.data or { },"return"),
- comment = comment,
- },
- }
-
- -- We could stick to only fetching the id and make the table here
- -- but we're not pushing that many tickets so we can as well follow
- -- the lazy approach and fetch the whole.
-
- local result, message = db.execute {
- template = template_fetch,
- variables = {
- basename = db.basename,
- token = token,
- subtoken = subtoken,
- },
- }
-
- if result and #result > 0 then
- if trace_sql then
- report("created: %s at %s",token,osfulltime(time))
- end
- return result[1]
- else
- report("failed: %s at %s",token,osfulltime(time))
- end
-
-end
-
-local template =[[
- UPDATE
- %basename%
- SET
- `data` = '%[data]%',
- `status` = %status%,
- `accessed` = %time%
- WHERE
- `id` = %id% ;
-]]
-
-function tickets.save(db,ticket)
-
- local time = ostime()
- local data = db.serialize(ticket.data or { },"return")
- local status = ticket.status or s_error
-
--- print("SETTING")
--- inspect(data)
-
- ticket.status = status
- ticket.accessed = time
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- id = ticket.id,
- time = ostime(),
- status = status,
- data = data,
- },
- }
-
- if trace_sql then
- report("saved: id %s, time %s",id,osfulltime(time))
- end
-
- return ticket
-end
-
-local template =[[
- UPDATE
- %basename%
- SET
- `accessed` = %time%
- WHERE
- `token` = '%token%' ;
-
- SELECT
- *
- FROM
- %basename%
- WHERE
- `id` = %id% ;
-]]
-
-function tickets.restore(db,id)
-
- local record, keys = db.execute {
- template = template,
- variables = {
- basename = db.basename,
- id = id,
- time = ostime(),
- },
- }
-
- local record = record and record[1]
-
- if record then
- if trace_sql then
- report("restored: id %s",id)
- end
- record.data = db.deserialize(record.data or "")
- return record
- elseif trace_sql then
- report("unknown: id %s",id)
- end
-
-end
-
-local template =[[
- DELETE FROM
- %basename%
- WHERE
- `id` = %id% ;
-]]
-
-function tickets.remove(db,id)
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- id = id,
- },
- }
-
- if trace_sql then
- report("removed: id %s",id)
- end
-
-end
-
-local template_yes =[[
- SELECT
- *
- FROM
- %basename%
- ORDER BY
- `id` ;
-]]
-
-local template_nop =[[
- SELECT
- `created`,
- `usertoken`,
- `accessed`,
- `status`
- FROM
- %basename%
- ORDER BY
- `id` ;
-]]
-
-function tickets.collect(db,nodata)
-
- local records, keys = db.execute {
- template = nodata and template_nop or template_yes,
- variables = {
- basename = db.basename,
- token = token,
- },
- }
-
- if not nodata then
- db.unpackdata(records)
- end
-
- if trace_sql then
- report("collected: %s tickets",#records)
- end
-
- return records, keys
-
-end
-
--- We aleays keep the last select in the execute so one can have
--- an update afterwards.
-
-local template =[[
- DELETE FROM
- %basename%
- WHERE
- `accessed` < %time% OR `status` >= %rubish% ;
-]]
-
-local template_cleanup_yes =[[
- SELECT
- *
- FROM
- %basename%
- WHERE
- `accessed` < %time%
- ORDER BY
- `id` ;
-]] .. template
-
-local template_cleanup_nop =[[
- SELECT
- `accessed`,
- `created`,
- `accessed`,
- `token`
- `usertoken`
- FROM
- %basename%
- WHERE
- `accessed` < %time%
- ORDER BY
- `id` ;
-]] .. template
-
-function tickets.cleanupdb(db,delta,nodata) -- maybe delta in db
-
- local time = delta and (ostime() - delta) or 0
-
- local records, keys = db.execute {
- template = nodata and template_cleanup_nop or template_cleanup_yes,
- variables = {
- basename = db.basename,
- time = time,
- rubish = s_rubish,
- },
- }
-
- if not nodata then
- db.unpackdata(records)
- end
-
- if trace_sql then
- report("cleaned: %s seconds before %s",delta,osfulltime(time))
- end
-
- return records, keys
-
-end
-
--- status related functions
-
-local template =[[
- SELECT
- `status`
- FROM
- %basename%
- WHERE
- `token` = '%token%'
- ORDER BY
- `id`
- ;
-]]
-
-function tickets.getstatus(db,token)
-
- local record, keys = db.execute {
- template = template,
- variables = {
- basename = db.basename,
- token = token,
- },
- }
-
- local record = record and record[1]
-
- return record and record.status or s_unknown
-
-end
-
-local template =[[
- SELECT
- `status`
- FROM
- %basename%
- WHERE
- `status` >= %rubish% OR `accessed` < %time%
- ORDER BY
- `id`
- ;
-]]
-
-function tickets.getobsolete(db,delta)
-
- local time = delta and (ostime() - delta) or 0
-
- local records = db.execute {
- template = template,
- variables = {
- basename = db.basename,
- time = time,
- rubish = s_rubish,
- },
- }
-
- db.unpackdata(records)
-
- return records
-
-end
-
-local template =[[
- SELECT
- `id`
- FROM
- %basename%
- WHERE
- `status` = %status%
- LIMIT
- 1 ;
-]]
-
-function tickets.hasstatus(db,status)
-
- local records = db.execute {
- template = template,
- variables = {
- basename = db.basename,
- status = status or s_unknown,
- },
- }
-
- return records and #records > 0 or false
-
-end
-
-local template =[[
- UPDATE
- %basename%
- SET
- `status` = %status%,
- `accessed` = %time%
- WHERE
- `id` = %id% ;
-]]
-
-function tickets.setstatus(db,id,status)
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- id = id,
- time = ostime(),
- status = status or s_error,
- },
- }
-
-end
-
-local template =[[
- DELETE FROM
- %basename%
- WHERE
- `status` IN (%status%) ;
-]]
-
-function tickets.prunedb(db,status)
-
- if type(status) == "table" then
- status = concat(status,",")
- end
-
- local data, keys = db.execute {
- template = template,
- variables = {
- basename = db.basename,
- status = status or s_unknown,
- },
- }
-
- if trace_sql then
- report("pruned: status %s removed",status)
- end
-
-end
-
--- START TRANSACTION ; ... COMMIT ;
--- LOCK TABLES %basename% WRITE ; ... UNLOCK TABLES ;
-
-local template_a = [[
- SET
- @last_ticket_token = '' ;
- UPDATE
- %basename%
- SET
- `token` = (@last_ticket_token := `token`),
- `status` = %newstatus%,
- `accessed` = %time%
- WHERE
- `status` = %status%
- ORDER BY
- `id`
- LIMIT
- 1
- ;
- SELECT
- *
- FROM
- %basename%
- WHERE
- `token` = @last_ticket_token
- ORDER BY
- `id`
- ;
-]]
-
-local template_b = [[
- SELECT
- *
- FROM
- tickets
- WHERE
- `status` = %status%
- ORDER BY
- `id`
- LIMIT
- 1
- ;
-]]
-
-function tickets.getfirstwithstatus(db,status,newstatus)
-
- local records
-
- if type(newstatus) == "number" then -- todo: also accept string
-
- records = db.execute {
- template = template_a,
- variables = {
- basename = db.basename,
- status = status or s_pending,
- newstatus = newstatus,
- time = ostime(),
- },
- }
-
-
- else
-
- records = db.execute {
- template = template_b,
- variables = {
- basename = db.basename,
- status = status or s_pending,
- },
- }
-
- end
-
- if type(records) == "table" and #records > 0 then
-
- for i=1,#records do
- local record = records[i]
- record.data = db.deserialize(record.data or "")
- record.status = newstatus or s_busy
- end
-
- return records
-
- end
-end
-
--- The next getter assumes that we have a sheduler running so that there is
--- one process in charge of changing the status.
-
-local template = [[
- SET
- @last_ticket_token = '' ;
- UPDATE
- %basename%
- SET
- `token` = (@last_ticket_token := `token`),
- `status` = %newstatus%,
- `accessed` = %time%
- WHERE
- `status` = %status%
- ORDER BY
- `id`
- LIMIT
- 1
- ;
- SELECT
- @last_ticket_token AS `token`
- ;
-]]
-
-function tickets.getfirstinqueue(db,status,newstatus)
-
- local records = db.execute {
- template = template,
- variables = {
- basename = db.basename,
- status = status or s_pending,
- newstatus = newstatus or s_busy,
- time = ostime(),
- },
- }
-
- local token = type(records) == "table" and #records > 0 and records[1].token
-
- return token ~= "" and token
-
-end
-
-local template =[[
- SELECT
- *
- FROM
- %basename%
- WHERE
- `token` = '%token%'
- ORDER BY
- `id` ;
-]]
-
-function tickets.getticketsbytoken(db,token)
-
- local records, keys = db.execute {
- template = template,
- variables = {
- basename = db.basename,
- token = token,
- },
- }
-
- db.unpackdata(records)
-
- return records
-
-end
-
-local template =[[
- SELECT
- *
- FROM
- %basename%
- WHERE
- `usertoken` = '%usertoken%' AND `status` < %rubish%
- ORDER BY
- `id` ;
-]]
-
-function tickets.getusertickets(db,usertoken)
-
- -- todo: update accessed
- -- todo: get less fields
- -- maybe only data for status changed (hard to check)
-
- local records, keys = db.execute {
- template = template,
- variables = {
- basename = db.basename,
- usertoken = usertoken,
- rubish = s_rubish,
- },
- }
-
- db.unpackdata(records)
-
- return records
-
-end
-
-local template =[[
- UPDATE
- %basename%
- SET
- `status` = %deleted%
- WHERE
- `usertoken` = '%usertoken%' ;
-]]
-
-function tickets.removeusertickets(db,usertoken)
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- usertoken = usertoken,
- deleted = s_deleted,
- },
- }
-
- if trace_sql then
- report("removed: usertoken %s",usertoken)
- end
-
-end
+if not modules then modules = { } end modules ['util-sql-tickets'] = {
+ version = 1.001,
+ comment = "companion to lmx-*",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- TODO: MAKE SOME INTO STORED PROCUDURES
+
+-- This is experimental code and currently part of the base installation simply
+-- because it's easier to distribute this way. Eventually it will be documented
+-- and the related scripts will show up as well.
+
+local tonumber = tonumber
+local format = string.format
+local ostime, uuid, osfulltime = os.time, os.uuid, os.fulltime
+local random = math.random
+local concat = table.concat
+
+local sql = utilities.sql
+local tickets = { }
+sql.tickets = tickets
+
+local trace_sql = false trackers.register("sql.tickets.trace", function(v) trace_sql = v end)
+local report = logs.reporter("sql","tickets")
+
+local serialize = sql.serialize
+local deserialize = sql.deserialize
+local execute = sql.execute
+
+tickets.newtoken = sql.tokens.new
+
+-- Beware as an index can be a string or a number, we will create
+-- a combination of hash and index.
+
+local statustags = { [0] =
+ "unknown",
+ "pending",
+ "busy",
+ "finished",
+ "dependent", -- same token but different subtoken (so we only need to find the first)
+ "reserved-1",
+ "reserved-2",
+ "error",
+ "deleted",
+}
+
+local status = table.swapped(statustags)
+tickets.status = status
+tickets.statustags = statustags
+
+local s_unknown = status.unknown
+local s_pending = status.pending
+local s_busy = status.busy
+local s_finished = status.finished
+local s_dependent = status.dependent
+local s_error = status.error
+local s_deleted = status.deleted
+
+local s_rubish = s_error -- and higher
+
+local function checkeddb(presets,datatable)
+ return sql.usedatabase(presets,datatable or presets.datatable or "tickets")
+end
+
+tickets.usedb = checkeddb
+
+local template =[[
+ CREATE TABLE IF NOT EXISTS %basename% (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `token` varchar(50) NOT NULL,
+ `subtoken` INT(11) NOT NULL,
+ `created` int(11) NOT NULL,
+ `accessed` int(11) NOT NULL,
+ `category` int(11) NOT NULL,
+ `status` int(11) NOT NULL,
+ `usertoken` varchar(50) NOT NULL,
+ `data` longtext NOT NULL,
+ `comment` longtext NOT NULL,
+
+ PRIMARY KEY (`id`),
+ UNIQUE INDEX `id_unique_index` (`id` ASC),
+ KEY `token_unique_key` (`token`)
+ )
+ DEFAULT CHARSET = utf8 ;
+]]
+
+function tickets.createdb(presets,datatable)
+ local db = checkeddb(presets,datatable)
+ local data, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report("datatable %a created in %a",db.name,db.base)
+
+ return db
+
+end
+
+local template =[[
+ DROP TABLE IF EXISTS %basename% ;
+]]
+
+function tickets.deletedb(presets,datatable)
+
+ local db = checkeddb(presets,datatable)
+
+ local data, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report("datatable %a removed in %a",db.name,db.base)
+
+end
+
+local template_push =[[
+ INSERT INTO %basename% (
+ `token`,
+ `subtoken`,
+ `created`,
+ `accessed`,
+ `status`,
+ `category`,
+ `usertoken`,
+ `data`,
+ `comment`
+ ) VALUES (
+ '%token%',
+ %subtoken%,
+ %time%,
+ %time%,
+ %status%,
+ %category%,
+ '%usertoken%',
+ '%[data]%',
+ '%[comment]%'
+ ) ;
+]]
+
+local template_fetch =[[
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `token` = '%token%'
+ AND
+ `subtoken` = '%subtoken%'
+ ;
+]]
+
+function tickets.create(db,ticket)
+
+ -- We assume a unique token .. if not we're toast anyway. We used to lock and
+ -- get the last id etc etc but there is no real need for that.
+
+ -- we could check for dependent here but we don't want the lookup
+
+ local token = ticket.token or tickets.newtoken()
+ local time = ostime()
+ local status = ticket.status
+ local category = ticket.category or 0
+ local subtoken = ticket.subtoken or 0
+ local usertoken = ticket.usertoken or ""
+ local comment = ticket.comment or ""
+
+ status = not status and subtoken > 1 and s_dependent or s_pending
+
+ local result, message = db.execute {
+ template = template_push,
+ variables = {
+ basename = db.basename,
+ token = token,
+ subtoken = subtoken,
+ time = time,
+ status = status,
+ category = category,
+ usertoken = usertoken,
+ data = db.serialize(ticket.data or { },"return"),
+ comment = comment,
+ },
+ }
+
+ -- We could stick to only fetching the id and make the table here
+ -- but we're not pushing that many tickets so we can as well follow
+ -- the lazy approach and fetch the whole.
+
+ local result, message = db.execute {
+ template = template_fetch,
+ variables = {
+ basename = db.basename,
+ token = token,
+ subtoken = subtoken,
+ },
+ }
+
+ if result and #result > 0 then
+ if trace_sql then
+ report("created: %s at %s",token,osfulltime(time))
+ end
+ return result[1]
+ else
+ report("failed: %s at %s",token,osfulltime(time))
+ end
+
+end
+
+local template =[[
+ UPDATE
+ %basename%
+ SET
+ `data` = '%[data]%',
+ `status` = %status%,
+ `accessed` = %time%
+ WHERE
+ `id` = %id% ;
+]]
+
+function tickets.save(db,ticket)
+
+ local time = ostime()
+ local data = db.serialize(ticket.data or { },"return")
+ local status = ticket.status or s_error
+
+-- print("SETTING")
+-- inspect(data)
+
+ ticket.status = status
+ ticket.accessed = time
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ id = ticket.id,
+ time = ostime(),
+ status = status,
+ data = data,
+ },
+ }
+
+ if trace_sql then
+ report("saved: id %s, time %s",id,osfulltime(time))
+ end
+
+ return ticket
+end
+
+local template =[[
+ UPDATE
+ %basename%
+ SET
+ `accessed` = %time%
+ WHERE
+ `token` = '%token%' ;
+
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `id` = %id% ;
+]]
+
+function tickets.restore(db,id)
+
+ local record, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ id = id,
+ time = ostime(),
+ },
+ }
+
+ local record = record and record[1]
+
+ if record then
+ if trace_sql then
+ report("restored: id %s",id)
+ end
+ record.data = db.deserialize(record.data or "")
+ return record
+ elseif trace_sql then
+ report("unknown: id %s",id)
+ end
+
+end
+
+local template =[[
+ DELETE FROM
+ %basename%
+ WHERE
+ `id` = %id% ;
+]]
+
+function tickets.remove(db,id)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ id = id,
+ },
+ }
+
+ if trace_sql then
+ report("removed: id %s",id)
+ end
+
+end
+
+local template_yes =[[
+ SELECT
+ *
+ FROM
+ %basename%
+ ORDER BY
+ `id` ;
+]]
+
+local template_nop =[[
+ SELECT
+ `created`,
+ `usertoken`,
+ `accessed`,
+ `status`
+ FROM
+ %basename%
+ ORDER BY
+ `id` ;
+]]
+
+function tickets.collect(db,nodata)
+
+ local records, keys = db.execute {
+ template = nodata and template_nop or template_yes,
+ variables = {
+ basename = db.basename,
+ token = token,
+ },
+ }
+
+ if not nodata then
+ db.unpackdata(records)
+ end
+
+ if trace_sql then
+ report("collected: %s tickets",#records)
+ end
+
+ return records, keys
+
+end
+
+-- We aleays keep the last select in the execute so one can have
+-- an update afterwards.
+
+local template =[[
+ DELETE FROM
+ %basename%
+ WHERE
+ `accessed` < %time% OR `status` >= %rubish% ;
+]]
+
+local template_cleanup_yes =[[
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `accessed` < %time%
+ ORDER BY
+ `id` ;
+]] .. template
+
+local template_cleanup_nop =[[
+ SELECT
+ `accessed`,
+ `created`,
+ `accessed`,
+ `token`
+ `usertoken`
+ FROM
+ %basename%
+ WHERE
+ `accessed` < %time%
+ ORDER BY
+ `id` ;
+]] .. template
+
+function tickets.cleanupdb(db,delta,nodata) -- maybe delta in db
+
+ local time = delta and (ostime() - delta) or 0
+
+ local records, keys = db.execute {
+ template = nodata and template_cleanup_nop or template_cleanup_yes,
+ variables = {
+ basename = db.basename,
+ time = time,
+ rubish = s_rubish,
+ },
+ }
+
+ if not nodata then
+ db.unpackdata(records)
+ end
+
+ if trace_sql then
+ report("cleaned: %s seconds before %s",delta,osfulltime(time))
+ end
+
+ return records, keys
+
+end
+
+-- status related functions
+
+local template =[[
+ SELECT
+ `status`
+ FROM
+ %basename%
+ WHERE
+ `token` = '%token%'
+ ORDER BY
+ `id`
+ ;
+]]
+
+function tickets.getstatus(db,token)
+
+ local record, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ token = token,
+ },
+ }
+
+ local record = record and record[1]
+
+ return record and record.status or s_unknown
+
+end
+
+local template =[[
+ SELECT
+ `status`
+ FROM
+ %basename%
+ WHERE
+ `status` >= %rubish% OR `accessed` < %time%
+ ORDER BY
+ `id`
+ ;
+]]
+
+function tickets.getobsolete(db,delta)
+
+ local time = delta and (ostime() - delta) or 0
+
+ local records = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ time = time,
+ rubish = s_rubish,
+ },
+ }
+
+ db.unpackdata(records)
+
+ return records
+
+end
+
+local template =[[
+ SELECT
+ `id`
+ FROM
+ %basename%
+ WHERE
+ `status` = %status%
+ LIMIT
+ 1 ;
+]]
+
+function tickets.hasstatus(db,status)
+
+ local records = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ status = status or s_unknown,
+ },
+ }
+
+ return records and #records > 0 or false
+
+end
+
+local template =[[
+ UPDATE
+ %basename%
+ SET
+ `status` = %status%,
+ `accessed` = %time%
+ WHERE
+ `id` = %id% ;
+]]
+
+function tickets.setstatus(db,id,status)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ id = id,
+ time = ostime(),
+ status = status or s_error,
+ },
+ }
+
+end
+
+local template =[[
+ DELETE FROM
+ %basename%
+ WHERE
+ `status` IN (%status%) ;
+]]
+
+function tickets.prunedb(db,status)
+
+ if type(status) == "table" then
+ status = concat(status,",")
+ end
+
+ local data, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ status = status or s_unknown,
+ },
+ }
+
+ if trace_sql then
+ report("pruned: status %s removed",status)
+ end
+
+end
+
+-- START TRANSACTION ; ... COMMIT ;
+-- LOCK TABLES %basename% WRITE ; ... UNLOCK TABLES ;
+
+local template_a = [[
+ SET
+ @last_ticket_token = '' ;
+ UPDATE
+ %basename%
+ SET
+ `token` = (@last_ticket_token := `token`),
+ `status` = %newstatus%,
+ `accessed` = %time%
+ WHERE
+ `status` = %status%
+ ORDER BY
+ `id`
+ LIMIT
+ 1
+ ;
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `token` = @last_ticket_token
+ ORDER BY
+ `id`
+ ;
+]]
+
+local template_b = [[
+ SELECT
+ *
+ FROM
+ tickets
+ WHERE
+ `status` = %status%
+ ORDER BY
+ `id`
+ LIMIT
+ 1
+ ;
+]]
+
+function tickets.getfirstwithstatus(db,status,newstatus)
+
+ local records
+
+ if type(newstatus) == "number" then -- todo: also accept string
+
+ records = db.execute {
+ template = template_a,
+ variables = {
+ basename = db.basename,
+ status = status or s_pending,
+ newstatus = newstatus,
+ time = ostime(),
+ },
+ }
+
+
+ else
+
+ records = db.execute {
+ template = template_b,
+ variables = {
+ basename = db.basename,
+ status = status or s_pending,
+ },
+ }
+
+ end
+
+ if type(records) == "table" and #records > 0 then
+
+ for i=1,#records do
+ local record = records[i]
+ record.data = db.deserialize(record.data or "")
+ record.status = newstatus or s_busy
+ end
+
+ return records
+
+ end
+end
+
+-- The next getter assumes that we have a sheduler running so that there is
+-- one process in charge of changing the status.
+
+local template = [[
+ SET
+ @last_ticket_token = '' ;
+ UPDATE
+ %basename%
+ SET
+ `token` = (@last_ticket_token := `token`),
+ `status` = %newstatus%,
+ `accessed` = %time%
+ WHERE
+ `status` = %status%
+ ORDER BY
+ `id`
+ LIMIT
+ 1
+ ;
+ SELECT
+ @last_ticket_token AS `token`
+ ;
+]]
+
+function tickets.getfirstinqueue(db,status,newstatus)
+
+ local records = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ status = status or s_pending,
+ newstatus = newstatus or s_busy,
+ time = ostime(),
+ },
+ }
+
+ local token = type(records) == "table" and #records > 0 and records[1].token
+
+ return token ~= "" and token
+
+end
+
+local template =[[
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `token` = '%token%'
+ ORDER BY
+ `id` ;
+]]
+
+function tickets.getticketsbytoken(db,token)
+
+ local records, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ token = token,
+ },
+ }
+
+ db.unpackdata(records)
+
+ return records
+
+end
+
+local template =[[
+ SELECT
+ *
+ FROM
+ %basename%
+ WHERE
+ `usertoken` = '%usertoken%' AND `status` < %rubish%
+ ORDER BY
+ `id` ;
+]]
+
+function tickets.getusertickets(db,usertoken)
+
+ -- todo: update accessed
+ -- todo: get less fields
+ -- maybe only data for status changed (hard to check)
+
+ local records, keys = db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ usertoken = usertoken,
+ rubish = s_rubish,
+ },
+ }
+
+ db.unpackdata(records)
+
+ return records
+
+end
+
+local template =[[
+ UPDATE
+ %basename%
+ SET
+ `status` = %deleted%
+ WHERE
+ `usertoken` = '%usertoken%' ;
+]]
+
+function tickets.removeusertickets(db,usertoken)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ usertoken = usertoken,
+ deleted = s_deleted,
+ },
+ }
+
+ if trace_sql then
+ report("removed: usertoken %s",usertoken)
+ end
+
+end
diff --git a/tex/context/base/util-sql-users.lua b/tex/context/base/util-sql-users.lua
index ea8fb4e07..b99bfa58a 100644
--- a/tex/context/base/util-sql-users.lua
+++ b/tex/context/base/util-sql-users.lua
@@ -1,410 +1,410 @@
-if not modules then modules = { } end modules ['util-sql-users'] = {
- version = 1.001,
- comment = "companion to lmx-*",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This is experimental code and currently part of the base installation simply
--- because it's easier to dirtribute this way. Eventually it will be documented
--- and the related scripts will show up as well.
-
--- local sql = sql or (utilities and utilities.sql) or require("util-sql")
--- local md5 = md5 or require("md5")
-
-local sql = utilities.sql
-
-local format, upper, find, gsub, topattern = string.format, string.upper, string.find, string.gsub, string.topattern
-local sumhexa = md5.sumhexa
-local booleanstring = string.booleanstring
-
-local sql = utilities.sql
-local users = { }
-sql.users = users
-
-local trace_sql = false trackers.register("sql.users.trace", function(v) trace_sql = v end)
-local report = logs.reporter("sql","users")
-
-local function encryptpassword(str)
- if not str or str == "" then
- return ""
- elseif find(str,"^MD5:") then
- return str
- else
- return upper(format("MD5:%s",sumhexa(str)))
- end
-end
-
-local function cleanuppassword(str)
- return (gsub(str,"^MD5:",""))
-end
-
-local function samepasswords(one,two)
- if not one or not two then
- return false
- end
- if not find(one,"^MD5:") then
- one = encryptpassword(one)
- end
- if not find(two,"^MD5:") then
- two = encryptpassword(two)
- end
- return one == two
-end
-
-local function validaddress(address,addresses)
- if address and addresses and address ~= "" and addresses ~= "" then
- if find(address,topattern(addresses,true,true)) then
- return true, "valid remote address"
- end
- return false, "invalid remote address"
- else
- return true, "no remote address check"
- end
-end
-
-
-users.encryptpassword = encryptpassword
-users.cleanuppassword = cleanuppassword
-users.samepasswords = samepasswords
-users.validaddress = validaddress
-
--- print(users.encryptpassword("test")) -- MD5:098F6BCD4621D373CADE4E832627B4F6
-
-local function checkeddb(presets,datatable)
- return sql.usedatabase(presets,datatable or presets.datatable or "users")
-end
-
-users.usedb = checkeddb
-
-local groupnames = { }
-local groupnumbers = { }
-
-local function registergroup(name)
- local n = #groupnames + 1
- groupnames [n] = name
- groupnames [tostring(n)] = name
- groupnames [name] = name
- groupnumbers[n] = n
- groupnumbers[tostring(n)] = n
- groupnumbers[name] = n
- return n
-end
-
-registergroup("superuser")
-registergroup("administrator")
-registergroup("user")
-registergroup("guest")
-
-users.groupnames = groupnames
-users.groupnumbers = groupnumbers
-
--- password 'test':
---
--- INSERT insert into users (`name`,`password`,`group`,`enabled`) values ('...','MD5:098F6BCD4621D373CADE4E832627B4F6',1,1) ;
-
-local template =[[
- CREATE TABLE `users` (
- `id` int(11) NOT NULL AUTO_INCREMENT,
- `name` varchar(80) NOT NULL,
- `fullname` varchar(80) NOT NULL,
- `password` varchar(50) DEFAULT NULL,
- `group` int(11) NOT NULL,
- `enabled` int(11) DEFAULT '1',
- `email` varchar(80) DEFAULT NULL,
- `address` varchar(256) DEFAULT NULL,
- `theme` varchar(50) DEFAULT NULL,
- `data` longtext,
- PRIMARY KEY (`id`),
- UNIQUE KEY `name_unique` (`name`)
- ) DEFAULT CHARSET = utf8 ;
-]]
-
-local converter, fields = sql.makeconverter {
- { name = "id", type = "number" },
- { name = "name", type = "string" },
- { name = "fullname", type = "string" },
- { name = "password", type = "string" },
- { name = "group", type = groupnames },
- { name = "enabled", type = "boolean" },
- { name = "email", type = "string" },
- { name = "address", type = "string" },
- { name = "theme", type = "string" },
- { name = "data", type = "deserialize" },
-}
-
-function users.createdb(presets,datatable)
-
- local db = checkeddb(presets,datatable)
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- },
- }
-
- report("datatable %a created in %a",db.name,db.base)
-
- return db
-
-end
-
-local template =[[
- SELECT
- %fields%
- FROM
- %basename%
- WHERE
- `name` = '%[name]%'
- AND
- `password` = '%[password]%'
- ;
-]]
-
-local template =[[
- SELECT
- %fields%
- FROM
- %basename%
- WHERE
- `name` = '%[name]%'
- ;
-]]
-
-function users.valid(db,username,password,address)
-
- local data = db.execute {
- template = template,
- converter = converter,
- variables = {
- basename = db.basename,
- fields = fields,
- name = username,
- },
- }
-
- local data = data and data[1]
-
- if not data then
- return false, "unknown user"
- elseif not data.enabled then
- return false, "disabled user"
- elseif data.password ~= encryptpassword(password) then
- return false, "wrong password"
- elseif not validaddress(address,data.address) then
- return false, "invalid address"
- else
- data.password = nil
- return data, "okay"
- end
-
-end
-
-local template =[[
- INSERT INTO %basename% (
- `name`,
- `fullname`,
- `password`,
- `group`,
- `enabled`,
- `email`,
- `address`,
- `theme`,
- `data`
- ) VALUES (
- '%[name]%',
- '%[fullname]%',
- '%[password]%',
- '%[group]%',
- '%[enabled]%',
- '%[email]%',
- '%[address]%',
- '%[theme]%',
- '%[data]%'
- ) ;
-]]
-
-function users.add(db,specification)
-
- local name = specification.username or specification.name
-
- if not name or name == "" then
- return
- end
-
- local data = specification.data
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- name = name,
- fullname = name or fullname,
- password = encryptpassword(specification.password or ""),
- group = groupnumbers[specification.group] or groupnumbers.guest,
- enabled = booleanstring(specification.enabled) and "1" or "0",
- email = specification.email,
- address = specification.address,
- theme = specification.theme,
- data = type(data) == "table" and db.serialize(data,"return") or "",
- },
- }
-
-end
-
-local template =[[
- SELECT
- %fields%
- FROM
- %basename%
- WHERE
- `name` = '%[name]%' ;
-]]
-
-function users.getbyname(db,name)
-
- local data = db.execute {
- template = template,
- converter = converter,
- variables = {
- basename = db.basename,
- fields = fields,
- name = name,
- },
- }
-
- return data and data[1] or nil
-
-end
-
-local template =[[
- SELECT
- %fields%
- FROM
- %basename%
- WHERE
- `id` = '%id%' ;
-]]
-
-local function getbyid(db,id)
-
- local data = db.execute {
- template = template,
- converter = converter,
- variables = {
- basename = db.basename,
- fields = fields,
- id = id,
- },
- }
-
- return data and data[1] or nil
-
-end
-
-users.getbyid = getbyid
-
-local template =[[
- UPDATE
- %basename%
- SET
- `fullname` = '%[fullname]%',
- `password` = '%[password]%',
- `group` = '%[group]%',
- `enabled` = '%[enabled]%',
- `email` = '%[email]%',
- `address` = '%[address]%',
- `theme` = '%[theme]%',
- `data` = '%[data]%'
- WHERE
- `id` = '%id%'
- ;
-]]
-
-function users.save(db,id,specification)
-
- id = tonumber(id)
-
- if not id then
- return
- end
-
- local user = getbyid(db,id)
-
- if tonumber(user.id) ~= id then
- return
- end
-
- local fullname = specification.fullname == nil and user.fulname or specification.fullname
- local password = specification.password == nil and user.password or specification.password
- local group = specification.group == nil and user.group or specification.group
- local enabled = specification.enabled == nil and user.enabled or specification.enabled
- local email = specification.email == nil and user.email or specification.email
- local address = specification.address == nil and user.address or specification.address
- local theme = specification.theme == nil and user.theme or specification.theme
- local data = specification.data == nil and user.data or specification.data
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- id = id,
- fullname = fullname,
- password = encryptpassword(password),
- group = groupnumbers[group],
- enabled = booleanstring(enabled) and "1" or "0",
- email = email,
- address = address,
- theme = theme,
- data = type(data) == "table" and db.serialize(data,"return") or "",
- },
- }
-
- return getbyid(db,id)
-
-end
-
-local template =[[
- DELETE FROM
- %basename%
- WHERE
- `id` = '%id%' ;
-]]
-
-function users.remove(db,id)
-
- db.execute {
- template = template,
- variables = {
- basename = db.basename,
- id = id,
- },
- }
-
-end
-
-local template =[[
- SELECT
- %fields%
- FROM
- %basename%
- ORDER BY
- `name` ;
-]]
-
-function users.collect(db) -- maybe also an id/name only variant
-
- local records, keys = db.execute {
- template = template,
- converter = converter,
- variables = {
- basename = db.basename,
- fields = fields,
- },
- }
-
- return records, keys
-
-end
+if not modules then modules = { } end modules ['util-sql-users'] = {
+ version = 1.001,
+ comment = "companion to lmx-*",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is experimental code and currently part of the base installation simply
+-- because it's easier to dirtribute this way. Eventually it will be documented
+-- and the related scripts will show up as well.
+
+-- local sql = sql or (utilities and utilities.sql) or require("util-sql")
+-- local md5 = md5 or require("md5")
+
+local sql = utilities.sql
+
+local format, upper, find, gsub, topattern = string.format, string.upper, string.find, string.gsub, string.topattern
+local sumhexa = md5.sumhexa
+local booleanstring = string.booleanstring
+
+local sql = utilities.sql
+local users = { }
+sql.users = users
+
+local trace_sql = false trackers.register("sql.users.trace", function(v) trace_sql = v end)
+local report = logs.reporter("sql","users")
+
+local function encryptpassword(str)
+ if not str or str == "" then
+ return ""
+ elseif find(str,"^MD5:") then
+ return str
+ else
+ return upper(format("MD5:%s",sumhexa(str)))
+ end
+end
+
+local function cleanuppassword(str)
+ return (gsub(str,"^MD5:",""))
+end
+
+local function samepasswords(one,two)
+ if not one or not two then
+ return false
+ end
+ if not find(one,"^MD5:") then
+ one = encryptpassword(one)
+ end
+ if not find(two,"^MD5:") then
+ two = encryptpassword(two)
+ end
+ return one == two
+end
+
+local function validaddress(address,addresses)
+ if address and addresses and address ~= "" and addresses ~= "" then
+ if find(address,topattern(addresses,true,true)) then
+ return true, "valid remote address"
+ end
+ return false, "invalid remote address"
+ else
+ return true, "no remote address check"
+ end
+end
+
+
+users.encryptpassword = encryptpassword
+users.cleanuppassword = cleanuppassword
+users.samepasswords = samepasswords
+users.validaddress = validaddress
+
+-- print(users.encryptpassword("test")) -- MD5:098F6BCD4621D373CADE4E832627B4F6
+
+local function checkeddb(presets,datatable)
+ return sql.usedatabase(presets,datatable or presets.datatable or "users")
+end
+
+users.usedb = checkeddb
+
+local groupnames = { }
+local groupnumbers = { }
+
+local function registergroup(name)
+ local n = #groupnames + 1
+ groupnames [n] = name
+ groupnames [tostring(n)] = name
+ groupnames [name] = name
+ groupnumbers[n] = n
+ groupnumbers[tostring(n)] = n
+ groupnumbers[name] = n
+ return n
+end
+
+registergroup("superuser")
+registergroup("administrator")
+registergroup("user")
+registergroup("guest")
+
+users.groupnames = groupnames
+users.groupnumbers = groupnumbers
+
+-- password 'test':
+--
+-- INSERT insert into users (`name`,`password`,`group`,`enabled`) values ('...','MD5:098F6BCD4621D373CADE4E832627B4F6',1,1) ;
+
+local template =[[
+ CREATE TABLE `users` (
+ `id` int(11) NOT NULL AUTO_INCREMENT,
+ `name` varchar(80) NOT NULL,
+ `fullname` varchar(80) NOT NULL,
+ `password` varchar(50) DEFAULT NULL,
+ `group` int(11) NOT NULL,
+ `enabled` int(11) DEFAULT '1',
+ `email` varchar(80) DEFAULT NULL,
+ `address` varchar(256) DEFAULT NULL,
+ `theme` varchar(50) DEFAULT NULL,
+ `data` longtext,
+ PRIMARY KEY (`id`),
+ UNIQUE KEY `name_unique` (`name`)
+ ) DEFAULT CHARSET = utf8 ;
+]]
+
+local converter, fields = sql.makeconverter {
+ { name = "id", type = "number" },
+ { name = "name", type = "string" },
+ { name = "fullname", type = "string" },
+ { name = "password", type = "string" },
+ { name = "group", type = groupnames },
+ { name = "enabled", type = "boolean" },
+ { name = "email", type = "string" },
+ { name = "address", type = "string" },
+ { name = "theme", type = "string" },
+ { name = "data", type = "deserialize" },
+}
+
+function users.createdb(presets,datatable)
+
+ local db = checkeddb(presets,datatable)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ },
+ }
+
+ report("datatable %a created in %a",db.name,db.base)
+
+ return db
+
+end
+
+local template =[[
+ SELECT
+ %fields%
+ FROM
+ %basename%
+ WHERE
+ `name` = '%[name]%'
+ AND
+ `password` = '%[password]%'
+ ;
+]]
+
+local template =[[
+ SELECT
+ %fields%
+ FROM
+ %basename%
+ WHERE
+ `name` = '%[name]%'
+ ;
+]]
+
+function users.valid(db,username,password,address)
+
+ local data = db.execute {
+ template = template,
+ converter = converter,
+ variables = {
+ basename = db.basename,
+ fields = fields,
+ name = username,
+ },
+ }
+
+ local data = data and data[1]
+
+ if not data then
+ return false, "unknown user"
+ elseif not data.enabled then
+ return false, "disabled user"
+ elseif data.password ~= encryptpassword(password) then
+ return false, "wrong password"
+ elseif not validaddress(address,data.address) then
+ return false, "invalid address"
+ else
+ data.password = nil
+ return data, "okay"
+ end
+
+end
+
+local template =[[
+ INSERT INTO %basename% (
+ `name`,
+ `fullname`,
+ `password`,
+ `group`,
+ `enabled`,
+ `email`,
+ `address`,
+ `theme`,
+ `data`
+ ) VALUES (
+ '%[name]%',
+ '%[fullname]%',
+ '%[password]%',
+ '%[group]%',
+ '%[enabled]%',
+ '%[email]%',
+ '%[address]%',
+ '%[theme]%',
+ '%[data]%'
+ ) ;
+]]
+
+function users.add(db,specification)
+
+ local name = specification.username or specification.name
+
+ if not name or name == "" then
+ return
+ end
+
+ local data = specification.data
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ name = name,
+ fullname = name or fullname,
+ password = encryptpassword(specification.password or ""),
+ group = groupnumbers[specification.group] or groupnumbers.guest,
+ enabled = booleanstring(specification.enabled) and "1" or "0",
+ email = specification.email,
+ address = specification.address,
+ theme = specification.theme,
+ data = type(data) == "table" and db.serialize(data,"return") or "",
+ },
+ }
+
+end
+
+local template =[[
+ SELECT
+ %fields%
+ FROM
+ %basename%
+ WHERE
+ `name` = '%[name]%' ;
+]]
+
+function users.getbyname(db,name)
+
+ local data = db.execute {
+ template = template,
+ converter = converter,
+ variables = {
+ basename = db.basename,
+ fields = fields,
+ name = name,
+ },
+ }
+
+ return data and data[1] or nil
+
+end
+
+local template =[[
+ SELECT
+ %fields%
+ FROM
+ %basename%
+ WHERE
+ `id` = '%id%' ;
+]]
+
+local function getbyid(db,id)
+
+ local data = db.execute {
+ template = template,
+ converter = converter,
+ variables = {
+ basename = db.basename,
+ fields = fields,
+ id = id,
+ },
+ }
+
+ return data and data[1] or nil
+
+end
+
+users.getbyid = getbyid
+
+local template =[[
+ UPDATE
+ %basename%
+ SET
+ `fullname` = '%[fullname]%',
+ `password` = '%[password]%',
+ `group` = '%[group]%',
+ `enabled` = '%[enabled]%',
+ `email` = '%[email]%',
+ `address` = '%[address]%',
+ `theme` = '%[theme]%',
+ `data` = '%[data]%'
+ WHERE
+ `id` = '%id%'
+ ;
+]]
+
+function users.save(db,id,specification)
+
+ id = tonumber(id)
+
+ if not id then
+ return
+ end
+
+ local user = getbyid(db,id)
+
+ if tonumber(user.id) ~= id then
+ return
+ end
+
+ local fullname = specification.fullname == nil and user.fulname or specification.fullname
+ local password = specification.password == nil and user.password or specification.password
+ local group = specification.group == nil and user.group or specification.group
+ local enabled = specification.enabled == nil and user.enabled or specification.enabled
+ local email = specification.email == nil and user.email or specification.email
+ local address = specification.address == nil and user.address or specification.address
+ local theme = specification.theme == nil and user.theme or specification.theme
+ local data = specification.data == nil and user.data or specification.data
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ id = id,
+ fullname = fullname,
+ password = encryptpassword(password),
+ group = groupnumbers[group],
+ enabled = booleanstring(enabled) and "1" or "0",
+ email = email,
+ address = address,
+ theme = theme,
+ data = type(data) == "table" and db.serialize(data,"return") or "",
+ },
+ }
+
+ return getbyid(db,id)
+
+end
+
+local template =[[
+ DELETE FROM
+ %basename%
+ WHERE
+ `id` = '%id%' ;
+]]
+
+function users.remove(db,id)
+
+ db.execute {
+ template = template,
+ variables = {
+ basename = db.basename,
+ id = id,
+ },
+ }
+
+end
+
+local template =[[
+ SELECT
+ %fields%
+ FROM
+ %basename%
+ ORDER BY
+ `name` ;
+]]
+
+function users.collect(db) -- maybe also an id/name only variant
+
+ local records, keys = db.execute {
+ template = template,
+ converter = converter,
+ variables = {
+ basename = db.basename,
+ fields = fields,
+ },
+ }
+
+ return records, keys
+
+end
diff --git a/tex/context/base/util-sql.lua b/tex/context/base/util-sql.lua
index 1c1766edf..cd2c4c2e2 100644
--- a/tex/context/base/util-sql.lua
+++ b/tex/context/base/util-sql.lua
@@ -1,443 +1,443 @@
-if not modules then modules = { } end modules ['util-sql'] = {
- version = 1.001,
- comment = "companion to m-sql.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- todo: templates as table (saves splitting)
-
--- Of course we could use a library but we don't want another depedency and there is
--- a bit of flux in these libraries. Also, we want the data back in a way that we
--- like.
---
--- This is the first of set of sql related modules that are providing functionality
--- for a web based framework that we use for typesetting (related) services. We're
--- talking of session management, job ticket processing, storage, (xml) file processing
--- and dealing with data from databases (often ambitiously called database publishing).
---
--- There is no generic solution for such services, but from our perspective, as we use
--- context in a regular tds tree (the standard distribution) it makes sense to put shared
--- code in the context distribution. That way we don't need to reinvent wheels every time.
-
--- We use the template mechanism from util-tpl which inturn is just using the dos cq
--- windows convention of %whatever% variables that I've used for ages.
-
--- util-sql-imp-client.lua
--- util-sql-imp-library.lua
--- util-sql-imp-swiglib.lua
--- util-sql-imp-lmxsql.lua
-
--- local sql = require("util-sql")
---
--- local converter = sql.makeconverter {
--- { name = "id", type = "number" },
--- { name = "data",type = "string" },
--- }
---
--- local execute = sql.methods.swiglib.execute
--- -- local execute = sql.methods.library.execute
--- -- local execute = sql.methods.client.execute
--- -- local execute = sql.methods.lmxsql.execute
---
--- result = execute {
--- presets = {
--- host = "localhost",
--- username = "root",
--- password = "test",
--- database = "test",
--- id = "test", -- forces persistent session
--- },
--- template = "select * from `test` where `id` > %criterium% ;",
--- variables = {
--- criterium = 2,
--- },
--- converter = converter
--- }
---
--- inspect(result)
-
-local format, match = string.format, string.match
-local random = math.random
-local rawset, setmetatable, getmetatable, load, type = rawset, setmetatable, getmetatable, load, type
-local P, S, V, C, Cs, Ct, Cc, Cg, Cf, patterns, lpegmatch = lpeg.P, lpeg.S, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.patterns, lpeg.match
-local concat = table.concat
-
-local osuuid = os.uuid
-local osclock = os.clock or os.time
-local ostime = os.time
-local setmetatableindex = table.setmetatableindex
-
-local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end)
-local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end)
-local report_state = logs.reporter("sql")
-
--- trace_sql = true
--- trace_queries = true
-
-utilities.sql = utilities.sql or { }
-local sql = utilities.sql
-
-local replacetemplate = utilities.templates.replace
-local loadtemplate = utilities.templates.load
-
-local methods = { }
-sql.methods = methods
-
-local helpers = { }
-sql.helpers = helpers
-
-local serialize = table.fastserialize
-local deserialize = table.deserialize
-
-sql.serialize = serialize
-sql.deserialize = deserialize
-
-helpers.serialize = serialize -- bonus
-helpers.deserialize = deserialize -- bonus
-
-local defaults = { __index =
- {
- resultfile = "result.dat",
- templatefile = "template.sql",
- queryfile = "query.sql",
- variables = { },
- username = "default",
- password = "default",
- host = "localhost",
- port = 3306,
- database = "default",
- },
-}
-
-setmetatableindex(sql.methods,function(t,k)
- report_state("start loading method %a",k)
- require("util-sql-imp-"..k)
- report_state("loading method %a done",k)
- return rawget(t,k)
-end)
-
--- converters
-
-local converters = { }
-sql.converters = converters
-
-local function makeconverter(entries,celltemplate,wraptemplate)
- local shortcuts = { }
- local assignments = { }
- local key = false
- for i=1,#entries do
- local entry = entries[i]
- local name = entry.name
- local kind = entry.type or entry.kind
- local value = format(celltemplate,i,i)
- if kind == "boolean" then
- assignments[#assignments+1] = format("[%q] = booleanstring(%s),",name,value)
- elseif kind == "number" then
- assignments[#assignments+1] = format("[%q] = tonumber(%s),",name,value)
- elseif type(kind) == "function" then
- local c = #converters + 1
- converters[c] = kind
- shortcuts[#shortcuts+1] = format("local fun_%s = converters[%s]",c,c)
- assignments[#assignments+1] = format("[%q] = fun_%s(%s),",name,c,value)
- elseif type(kind) == "table" then
- local c = #converters + 1
- converters[c] = kind
- shortcuts[#shortcuts+1] = format("local tab_%s = converters[%s]",c,c)
- assignments[#assignments+1] = format("[%q] = tab_%s[%s],",name,#converters,value)
- elseif kind == "deserialize" then
- assignments[#assignments+1] = format("[%q] = deserialize(%s),",name,value)
- elseif kind == "key" then
- -- hashed instead of indexed
- key = value
- elseif kind == "entry" then
- -- so we can (efficiently) extend the hashed table
- local default = entry.default or ""
- if type(default) == "string" then
- assignments[#assignments+1] = format("[%q] = %q,",name,default)
- else
- assignments[#assignments+1] = format("[%q] = %s,",name,tostring(default))
- end
- else
- assignments[#assignments+1] = format("[%q] = %s,",name,value)
- end
- end
- local code = format(wraptemplate,concat(shortcuts,"\n"),key and "{ }" or "data",key or "i",concat(assignments,"\n "))
- -- print(code)
- local func = load(code)
- return func and func()
-end
-
-function sql.makeconverter(entries)
- local fields = { }
- for i=1,#entries do
- fields[i] = format("`%s`",entries[i].name)
- end
- fields = concat(fields, ", ")
- local converter = {
- fields = fields
- }
- setmetatableindex(converter, function(t,k)
- local sqlmethod = methods[k]
- local v = makeconverter(entries,sqlmethod.celltemplate,sqlmethod.wraptemplate)
- t[k] = v
- return v
- end)
- return converter, fields
-end
-
--- helper for libraries:
-
-local function validspecification(specification)
- local presets = specification.presets
- if type(presets) == "string" then
- presets = dofile(presets)
- end
- if type(presets) == "table" then
- setmetatable(presets,defaults)
- setmetatable(specification,{ __index = presets })
- else
- setmetatable(specification,defaults)
- end
- return true
-end
-
-helpers.validspecification = validspecification
-
-local whitespace = patterns.whitespace^0
-local eol = patterns.eol
-local separator = P(";")
-local escaped = patterns.escaped
-local dquote = patterns.dquote
-local squote = patterns.squote
-local dsquote = squote * squote
----- quoted = patterns.quoted
-local quoted = dquote * (escaped + (1-dquote))^0 * dquote
- + squote * (escaped + dsquote + (1-squote))^0 * squote
-local comment = P("--") * (1-eol) / ""
-local query = whitespace
- * Cs((quoted + comment + 1 - separator)^1 * Cc(";"))
- * whitespace
-local splitter = Ct(query * (separator * query)^0)
-
-helpers.querysplitter = splitter
-
--- I will add a bit more checking.
-
-local function validspecification(specification)
- local presets = specification.presets
- if type(presets) == "string" then
- presets = dofile(presets)
- end
- if type(presets) == "table" then
- local m = getmetatable(presets)
- if m then
- setmetatable(m,defaults)
- else
- setmetatable(presets,defaults)
- end
- setmetatable(specification,{ __index = presets })
- else
- setmetatable(specification,defaults)
- end
- local templatefile = specification.templatefile or "query"
- local queryfile = specification.queryfile or presets.queryfile or file.nameonly(templatefile) .. "-temp.sql"
- local resultfile = specification.resultfile or presets.resultfile or file.nameonly(templatefile) .. "-temp.dat"
- specification.queryfile = queryfile
- specification.resultfile = resultfile
- if trace_sql then
- report_state("template file: %s",templatefile or "")
- report_state("query file: %s",queryfile)
- report_state("result file: %s",resultfile)
- end
- return true
-end
-
-local function preparetemplate(specification)
- local template = specification.template
- if template then
- local query = replacetemplate(template,specification.variables,'sql')
- if not query then
- report_state("error in template: %s",template)
- elseif trace_queries then
- report_state("query from template: %s",query)
- end
- return query
- end
- local templatefile = specification.templatefile
- if templatefile then
- local query = loadtemplate(templatefile,specification.variables,'sql')
- if not query then
- report_state("error in template file %a",templatefile)
- elseif trace_queries then
- report_state("query from template file %a: %s",templatefile,query)
- end
- return query
- end
- report_state("no query template or templatefile")
-end
-
-helpers.preparetemplate = preparetemplate
-
--- -- -- we delay setting this -- -- --
-
-local currentmethod
-
-local function firstexecute(...)
- local execute = methods[currentmethod].execute
- sql.execute = execute
- return execute(...)
-end
-
-function sql.setmethod(method)
- currentmethod = method
- sql.execute = firstexecute
-end
-
-sql.setmethod("library")
-
--- helper:
-
-function sql.usedatabase(presets,datatable)
- local name = datatable or presets.datatable
- if name then
- local method = presets.method and sql.methods[presets.method] or sql.methods.client
- local base = presets.database or "test"
- local basename = format("`%s`.`%s`",base,name)
- local execute = nil
- local m_execute = method.execute
- if method.usesfiles then
- local queryfile = presets.queryfile or format("%s-temp.sql",name)
- local resultfile = presets.resultfile or format("%s-temp.dat",name)
- execute = function(specification) -- variables template
- if not specification.presets then specification.presets = presets end
- if not specification.queryfile then specification.queryfile = queryfile end
- if not specification.resultfile then specification.resultfile = queryfile end
- return m_execute(specification)
- end
- else
- execute = function(specification) -- variables template
- if not specification.presets then specification.presets = presets end
- return m_execute(specification)
- end
- end
- local function unpackdata(records,name)
- if records then
- name = name or "data"
- for i=1,#records do
- local record = records[i]
- local data = record[name]
- if data then
- record[name] = deserialize(data)
- end
- end
- end
- end
- return {
- presets = preset,
- base = base,
- name = name,
- basename = basename,
- execute = execute,
- serialize = serialize,
- deserialize = deserialize,
- unpackdata = unpackdata,
- }
- else
- report_state("missing name in usedatabase specification")
- end
-end
-
--- local data = utilities.sql.prepare {
--- templatefile = "test.sql",
--- variables = { },
--- host = "...",
--- username = "...",
--- password = "...",
--- database = "...",
--- }
-
--- local presets = {
--- host = "...",
--- username = "...",
--- password = "...",
--- database = "...",
--- }
---
--- local data = utilities.sql.prepare {
--- templatefile = "test.sql",
--- variables = { },
--- presets = presets,
--- }
-
--- local data = utilities.sql.prepare {
--- templatefile = "test.sql",
--- variables = { },
--- presets = dofile(...),
--- }
-
--- local data = utilities.sql.prepare {
--- templatefile = "test.sql",
--- variables = { },
--- presets = "...",
--- }
-
--- for i=1,10 do
--- local dummy = uuid() -- else same every time, don't ask
--- end
-
-sql.tokens = {
- length = 42, -- but in practice we will reserve some 50 characters
- new = function()
- return format("%s-%x06",osuuid(),random(0xFFFFF)) -- 36 + 1 + 6 = 42
- end,
-}
-
--- -- --
-
--- local func, code = sql.makeconverter {
--- { name = "a", type = "number" },
--- { name = "b", type = "string" },
--- { name = "c", type = "boolean" },
--- { name = "d", type = { x = "1" } },
--- { name = "e", type = os.fulltime },
--- }
---
--- print(code)
-
--- -- --
-
-if tex and tex.systemmodes then
-
- local droptable = table.drop
- local threshold = 16 * 1024 -- use slower but less memory hungry variant
-
- function sql.prepare(specification,tag)
- -- could go into tuc if needed
- -- todo: serialize per column
- local tag = tag or specification.tag or "last"
- local filename = format("%s-sql-result-%s.tuc",tex.jobname,tag)
- if tex.systemmodes["first"] then
- local data, keys = sql.execute(specification)
- if not data then
- data = { }
- end
- if not keys then
- keys = { }
- end
- io.savedata(filename,droptable({ data = data, keys = keys },#keys*#data>threshold))
- return data, keys
- else
- local result = table.load(filename)
- return result.data, result.keys
- end
- end
-
-else
-
- sql.prepare = sql.execute
-
-end
-
-return sql
+if not modules then modules = { } end modules ['util-sql'] = {
+ version = 1.001,
+ comment = "companion to m-sql.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- todo: templates as table (saves splitting)
+
+-- Of course we could use a library but we don't want another depedency and there is
+-- a bit of flux in these libraries. Also, we want the data back in a way that we
+-- like.
+--
+-- This is the first of set of sql related modules that are providing functionality
+-- for a web based framework that we use for typesetting (related) services. We're
+-- talking of session management, job ticket processing, storage, (xml) file processing
+-- and dealing with data from databases (often ambitiously called database publishing).
+--
+-- There is no generic solution for such services, but from our perspective, as we use
+-- context in a regular tds tree (the standard distribution) it makes sense to put shared
+-- code in the context distribution. That way we don't need to reinvent wheels every time.
+
+-- We use the template mechanism from util-tpl which inturn is just using the dos cq
+-- windows convention of %whatever% variables that I've used for ages.
+
+-- util-sql-imp-client.lua
+-- util-sql-imp-library.lua
+-- util-sql-imp-swiglib.lua
+-- util-sql-imp-lmxsql.lua
+
+-- local sql = require("util-sql")
+--
+-- local converter = sql.makeconverter {
+-- { name = "id", type = "number" },
+-- { name = "data",type = "string" },
+-- }
+--
+-- local execute = sql.methods.swiglib.execute
+-- -- local execute = sql.methods.library.execute
+-- -- local execute = sql.methods.client.execute
+-- -- local execute = sql.methods.lmxsql.execute
+--
+-- result = execute {
+-- presets = {
+-- host = "localhost",
+-- username = "root",
+-- password = "test",
+-- database = "test",
+-- id = "test", -- forces persistent session
+-- },
+-- template = "select * from `test` where `id` > %criterium% ;",
+-- variables = {
+-- criterium = 2,
+-- },
+-- converter = converter
+-- }
+--
+-- inspect(result)
+
+local format, match = string.format, string.match
+local random = math.random
+local rawset, setmetatable, getmetatable, load, type = rawset, setmetatable, getmetatable, load, type
+local P, S, V, C, Cs, Ct, Cc, Cg, Cf, patterns, lpegmatch = lpeg.P, lpeg.S, lpeg.V, lpeg.C, lpeg.Cs, lpeg.Ct, lpeg.Cc, lpeg.Cg, lpeg.Cf, lpeg.patterns, lpeg.match
+local concat = table.concat
+
+local osuuid = os.uuid
+local osclock = os.clock or os.time
+local ostime = os.time
+local setmetatableindex = table.setmetatableindex
+
+local trace_sql = false trackers.register("sql.trace", function(v) trace_sql = v end)
+local trace_queries = false trackers.register("sql.queries",function(v) trace_queries = v end)
+local report_state = logs.reporter("sql")
+
+-- trace_sql = true
+-- trace_queries = true
+
+utilities.sql = utilities.sql or { }
+local sql = utilities.sql
+
+local replacetemplate = utilities.templates.replace
+local loadtemplate = utilities.templates.load
+
+local methods = { }
+sql.methods = methods
+
+local helpers = { }
+sql.helpers = helpers
+
+local serialize = table.fastserialize
+local deserialize = table.deserialize
+
+sql.serialize = serialize
+sql.deserialize = deserialize
+
+helpers.serialize = serialize -- bonus
+helpers.deserialize = deserialize -- bonus
+
+local defaults = { __index =
+ {
+ resultfile = "result.dat",
+ templatefile = "template.sql",
+ queryfile = "query.sql",
+ variables = { },
+ username = "default",
+ password = "default",
+ host = "localhost",
+ port = 3306,
+ database = "default",
+ },
+}
+
+setmetatableindex(sql.methods,function(t,k)
+ report_state("start loading method %a",k)
+ require("util-sql-imp-"..k)
+ report_state("loading method %a done",k)
+ return rawget(t,k)
+end)
+
+-- converters
+
+local converters = { }
+sql.converters = converters
+
+local function makeconverter(entries,celltemplate,wraptemplate)
+ local shortcuts = { }
+ local assignments = { }
+ local key = false
+ for i=1,#entries do
+ local entry = entries[i]
+ local name = entry.name
+ local kind = entry.type or entry.kind
+ local value = format(celltemplate,i,i)
+ if kind == "boolean" then
+ assignments[#assignments+1] = format("[%q] = booleanstring(%s),",name,value)
+ elseif kind == "number" then
+ assignments[#assignments+1] = format("[%q] = tonumber(%s),",name,value)
+ elseif type(kind) == "function" then
+ local c = #converters + 1
+ converters[c] = kind
+ shortcuts[#shortcuts+1] = format("local fun_%s = converters[%s]",c,c)
+ assignments[#assignments+1] = format("[%q] = fun_%s(%s),",name,c,value)
+ elseif type(kind) == "table" then
+ local c = #converters + 1
+ converters[c] = kind
+ shortcuts[#shortcuts+1] = format("local tab_%s = converters[%s]",c,c)
+ assignments[#assignments+1] = format("[%q] = tab_%s[%s],",name,#converters,value)
+ elseif kind == "deserialize" then
+ assignments[#assignments+1] = format("[%q] = deserialize(%s),",name,value)
+ elseif kind == "key" then
+ -- hashed instead of indexed
+ key = value
+ elseif kind == "entry" then
+ -- so we can (efficiently) extend the hashed table
+ local default = entry.default or ""
+ if type(default) == "string" then
+ assignments[#assignments+1] = format("[%q] = %q,",name,default)
+ else
+ assignments[#assignments+1] = format("[%q] = %s,",name,tostring(default))
+ end
+ else
+ assignments[#assignments+1] = format("[%q] = %s,",name,value)
+ end
+ end
+ local code = format(wraptemplate,concat(shortcuts,"\n"),key and "{ }" or "data",key or "i",concat(assignments,"\n "))
+ -- print(code)
+ local func = load(code)
+ return func and func()
+end
+
+function sql.makeconverter(entries)
+ local fields = { }
+ for i=1,#entries do
+ fields[i] = format("`%s`",entries[i].name)
+ end
+ fields = concat(fields, ", ")
+ local converter = {
+ fields = fields
+ }
+ setmetatableindex(converter, function(t,k)
+ local sqlmethod = methods[k]
+ local v = makeconverter(entries,sqlmethod.celltemplate,sqlmethod.wraptemplate)
+ t[k] = v
+ return v
+ end)
+ return converter, fields
+end
+
+-- helper for libraries:
+
+local function validspecification(specification)
+ local presets = specification.presets
+ if type(presets) == "string" then
+ presets = dofile(presets)
+ end
+ if type(presets) == "table" then
+ setmetatable(presets,defaults)
+ setmetatable(specification,{ __index = presets })
+ else
+ setmetatable(specification,defaults)
+ end
+ return true
+end
+
+helpers.validspecification = validspecification
+
+local whitespace = patterns.whitespace^0
+local eol = patterns.eol
+local separator = P(";")
+local escaped = patterns.escaped
+local dquote = patterns.dquote
+local squote = patterns.squote
+local dsquote = squote * squote
+---- quoted = patterns.quoted
+local quoted = dquote * (escaped + (1-dquote))^0 * dquote
+ + squote * (escaped + dsquote + (1-squote))^0 * squote
+local comment = P("--") * (1-eol) / ""
+local query = whitespace
+ * Cs((quoted + comment + 1 - separator)^1 * Cc(";"))
+ * whitespace
+local splitter = Ct(query * (separator * query)^0)
+
+helpers.querysplitter = splitter
+
+-- I will add a bit more checking.
+
+local function validspecification(specification)
+ local presets = specification.presets
+ if type(presets) == "string" then
+ presets = dofile(presets)
+ end
+ if type(presets) == "table" then
+ local m = getmetatable(presets)
+ if m then
+ setmetatable(m,defaults)
+ else
+ setmetatable(presets,defaults)
+ end
+ setmetatable(specification,{ __index = presets })
+ else
+ setmetatable(specification,defaults)
+ end
+ local templatefile = specification.templatefile or "query"
+ local queryfile = specification.queryfile or presets.queryfile or file.nameonly(templatefile) .. "-temp.sql"
+ local resultfile = specification.resultfile or presets.resultfile or file.nameonly(templatefile) .. "-temp.dat"
+ specification.queryfile = queryfile
+ specification.resultfile = resultfile
+ if trace_sql then
+ report_state("template file: %s",templatefile or "")
+ report_state("query file: %s",queryfile)
+ report_state("result file: %s",resultfile)
+ end
+ return true
+end
+
+local function preparetemplate(specification)
+ local template = specification.template
+ if template then
+ local query = replacetemplate(template,specification.variables,'sql')
+ if not query then
+ report_state("error in template: %s",template)
+ elseif trace_queries then
+ report_state("query from template: %s",query)
+ end
+ return query
+ end
+ local templatefile = specification.templatefile
+ if templatefile then
+ local query = loadtemplate(templatefile,specification.variables,'sql')
+ if not query then
+ report_state("error in template file %a",templatefile)
+ elseif trace_queries then
+ report_state("query from template file %a: %s",templatefile,query)
+ end
+ return query
+ end
+ report_state("no query template or templatefile")
+end
+
+helpers.preparetemplate = preparetemplate
+
+-- -- -- we delay setting this -- -- --
+
+local currentmethod
+
+local function firstexecute(...)
+ local execute = methods[currentmethod].execute
+ sql.execute = execute
+ return execute(...)
+end
+
+function sql.setmethod(method)
+ currentmethod = method
+ sql.execute = firstexecute
+end
+
+sql.setmethod("library")
+
+-- helper:
+
+function sql.usedatabase(presets,datatable)
+ local name = datatable or presets.datatable
+ if name then
+ local method = presets.method and sql.methods[presets.method] or sql.methods.client
+ local base = presets.database or "test"
+ local basename = format("`%s`.`%s`",base,name)
+ local execute = nil
+ local m_execute = method.execute
+ if method.usesfiles then
+ local queryfile = presets.queryfile or format("%s-temp.sql",name)
+ local resultfile = presets.resultfile or format("%s-temp.dat",name)
+ execute = function(specification) -- variables template
+ if not specification.presets then specification.presets = presets end
+ if not specification.queryfile then specification.queryfile = queryfile end
+ if not specification.resultfile then specification.resultfile = queryfile end
+ return m_execute(specification)
+ end
+ else
+ execute = function(specification) -- variables template
+ if not specification.presets then specification.presets = presets end
+ return m_execute(specification)
+ end
+ end
+ local function unpackdata(records,name)
+ if records then
+ name = name or "data"
+ for i=1,#records do
+ local record = records[i]
+ local data = record[name]
+ if data then
+ record[name] = deserialize(data)
+ end
+ end
+ end
+ end
+ return {
+ presets = preset,
+ base = base,
+ name = name,
+ basename = basename,
+ execute = execute,
+ serialize = serialize,
+ deserialize = deserialize,
+ unpackdata = unpackdata,
+ }
+ else
+ report_state("missing name in usedatabase specification")
+ end
+end
+
+-- local data = utilities.sql.prepare {
+-- templatefile = "test.sql",
+-- variables = { },
+-- host = "...",
+-- username = "...",
+-- password = "...",
+-- database = "...",
+-- }
+
+-- local presets = {
+-- host = "...",
+-- username = "...",
+-- password = "...",
+-- database = "...",
+-- }
+--
+-- local data = utilities.sql.prepare {
+-- templatefile = "test.sql",
+-- variables = { },
+-- presets = presets,
+-- }
+
+-- local data = utilities.sql.prepare {
+-- templatefile = "test.sql",
+-- variables = { },
+-- presets = dofile(...),
+-- }
+
+-- local data = utilities.sql.prepare {
+-- templatefile = "test.sql",
+-- variables = { },
+-- presets = "...",
+-- }
+
+-- for i=1,10 do
+-- local dummy = uuid() -- else same every time, don't ask
+-- end
+
+sql.tokens = {
+ length = 42, -- but in practice we will reserve some 50 characters
+ new = function()
+ return format("%s-%x06",osuuid(),random(0xFFFFF)) -- 36 + 1 + 6 = 42
+ end,
+}
+
+-- -- --
+
+-- local func, code = sql.makeconverter {
+-- { name = "a", type = "number" },
+-- { name = "b", type = "string" },
+-- { name = "c", type = "boolean" },
+-- { name = "d", type = { x = "1" } },
+-- { name = "e", type = os.fulltime },
+-- }
+--
+-- print(code)
+
+-- -- --
+
+if tex and tex.systemmodes then
+
+ local droptable = table.drop
+ local threshold = 16 * 1024 -- use slower but less memory hungry variant
+
+ function sql.prepare(specification,tag)
+ -- could go into tuc if needed
+ -- todo: serialize per column
+ local tag = tag or specification.tag or "last"
+ local filename = format("%s-sql-result-%s.tuc",tex.jobname,tag)
+ if tex.systemmodes["first"] then
+ local data, keys = sql.execute(specification)
+ if not data then
+ data = { }
+ end
+ if not keys then
+ keys = { }
+ end
+ io.savedata(filename,droptable({ data = data, keys = keys },#keys*#data>threshold))
+ return data, keys
+ else
+ local result = table.load(filename)
+ return result.data, result.keys
+ end
+ end
+
+else
+
+ sql.prepare = sql.execute
+
+end
+
+return sql
diff --git a/tex/context/base/util-sta.lua b/tex/context/base/util-sta.lua
index 1a61ec4e6..1ea713a76 100644
--- a/tex/context/base/util-sta.lua
+++ b/tex/context/base/util-sta.lua
@@ -1,342 +1,342 @@
-if not modules then modules = { } end modules ['util-sta'] = {
- version = 1.001,
- comment = "companion to util-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
-local format = string.format
-local select, tostring = select, tostring
-
-local trace_stacker = false trackers.register("stacker.resolve", function(v) trace_stacker = v end)
-
-local stacker = stacker or { }
-
-utilities.stacker = stacker
-
-local function start(s,t,first,last)
- if s.mode == "switch" then
- local n = tostring(t[last])
- if trace_stacker then
- s.report("start: %s",n)
- end
- return n
- else
- local r = { }
- for i=first,last do
- r[#r+1] = tostring(t[i])
- end
- local n = concat(r," ")
- if trace_stacker then
- s.report("start: %s",n)
- end
- return n
- end
-end
-
-local function stop(s,t,first,last)
- if s.mode == "switch" then
- local n = tostring(false)
- if trace_stacker then
- s.report("stop: %s",n)
- end
- return n
- else
- local r = { }
- for i=last,first,-1 do
- r[#r+1] = tostring(false)
- end
- local n = concat(r," ")
- if trace_stacker then
- s.report("stop: %s",n)
- end
- return n
- end
-end
-
-local function change(s,t1,first1,last1,t2,first2,last2)
- if s.mode == "switch" then
- local n = tostring(t2[last2])
- if trace_stacker then
- s.report("change: %s",n)
- end
- return n
- else
- local r = { }
- for i=last1,first1,-1 do
- r[#r+1] = tostring(false)
- end
- local n = concat(r," ")
- for i=first2,last2 do
- r[#r+1] = tostring(t2[i])
- end
- if trace_stacker then
- s.report("change: %s",n)
- end
- return n
- end
-end
-
-function stacker.new(name)
-
- local s
-
- local stack = { }
- local list = { }
- local ids = { }
- local hash = { }
-
- local hashing = true
-
- local function push(...)
- for i=1,select("#",...) do
- insert(stack,(select(i,...))) -- watch the ()
- end
- if hashing then
- local c = concat(stack,"|")
- local n = hash[c]
- if not n then
- n = #list+1
- hash[c] = n
- list[n] = fastcopy(stack)
- end
- insert(ids,n)
- return n
- else
- local n = #list+1
- list[n] = fastcopy(stack)
- insert(ids,n)
- return n
- end
- end
-
- local function pop()
- remove(stack)
- remove(ids)
- return ids[#ids] or s.unset or -1
- end
-
- local function clean()
- if #stack == 0 then
- if trace_stacker then
- s.report("%s list entries, %s stack entries",#list,#stack)
- end
- end
- end
-
- local tops = { }
- local top, switch
-
- local function resolve_begin(mode)
- if mode then
- switch = mode == "switch"
- else
- switch = s.mode == "switch"
- end
- top = { switch = switch }
- insert(tops,top)
- end
-
- local function resolve_step(ti) -- keep track of changes outside function !
- -- todo: optimize for n=1 etc
- local result = nil
- local noftop = #top
- if ti > 0 then
- local current = list[ti]
- if current then
- local noflist = #current
- local nofsame = 0
- if noflist > noftop then
- for i=1,noflist do
- if current[i] == top[i] then
- nofsame = i
- else
- break
- end
- end
- else
- for i=1,noflist do
- if current[i] == top[i] then
- nofsame = i
- else
- break
- end
- end
- end
- local plus = nofsame + 1
- if plus <= noftop then
- if plus <= noflist then
- if switch then
- result = s.change(s,top,plus,noftop,current,nofsame,noflist)
- else
- result = s.change(s,top,plus,noftop,current,plus,noflist)
- end
- else
- if switch then
- result = s.change(s,top,plus,noftop,current,nofsame,noflist)
- else
- result = s.stop(s,top,plus,noftop)
- end
- end
- elseif plus <= noflist then
- if switch then
- result = s.start(s,current,nofsame,noflist)
- else
- result = s.start(s,current,plus,noflist)
- end
- end
- top = current
- else
- if 1 <= noftop then
- result = s.stop(s,top,1,noftop)
- end
- top = { }
- end
- return result
- else
- if 1 <= noftop then
- result = s.stop(s,top,1,noftop)
- end
- top = { }
- return result
- end
- end
-
- local function resolve_end()
- -- resolve_step(s.unset)
- local noftop = #top
- if noftop > 0 then
- local result = s.stop(s,top,1,#top)
- remove(tops)
- top = tops[#tops]
- switch = top and top.switch
- return result
- end
- end
-
- local function resolve(t)
- resolve_begin()
- for i=1,#t do
- resolve_step(t[i])
- end
- resolve_end()
- end
-
- local report = logs.reporter("stacker",name or nil)
-
- s = {
- name = name or "unknown",
- unset = -1,
- report = report,
- start = start,
- stop = stop,
- change = change,
- push = push,
- pop = pop,
- clean = clean,
- resolve = resolve,
- resolve_begin = resolve_begin,
- resolve_step = resolve_step,
- resolve_end = resolve_end,
- }
-
- return s -- we can overload functions
-
-end
-
--- local s = utilities.stacker.new("demo")
---
--- local unset = s.unset
--- local push = s.push
--- local pop = s.pop
---
--- local t = {
--- unset,
--- unset,
--- push("a"), -- a
--- push("b","c"), -- a b c
--- pop(), -- a b
--- push("d"), -- a b d
--- pop(), -- a b
--- unset,
--- pop(), -- a
--- pop(), -- b
--- unset,
--- unset,
--- }
---
--- s.resolve(t)
-
--- demostacker = utilities.stacker.new("demos")
---
--- local whatever = {
--- one = "1 0 0 RG 1 0 0 rg",
--- two = "1 1 0 RG 1 1 0 rg",
--- [false] = "0 G 0 g",
--- }
---
--- local concat = table.concat
---
--- local pdfliteral = nodes.pool.pdfliteral
---
--- function demostacker.start(s,t,first,last)
--- local n = whatever[t[last]]
--- -- s.report("start: %s",n)
--- return pdfliteral(n)
--- end
---
--- function demostacker.stop(s,t,first,last)
--- local n = whatever[false]
--- -- s.report("stop: %s",n)
--- return pdfliteral(n)
--- end
---
--- function demostacker.change(s,t1,first1,last1,t2,first2,last2)
--- local n = whatever[t2[last2]]
--- -- s.report("change: %s",n)
--- return pdfliteral(n)
--- end
---
--- demostacker.mode = "switch"
---
--- local whatever = {
--- one = "/OC /test1 BDC",
--- two = "/OC /test2 BDC",
--- [false] = "EMC",
--- }
---
--- demostacker = utilities.stacker.new("demos")
---
--- function demostacker.start(s,t,first,last)
--- local r = { }
--- for i=first,last do
--- r[#r+1] = whatever[t[i]]
--- end
--- -- s.report("start: %s",concat(r," "))
--- return pdfliteral(concat(r," "))
--- end
---
--- function demostacker.stop(s,t,first,last)
--- local r = { }
--- for i=last,first,-1 do
--- r[#r+1] = whatever[false]
--- end
--- -- s.report("stop: %s",concat(r," "))
--- return pdfliteral(concat(r," "))
--- end
---
--- function demostacker.change(s,t1,first1,last1,t2,first2,last2)
--- local r = { }
--- for i=last1,first1,-1 do
--- r[#r+1] = whatever[false]
--- end
--- for i=first2,last2 do
--- r[#r+1] = whatever[t2[i]]
--- end
--- -- s.report("change: %s",concat(r," "))
--- return pdfliteral(concat(r," "))
--- end
---
--- demostacker.mode = "stack"
+if not modules then modules = { } end modules ['util-sta'] = {
+ version = 1.001,
+ comment = "companion to util-ini.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local insert, remove, fastcopy, concat = table.insert, table.remove, table.fastcopy, table.concat
+local format = string.format
+local select, tostring = select, tostring
+
+local trace_stacker = false trackers.register("stacker.resolve", function(v) trace_stacker = v end)
+
+local stacker = stacker or { }
+
+utilities.stacker = stacker
+
+local function start(s,t,first,last)
+ if s.mode == "switch" then
+ local n = tostring(t[last])
+ if trace_stacker then
+ s.report("start: %s",n)
+ end
+ return n
+ else
+ local r = { }
+ for i=first,last do
+ r[#r+1] = tostring(t[i])
+ end
+ local n = concat(r," ")
+ if trace_stacker then
+ s.report("start: %s",n)
+ end
+ return n
+ end
+end
+
+local function stop(s,t,first,last)
+ if s.mode == "switch" then
+ local n = tostring(false)
+ if trace_stacker then
+ s.report("stop: %s",n)
+ end
+ return n
+ else
+ local r = { }
+ for i=last,first,-1 do
+ r[#r+1] = tostring(false)
+ end
+ local n = concat(r," ")
+ if trace_stacker then
+ s.report("stop: %s",n)
+ end
+ return n
+ end
+end
+
+local function change(s,t1,first1,last1,t2,first2,last2)
+ if s.mode == "switch" then
+ local n = tostring(t2[last2])
+ if trace_stacker then
+ s.report("change: %s",n)
+ end
+ return n
+ else
+ local r = { }
+ for i=last1,first1,-1 do
+ r[#r+1] = tostring(false)
+ end
+ local n = concat(r," ")
+ for i=first2,last2 do
+ r[#r+1] = tostring(t2[i])
+ end
+ if trace_stacker then
+ s.report("change: %s",n)
+ end
+ return n
+ end
+end
+
+function stacker.new(name)
+
+ local s
+
+ local stack = { }
+ local list = { }
+ local ids = { }
+ local hash = { }
+
+ local hashing = true
+
+ local function push(...)
+ for i=1,select("#",...) do
+ insert(stack,(select(i,...))) -- watch the ()
+ end
+ if hashing then
+ local c = concat(stack,"|")
+ local n = hash[c]
+ if not n then
+ n = #list+1
+ hash[c] = n
+ list[n] = fastcopy(stack)
+ end
+ insert(ids,n)
+ return n
+ else
+ local n = #list+1
+ list[n] = fastcopy(stack)
+ insert(ids,n)
+ return n
+ end
+ end
+
+ local function pop()
+ remove(stack)
+ remove(ids)
+ return ids[#ids] or s.unset or -1
+ end
+
+ local function clean()
+ if #stack == 0 then
+ if trace_stacker then
+ s.report("%s list entries, %s stack entries",#list,#stack)
+ end
+ end
+ end
+
+ local tops = { }
+ local top, switch
+
+ local function resolve_begin(mode)
+ if mode then
+ switch = mode == "switch"
+ else
+ switch = s.mode == "switch"
+ end
+ top = { switch = switch }
+ insert(tops,top)
+ end
+
+ local function resolve_step(ti) -- keep track of changes outside function !
+ -- todo: optimize for n=1 etc
+ local result = nil
+ local noftop = #top
+ if ti > 0 then
+ local current = list[ti]
+ if current then
+ local noflist = #current
+ local nofsame = 0
+ if noflist > noftop then
+ for i=1,noflist do
+ if current[i] == top[i] then
+ nofsame = i
+ else
+ break
+ end
+ end
+ else
+ for i=1,noflist do
+ if current[i] == top[i] then
+ nofsame = i
+ else
+ break
+ end
+ end
+ end
+ local plus = nofsame + 1
+ if plus <= noftop then
+ if plus <= noflist then
+ if switch then
+ result = s.change(s,top,plus,noftop,current,nofsame,noflist)
+ else
+ result = s.change(s,top,plus,noftop,current,plus,noflist)
+ end
+ else
+ if switch then
+ result = s.change(s,top,plus,noftop,current,nofsame,noflist)
+ else
+ result = s.stop(s,top,plus,noftop)
+ end
+ end
+ elseif plus <= noflist then
+ if switch then
+ result = s.start(s,current,nofsame,noflist)
+ else
+ result = s.start(s,current,plus,noflist)
+ end
+ end
+ top = current
+ else
+ if 1 <= noftop then
+ result = s.stop(s,top,1,noftop)
+ end
+ top = { }
+ end
+ return result
+ else
+ if 1 <= noftop then
+ result = s.stop(s,top,1,noftop)
+ end
+ top = { }
+ return result
+ end
+ end
+
+ local function resolve_end()
+ -- resolve_step(s.unset)
+ local noftop = #top
+ if noftop > 0 then
+ local result = s.stop(s,top,1,#top)
+ remove(tops)
+ top = tops[#tops]
+ switch = top and top.switch
+ return result
+ end
+ end
+
+ local function resolve(t)
+ resolve_begin()
+ for i=1,#t do
+ resolve_step(t[i])
+ end
+ resolve_end()
+ end
+
+ local report = logs.reporter("stacker",name or nil)
+
+ s = {
+ name = name or "unknown",
+ unset = -1,
+ report = report,
+ start = start,
+ stop = stop,
+ change = change,
+ push = push,
+ pop = pop,
+ clean = clean,
+ resolve = resolve,
+ resolve_begin = resolve_begin,
+ resolve_step = resolve_step,
+ resolve_end = resolve_end,
+ }
+
+ return s -- we can overload functions
+
+end
+
+-- local s = utilities.stacker.new("demo")
+--
+-- local unset = s.unset
+-- local push = s.push
+-- local pop = s.pop
+--
+-- local t = {
+-- unset,
+-- unset,
+-- push("a"), -- a
+-- push("b","c"), -- a b c
+-- pop(), -- a b
+-- push("d"), -- a b d
+-- pop(), -- a b
+-- unset,
+-- pop(), -- a
+-- pop(), -- b
+-- unset,
+-- unset,
+-- }
+--
+-- s.resolve(t)
+
+-- demostacker = utilities.stacker.new("demos")
+--
+-- local whatever = {
+-- one = "1 0 0 RG 1 0 0 rg",
+-- two = "1 1 0 RG 1 1 0 rg",
+-- [false] = "0 G 0 g",
+-- }
+--
+-- local concat = table.concat
+--
+-- local pdfliteral = nodes.pool.pdfliteral
+--
+-- function demostacker.start(s,t,first,last)
+-- local n = whatever[t[last]]
+-- -- s.report("start: %s",n)
+-- return pdfliteral(n)
+-- end
+--
+-- function demostacker.stop(s,t,first,last)
+-- local n = whatever[false]
+-- -- s.report("stop: %s",n)
+-- return pdfliteral(n)
+-- end
+--
+-- function demostacker.change(s,t1,first1,last1,t2,first2,last2)
+-- local n = whatever[t2[last2]]
+-- -- s.report("change: %s",n)
+-- return pdfliteral(n)
+-- end
+--
+-- demostacker.mode = "switch"
+--
+-- local whatever = {
+-- one = "/OC /test1 BDC",
+-- two = "/OC /test2 BDC",
+-- [false] = "EMC",
+-- }
+--
+-- demostacker = utilities.stacker.new("demos")
+--
+-- function demostacker.start(s,t,first,last)
+-- local r = { }
+-- for i=first,last do
+-- r[#r+1] = whatever[t[i]]
+-- end
+-- -- s.report("start: %s",concat(r," "))
+-- return pdfliteral(concat(r," "))
+-- end
+--
+-- function demostacker.stop(s,t,first,last)
+-- local r = { }
+-- for i=last,first,-1 do
+-- r[#r+1] = whatever[false]
+-- end
+-- -- s.report("stop: %s",concat(r," "))
+-- return pdfliteral(concat(r," "))
+-- end
+--
+-- function demostacker.change(s,t1,first1,last1,t2,first2,last2)
+-- local r = { }
+-- for i=last1,first1,-1 do
+-- r[#r+1] = whatever[false]
+-- end
+-- for i=first2,last2 do
+-- r[#r+1] = whatever[t2[i]]
+-- end
+-- -- s.report("change: %s",concat(r," "))
+-- return pdfliteral(concat(r," "))
+-- end
+--
+-- demostacker.mode = "stack"
diff --git a/tex/context/base/util-sto.lua b/tex/context/base/util-sto.lua
index 191d6cd73..355f0ecd3 100644
--- a/tex/context/base/util-sto.lua
+++ b/tex/context/base/util-sto.lua
@@ -1,189 +1,189 @@
-if not modules then modules = { } end modules ['util-sto'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local setmetatable, getmetatable, type = setmetatable, getmetatable, type
-
-utilities = utilities or { }
-utilities.storage = utilities.storage or { }
-local storage = utilities.storage
-
-function storage.mark(t)
- if not t then
- print("\nfatal error: storage cannot be marked\n")
- os.exit()
- return
- end
- local m = getmetatable(t)
- if not m then
- m = { }
- setmetatable(t,m)
- end
- m.__storage__ = true
- return t
-end
-
-function storage.allocate(t)
- t = t or { }
- local m = getmetatable(t)
- if not m then
- m = { }
- setmetatable(t,m)
- end
- m.__storage__ = true
- return t
-end
-
-function storage.marked(t)
- local m = getmetatable(t)
- return m and m.__storage__
-end
-
-function storage.checked(t)
- if not t then
- report("\nfatal error: storage has not been allocated\n")
- os.exit()
- return
- end
- return t
-end
-
--- function utilities.storage.delay(parent,name,filename)
--- local m = getmetatable(parent)
--- m.__list[name] = filename
--- end
---
--- function utilities.storage.predefine(parent)
--- local list = { }
--- local m = getmetatable(parent) or {
--- __list = list,
--- __index = function(t,k)
--- local l = require(list[k])
--- t[k] = l
--- return l
--- end
--- }
--- setmetatable(parent,m)
--- end
---
--- bla = { }
--- utilities.storage.predefine(bla)
--- utilities.storage.delay(bla,"test","oepsoeps")
--- local t = bla.test
--- table.print(t)
--- print(t.a)
-
-function storage.setinitializer(data,initialize)
- local m = getmetatable(data) or { }
- m.__index = function(data,k)
- m.__index = nil -- so that we can access the entries during initializing
- initialize()
- return data[k]
- end
- setmetatable(data, m)
-end
-
-local keyisvalue = { __index = function(t,k)
- t[k] = k
- return k
-end }
-
-function storage.sparse(t)
- t = t or { }
- setmetatable(t,keyisvalue)
- return t
-end
-
--- table namespace ?
-
-local function f_empty () return "" end -- t,k
-local function f_self (t,k) t[k] = k return k end
-local function f_table (t,k) local v = { } t[k] = v return v end
-local function f_ignore() end -- t,k,v
-
-local t_empty = { __index = f_empty }
-local t_self = { __index = f_self }
-local t_table = { __index = f_table }
-local t_ignore = { __newindex = f_ignore }
-
-function table.setmetatableindex(t,f)
- if type(t) ~= "table" then
- f, t = t, { }
- end
- local m = getmetatable(t)
- if m then
- if f == "empty" then
- m.__index = f_empty
- elseif f == "key" then
- m.__index = f_self
- elseif f == "table" then
- m.__index = f_table
- else
- m.__index = f
- end
- else
- if f == "empty" then
- setmetatable(t, t_empty)
- elseif f == "key" then
- setmetatable(t, t_self)
- elseif f == "table" then
- setmetatable(t, t_table)
- else
- setmetatable(t,{ __index = f })
- end
- end
- return t
-end
-
-function table.setmetatablenewindex(t,f)
- if type(t) ~= "table" then
- f, t = t, { }
- end
- local m = getmetatable(t)
- if m then
- if f == "ignore" then
- m.__newindex = f_ignore
- else
- m.__newindex = f
- end
- else
- if f == "ignore" then
- setmetatable(t, t_ignore)
- else
- setmetatable(t,{ __newindex = f })
- end
- end
- return t
-end
-
-function table.setmetatablecall(t,f)
- if type(t) ~= "table" then
- f, t = t, { }
- end
- local m = getmetatable(t)
- if m then
- m.__call = f
- else
- setmetatable(t,{ __call = f })
- end
- return t
-end
-
-function table.setmetatablekey(t,key,value)
- local m = getmetatable(t)
- if not m then
- m = { }
- setmetatable(t,m)
- end
- m[key] = value
- return t
-end
-
-function table.getmetatablekey(t,key,value)
- local m = getmetatable(t)
- return m and m[key]
-end
+if not modules then modules = { } end modules ['util-sto'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+local setmetatable, getmetatable, type = setmetatable, getmetatable, type
+
+utilities = utilities or { }
+utilities.storage = utilities.storage or { }
+local storage = utilities.storage
+
+function storage.mark(t)
+ if not t then
+ print("\nfatal error: storage cannot be marked\n")
+ os.exit()
+ return
+ end
+ local m = getmetatable(t)
+ if not m then
+ m = { }
+ setmetatable(t,m)
+ end
+ m.__storage__ = true
+ return t
+end
+
+function storage.allocate(t)
+ t = t or { }
+ local m = getmetatable(t)
+ if not m then
+ m = { }
+ setmetatable(t,m)
+ end
+ m.__storage__ = true
+ return t
+end
+
+function storage.marked(t)
+ local m = getmetatable(t)
+ return m and m.__storage__
+end
+
+function storage.checked(t)
+ if not t then
+ report("\nfatal error: storage has not been allocated\n")
+ os.exit()
+ return
+ end
+ return t
+end
+
+-- function utilities.storage.delay(parent,name,filename)
+-- local m = getmetatable(parent)
+-- m.__list[name] = filename
+-- end
+--
+-- function utilities.storage.predefine(parent)
+-- local list = { }
+-- local m = getmetatable(parent) or {
+-- __list = list,
+-- __index = function(t,k)
+-- local l = require(list[k])
+-- t[k] = l
+-- return l
+-- end
+-- }
+-- setmetatable(parent,m)
+-- end
+--
+-- bla = { }
+-- utilities.storage.predefine(bla)
+-- utilities.storage.delay(bla,"test","oepsoeps")
+-- local t = bla.test
+-- table.print(t)
+-- print(t.a)
+
+function storage.setinitializer(data,initialize)
+ local m = getmetatable(data) or { }
+ m.__index = function(data,k)
+ m.__index = nil -- so that we can access the entries during initializing
+ initialize()
+ return data[k]
+ end
+ setmetatable(data, m)
+end
+
+local keyisvalue = { __index = function(t,k)
+ t[k] = k
+ return k
+end }
+
+function storage.sparse(t)
+ t = t or { }
+ setmetatable(t,keyisvalue)
+ return t
+end
+
+-- table namespace ?
+
+local function f_empty () return "" end -- t,k
+local function f_self (t,k) t[k] = k return k end
+local function f_table (t,k) local v = { } t[k] = v return v end
+local function f_ignore() end -- t,k,v
+
+local t_empty = { __index = f_empty }
+local t_self = { __index = f_self }
+local t_table = { __index = f_table }
+local t_ignore = { __newindex = f_ignore }
+
+function table.setmetatableindex(t,f)
+ if type(t) ~= "table" then
+ f, t = t, { }
+ end
+ local m = getmetatable(t)
+ if m then
+ if f == "empty" then
+ m.__index = f_empty
+ elseif f == "key" then
+ m.__index = f_self
+ elseif f == "table" then
+ m.__index = f_table
+ else
+ m.__index = f
+ end
+ else
+ if f == "empty" then
+ setmetatable(t, t_empty)
+ elseif f == "key" then
+ setmetatable(t, t_self)
+ elseif f == "table" then
+ setmetatable(t, t_table)
+ else
+ setmetatable(t,{ __index = f })
+ end
+ end
+ return t
+end
+
+function table.setmetatablenewindex(t,f)
+ if type(t) ~= "table" then
+ f, t = t, { }
+ end
+ local m = getmetatable(t)
+ if m then
+ if f == "ignore" then
+ m.__newindex = f_ignore
+ else
+ m.__newindex = f
+ end
+ else
+ if f == "ignore" then
+ setmetatable(t, t_ignore)
+ else
+ setmetatable(t,{ __newindex = f })
+ end
+ end
+ return t
+end
+
+function table.setmetatablecall(t,f)
+ if type(t) ~= "table" then
+ f, t = t, { }
+ end
+ local m = getmetatable(t)
+ if m then
+ m.__call = f
+ else
+ setmetatable(t,{ __call = f })
+ end
+ return t
+end
+
+function table.setmetatablekey(t,key,value)
+ local m = getmetatable(t)
+ if not m then
+ m = { }
+ setmetatable(t,m)
+ end
+ m[key] = value
+ return t
+end
+
+function table.getmetatablekey(t,key,value)
+ local m = getmetatable(t)
+ return m and m[key]
+end
diff --git a/tex/context/base/util-str.lua b/tex/context/base/util-str.lua
index 4890a11d6..f671b0012 100644
--- a/tex/context/base/util-str.lua
+++ b/tex/context/base/util-str.lua
@@ -1,766 +1,766 @@
-if not modules then modules = { } end modules ['util-str'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-utilities = utilities or {}
-utilities.strings = utilities.strings or { }
-local strings = utilities.strings
-
-local format, gsub, rep, sub = string.format, string.gsub, string.rep, string.sub
-local load, dump = load, string.dump
-local tonumber, type, tostring = tonumber, type, tostring
-local unpack, concat = table.unpack, table.concat
-local P, V, C, S, R, Ct, Cs, Cp, Carg, Cc = lpeg.P, lpeg.V, lpeg.C, lpeg.S, lpeg.R, lpeg.Ct, lpeg.Cs, lpeg.Cp, lpeg.Carg, lpeg.Cc
-local patterns, lpegmatch = lpeg.patterns, lpeg.match
-local utfchar, utfbyte = utf.char, utf.byte
------ loadstripped = utilities.lua.loadstripped
------ setmetatableindex = table.setmetatableindex
-
-local loadstripped = _LUAVERSION < 5.2 and load or function(str)
- return load(dump(load(str),true)) -- it only makes sense in luajit and luatex where we have a stipped load
-end
-
--- todo: make a special namespace for the formatter
-
-if not number then number = { } end -- temp hack for luatex-fonts
-
-local stripper = patterns.stripzeros
-
-local function points(n)
- return (not n or n == 0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
-end
-
-local function basepoints(n)
- return (not n or n == 0) and "0bp" or lpegmatch(stripper,format("%.5fbp", n*(7200/7227)/65536))
-end
-
-number.points = points
-number.basepoints = basepoints
-
--- str = " \n \ntest \n test\ntest "
--- print("["..string.gsub(string.collapsecrlf(str),"\n","+").."]")
-
-local rubish = patterns.spaceortab^0 * patterns.newline
-local anyrubish = patterns.spaceortab + patterns.newline
-local anything = patterns.anything
-local stripped = (patterns.spaceortab^1 / "") * patterns.newline
-local leading = rubish^0 / ""
-local trailing = (anyrubish^1 * patterns.endofstring) / ""
-local redundant = rubish^3 / "\n"
-
-local pattern = Cs(leading * (trailing + redundant + stripped + anything)^0)
-
-function strings.collapsecrlf(str)
- return lpegmatch(pattern,str)
-end
-
--- The following functions might end up in another namespace.
-
-local repeaters = { } -- watch how we also moved the -1 in depth-1 to the creator
-
-function strings.newrepeater(str,offset)
- offset = offset or 0
- local s = repeaters[str]
- if not s then
- s = { }
- repeaters[str] = s
- end
- local t = s[offset]
- if t then
- return t
- end
- t = { }
- setmetatable(t, { __index = function(t,k)
- if not k then
- return ""
- end
- local n = k + offset
- local s = n > 0 and rep(str,n) or ""
- t[k] = s
- return s
- end })
- s[offset] = t
- return t
-end
-
--- local dashes = strings.newrepeater("--",-1)
--- print(dashes[2],dashes[3],dashes[1])
-
-local extra, tab, start = 0, 0, 4, 0
-
-local nspaces = strings.newrepeater(" ")
-
-string.nspaces = nspaces
-
-local pattern =
- Carg(1) / function(t)
- extra, tab, start = 0, t or 7, 1
- end
- * Cs((
- Cp() * patterns.tab / function(position)
- local current = (position - start + 1) + extra
- local spaces = tab-(current-1) % tab
- if spaces > 0 then
- extra = extra + spaces - 1
- return nspaces[spaces] -- rep(" ",spaces)
- else
- return ""
- end
- end
- + patterns.newline * Cp() / function(position)
- extra, start = 0, position
- end
- + patterns.anything
- )^1)
-
-function strings.tabtospace(str,tab)
- return lpegmatch(pattern,str,1,tab or 7)
-end
-
--- local t = {
--- "1234567123456712345671234567",
--- "\tb\tc",
--- "a\tb\tc",
--- "aa\tbb\tcc",
--- "aaa\tbbb\tccc",
--- "aaaa\tbbbb\tcccc",
--- "aaaaa\tbbbbb\tccccc",
--- "aaaaaa\tbbbbbb\tcccccc\n aaaaaa\tbbbbbb\tcccccc",
--- "one\n two\nxxx three\nxx four\nx five\nsix",
--- }
--- for k=1,#t do
--- print(strings.tabtospace(t[k]))
--- end
-
-function strings.striplong(str) -- strips all leading spaces
- str = gsub(str,"^%s*","")
- str = gsub(str,"[\n\r]+ *","\n")
- return str
-end
-
--- local template = string.striplong([[
--- aaaa
--- bb
--- cccccc
--- ]])
-
-function strings.nice(str)
- str = gsub(str,"[:%-+_]+"," ") -- maybe more
- return str
-end
-
--- Work in progress. Interesting is that compared to the built-in this is faster in
--- luatex than in luajittex where we have a comparable speed. It only makes sense
--- to use the formatter when a (somewhat) complex format is used a lot. Each formatter
--- is a function so there is some overhead and not all formatted output is worth that
--- overhead. Keep in mind that there is an extra function call involved. In principle
--- we end up with a string concatination so one could inline such a sequence but often
--- at the cost of less readabinity. So, it's a sort of (visual) compromise. Of course
--- there is the benefit of more variants. (Concerning the speed: a simple format like
--- %05fpt is better off with format than with a formatter, but as soon as you put
--- something in front formatters become faster. Passing the pt as extra argument makes
--- formatters behave better. Of course this is rather implementation dependent. Also,
--- when a specific format is only used a few times the overhead in creating it is not
--- compensated by speed.)
---
--- More info can be found in cld-mkiv.pdf so here I stick to a simple list.
---
--- integer %...i number
--- integer %...d number
--- unsigned %...u number
--- character %...c number
--- hexadecimal %...x number
--- HEXADECIMAL %...X number
--- octal %...o number
--- string %...s string number
--- float %...f number
--- exponential %...e number
--- exponential %...E number
--- autofloat %...g number
--- autofloat %...G number
--- utf character %...c number
--- force tostring %...S any
--- force tostring %Q any
--- force tonumber %N number (strip leading zeros)
--- signed number %I number
--- rounded number %r number
--- 0xhexadecimal %...h character number
--- 0xHEXADECIMAL %...H character number
--- U+hexadecimal %...u character number
--- U+HEXADECIMAL %...U character number
--- points %p number (scaled points)
--- basepoints %b number (scaled points)
--- table concat %...t table
--- serialize %...T sequenced (no nested tables)
--- boolean (logic) %l boolean
--- BOOLEAN %L boolean
--- whitespace %...w
--- automatic %...a 'whatever' (string, table, ...)
--- automatic %...a "whatever" (string, table, ...)
-
-local n = 0
-
--- we are somewhat sloppy in parsing prefixes as it's not that critical
-
--- hard to avoid but we can collect them in a private namespace if needed
-
--- inline the next two makes no sense as we only use this in logging
-
-local sequenced = table.sequenced
-
-function string.autodouble(s,sep)
- if s == nil then
- return '""'
- end
- local t = type(s)
- if t == "number" then
- return tostring(s) -- tostring not really needed
- end
- if t == "table" then
- return ('"' .. sequenced(s,sep or ",") .. '"')
- end
- return ('"' .. tostring(s) .. '"')
-end
-
-function string.autosingle(s,sep)
- if s == nil then
- return "''"
- end
- local t = type(s)
- if t == "number" then
- return tostring(s) -- tostring not really needed
- end
- if t == "table" then
- return ("'" .. sequenced(s,sep or ",") .. "'")
- end
- return ("'" .. tostring(s) .. "'")
-end
-
-local tracedchars = { }
-string.tracedchars = tracedchars
-strings.tracers = tracedchars
-
-function string.tracedchar(b)
- -- todo: table
- if type(b) == "number" then
- return tracedchars[b] or (utfchar(b) .. " (U+" .. format('%05X',b) .. ")")
- else
- local c = utfbyte(b)
- return tracedchars[c] or (b .. " (U+" .. format('%05X',c) .. ")")
- end
-end
-
-function number.signed(i)
- if i > 0 then
- return "+", i
- else
- return "-", -i
- end
-end
-
-local preamble = [[
-local type = type
-local tostring = tostring
-local tonumber = tonumber
-local format = string.format
-local concat = table.concat
-local signed = number.signed
-local points = number.points
-local basepoints = number.basepoints
-local utfchar = utf.char
-local utfbyte = utf.byte
-local lpegmatch = lpeg.match
-local nspaces = string.nspaces
-local tracedchar = string.tracedchar
-local autosingle = string.autosingle
-local autodouble = string.autodouble
-local sequenced = table.sequenced
-]]
-
-local template = [[
-%s
-%s
-return function(%s) return %s end
-]]
-
-local arguments = { "a1" } -- faster than previously used (select(n,...))
-
-setmetatable(arguments, { __index =
- function(t,k)
- local v = t[k-1] .. ",a" .. k
- t[k] = v
- return v
- end
-})
-
-local prefix_any = C((S("+- .") + R("09"))^0)
-local prefix_tab = C((1-R("az","AZ","09","%%"))^0)
-
--- we've split all cases as then we can optimize them (let's omit the fuzzy u)
-
--- todo: replace outer formats in next by ..
-
-local format_s = function(f)
- n = n + 1
- if f and f ~= "" then
- return format("format('%%%ss',a%s)",f,n)
- else -- best no tostring in order to stay compatible (.. does a selective tostring too)
- return format("(a%s or '')",n) -- goodie: nil check
- end
-end
-
-local format_S = function(f) -- can be optimized
- n = n + 1
- if f and f ~= "" then
- return format("format('%%%ss',tostring(a%s))",f,n)
- else
- return format("tostring(a%s)",n)
- end
-end
-
-local format_q = function()
- n = n + 1
- return format("(a%s and format('%%q',a%s) or '')",n,n) -- goodie: nil check (maybe separate lpeg, not faster)
-end
-
-local format_Q = function() -- can be optimized
- n = n + 1
- return format("format('%%q',tostring(a%s))",n)
-end
-
-local format_i = function(f)
- n = n + 1
- if f and f ~= "" then
- return format("format('%%%si',a%s)",f,n)
- else
- return format("a%s",n)
- end
-end
-
-local format_d = format_i
-
-local format_I = function(f)
- n = n + 1
- return format("format('%%s%%%si',signed(a%s))",f,n)
-end
-
-local format_f = function(f)
- n = n + 1
- return format("format('%%%sf',a%s)",f,n)
-end
-
-local format_g = function(f)
- n = n + 1
- return format("format('%%%sg',a%s)",f,n)
-end
-
-local format_G = function(f)
- n = n + 1
- return format("format('%%%sG',a%s)",f,n)
-end
-
-local format_e = function(f)
- n = n + 1
- return format("format('%%%se',a%s)",f,n)
-end
-
-local format_E = function(f)
- n = n + 1
- return format("format('%%%sE',a%s)",f,n)
-end
-
-local format_x = function(f)
- n = n + 1
- return format("format('%%%sx',a%s)",f,n)
-end
-
-local format_X = function(f)
- n = n + 1
- return format("format('%%%sX',a%s)",f,n)
-end
-
-local format_o = function(f)
- n = n + 1
- return format("format('%%%so',a%s)",f,n)
-end
-
-local format_c = function()
- n = n + 1
- return format("utfchar(a%s)",n)
-end
-
-local format_C = function()
- n = n + 1
- return format("tracedchar(a%s)",n)
-end
-
-local format_r = function(f)
- n = n + 1
- return format("format('%%%s.0f',a%s)",f,n)
-end
-
-local format_h = function(f)
- n = n + 1
- if f == "-" then
- f = sub(f,2)
- return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
- else
- return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
- end
-end
-
-local format_H = function(f)
- n = n + 1
- if f == "-" then
- f = sub(f,2)
- return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
- else
- return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
- end
-end
-
-local format_u = function(f)
- n = n + 1
- if f == "-" then
- f = sub(f,2)
- return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
- else
- return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
- end
-end
-
-local format_U = function(f)
- n = n + 1
- if f == "-" then
- f = sub(f,2)
- return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
- else
- return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
- end
-end
-
-local format_p = function()
- n = n + 1
- return format("points(a%s)",n)
-end
-
-local format_b = function()
- n = n + 1
- return format("basepoints(a%s)",n)
-end
-
-local format_t = function(f)
- n = n + 1
- if f and f ~= "" then
- return format("concat(a%s,%q)",n,f)
- else
- return format("concat(a%s)",n)
- end
-end
-
-local format_T = function(f)
- n = n + 1
- if f and f ~= "" then
- return format("sequenced(a%s,%q)",n,f)
- else
- return format("sequenced(a%s)",n)
- end
-end
-
-local format_l = function()
- n = n + 1
- return format("(a%s and 'true' or 'false')",n)
-end
-
-local format_L = function()
- n = n + 1
- return format("(a%s and 'TRUE' or 'FALSE')",n)
-end
-
-local format_N = function() -- strips leading zeros
- n = n + 1
- return format("tostring(tonumber(a%s) or a%s)",n,n)
-end
-
-local format_a = function(f)
- n = n + 1
- if f and f ~= "" then
- return format("autosingle(a%s,%q)",n,f)
- else
- return format("autosingle(a%s)",n)
- end
-end
-
-local format_A = function(f)
- n = n + 1
- if f and f ~= "" then
- return format("autodouble(a%s,%q)",n,f)
- else
- return format("autodouble(a%s)",n)
- end
-end
-
-local format_w = function(f) -- handy when doing depth related indent
- n = n + 1
- f = tonumber(f)
- if f then -- not that useful
- return format("nspaces[%s+a%s]",f,n) -- no real need for tonumber
- else
- return format("nspaces[a%s]",n) -- no real need for tonumber
- end
-end
-
-local format_W = function(f) -- handy when doing depth related indent
- return format("nspaces[%s]",tonumber(f) or 0)
-end
-
-local format_rest = function(s)
- return format("%q",s) -- catches " and \n and such
-end
-
-local format_extension = function(extensions,f,name)
- local extension = extensions[name] or "tostring(%s)"
- local f = tonumber(f) or 1
- if f == 0 then
- return extension
- elseif f == 1 then
- n = n + 1
- local a = "a" .. n
- return format(extension,a,a) -- maybe more times?
- elseif f < 0 then
- local a = "a" .. (n + f + 1)
- return format(extension,a,a)
- else
- local t = { }
- for i=1,f do
- n = n + 1
- t[#t+1] = "a" .. n
- end
- return format(extension,unpack(t))
- end
-end
-
-local builder = Cs { "start",
- start = (
- (
- P("%") / ""
- * (
- V("!") -- new
- + V("s") + V("q")
- + V("i") + V("d")
- + V("f") + V("g") + V("G") + V("e") + V("E")
- + V("x") + V("X") + V("o")
- --
- + V("c")
- + V("C")
- + V("S") -- new
- + V("Q") -- new
- + V("N") -- new
- --
- + V("r")
- + V("h") + V("H") + V("u") + V("U")
- + V("p") + V("b")
- + V("t") + V("T")
- + V("l") + V("L")
- + V("I")
- + V("h") -- new
- + V("w") -- new
- + V("W") -- new
- + V("a") -- new
- + V("A") -- new
- --
- + V("*") -- ignores probably messed up %
- )
- + V("*")
- )
- * (P(-1) + Carg(1))
- )^0,
- --
- ["s"] = (prefix_any * P("s")) / format_s, -- %s => regular %s (string)
- ["q"] = (prefix_any * P("q")) / format_q, -- %q => regular %q (quoted string)
- ["i"] = (prefix_any * P("i")) / format_i, -- %i => regular %i (integer)
- ["d"] = (prefix_any * P("d")) / format_d, -- %d => regular %d (integer)
- ["f"] = (prefix_any * P("f")) / format_f, -- %f => regular %f (float)
- ["g"] = (prefix_any * P("g")) / format_g, -- %g => regular %g (float)
- ["G"] = (prefix_any * P("G")) / format_G, -- %G => regular %G (float)
- ["e"] = (prefix_any * P("e")) / format_e, -- %e => regular %e (float)
- ["E"] = (prefix_any * P("E")) / format_E, -- %E => regular %E (float)
- ["x"] = (prefix_any * P("x")) / format_x, -- %x => regular %x (hexadecimal)
- ["X"] = (prefix_any * P("X")) / format_X, -- %X => regular %X (HEXADECIMAL)
- ["o"] = (prefix_any * P("o")) / format_o, -- %o => regular %o (octal)
- --
- ["S"] = (prefix_any * P("S")) / format_S, -- %S => %s (tostring)
- ["Q"] = (prefix_any * P("Q")) / format_S, -- %Q => %q (tostring)
- ["N"] = (prefix_any * P("N")) / format_N, -- %N => tonumber (strips leading zeros)
- ["c"] = (prefix_any * P("c")) / format_c, -- %c => utf character (extension to regular)
- ["C"] = (prefix_any * P("C")) / format_C, -- %c => U+.... utf character
- --
- ["r"] = (prefix_any * P("r")) / format_r, -- %r => round
- ["h"] = (prefix_any * P("h")) / format_h, -- %h => 0x0a1b2 (when - no 0x) was v
- ["H"] = (prefix_any * P("H")) / format_H, -- %H => 0x0A1B2 (when - no 0x) was V
- ["u"] = (prefix_any * P("u")) / format_u, -- %u => u+0a1b2 (when - no u+)
- ["U"] = (prefix_any * P("U")) / format_U, -- %U => U+0A1B2 (when - no U+)
- ["p"] = (prefix_any * P("p")) / format_p, -- %p => 12.345pt / maybe: P (and more units)
- ["b"] = (prefix_any * P("b")) / format_b, -- %b => 12.342bp / maybe: B (and more units)
- ["t"] = (prefix_tab * P("t")) / format_t, -- %t => concat
- ["T"] = (prefix_tab * P("T")) / format_T, -- %t => sequenced
- ["l"] = (prefix_tab * P("l")) / format_l, -- %l => boolean
- ["L"] = (prefix_tab * P("L")) / format_L, -- %L => BOOLEAN
- ["I"] = (prefix_any * P("I")) / format_I, -- %I => signed integer
- --
- ["w"] = (prefix_any * P("w")) / format_w, -- %w => n spaces (optional prefix is added)
- ["W"] = (prefix_any * P("W")) / format_W, -- %W => mandate prefix, no specifier
- --
- ["a"] = (prefix_any * P("a")) / format_a, -- %a => '...' (forces tostring)
- ["A"] = (prefix_any * P("A")) / format_A, -- %A => "..." (forces tostring)
- --
- ["*"] = Cs(((1-P("%"))^1 + P("%%")/"%%%%")^1) / format_rest, -- rest (including %%)
- --
- ["!"] = Carg(2) * prefix_any * P("!") * C((1-P("!"))^1) * P("!") / format_extension,
-}
-
--- we can be clever and only alias what is needed
-
-local direct = Cs (
- P("%")/""
- * Cc([[local format = string.format return function(str) return format("%]])
- * (S("+- .") + R("09"))^0
- * S("sqidfgGeExXo")
- * Cc([[",str) end]])
- * P(-1)
- )
-
-local function make(t,str)
- local f
- local p
- local p = lpegmatch(direct,str)
- if p then
- f = loadstripped(p)()
- else
- n = 0
- p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n
- if n > 0 then
- p = format(template,preamble,t._preamble_,arguments[n],p)
--- print("builder>",p)
- f = loadstripped(p)()
- else
- f = function() return str end
- end
- end
- t[str] = f
- return f
-end
-
--- -- collect periodically
---
--- local threshold = 1000 -- max nof cached formats
---
--- local function make(t,str)
--- local f = rawget(t,str)
--- if f then
--- return f
--- end
--- local parent = t._t_
--- if parent._n_ > threshold then
--- local m = { _t_ = parent }
--- getmetatable(parent).__index = m
--- setmetatable(m, { __index = make })
--- else
--- parent._n_ = parent._n_ + 1
--- end
--- local f
--- local p = lpegmatch(direct,str)
--- if p then
--- f = loadstripped(p)()
--- else
--- n = 0
--- p = lpegmatch(builder,str,1,"..",parent._extensions_) -- after this we know n
--- if n > 0 then
--- p = format(template,preamble,parent._preamble_,arguments[n],p)
--- -- print("builder>",p)
--- f = loadstripped(p)()
--- else
--- f = function() return str end
--- end
--- end
--- t[str] = f
--- return f
--- end
-
-local function use(t,fmt,...)
- return t[fmt](...)
-end
-
-strings.formatters = { }
-
--- we cannot make these tables weak, unless we start using an indirect
--- table (metatable) in which case we could better keep a count and
--- clear that table when a threshold is reached
-
-function strings.formatters.new()
- local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter" }
- setmetatable(t, { __index = make, __call = use })
- return t
-end
-
--- function strings.formatters.new()
--- local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter", _n_ = 0 }
--- local m = { _t_ = t }
--- setmetatable(t, { __index = m, __call = use })
--- setmetatable(m, { __index = make })
--- return t
--- end
-
-local formatters = strings.formatters.new() -- the default instance
-
-string.formatters = formatters -- in the main string namespace
-string.formatter = function(str,...) return formatters[str](...) end -- sometimes nicer name
-
-local function add(t,name,template,preamble)
- if type(t) == "table" and t._type_ == "formatter" then
- t._extensions_[name] = template or "%s"
- if preamble then
- t._preamble_ = preamble .. "\n" .. t._preamble_ -- so no overload !
- end
- end
-end
-
-strings.formatters.add = add
-
--- registered in the default instance (should we fall back on this one?)
-
-lpeg.patterns.xmlescape = Cs((P("<")/"<" + P(">")/">" + P("&")/"&" + P('"')/""" + P(1))^0)
-lpeg.patterns.texescape = Cs((C(S("#$%\\{}"))/"\\%1" + P(1))^0)
-
-add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
-add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
-
--- -- yes or no:
---
--- local function make(t,str)
--- local f
--- local p = lpegmatch(direct,str)
--- if p then
--- f = loadstripped(p)()
--- else
--- n = 0
--- p = lpegmatch(builder,str,1,",") -- after this we know n
--- if n > 0 then
--- p = format(template,template_shortcuts,arguments[n],p)
--- f = loadstripped(p)()
--- else
--- f = function() return str end
--- end
--- end
--- t[str] = f
--- return f
--- end
---
--- local formatteds = string.formatteds or { }
--- string.formatteds = formatteds
---
--- setmetatable(formatteds, { __index = make, __call = use })
+if not modules then modules = { } end modules ['util-str'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+utilities = utilities or {}
+utilities.strings = utilities.strings or { }
+local strings = utilities.strings
+
+local format, gsub, rep, sub = string.format, string.gsub, string.rep, string.sub
+local load, dump = load, string.dump
+local tonumber, type, tostring = tonumber, type, tostring
+local unpack, concat = table.unpack, table.concat
+local P, V, C, S, R, Ct, Cs, Cp, Carg, Cc = lpeg.P, lpeg.V, lpeg.C, lpeg.S, lpeg.R, lpeg.Ct, lpeg.Cs, lpeg.Cp, lpeg.Carg, lpeg.Cc
+local patterns, lpegmatch = lpeg.patterns, lpeg.match
+local utfchar, utfbyte = utf.char, utf.byte
+----- loadstripped = utilities.lua.loadstripped
+----- setmetatableindex = table.setmetatableindex
+
+local loadstripped = _LUAVERSION < 5.2 and load or function(str)
+ return load(dump(load(str),true)) -- it only makes sense in luajit and luatex where we have a stipped load
+end
+
+-- todo: make a special namespace for the formatter
+
+if not number then number = { } end -- temp hack for luatex-fonts
+
+local stripper = patterns.stripzeros
+
+local function points(n)
+ return (not n or n == 0) and "0pt" or lpegmatch(stripper,format("%.5fpt",n/65536))
+end
+
+local function basepoints(n)
+ return (not n or n == 0) and "0bp" or lpegmatch(stripper,format("%.5fbp", n*(7200/7227)/65536))
+end
+
+number.points = points
+number.basepoints = basepoints
+
+-- str = " \n \ntest \n test\ntest "
+-- print("["..string.gsub(string.collapsecrlf(str),"\n","+").."]")
+
+local rubish = patterns.spaceortab^0 * patterns.newline
+local anyrubish = patterns.spaceortab + patterns.newline
+local anything = patterns.anything
+local stripped = (patterns.spaceortab^1 / "") * patterns.newline
+local leading = rubish^0 / ""
+local trailing = (anyrubish^1 * patterns.endofstring) / ""
+local redundant = rubish^3 / "\n"
+
+local pattern = Cs(leading * (trailing + redundant + stripped + anything)^0)
+
+function strings.collapsecrlf(str)
+ return lpegmatch(pattern,str)
+end
+
+-- The following functions might end up in another namespace.
+
+local repeaters = { } -- watch how we also moved the -1 in depth-1 to the creator
+
+function strings.newrepeater(str,offset)
+ offset = offset or 0
+ local s = repeaters[str]
+ if not s then
+ s = { }
+ repeaters[str] = s
+ end
+ local t = s[offset]
+ if t then
+ return t
+ end
+ t = { }
+ setmetatable(t, { __index = function(t,k)
+ if not k then
+ return ""
+ end
+ local n = k + offset
+ local s = n > 0 and rep(str,n) or ""
+ t[k] = s
+ return s
+ end })
+ s[offset] = t
+ return t
+end
+
+-- local dashes = strings.newrepeater("--",-1)
+-- print(dashes[2],dashes[3],dashes[1])
+
+local extra, tab, start = 0, 0, 4, 0
+
+local nspaces = strings.newrepeater(" ")
+
+string.nspaces = nspaces
+
+local pattern =
+ Carg(1) / function(t)
+ extra, tab, start = 0, t or 7, 1
+ end
+ * Cs((
+ Cp() * patterns.tab / function(position)
+ local current = (position - start + 1) + extra
+ local spaces = tab-(current-1) % tab
+ if spaces > 0 then
+ extra = extra + spaces - 1
+ return nspaces[spaces] -- rep(" ",spaces)
+ else
+ return ""
+ end
+ end
+ + patterns.newline * Cp() / function(position)
+ extra, start = 0, position
+ end
+ + patterns.anything
+ )^1)
+
+function strings.tabtospace(str,tab)
+ return lpegmatch(pattern,str,1,tab or 7)
+end
+
+-- local t = {
+-- "1234567123456712345671234567",
+-- "\tb\tc",
+-- "a\tb\tc",
+-- "aa\tbb\tcc",
+-- "aaa\tbbb\tccc",
+-- "aaaa\tbbbb\tcccc",
+-- "aaaaa\tbbbbb\tccccc",
+-- "aaaaaa\tbbbbbb\tcccccc\n aaaaaa\tbbbbbb\tcccccc",
+-- "one\n two\nxxx three\nxx four\nx five\nsix",
+-- }
+-- for k=1,#t do
+-- print(strings.tabtospace(t[k]))
+-- end
+
+function strings.striplong(str) -- strips all leading spaces
+ str = gsub(str,"^%s*","")
+ str = gsub(str,"[\n\r]+ *","\n")
+ return str
+end
+
+-- local template = string.striplong([[
+-- aaaa
+-- bb
+-- cccccc
+-- ]])
+
+function strings.nice(str)
+ str = gsub(str,"[:%-+_]+"," ") -- maybe more
+ return str
+end
+
+-- Work in progress. Interesting is that compared to the built-in this is faster in
+-- luatex than in luajittex where we have a comparable speed. It only makes sense
+-- to use the formatter when a (somewhat) complex format is used a lot. Each formatter
+-- is a function so there is some overhead and not all formatted output is worth that
+-- overhead. Keep in mind that there is an extra function call involved. In principle
+-- we end up with a string concatination so one could inline such a sequence but often
+-- at the cost of less readabinity. So, it's a sort of (visual) compromise. Of course
+-- there is the benefit of more variants. (Concerning the speed: a simple format like
+-- %05fpt is better off with format than with a formatter, but as soon as you put
+-- something in front formatters become faster. Passing the pt as extra argument makes
+-- formatters behave better. Of course this is rather implementation dependent. Also,
+-- when a specific format is only used a few times the overhead in creating it is not
+-- compensated by speed.)
+--
+-- More info can be found in cld-mkiv.pdf so here I stick to a simple list.
+--
+-- integer %...i number
+-- integer %...d number
+-- unsigned %...u number
+-- character %...c number
+-- hexadecimal %...x number
+-- HEXADECIMAL %...X number
+-- octal %...o number
+-- string %...s string number
+-- float %...f number
+-- exponential %...e number
+-- exponential %...E number
+-- autofloat %...g number
+-- autofloat %...G number
+-- utf character %...c number
+-- force tostring %...S any
+-- force tostring %Q any
+-- force tonumber %N number (strip leading zeros)
+-- signed number %I number
+-- rounded number %r number
+-- 0xhexadecimal %...h character number
+-- 0xHEXADECIMAL %...H character number
+-- U+hexadecimal %...u character number
+-- U+HEXADECIMAL %...U character number
+-- points %p number (scaled points)
+-- basepoints %b number (scaled points)
+-- table concat %...t table
+-- serialize %...T sequenced (no nested tables)
+-- boolean (logic) %l boolean
+-- BOOLEAN %L boolean
+-- whitespace %...w
+-- automatic %...a 'whatever' (string, table, ...)
+-- automatic %...a "whatever" (string, table, ...)
+
+local n = 0
+
+-- we are somewhat sloppy in parsing prefixes as it's not that critical
+
+-- hard to avoid but we can collect them in a private namespace if needed
+
+-- inline the next two makes no sense as we only use this in logging
+
+local sequenced = table.sequenced
+
+function string.autodouble(s,sep)
+ if s == nil then
+ return '""'
+ end
+ local t = type(s)
+ if t == "number" then
+ return tostring(s) -- tostring not really needed
+ end
+ if t == "table" then
+ return ('"' .. sequenced(s,sep or ",") .. '"')
+ end
+ return ('"' .. tostring(s) .. '"')
+end
+
+function string.autosingle(s,sep)
+ if s == nil then
+ return "''"
+ end
+ local t = type(s)
+ if t == "number" then
+ return tostring(s) -- tostring not really needed
+ end
+ if t == "table" then
+ return ("'" .. sequenced(s,sep or ",") .. "'")
+ end
+ return ("'" .. tostring(s) .. "'")
+end
+
+local tracedchars = { }
+string.tracedchars = tracedchars
+strings.tracers = tracedchars
+
+function string.tracedchar(b)
+ -- todo: table
+ if type(b) == "number" then
+ return tracedchars[b] or (utfchar(b) .. " (U+" .. format('%05X',b) .. ")")
+ else
+ local c = utfbyte(b)
+ return tracedchars[c] or (b .. " (U+" .. format('%05X',c) .. ")")
+ end
+end
+
+function number.signed(i)
+ if i > 0 then
+ return "+", i
+ else
+ return "-", -i
+ end
+end
+
+local preamble = [[
+local type = type
+local tostring = tostring
+local tonumber = tonumber
+local format = string.format
+local concat = table.concat
+local signed = number.signed
+local points = number.points
+local basepoints = number.basepoints
+local utfchar = utf.char
+local utfbyte = utf.byte
+local lpegmatch = lpeg.match
+local nspaces = string.nspaces
+local tracedchar = string.tracedchar
+local autosingle = string.autosingle
+local autodouble = string.autodouble
+local sequenced = table.sequenced
+]]
+
+local template = [[
+%s
+%s
+return function(%s) return %s end
+]]
+
+local arguments = { "a1" } -- faster than previously used (select(n,...))
+
+setmetatable(arguments, { __index =
+ function(t,k)
+ local v = t[k-1] .. ",a" .. k
+ t[k] = v
+ return v
+ end
+})
+
+local prefix_any = C((S("+- .") + R("09"))^0)
+local prefix_tab = C((1-R("az","AZ","09","%%"))^0)
+
+-- we've split all cases as then we can optimize them (let's omit the fuzzy u)
+
+-- todo: replace outer formats in next by ..
+
+local format_s = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("format('%%%ss',a%s)",f,n)
+ else -- best no tostring in order to stay compatible (.. does a selective tostring too)
+ return format("(a%s or '')",n) -- goodie: nil check
+ end
+end
+
+local format_S = function(f) -- can be optimized
+ n = n + 1
+ if f and f ~= "" then
+ return format("format('%%%ss',tostring(a%s))",f,n)
+ else
+ return format("tostring(a%s)",n)
+ end
+end
+
+local format_q = function()
+ n = n + 1
+ return format("(a%s and format('%%q',a%s) or '')",n,n) -- goodie: nil check (maybe separate lpeg, not faster)
+end
+
+local format_Q = function() -- can be optimized
+ n = n + 1
+ return format("format('%%q',tostring(a%s))",n)
+end
+
+local format_i = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("format('%%%si',a%s)",f,n)
+ else
+ return format("a%s",n)
+ end
+end
+
+local format_d = format_i
+
+local format_I = function(f)
+ n = n + 1
+ return format("format('%%s%%%si',signed(a%s))",f,n)
+end
+
+local format_f = function(f)
+ n = n + 1
+ return format("format('%%%sf',a%s)",f,n)
+end
+
+local format_g = function(f)
+ n = n + 1
+ return format("format('%%%sg',a%s)",f,n)
+end
+
+local format_G = function(f)
+ n = n + 1
+ return format("format('%%%sG',a%s)",f,n)
+end
+
+local format_e = function(f)
+ n = n + 1
+ return format("format('%%%se',a%s)",f,n)
+end
+
+local format_E = function(f)
+ n = n + 1
+ return format("format('%%%sE',a%s)",f,n)
+end
+
+local format_x = function(f)
+ n = n + 1
+ return format("format('%%%sx',a%s)",f,n)
+end
+
+local format_X = function(f)
+ n = n + 1
+ return format("format('%%%sX',a%s)",f,n)
+end
+
+local format_o = function(f)
+ n = n + 1
+ return format("format('%%%so',a%s)",f,n)
+end
+
+local format_c = function()
+ n = n + 1
+ return format("utfchar(a%s)",n)
+end
+
+local format_C = function()
+ n = n + 1
+ return format("tracedchar(a%s)",n)
+end
+
+local format_r = function(f)
+ n = n + 1
+ return format("format('%%%s.0f',a%s)",f,n)
+end
+
+local format_h = function(f)
+ n = n + 1
+ if f == "-" then
+ f = sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ end
+end
+
+local format_H = function(f)
+ n = n + 1
+ if f == "-" then
+ f = sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ else
+ return format("format('0x%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ end
+end
+
+local format_u = function(f)
+ n = n + 1
+ if f == "-" then
+ f = sub(f,2)
+ return format("format('%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ else
+ return format("format('u+%%%sx',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ end
+end
+
+local format_U = function(f)
+ n = n + 1
+ if f == "-" then
+ f = sub(f,2)
+ return format("format('%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ else
+ return format("format('U+%%%sX',type(a%s) == 'number' and a%s or utfbyte(a%s))",f == "" and "05" or f,n,n,n)
+ end
+end
+
+local format_p = function()
+ n = n + 1
+ return format("points(a%s)",n)
+end
+
+local format_b = function()
+ n = n + 1
+ return format("basepoints(a%s)",n)
+end
+
+local format_t = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("concat(a%s,%q)",n,f)
+ else
+ return format("concat(a%s)",n)
+ end
+end
+
+local format_T = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("sequenced(a%s,%q)",n,f)
+ else
+ return format("sequenced(a%s)",n)
+ end
+end
+
+local format_l = function()
+ n = n + 1
+ return format("(a%s and 'true' or 'false')",n)
+end
+
+local format_L = function()
+ n = n + 1
+ return format("(a%s and 'TRUE' or 'FALSE')",n)
+end
+
+local format_N = function() -- strips leading zeros
+ n = n + 1
+ return format("tostring(tonumber(a%s) or a%s)",n,n)
+end
+
+local format_a = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("autosingle(a%s,%q)",n,f)
+ else
+ return format("autosingle(a%s)",n)
+ end
+end
+
+local format_A = function(f)
+ n = n + 1
+ if f and f ~= "" then
+ return format("autodouble(a%s,%q)",n,f)
+ else
+ return format("autodouble(a%s)",n)
+ end
+end
+
+local format_w = function(f) -- handy when doing depth related indent
+ n = n + 1
+ f = tonumber(f)
+ if f then -- not that useful
+ return format("nspaces[%s+a%s]",f,n) -- no real need for tonumber
+ else
+ return format("nspaces[a%s]",n) -- no real need for tonumber
+ end
+end
+
+local format_W = function(f) -- handy when doing depth related indent
+ return format("nspaces[%s]",tonumber(f) or 0)
+end
+
+local format_rest = function(s)
+ return format("%q",s) -- catches " and \n and such
+end
+
+local format_extension = function(extensions,f,name)
+ local extension = extensions[name] or "tostring(%s)"
+ local f = tonumber(f) or 1
+ if f == 0 then
+ return extension
+ elseif f == 1 then
+ n = n + 1
+ local a = "a" .. n
+ return format(extension,a,a) -- maybe more times?
+ elseif f < 0 then
+ local a = "a" .. (n + f + 1)
+ return format(extension,a,a)
+ else
+ local t = { }
+ for i=1,f do
+ n = n + 1
+ t[#t+1] = "a" .. n
+ end
+ return format(extension,unpack(t))
+ end
+end
+
+local builder = Cs { "start",
+ start = (
+ (
+ P("%") / ""
+ * (
+ V("!") -- new
+ + V("s") + V("q")
+ + V("i") + V("d")
+ + V("f") + V("g") + V("G") + V("e") + V("E")
+ + V("x") + V("X") + V("o")
+ --
+ + V("c")
+ + V("C")
+ + V("S") -- new
+ + V("Q") -- new
+ + V("N") -- new
+ --
+ + V("r")
+ + V("h") + V("H") + V("u") + V("U")
+ + V("p") + V("b")
+ + V("t") + V("T")
+ + V("l") + V("L")
+ + V("I")
+ + V("h") -- new
+ + V("w") -- new
+ + V("W") -- new
+ + V("a") -- new
+ + V("A") -- new
+ --
+ + V("*") -- ignores probably messed up %
+ )
+ + V("*")
+ )
+ * (P(-1) + Carg(1))
+ )^0,
+ --
+ ["s"] = (prefix_any * P("s")) / format_s, -- %s => regular %s (string)
+ ["q"] = (prefix_any * P("q")) / format_q, -- %q => regular %q (quoted string)
+ ["i"] = (prefix_any * P("i")) / format_i, -- %i => regular %i (integer)
+ ["d"] = (prefix_any * P("d")) / format_d, -- %d => regular %d (integer)
+ ["f"] = (prefix_any * P("f")) / format_f, -- %f => regular %f (float)
+ ["g"] = (prefix_any * P("g")) / format_g, -- %g => regular %g (float)
+ ["G"] = (prefix_any * P("G")) / format_G, -- %G => regular %G (float)
+ ["e"] = (prefix_any * P("e")) / format_e, -- %e => regular %e (float)
+ ["E"] = (prefix_any * P("E")) / format_E, -- %E => regular %E (float)
+ ["x"] = (prefix_any * P("x")) / format_x, -- %x => regular %x (hexadecimal)
+ ["X"] = (prefix_any * P("X")) / format_X, -- %X => regular %X (HEXADECIMAL)
+ ["o"] = (prefix_any * P("o")) / format_o, -- %o => regular %o (octal)
+ --
+ ["S"] = (prefix_any * P("S")) / format_S, -- %S => %s (tostring)
+ ["Q"] = (prefix_any * P("Q")) / format_S, -- %Q => %q (tostring)
+ ["N"] = (prefix_any * P("N")) / format_N, -- %N => tonumber (strips leading zeros)
+ ["c"] = (prefix_any * P("c")) / format_c, -- %c => utf character (extension to regular)
+ ["C"] = (prefix_any * P("C")) / format_C, -- %c => U+.... utf character
+ --
+ ["r"] = (prefix_any * P("r")) / format_r, -- %r => round
+ ["h"] = (prefix_any * P("h")) / format_h, -- %h => 0x0a1b2 (when - no 0x) was v
+ ["H"] = (prefix_any * P("H")) / format_H, -- %H => 0x0A1B2 (when - no 0x) was V
+ ["u"] = (prefix_any * P("u")) / format_u, -- %u => u+0a1b2 (when - no u+)
+ ["U"] = (prefix_any * P("U")) / format_U, -- %U => U+0A1B2 (when - no U+)
+ ["p"] = (prefix_any * P("p")) / format_p, -- %p => 12.345pt / maybe: P (and more units)
+ ["b"] = (prefix_any * P("b")) / format_b, -- %b => 12.342bp / maybe: B (and more units)
+ ["t"] = (prefix_tab * P("t")) / format_t, -- %t => concat
+ ["T"] = (prefix_tab * P("T")) / format_T, -- %t => sequenced
+ ["l"] = (prefix_tab * P("l")) / format_l, -- %l => boolean
+ ["L"] = (prefix_tab * P("L")) / format_L, -- %L => BOOLEAN
+ ["I"] = (prefix_any * P("I")) / format_I, -- %I => signed integer
+ --
+ ["w"] = (prefix_any * P("w")) / format_w, -- %w => n spaces (optional prefix is added)
+ ["W"] = (prefix_any * P("W")) / format_W, -- %W => mandate prefix, no specifier
+ --
+ ["a"] = (prefix_any * P("a")) / format_a, -- %a => '...' (forces tostring)
+ ["A"] = (prefix_any * P("A")) / format_A, -- %A => "..." (forces tostring)
+ --
+ ["*"] = Cs(((1-P("%"))^1 + P("%%")/"%%%%")^1) / format_rest, -- rest (including %%)
+ --
+ ["!"] = Carg(2) * prefix_any * P("!") * C((1-P("!"))^1) * P("!") / format_extension,
+}
+
+-- we can be clever and only alias what is needed
+
+local direct = Cs (
+ P("%")/""
+ * Cc([[local format = string.format return function(str) return format("%]])
+ * (S("+- .") + R("09"))^0
+ * S("sqidfgGeExXo")
+ * Cc([[",str) end]])
+ * P(-1)
+ )
+
+local function make(t,str)
+ local f
+ local p
+ local p = lpegmatch(direct,str)
+ if p then
+ f = loadstripped(p)()
+ else
+ n = 0
+ p = lpegmatch(builder,str,1,"..",t._extensions_) -- after this we know n
+ if n > 0 then
+ p = format(template,preamble,t._preamble_,arguments[n],p)
+-- print("builder>",p)
+ f = loadstripped(p)()
+ else
+ f = function() return str end
+ end
+ end
+ t[str] = f
+ return f
+end
+
+-- -- collect periodically
+--
+-- local threshold = 1000 -- max nof cached formats
+--
+-- local function make(t,str)
+-- local f = rawget(t,str)
+-- if f then
+-- return f
+-- end
+-- local parent = t._t_
+-- if parent._n_ > threshold then
+-- local m = { _t_ = parent }
+-- getmetatable(parent).__index = m
+-- setmetatable(m, { __index = make })
+-- else
+-- parent._n_ = parent._n_ + 1
+-- end
+-- local f
+-- local p = lpegmatch(direct,str)
+-- if p then
+-- f = loadstripped(p)()
+-- else
+-- n = 0
+-- p = lpegmatch(builder,str,1,"..",parent._extensions_) -- after this we know n
+-- if n > 0 then
+-- p = format(template,preamble,parent._preamble_,arguments[n],p)
+-- -- print("builder>",p)
+-- f = loadstripped(p)()
+-- else
+-- f = function() return str end
+-- end
+-- end
+-- t[str] = f
+-- return f
+-- end
+
+local function use(t,fmt,...)
+ return t[fmt](...)
+end
+
+strings.formatters = { }
+
+-- we cannot make these tables weak, unless we start using an indirect
+-- table (metatable) in which case we could better keep a count and
+-- clear that table when a threshold is reached
+
+function strings.formatters.new()
+ local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter" }
+ setmetatable(t, { __index = make, __call = use })
+ return t
+end
+
+-- function strings.formatters.new()
+-- local t = { _extensions_ = { }, _preamble_ = "", _type_ = "formatter", _n_ = 0 }
+-- local m = { _t_ = t }
+-- setmetatable(t, { __index = m, __call = use })
+-- setmetatable(m, { __index = make })
+-- return t
+-- end
+
+local formatters = strings.formatters.new() -- the default instance
+
+string.formatters = formatters -- in the main string namespace
+string.formatter = function(str,...) return formatters[str](...) end -- sometimes nicer name
+
+local function add(t,name,template,preamble)
+ if type(t) == "table" and t._type_ == "formatter" then
+ t._extensions_[name] = template or "%s"
+ if preamble then
+ t._preamble_ = preamble .. "\n" .. t._preamble_ -- so no overload !
+ end
+ end
+end
+
+strings.formatters.add = add
+
+-- registered in the default instance (should we fall back on this one?)
+
+lpeg.patterns.xmlescape = Cs((P("<")/"<" + P(">")/">" + P("&")/"&" + P('"')/""" + P(1))^0)
+lpeg.patterns.texescape = Cs((C(S("#$%\\{}"))/"\\%1" + P(1))^0)
+
+add(formatters,"xml",[[lpegmatch(xmlescape,%s)]],[[local xmlescape = lpeg.patterns.xmlescape]])
+add(formatters,"tex",[[lpegmatch(texescape,%s)]],[[local texescape = lpeg.patterns.texescape]])
+
+-- -- yes or no:
+--
+-- local function make(t,str)
+-- local f
+-- local p = lpegmatch(direct,str)
+-- if p then
+-- f = loadstripped(p)()
+-- else
+-- n = 0
+-- p = lpegmatch(builder,str,1,",") -- after this we know n
+-- if n > 0 then
+-- p = format(template,template_shortcuts,arguments[n],p)
+-- f = loadstripped(p)()
+-- else
+-- f = function() return str end
+-- end
+-- end
+-- t[str] = f
+-- return f
+-- end
+--
+-- local formatteds = string.formatteds or { }
+-- string.formatteds = formatteds
+--
+-- setmetatable(formatteds, { __index = make, __call = use })
diff --git a/tex/context/base/util-tab.lua b/tex/context/base/util-tab.lua
index ecf36b137..30554015b 100644
--- a/tex/context/base/util-tab.lua
+++ b/tex/context/base/util-tab.lua
@@ -1,493 +1,493 @@
-if not modules then modules = { } end modules ['util-tab'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-utilities = utilities or {}
-utilities.tables = utilities.tables or { }
-local tables = utilities.tables
-
-local format, gmatch, gsub = string.format, string.gmatch, string.gsub
-local concat, insert, remove = table.concat, table.insert, table.remove
-local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
-local type, next, rawset, tonumber, tostring, load, select = type, next, rawset, tonumber, tostring, load, select
-local lpegmatch, P, Cs, Cc = lpeg.match, lpeg.P, lpeg.Cs, lpeg.Cc
-local serialize, sortedkeys, sortedpairs = table.serialize, table.sortedkeys, table.sortedpairs
-local formatters = string.formatters
-
-local splitter = lpeg.tsplitat(".")
-
-function tables.definetable(target,nofirst,nolast) -- defines undefined tables
- local composed, shortcut, t = nil, nil, { }
- local snippets = lpegmatch(splitter,target)
- for i=1,#snippets - (nolast and 1 or 0) do
- local name = snippets[i]
- if composed then
- composed = shortcut .. "." .. name
- shortcut = shortcut .. "_" .. name
- t[#t+1] = formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut)
- else
- composed = name
- shortcut = name
- if not nofirst then
- t[#t+1] = formatters["%s = %s or { }"](composed,composed)
- end
- end
- end
- if nolast then
- composed = shortcut .. "." .. snippets[#snippets]
- end
- return concat(t,"\n"), composed
-end
-
--- local t = tables.definedtable("a","b","c","d")
-
-function tables.definedtable(...)
- local t = _G
- for i=1,select("#",...) do
- local li = select(i,...)
- local tl = t[li]
- if not tl then
- tl = { }
- t[li] = tl
- end
- t = tl
- end
- return t
-end
-
-function tables.accesstable(target,root)
- local t = root or _G
- for name in gmatch(target,"([^%.]+)") do
- t = t[name]
- if not t then
- return
- end
- end
- return t
-end
-
-function tables.migratetable(target,v,root)
- local t = root or _G
- local names = string.split(target,".")
- for i=1,#names-1 do
- local name = names[i]
- t[name] = t[name] or { }
- t = t[name]
- if not t then
- return
- end
- end
- t[names[#names]] = v
-end
-
-function tables.removevalue(t,value) -- todo: n
- if value then
- for i=1,#t do
- if t[i] == value then
- remove(t,i)
- -- remove all, so no: return
- end
- end
- end
-end
-
-function tables.insertbeforevalue(t,value,extra)
- for i=1,#t do
- if t[i] == extra then
- remove(t,i)
- end
- end
- for i=1,#t do
- if t[i] == value then
- insert(t,i,extra)
- return
- end
- end
- insert(t,1,extra)
-end
-
-function tables.insertaftervalue(t,value,extra)
- for i=1,#t do
- if t[i] == extra then
- remove(t,i)
- end
- end
- for i=1,#t do
- if t[i] == value then
- insert(t,i+1,extra)
- return
- end
- end
- insert(t,#t+1,extra)
-end
-
--- experimental
-
-local escape = Cs(Cc('"') * ((P('"')/'""' + P(1))^0) * Cc('"'))
-
-function table.tocsv(t,specification)
- if t and #t > 0 then
- local result = { }
- local r = { }
- specification = specification or { }
- local fields = specification.fields
- if type(fields) ~= "string" then
- fields = sortedkeys(t[1])
- end
- local separator = specification.separator or ","
- if specification.preamble == true then
- for f=1,#fields do
- r[f] = lpegmatch(escape,tostring(fields[f]))
- end
- result[1] = concat(r,separator)
- end
- for i=1,#t do
- local ti = t[i]
- for f=1,#fields do
- local field = ti[fields[f]]
- if type(field) == "string" then
- r[f] = lpegmatch(escape,field)
- else
- r[f] = tostring(field)
- end
- end
- result[#result+1] = concat(r,separator)
- end
- return concat(result,"\n")
- else
- return ""
- end
-end
-
--- local nspaces = utilities.strings.newrepeater(" ")
--- local escape = Cs((P("<")/"<" + P(">")/">" + P("&")/"&" + P(1))^0)
---
--- local function toxml(t,d,result,step)
--- for k, v in sortedpairs(t) do
--- local s = nspaces[d]
--- local tk = type(k)
--- local tv = type(v)
--- if tv == "table" then
--- if tk == "number" then
--- result[#result+1] = format("%s",s,k)
--- toxml(v,d+step,result,step)
--- result[#result+1] = format("%s",s,k)
--- else
--- result[#result+1] = format("%s<%s>",s,k)
--- toxml(v,d+step,result,step)
--- result[#result+1] = format("%s%s>",s,k)
--- end
--- elseif tv == "string" then
--- if tk == "number" then
--- result[#result+1] = format("%s%s",s,k,lpegmatch(escape,v),k)
--- else
--- result[#result+1] = format("%s<%s>%s%s>",s,k,lpegmatch(escape,v),k)
--- end
--- elseif tk == "number" then
--- result[#result+1] = format("%s%s",s,k,tostring(v),k)
--- else
--- result[#result+1] = format("%s<%s>%s%s>",s,k,tostring(v),k)
--- end
--- end
--- end
---
--- much faster
-
-local nspaces = utilities.strings.newrepeater(" ")
-
-local function toxml(t,d,result,step)
- for k, v in sortedpairs(t) do
- local s = nspaces[d] -- inlining this is somewhat faster but gives more formatters
- local tk = type(k)
- local tv = type(v)
- if tv == "table" then
- if tk == "number" then
- result[#result+1] = formatters["%s"](s,k)
- toxml(v,d+step,result,step)
- result[#result+1] = formatters["%s"](s,k)
- else
- result[#result+1] = formatters["%s<%s>"](s,k)
- toxml(v,d+step,result,step)
- result[#result+1] = formatters["%s%s>"](s,k)
- end
- elseif tv == "string" then
- if tk == "number" then
- result[#result+1] = formatters["%s%!xml!"](s,k,v,k)
- else
- result[#result+1] = formatters["%s<%s>%!xml!%s>"](s,k,v,k)
- end
- elseif tk == "number" then
- result[#result+1] = formatters["%s%S"](s,k,v,k)
- else
- result[#result+1] = formatters["%s<%s>%S%s>"](s,k,v,k)
- end
- end
-end
-
--- function table.toxml(t,name,nobanner,indent,spaces)
--- local noroot = name == false
--- local result = (nobanner or noroot) and { } or { "" }
--- local indent = rep(" ",indent or 0)
--- local spaces = rep(" ",spaces or 1)
--- if noroot then
--- toxml( t, inndent, result, spaces)
--- else
--- toxml( { [name or "root"] = t }, indent, result, spaces)
--- end
--- return concat(result,"\n")
--- end
-
-function table.toxml(t,specification)
- specification = specification or { }
- local name = specification.name
- local noroot = name == false
- local result = (specification.nobanner or noroot) and { } or { "" }
- local indent = specification.indent or 0
- local spaces = specification.spaces or 1
- if noroot then
- toxml( t, indent, result, spaces)
- else
- toxml( { [name or "data"] = t }, indent, result, spaces)
- end
- return concat(result,"\n")
-end
-
--- also experimental
-
--- encapsulate(table,utilities.tables)
--- encapsulate(table,utilities.tables,true)
--- encapsulate(table,true)
-
-function tables.encapsulate(core,capsule,protect)
- if type(capsule) ~= "table" then
- protect = true
- capsule = { }
- end
- for key, value in next, core do
- if capsule[key] then
- print(formatters["\ninvalid %s %a in %a"]("inheritance",key,core))
- os.exit()
- else
- capsule[key] = value
- end
- end
- if protect then
- for key, value in next, core do
- core[key] = nil
- end
- setmetatable(core, {
- __index = capsule,
- __newindex = function(t,key,value)
- if capsule[key] then
- print(formatters["\ninvalid %s %a' in %a"]("overload",key,core))
- os.exit()
- else
- rawset(t,key,value)
- end
- end
- } )
- end
-end
-
-local function fastserialize(t,r,outer) -- no mixes
- r[#r+1] = "{"
- local n = #t
- if n > 0 then
- for i=1,n do
- local v = t[i]
- local tv = type(v)
- if tv == "string" then
- r[#r+1] = formatters["%q,"](v)
- elseif tv == "number" then
- r[#r+1] = formatters["%s,"](v)
- elseif tv == "table" then
- fastserialize(v,r)
- elseif tv == "boolean" then
- r[#r+1] = formatters["%S,"](v)
- end
- end
- else
- for k, v in next, t do
- local tv = type(v)
- if tv == "string" then
- r[#r+1] = formatters["[%q]=%q,"](k,v)
- elseif tv == "number" then
- r[#r+1] = formatters["[%q]=%s,"](k,v)
- elseif tv == "table" then
- r[#r+1] = formatters["[%q]="](k)
- fastserialize(v,r)
- elseif tv == "boolean" then
- r[#r+1] = formatters["[%q]=%S,"](k,v)
- end
- end
- end
- if outer then
- r[#r+1] = "}"
- else
- r[#r+1] = "},"
- end
- return r
-end
-
--- local f_hashed_string = formatters["[%q]=%q,"]
--- local f_hashed_number = formatters["[%q]=%s,"]
--- local f_hashed_table = formatters["[%q]="]
--- local f_hashed_true = formatters["[%q]=true,"]
--- local f_hashed_false = formatters["[%q]=false,"]
---
--- local f_indexed_string = formatters["%q,"]
--- local f_indexed_number = formatters["%s,"]
--- ----- f_indexed_true = formatters["true,"]
--- ----- f_indexed_false = formatters["false,"]
---
--- local function fastserialize(t,r,outer) -- no mixes
--- r[#r+1] = "{"
--- local n = #t
--- if n > 0 then
--- for i=1,n do
--- local v = t[i]
--- local tv = type(v)
--- if tv == "string" then
--- r[#r+1] = f_indexed_string(v)
--- elseif tv == "number" then
--- r[#r+1] = f_indexed_number(v)
--- elseif tv == "table" then
--- fastserialize(v,r)
--- elseif tv == "boolean" then
--- -- r[#r+1] = v and f_indexed_true(k) or f_indexed_false(k)
--- r[#r+1] = v and "true," or "false,"
--- end
--- end
--- else
--- for k, v in next, t do
--- local tv = type(v)
--- if tv == "string" then
--- r[#r+1] = f_hashed_string(k,v)
--- elseif tv == "number" then
--- r[#r+1] = f_hashed_number(k,v)
--- elseif tv == "table" then
--- r[#r+1] = f_hashed_table(k)
--- fastserialize(v,r)
--- elseif tv == "boolean" then
--- r[#r+1] = v and f_hashed_true(k) or f_hashed_false(k)
--- end
--- end
--- end
--- if outer then
--- r[#r+1] = "}"
--- else
--- r[#r+1] = "},"
--- end
--- return r
--- end
-
-function table.fastserialize(t,prefix) -- so prefix should contain the =
- return concat(fastserialize(t,{ prefix or "return" },true))
-end
-
-function table.deserialize(str)
- if not str or str == "" then
- return
- end
- local code = load(str)
- if not code then
- return
- end
- code = code()
- if not code then
- return
- end
- return code
-end
-
--- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7, e = 'f"g\nh' } })
-
-function table.load(filename)
- if filename then
- local t = io.loaddata(filename)
- if t and t ~= "" then
- t = load(t)
- if type(t) == "function" then
- t = t()
- if type(t) == "table" then
- return t
- end
- end
- end
- end
-end
-
-function table.save(filename,t,n,...)
- io.savedata(filename,serialize(t,n == nil and true or n,...))
-end
-
-local function slowdrop(t)
- local r = { }
- local l = { }
- for i=1,#t do
- local ti = t[i]
- local j = 0
- for k, v in next, ti do
- j = j + 1
- l[j] = formatters["%s=%q"](k,v)
- end
- r[i] = formatters[" {%t},\n"](l)
- end
- return formatters["return {\n%st}"](r)
-end
-
-local function fastdrop(t)
- local r = { "return {\n" }
- for i=1,#t do
- local ti = t[i]
- r[#r+1] = " {"
- for k, v in next, ti do
- r[#r+1] = formatters["%s=%q"](k,v)
- end
- r[#r+1] = "},\n"
- end
- r[#r+1] = "}"
- return concat(r)
-end
-
-function table.drop(t,slow) -- only { { a=2 }, {a=3} }
- if #t == 0 then
- return "return { }"
- elseif slow == true then
- return slowdrop(t) -- less memory
- else
- return fastdrop(t) -- some 15% faster
- end
-end
-
-function table.autokey(t,k)
- local v = { }
- t[k] = v
- return v
-end
-
-local selfmapper = { __index = function(t,k) t[k] = k return k end }
-
-function table.twowaymapper(t)
- if not t then
- t = { }
- else
- for i=0,#t do
- local ti = t[i] -- t[1] = "one"
- if ti then
- local i = tostring(i)
- t[i] = ti -- t["1"] = "one"
- t[ti] = i -- t["one"] = "1"
- end
- end
- t[""] = t[0] or ""
- end
- -- setmetatableindex(t,"key")
- setmetatable(t,selfmapper)
- return t
-end
-
+if not modules then modules = { } end modules ['util-tab'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+utilities = utilities or {}
+utilities.tables = utilities.tables or { }
+local tables = utilities.tables
+
+local format, gmatch, gsub = string.format, string.gmatch, string.gsub
+local concat, insert, remove = table.concat, table.insert, table.remove
+local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
+local type, next, rawset, tonumber, tostring, load, select = type, next, rawset, tonumber, tostring, load, select
+local lpegmatch, P, Cs, Cc = lpeg.match, lpeg.P, lpeg.Cs, lpeg.Cc
+local serialize, sortedkeys, sortedpairs = table.serialize, table.sortedkeys, table.sortedpairs
+local formatters = string.formatters
+
+local splitter = lpeg.tsplitat(".")
+
+function tables.definetable(target,nofirst,nolast) -- defines undefined tables
+ local composed, shortcut, t = nil, nil, { }
+ local snippets = lpegmatch(splitter,target)
+ for i=1,#snippets - (nolast and 1 or 0) do
+ local name = snippets[i]
+ if composed then
+ composed = shortcut .. "." .. name
+ shortcut = shortcut .. "_" .. name
+ t[#t+1] = formatters["local %s = %s if not %s then %s = { } %s = %s end"](shortcut,composed,shortcut,shortcut,composed,shortcut)
+ else
+ composed = name
+ shortcut = name
+ if not nofirst then
+ t[#t+1] = formatters["%s = %s or { }"](composed,composed)
+ end
+ end
+ end
+ if nolast then
+ composed = shortcut .. "." .. snippets[#snippets]
+ end
+ return concat(t,"\n"), composed
+end
+
+-- local t = tables.definedtable("a","b","c","d")
+
+function tables.definedtable(...)
+ local t = _G
+ for i=1,select("#",...) do
+ local li = select(i,...)
+ local tl = t[li]
+ if not tl then
+ tl = { }
+ t[li] = tl
+ end
+ t = tl
+ end
+ return t
+end
+
+function tables.accesstable(target,root)
+ local t = root or _G
+ for name in gmatch(target,"([^%.]+)") do
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ return t
+end
+
+function tables.migratetable(target,v,root)
+ local t = root or _G
+ local names = string.split(target,".")
+ for i=1,#names-1 do
+ local name = names[i]
+ t[name] = t[name] or { }
+ t = t[name]
+ if not t then
+ return
+ end
+ end
+ t[names[#names]] = v
+end
+
+function tables.removevalue(t,value) -- todo: n
+ if value then
+ for i=1,#t do
+ if t[i] == value then
+ remove(t,i)
+ -- remove all, so no: return
+ end
+ end
+ end
+end
+
+function tables.insertbeforevalue(t,value,extra)
+ for i=1,#t do
+ if t[i] == extra then
+ remove(t,i)
+ end
+ end
+ for i=1,#t do
+ if t[i] == value then
+ insert(t,i,extra)
+ return
+ end
+ end
+ insert(t,1,extra)
+end
+
+function tables.insertaftervalue(t,value,extra)
+ for i=1,#t do
+ if t[i] == extra then
+ remove(t,i)
+ end
+ end
+ for i=1,#t do
+ if t[i] == value then
+ insert(t,i+1,extra)
+ return
+ end
+ end
+ insert(t,#t+1,extra)
+end
+
+-- experimental
+
+local escape = Cs(Cc('"') * ((P('"')/'""' + P(1))^0) * Cc('"'))
+
+function table.tocsv(t,specification)
+ if t and #t > 0 then
+ local result = { }
+ local r = { }
+ specification = specification or { }
+ local fields = specification.fields
+ if type(fields) ~= "string" then
+ fields = sortedkeys(t[1])
+ end
+ local separator = specification.separator or ","
+ if specification.preamble == true then
+ for f=1,#fields do
+ r[f] = lpegmatch(escape,tostring(fields[f]))
+ end
+ result[1] = concat(r,separator)
+ end
+ for i=1,#t do
+ local ti = t[i]
+ for f=1,#fields do
+ local field = ti[fields[f]]
+ if type(field) == "string" then
+ r[f] = lpegmatch(escape,field)
+ else
+ r[f] = tostring(field)
+ end
+ end
+ result[#result+1] = concat(r,separator)
+ end
+ return concat(result,"\n")
+ else
+ return ""
+ end
+end
+
+-- local nspaces = utilities.strings.newrepeater(" ")
+-- local escape = Cs((P("<")/"<" + P(">")/">" + P("&")/"&" + P(1))^0)
+--
+-- local function toxml(t,d,result,step)
+-- for k, v in sortedpairs(t) do
+-- local s = nspaces[d]
+-- local tk = type(k)
+-- local tv = type(v)
+-- if tv == "table" then
+-- if tk == "number" then
+-- result[#result+1] = format("%s",s,k)
+-- toxml(v,d+step,result,step)
+-- result[#result+1] = format("%s",s,k)
+-- else
+-- result[#result+1] = format("%s<%s>",s,k)
+-- toxml(v,d+step,result,step)
+-- result[#result+1] = format("%s%s>",s,k)
+-- end
+-- elseif tv == "string" then
+-- if tk == "number" then
+-- result[#result+1] = format("%s%s",s,k,lpegmatch(escape,v),k)
+-- else
+-- result[#result+1] = format("%s<%s>%s%s>",s,k,lpegmatch(escape,v),k)
+-- end
+-- elseif tk == "number" then
+-- result[#result+1] = format("%s%s",s,k,tostring(v),k)
+-- else
+-- result[#result+1] = format("%s<%s>%s%s>",s,k,tostring(v),k)
+-- end
+-- end
+-- end
+--
+-- much faster
+
+local nspaces = utilities.strings.newrepeater(" ")
+
+local function toxml(t,d,result,step)
+ for k, v in sortedpairs(t) do
+ local s = nspaces[d] -- inlining this is somewhat faster but gives more formatters
+ local tk = type(k)
+ local tv = type(v)
+ if tv == "table" then
+ if tk == "number" then
+ result[#result+1] = formatters["%s"](s,k)
+ toxml(v,d+step,result,step)
+ result[#result+1] = formatters["%s"](s,k)
+ else
+ result[#result+1] = formatters["%s<%s>"](s,k)
+ toxml(v,d+step,result,step)
+ result[#result+1] = formatters["%s%s>"](s,k)
+ end
+ elseif tv == "string" then
+ if tk == "number" then
+ result[#result+1] = formatters["%s%!xml!"](s,k,v,k)
+ else
+ result[#result+1] = formatters["%s<%s>%!xml!%s>"](s,k,v,k)
+ end
+ elseif tk == "number" then
+ result[#result+1] = formatters["%s%S"](s,k,v,k)
+ else
+ result[#result+1] = formatters["%s<%s>%S%s>"](s,k,v,k)
+ end
+ end
+end
+
+-- function table.toxml(t,name,nobanner,indent,spaces)
+-- local noroot = name == false
+-- local result = (nobanner or noroot) and { } or { "" }
+-- local indent = rep(" ",indent or 0)
+-- local spaces = rep(" ",spaces or 1)
+-- if noroot then
+-- toxml( t, inndent, result, spaces)
+-- else
+-- toxml( { [name or "root"] = t }, indent, result, spaces)
+-- end
+-- return concat(result,"\n")
+-- end
+
+function table.toxml(t,specification)
+ specification = specification or { }
+ local name = specification.name
+ local noroot = name == false
+ local result = (specification.nobanner or noroot) and { } or { "" }
+ local indent = specification.indent or 0
+ local spaces = specification.spaces or 1
+ if noroot then
+ toxml( t, indent, result, spaces)
+ else
+ toxml( { [name or "data"] = t }, indent, result, spaces)
+ end
+ return concat(result,"\n")
+end
+
+-- also experimental
+
+-- encapsulate(table,utilities.tables)
+-- encapsulate(table,utilities.tables,true)
+-- encapsulate(table,true)
+
+function tables.encapsulate(core,capsule,protect)
+ if type(capsule) ~= "table" then
+ protect = true
+ capsule = { }
+ end
+ for key, value in next, core do
+ if capsule[key] then
+ print(formatters["\ninvalid %s %a in %a"]("inheritance",key,core))
+ os.exit()
+ else
+ capsule[key] = value
+ end
+ end
+ if protect then
+ for key, value in next, core do
+ core[key] = nil
+ end
+ setmetatable(core, {
+ __index = capsule,
+ __newindex = function(t,key,value)
+ if capsule[key] then
+ print(formatters["\ninvalid %s %a' in %a"]("overload",key,core))
+ os.exit()
+ else
+ rawset(t,key,value)
+ end
+ end
+ } )
+ end
+end
+
+local function fastserialize(t,r,outer) -- no mixes
+ r[#r+1] = "{"
+ local n = #t
+ if n > 0 then
+ for i=1,n do
+ local v = t[i]
+ local tv = type(v)
+ if tv == "string" then
+ r[#r+1] = formatters["%q,"](v)
+ elseif tv == "number" then
+ r[#r+1] = formatters["%s,"](v)
+ elseif tv == "table" then
+ fastserialize(v,r)
+ elseif tv == "boolean" then
+ r[#r+1] = formatters["%S,"](v)
+ end
+ end
+ else
+ for k, v in next, t do
+ local tv = type(v)
+ if tv == "string" then
+ r[#r+1] = formatters["[%q]=%q,"](k,v)
+ elseif tv == "number" then
+ r[#r+1] = formatters["[%q]=%s,"](k,v)
+ elseif tv == "table" then
+ r[#r+1] = formatters["[%q]="](k)
+ fastserialize(v,r)
+ elseif tv == "boolean" then
+ r[#r+1] = formatters["[%q]=%S,"](k,v)
+ end
+ end
+ end
+ if outer then
+ r[#r+1] = "}"
+ else
+ r[#r+1] = "},"
+ end
+ return r
+end
+
+-- local f_hashed_string = formatters["[%q]=%q,"]
+-- local f_hashed_number = formatters["[%q]=%s,"]
+-- local f_hashed_table = formatters["[%q]="]
+-- local f_hashed_true = formatters["[%q]=true,"]
+-- local f_hashed_false = formatters["[%q]=false,"]
+--
+-- local f_indexed_string = formatters["%q,"]
+-- local f_indexed_number = formatters["%s,"]
+-- ----- f_indexed_true = formatters["true,"]
+-- ----- f_indexed_false = formatters["false,"]
+--
+-- local function fastserialize(t,r,outer) -- no mixes
+-- r[#r+1] = "{"
+-- local n = #t
+-- if n > 0 then
+-- for i=1,n do
+-- local v = t[i]
+-- local tv = type(v)
+-- if tv == "string" then
+-- r[#r+1] = f_indexed_string(v)
+-- elseif tv == "number" then
+-- r[#r+1] = f_indexed_number(v)
+-- elseif tv == "table" then
+-- fastserialize(v,r)
+-- elseif tv == "boolean" then
+-- -- r[#r+1] = v and f_indexed_true(k) or f_indexed_false(k)
+-- r[#r+1] = v and "true," or "false,"
+-- end
+-- end
+-- else
+-- for k, v in next, t do
+-- local tv = type(v)
+-- if tv == "string" then
+-- r[#r+1] = f_hashed_string(k,v)
+-- elseif tv == "number" then
+-- r[#r+1] = f_hashed_number(k,v)
+-- elseif tv == "table" then
+-- r[#r+1] = f_hashed_table(k)
+-- fastserialize(v,r)
+-- elseif tv == "boolean" then
+-- r[#r+1] = v and f_hashed_true(k) or f_hashed_false(k)
+-- end
+-- end
+-- end
+-- if outer then
+-- r[#r+1] = "}"
+-- else
+-- r[#r+1] = "},"
+-- end
+-- return r
+-- end
+
+function table.fastserialize(t,prefix) -- so prefix should contain the =
+ return concat(fastserialize(t,{ prefix or "return" },true))
+end
+
+function table.deserialize(str)
+ if not str or str == "" then
+ return
+ end
+ local code = load(str)
+ if not code then
+ return
+ end
+ code = code()
+ if not code then
+ return
+ end
+ return code
+end
+
+-- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7, e = 'f"g\nh' } })
+
+function table.load(filename)
+ if filename then
+ local t = io.loaddata(filename)
+ if t and t ~= "" then
+ t = load(t)
+ if type(t) == "function" then
+ t = t()
+ if type(t) == "table" then
+ return t
+ end
+ end
+ end
+ end
+end
+
+function table.save(filename,t,n,...)
+ io.savedata(filename,serialize(t,n == nil and true or n,...))
+end
+
+local function slowdrop(t)
+ local r = { }
+ local l = { }
+ for i=1,#t do
+ local ti = t[i]
+ local j = 0
+ for k, v in next, ti do
+ j = j + 1
+ l[j] = formatters["%s=%q"](k,v)
+ end
+ r[i] = formatters[" {%t},\n"](l)
+ end
+ return formatters["return {\n%st}"](r)
+end
+
+local function fastdrop(t)
+ local r = { "return {\n" }
+ for i=1,#t do
+ local ti = t[i]
+ r[#r+1] = " {"
+ for k, v in next, ti do
+ r[#r+1] = formatters["%s=%q"](k,v)
+ end
+ r[#r+1] = "},\n"
+ end
+ r[#r+1] = "}"
+ return concat(r)
+end
+
+function table.drop(t,slow) -- only { { a=2 }, {a=3} }
+ if #t == 0 then
+ return "return { }"
+ elseif slow == true then
+ return slowdrop(t) -- less memory
+ else
+ return fastdrop(t) -- some 15% faster
+ end
+end
+
+function table.autokey(t,k)
+ local v = { }
+ t[k] = v
+ return v
+end
+
+local selfmapper = { __index = function(t,k) t[k] = k return k end }
+
+function table.twowaymapper(t)
+ if not t then
+ t = { }
+ else
+ for i=0,#t do
+ local ti = t[i] -- t[1] = "one"
+ if ti then
+ local i = tostring(i)
+ t[i] = ti -- t["1"] = "one"
+ t[ti] = i -- t["one"] = "1"
+ end
+ end
+ t[""] = t[0] or ""
+ end
+ -- setmetatableindex(t,"key")
+ setmetatable(t,selfmapper)
+ return t
+end
+
diff --git a/tex/context/base/util-tpl.lua b/tex/context/base/util-tpl.lua
index 7a6abefd6..045faf1d0 100644
--- a/tex/context/base/util-tpl.lua
+++ b/tex/context/base/util-tpl.lua
@@ -1,174 +1,174 @@
-if not modules then modules = { } end modules ['util-tpl'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
--- This is experimental code. Coming from dos and windows, I've always used %whatever%
--- as template variables so let's stick to it. After all, it's easy to parse and stands
--- out well. A double %% is turned into a regular %.
-
-utilities.templates = utilities.templates or { }
-local templates = utilities.templates
-
-local trace_template = false trackers.register("templates.trace",function(v) trace_template = v end)
-local report_template = logs.reporter("template")
-
-local tostring = tostring
-local format, sub = string.format, string.sub
-local P, C, Cs, Carg, lpegmatch = lpeg.P, lpeg.C, lpeg.Cs, lpeg.Carg, lpeg.match
-
--- todo: make installable template.new
-
-local replacer
-
-local function replacekey(k,t,how,recursive)
- local v = t[k]
- if not v then
- if trace_template then
- report_template("unknown key %a",k)
- end
- return ""
- else
- v = tostring(v)
- if trace_template then
- report_template("setting key %a to value %a",k,v)
- end
- if recursive then
- return lpegmatch(replacer,v,1,t,how,recursive)
- else
- return v
- end
- end
-end
-
-local sqlescape = lpeg.replacer {
- { "'", "''" },
- { "\\", "\\\\" },
- { "\r\n", "\\n" },
- { "\r", "\\n" },
- -- { "\t", "\\t" },
-}
-
-local sqlquotedescape = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'"))
-
--- escapeset : \0\1\2\3\4\5\6\7\8\9\10\11\12\13\14\15\16\17\18\19\20\21\22\23\24\25\26\27\28\29\30\31\"\\\127
--- test string: [[1\0\31test23"\\]] .. string.char(19) .. "23"
---
--- slow:
---
--- local luaescape = lpeg.replacer {
--- { '"', [[\"]] },
--- { '\\', [[\\]] },
--- { R("\0\9") * #R("09"), function(s) return "\\00" .. byte(s) end },
--- { R("\10\31") * #R("09"), function(s) return "\\0" .. byte(s) end },
--- { R("\0\31") , function(s) return "\\" .. byte(s) end },
--- }
---
--- slightly faster:
---
--- local luaescape = Cs ((
--- P('"' ) / [[\"]] +
--- P('\\') / [[\\]] +
--- Cc("\\00") * (R("\0\9") / byte) * #R("09") +
--- Cc("\\0") * (R("\10\31") / byte) * #R("09") +
--- Cc("\\") * (R("\0\31") / byte) +
--- P(1)
--- )^0)
-
-local escapers = {
- lua = function(s)
- return sub(format("%q",s),2,-2)
- end,
- sql = function(s)
- return lpegmatch(sqlescape,s)
- end,
-}
-
-local quotedescapers = {
- lua = function(s)
- return format("%q",s)
- end,
- sql = function(s)
- return lpegmatch(sqlquotedescape,s)
- end,
-}
-
-lpeg.patterns.sqlescape = sqlescape
-lpeg.patterns.sqlescape = sqlquotedescape
-
-local luaescaper = escapers.lua
-local quotedluaescaper = quotedescapers.lua
-
-local function replacekeyunquoted(s,t,how,recurse) -- ".. \" "
- local escaper = how and escapers[how] or luaescaper
- return escaper(replacekey(s,t,how,recurse))
-end
-
-local function replacekeyquoted(s,t,how,recurse) -- ".. \" "
- local escaper = how and quotedescapers[how] or quotedluaescaper
- return escaper(replacekey(s,t,how,recurse))
-end
-
-local single = P("%") -- test %test% test : resolves test
-local double = P("%%") -- test 10%% test : %% becomes %
-local lquoted = P("%[") -- test '%[test]%' test : resolves to test with escaped "'s
-local rquoted = P("]%") --
-local lquotedq = P("%(") -- test %(test)% test : resolves to 'test' with escaped "'s
-local rquotedq = P(")%") --
-
-local escape = double / '%%'
-local nosingle = single / ''
-local nodouble = double / ''
-local nolquoted = lquoted / ''
-local norquoted = rquoted / ''
-local nolquotedq = lquotedq / ''
-local norquotedq = rquotedq / ''
-
-local key = nosingle * ((C((1-nosingle )^1) * Carg(1) * Carg(2) * Carg(3)) / replacekey ) * nosingle
-local quoted = nolquotedq * ((C((1-norquotedq)^1) * Carg(1) * Carg(2) * Carg(3)) / replacekeyquoted ) * norquotedq
-local unquoted = nolquoted * ((C((1-norquoted )^1) * Carg(1) * Carg(2) * Carg(3)) / replacekeyunquoted) * norquoted
-local any = P(1)
-
- replacer = Cs((unquoted + quoted + escape + key + any)^0)
-
-local function replace(str,mapping,how,recurse)
- if mapping and str then
- return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
- else
- return str
- end
-end
-
--- print(replace("test '%[x]%' test",{ x = [[a 'x' a]] }))
--- print(replace("test '%[x]%' test",{ x = true }))
--- print(replace("test '%[x]%' test",{ x = [[a 'x' a]], y = "oeps" },'sql'))
--- print(replace("test '%[x]%' test",{ x = [[a '%y%' a]], y = "oeps" },'sql',true))
--- print(replace([[test %[x]% test]],{ x = [[a "x" a]]}))
--- print(replace([[test %(x)% test]],{ x = [[a "x" a]]}))
-
-templates.replace = replace
-
-function templates.load(filename,mapping,how,recurse)
- local data = io.loaddata(filename) or ""
- if mapping and next(mapping) then
- return replace(data,mapping,how,recurse)
- else
- return data
- end
-end
-
-function templates.resolve(t,mapping,how,recurse)
- if not mapping then
- mapping = t
- end
- for k, v in next, t do
- t[k] = replace(v,mapping,how,recurse)
- end
- return t
-end
-
--- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" }))
--- inspect(utilities.templates.resolve({ one = "%two%", two = "two", three = "%three%" }))
+if not modules then modules = { } end modules ['util-tpl'] = {
+ version = 1.001,
+ comment = "companion to luat-lib.mkiv",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files"
+}
+
+-- This is experimental code. Coming from dos and windows, I've always used %whatever%
+-- as template variables so let's stick to it. After all, it's easy to parse and stands
+-- out well. A double %% is turned into a regular %.
+
+utilities.templates = utilities.templates or { }
+local templates = utilities.templates
+
+local trace_template = false trackers.register("templates.trace",function(v) trace_template = v end)
+local report_template = logs.reporter("template")
+
+local tostring = tostring
+local format, sub = string.format, string.sub
+local P, C, Cs, Carg, lpegmatch = lpeg.P, lpeg.C, lpeg.Cs, lpeg.Carg, lpeg.match
+
+-- todo: make installable template.new
+
+local replacer
+
+local function replacekey(k,t,how,recursive)
+ local v = t[k]
+ if not v then
+ if trace_template then
+ report_template("unknown key %a",k)
+ end
+ return ""
+ else
+ v = tostring(v)
+ if trace_template then
+ report_template("setting key %a to value %a",k,v)
+ end
+ if recursive then
+ return lpegmatch(replacer,v,1,t,how,recursive)
+ else
+ return v
+ end
+ end
+end
+
+local sqlescape = lpeg.replacer {
+ { "'", "''" },
+ { "\\", "\\\\" },
+ { "\r\n", "\\n" },
+ { "\r", "\\n" },
+ -- { "\t", "\\t" },
+}
+
+local sqlquotedescape = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'"))
+
+-- escapeset : \0\1\2\3\4\5\6\7\8\9\10\11\12\13\14\15\16\17\18\19\20\21\22\23\24\25\26\27\28\29\30\31\"\\\127
+-- test string: [[1\0\31test23"\\]] .. string.char(19) .. "23"
+--
+-- slow:
+--
+-- local luaescape = lpeg.replacer {
+-- { '"', [[\"]] },
+-- { '\\', [[\\]] },
+-- { R("\0\9") * #R("09"), function(s) return "\\00" .. byte(s) end },
+-- { R("\10\31") * #R("09"), function(s) return "\\0" .. byte(s) end },
+-- { R("\0\31") , function(s) return "\\" .. byte(s) end },
+-- }
+--
+-- slightly faster:
+--
+-- local luaescape = Cs ((
+-- P('"' ) / [[\"]] +
+-- P('\\') / [[\\]] +
+-- Cc("\\00") * (R("\0\9") / byte) * #R("09") +
+-- Cc("\\0") * (R("\10\31") / byte) * #R("09") +
+-- Cc("\\") * (R("\0\31") / byte) +
+-- P(1)
+-- )^0)
+
+local escapers = {
+ lua = function(s)
+ return sub(format("%q",s),2,-2)
+ end,
+ sql = function(s)
+ return lpegmatch(sqlescape,s)
+ end,
+}
+
+local quotedescapers = {
+ lua = function(s)
+ return format("%q",s)
+ end,
+ sql = function(s)
+ return lpegmatch(sqlquotedescape,s)
+ end,
+}
+
+lpeg.patterns.sqlescape = sqlescape
+lpeg.patterns.sqlescape = sqlquotedescape
+
+local luaescaper = escapers.lua
+local quotedluaescaper = quotedescapers.lua
+
+local function replacekeyunquoted(s,t,how,recurse) -- ".. \" "
+ local escaper = how and escapers[how] or luaescaper
+ return escaper(replacekey(s,t,how,recurse))
+end
+
+local function replacekeyquoted(s,t,how,recurse) -- ".. \" "
+ local escaper = how and quotedescapers[how] or quotedluaescaper
+ return escaper(replacekey(s,t,how,recurse))
+end
+
+local single = P("%") -- test %test% test : resolves test
+local double = P("%%") -- test 10%% test : %% becomes %
+local lquoted = P("%[") -- test '%[test]%' test : resolves to test with escaped "'s
+local rquoted = P("]%") --
+local lquotedq = P("%(") -- test %(test)% test : resolves to 'test' with escaped "'s
+local rquotedq = P(")%") --
+
+local escape = double / '%%'
+local nosingle = single / ''
+local nodouble = double / ''
+local nolquoted = lquoted / ''
+local norquoted = rquoted / ''
+local nolquotedq = lquotedq / ''
+local norquotedq = rquotedq / ''
+
+local key = nosingle * ((C((1-nosingle )^1) * Carg(1) * Carg(2) * Carg(3)) / replacekey ) * nosingle
+local quoted = nolquotedq * ((C((1-norquotedq)^1) * Carg(1) * Carg(2) * Carg(3)) / replacekeyquoted ) * norquotedq
+local unquoted = nolquoted * ((C((1-norquoted )^1) * Carg(1) * Carg(2) * Carg(3)) / replacekeyunquoted) * norquoted
+local any = P(1)
+
+ replacer = Cs((unquoted + quoted + escape + key + any)^0)
+
+local function replace(str,mapping,how,recurse)
+ if mapping and str then
+ return lpegmatch(replacer,str,1,mapping,how or "lua",recurse or false) or str
+ else
+ return str
+ end
+end
+
+-- print(replace("test '%[x]%' test",{ x = [[a 'x' a]] }))
+-- print(replace("test '%[x]%' test",{ x = true }))
+-- print(replace("test '%[x]%' test",{ x = [[a 'x' a]], y = "oeps" },'sql'))
+-- print(replace("test '%[x]%' test",{ x = [[a '%y%' a]], y = "oeps" },'sql',true))
+-- print(replace([[test %[x]% test]],{ x = [[a "x" a]]}))
+-- print(replace([[test %(x)% test]],{ x = [[a "x" a]]}))
+
+templates.replace = replace
+
+function templates.load(filename,mapping,how,recurse)
+ local data = io.loaddata(filename) or ""
+ if mapping and next(mapping) then
+ return replace(data,mapping,how,recurse)
+ else
+ return data
+ end
+end
+
+function templates.resolve(t,mapping,how,recurse)
+ if not mapping then
+ mapping = t
+ end
+ for k, v in next, t do
+ t[k] = replace(v,mapping,how,recurse)
+ end
+ return t
+end
+
+-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" }))
+-- inspect(utilities.templates.resolve({ one = "%two%", two = "two", three = "%three%" }))
diff --git a/tex/context/base/x-asciimath.lua b/tex/context/base/x-asciimath.lua
index 5ef741ce3..9f4021212 100644
--- a/tex/context/base/x-asciimath.lua
+++ b/tex/context/base/x-asciimath.lua
@@ -1,270 +1,270 @@
-if not modules then modules = { } end modules ['x-asciimath'] = {
- version = 1.001,
- comment = "companion to x-asciimath.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
---[[ldx--
-
Some backgrounds are discussed in x-asciimath.mkiv.