summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorHans Hagen <pragma@wxs.nl>2013-02-05 13:35:00 +0100
committerHans Hagen <pragma@wxs.nl>2013-02-05 13:35:00 +0100
commitbaae751ef69bcc2c205ec969dbd5788fe0ac26e8 (patch)
tree48d78cbe66786b60e549f4e21be3bcf2b47262aa
parente2f3737b9e45d1cf4d553b3f6cce71d5428bfe51 (diff)
downloadcontext-baae751ef69bcc2c205ec969dbd5788fe0ac26e8.tar.gz
beta 2013.02.05 13:35
-rw-r--r--context/data/scite/lexers/scite-context-lexer-cld.lua1
-rw-r--r--context/data/scite/lexers/scite-context-lexer-lua-longstring.lua8
-rw-r--r--context/data/scite/lexers/scite-context-lexer-lua.lua25
-rw-r--r--context/data/scite/lexers/scite-context-lexer-mps.lua4
-rw-r--r--context/data/scite/lexers/scite-context-lexer-tex.lua37
-rw-r--r--context/data/scite/lexers/scite-context-lexer-txt.lua4
-rw-r--r--context/data/scite/lexers/scite-context-lexer-web.lua57
-rw-r--r--context/data/scite/lexers/scite-context-lexer-xml-comment.lua5
-rw-r--r--context/data/scite/lexers/scite-context-lexer-xml.lua19
-rw-r--r--context/data/scite/lexers/scite-context-lexer.lua589
-rw-r--r--context/data/scite/lexers/themes/scite-context-theme.lua1
-rw-r--r--context/data/scite/scite-context-data-interfaces.properties1570
-rw-r--r--context/data/scite/scite-context-data-metapost.properties14
-rw-r--r--context/data/scite/scite-context-data-tex.properties98
-rw-r--r--context/data/scite/scite-context-external.properties12
-rw-r--r--context/data/scite/scite-context-internal.properties4
-rw-r--r--context/data/scite/scite-context.properties20
-rw-r--r--metapost/context/base/mp-grap.mpiv173
-rw-r--r--scripts/context/lua/mtx-package.lua22
-rw-r--r--scripts/context/lua/mtxrun.lua132
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua132
-rwxr-xr-xscripts/context/stubs/unix/mtxrun132
-rw-r--r--tex/context/base/cont-new.mkii2
-rw-r--r--tex/context/base/cont-new.mkiv2
-rw-r--r--tex/context/base/context-version.pdfbin4124 -> 4107 bytes
-rw-r--r--tex/context/base/context-version.pngbin40232 -> 40160 bytes
-rw-r--r--tex/context/base/context.mkii2
-rw-r--r--tex/context/base/context.mkiv2
-rw-r--r--tex/context/base/grph-inc.lua87
-rw-r--r--tex/context/base/l-os.lua12
-rw-r--r--tex/context/base/luat-env.lua234
-rw-r--r--tex/context/base/luat-lib.mkiv1
-rw-r--r--tex/context/base/m-graph.mkiv22
-rw-r--r--tex/context/base/status-files.pdfbin24744 -> 24719 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin208557 -> 208685 bytes
-rw-r--r--tex/context/base/strc-flt.mkvi8
-rw-r--r--tex/context/base/trac-lmx.lua9
-rw-r--r--tex/context/base/util-mrg.lua4
-rw-r--r--tex/context/base/util-sql-tickets.lua2
-rw-r--r--tex/context/base/util-sql.lua4
-rw-r--r--tex/generic/context/luatex/luatex-basics-nod.lua9
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua21803
-rw-r--r--tex/generic/context/luatex/luatex-fonts.lua126
43 files changed, 11306 insertions, 14082 deletions
diff --git a/context/data/scite/lexers/scite-context-lexer-cld.lua b/context/data/scite/lexers/scite-context-lexer-cld.lua
index 4aa2901d2..1e30c18a2 100644
--- a/context/data/scite/lexers/scite-context-lexer-cld.lua
+++ b/context/data/scite/lexers/scite-context-lexer-cld.lua
@@ -7,7 +7,6 @@ local info = {
}
local lexer = lexer
-local token = lexer.token
local cldlexer = { _NAME = "cld", _FILENAME = "scite-context-lexer-cld" }
local whitespace = lexer.WHITESPACE -- maybe we need to fix this
diff --git a/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua b/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua
index 26bdb8dbc..fdec301be 100644
--- a/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua
+++ b/context/data/scite/lexers/scite-context-lexer-lua-longstring.lua
@@ -1,3 +1,11 @@
+local info = {
+ version = 1.002,
+ comment = "scintilla lpeg lexer for lua",
+ author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright = "PRAGMA ADE / ConTeXt Development Team",
+ license = "see context related readme files",
+}
+
local lexer = lexer
local token = lexer.token
local P = lpeg.P
diff --git a/context/data/scite/lexers/scite-context-lexer-lua.lua b/context/data/scite/lexers/scite-context-lexer-lua.lua
index e8a52c991..e95d90977 100644
--- a/context/data/scite/lexers/scite-context-lexer-lua.lua
+++ b/context/data/scite/lexers/scite-context-lexer-lua.lua
@@ -189,7 +189,6 @@ lualexer._rules = {
{ 'number', number },
{ 'longcomment', longcomment },
{ 'shortcomment', shortcomment },
--- { 'number', number },
{ 'label', gotolabel },
{ 'operator', operator },
{ 'rest', rest },
@@ -243,28 +242,30 @@ lualexer._rules = {
lualexer._tokenstyles = context.styleset
+-- lualexer._foldpattern = R("az")^2 + S("{}[]") -- separate entry else interference
+
+lualexer._foldpattern = (P("end") + P("if") + P("do") + P("function") + P("repeat") + P("until")) * P(#(1 - R("az")))
+ + S("{}[]")
+
lualexer._foldsymbols = {
_patterns = {
- -- '%l+', -- costly
- -- '%l%l+',
'[a-z][a-z]+',
- -- '[%({%)}%[%]]',
'[{}%[%]]',
},
['keyword'] = { -- challenge: if=0 then=1 else=-1 elseif=-1
- ['if'] = 1,
- ['end'] = -1,
- ['do'] = 1,
- ['function'] = 1,
- ['repeat'] = 1,
+ ['if'] = 1, -- if .. [then|else] .. end
+ ['do'] = 1, -- [while] do .. end
+ ['function'] = 1, -- function .. end
+ ['repeat'] = 1, -- repeat .. until
['until'] = -1,
+ ['end'] = -1,
},
['comment'] = {
['['] = 1, [']'] = -1,
},
- ['quote'] = { -- to be tested
- ['['] = 1, [']'] = -1,
- },
+ -- ['quote'] = { -- confusing
+ -- ['['] = 1, [']'] = -1,
+ -- },
['special'] = {
-- ['('] = 1, [')'] = -1,
['{'] = 1, ['}'] = -1,
diff --git a/context/data/scite/lexers/scite-context-lexer-mps.lua b/context/data/scite/lexers/scite-context-lexer-mps.lua
index 27a2e2979..96c5e9c3c 100644
--- a/context/data/scite/lexers/scite-context-lexer-mps.lua
+++ b/context/data/scite/lexers/scite-context-lexer-mps.lua
@@ -129,9 +129,11 @@ metafunlexer._rules = {
metafunlexer._tokenstyles = context.styleset
+metafunlexer._foldpattern = R("az")^2 -- separate entry else interference
+
metafunlexer._foldsymbols = {
_patterns = {
- "%l+",
+ '[a-z][a-z]+',
},
["primitive"] = {
["beginfig"] = 1,
diff --git a/context/data/scite/lexers/scite-context-lexer-tex.lua b/context/data/scite/lexers/scite-context-lexer-tex.lua
index 4e8fba8e7..898602f03 100644
--- a/context/data/scite/lexers/scite-context-lexer-tex.lua
+++ b/context/data/scite/lexers/scite-context-lexer-tex.lua
@@ -241,13 +241,15 @@ local p_unit = P("pt") + P("bp") + P("sp") + P("mm") + P("cm") +
-- if validwords then
-- return checkedword(validwords,validminimum,s,i)
-- else
--- return true, { "text", i }
+-- -- return true, { "text", i }
+-- return true, "text", i
-- end
-- end)
--
-- So we use this one instead:
-local p_word = Ct( iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
+----- p_word = Ct( iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
+local p_word = iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() -- the function can be inlined
----- p_text = (1 - p_grouping - p_special - p_extra - backslash - space + hspace)^1
@@ -460,22 +462,33 @@ contextlexer._tokenstyles = context.styleset
-- contextlexer._tokenstyles[#contextlexer._tokenstyles + 1] = { cldlexer._NAME..'_whitespace', lexer.style_whitespace }
-- contextlexer._tokenstyles[#contextlexer._tokenstyles + 1] = { mpslexer._NAME..'_whitespace', lexer.style_whitespace }
+local environment = {
+ ["\\start"] = 1, ["\\stop"] = -1,
+ -- ["\\begin"] = 1, ["\\end" ] = -1,
+}
+
+-- local block = {
+-- ["\\begin"] = 1, ["\\end" ] = -1,
+-- }
-local folds = {
- ["\\start"] = 1, ["\\stop" ] = -1,
- ["\\begin"] = 1, ["\\end" ] = -1,
+local group = {
+ ["{"] = 1, ["}"] = -1,
}
-contextlexer._foldsymbols = {
+contextlexer._foldpattern = P("\\" ) * (P("start") + P("stop")) + S("{}") -- separate entry else interference
+
+contextlexer._foldsymbols = { -- these need to be style references
_patterns = {
"\\start", "\\stop", -- regular environments
- "\\begin", "\\end", -- (moveable) blocks
+ -- "\\begin", "\\end", -- (moveable) blocks
+ "[{}]",
},
- ["helper"] = folds,
- ["data"] = folds,
- ["command"] = folds,
- ["user"] = folds, -- csname
- ["grouping"] = folds,
+ ["command"] = environment,
+ ["constant"] = environment,
+ ["data"] = environment,
+ ["user"] = environment,
+ ["embedded"] = environment,
+ ["grouping"] = group,
}
return contextlexer
diff --git a/context/data/scite/lexers/scite-context-lexer-txt.lua b/context/data/scite/lexers/scite-context-lexer-txt.lua
index 4c4742d54..fe062fb94 100644
--- a/context/data/scite/lexers/scite-context-lexer-txt.lua
+++ b/context/data/scite/lexers/scite-context-lexer-txt.lua
@@ -54,7 +54,8 @@ local t_preamble =
token("preamble", p_preamble)
local t_word =
- Ct( wordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
+-- Ct( wordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
+ wordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() -- the function can be inlined
local t_text =
token("default", wordtoken^1)
@@ -73,6 +74,7 @@ textlexer._rules = {
{ "rest", t_rest },
}
+textlexer._LEXBYLINE = true -- new (needs testing, not yet as the system changed in 3.24)
textlexer._tokenstyles = context.styleset
return textlexer
diff --git a/context/data/scite/lexers/scite-context-lexer-web.lua b/context/data/scite/lexers/scite-context-lexer-web.lua
index 72068ccb2..f59a3205d 100644
--- a/context/data/scite/lexers/scite-context-lexer-web.lua
+++ b/context/data/scite/lexers/scite-context-lexer-web.lua
@@ -99,7 +99,7 @@ local endweb = P("@c")
local webcomment = token("comment", #beginweb * startofline * beginweb * (1-endweb)^0 * endweb)
-local texlexer = lexer.load('scite-context-lexer-tex')
+local texlexer = lexer.load('scite-context-lexer-tex')
lexer.embed_lexer(weblexer, texlexer, #beginweb * startofline * token("comment",beginweb), token("comment",endweb))
@@ -121,30 +121,35 @@ weblexer._rules = {
weblexer._tokenstyles = context.styleset
--- weblexer._foldsymbols = {
--- _patterns = {
--- -- '%l+', -- costly
--- '[{}]',
--- '/%*',
--- '%*/',
--- -- '//',
--- },
--- ["macro"] = {
--- ['region'] = 1,
--- ['endregion'] = -1,
--- ['if'] = 1,
--- ['ifdef'] = 1,
--- ['ifndef'] = 1,
--- ['endif'] = -1,
--- },
--- ["operator"] = {
--- ['{'] = 1,
--- ['}'] = -1,
--- },
--- ["comment"] = {
--- ['/*'] = 1,
--- ['*/'] = -1,
--- }
--- }
+weblexer._foldpattern = P("/*") + P("*/") + S("{}") -- separate entry else interference
+
+weblexer._foldsymbols = {
+ _patterns = {
+ '[{}]',
+ '/%*',
+ '%*/',
+ },
+ -- ["data"] = { -- macro
+ -- ['region'] = 1,
+ -- ['endregion'] = -1,
+ -- ['if'] = 1,
+ -- ['ifdef'] = 1,
+ -- ['ifndef'] = 1,
+ -- ['endif'] = -1,
+ -- },
+ ["special"] = { -- operator
+ ['{'] = 1,
+ ['}'] = -1,
+ },
+ ["comment"] = {
+ ['/*'] = 1,
+ ['*/'] = -1,
+ }
+}
+
+-- -- by indentation:
+--
+weblexer._foldpatterns = nil
+weblexer._foldsymbols = nil
return weblexer
diff --git a/context/data/scite/lexers/scite-context-lexer-xml-comment.lua b/context/data/scite/lexers/scite-context-lexer-xml-comment.lua
index eab3b2a61..104310f94 100644
--- a/context/data/scite/lexers/scite-context-lexer-xml-comment.lua
+++ b/context/data/scite/lexers/scite-context-lexer-xml-comment.lua
@@ -27,12 +27,15 @@ xmlcommentlexer._rules = {
xmlcommentlexer._tokenstyles = context.styleset
+xmlcommentlexer._foldpattern = P("<!--") + P("-->")
+
xmlcommentlexer._foldsymbols = {
_patterns = {
"<%!%-%-", "%-%->", -- comments
},
["comment"] = {
- ["<!--"] = 1, ["-->" ] = -1,
+ ["<!--"] = 1,
+ ["-->" ] = -1,
}
}
diff --git a/context/data/scite/lexers/scite-context-lexer-xml.lua b/context/data/scite/lexers/scite-context-lexer-xml.lua
index 40c46f20a..9d5199a64 100644
--- a/context/data/scite/lexers/scite-context-lexer-xml.lua
+++ b/context/data/scite/lexers/scite-context-lexer-xml.lua
@@ -95,7 +95,8 @@ local p_preamble = Cmt(#P("<?xml "), function(input,i,_) -- todo: utf bomb
end)
local p_word =
- Ct( iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
+-- Ct( iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() ) -- the function can be inlined
+ iwordpattern / function(s) return styleofword(validwords,validminimum,s) end * Cp() -- the function can be inlined
local p_rest =
token("default", any)
@@ -303,9 +304,9 @@ xmllexer._rules = {
{ "whitespace", p_spacing },
{ "preamble", p_preamble },
{ "word", p_word },
--- { "text", p_text },
--- { "comment", p_comment },
--- { "cdata", p_cdata },
+ -- { "text", p_text },
+ -- { "comment", p_comment },
+ -- { "cdata", p_cdata },
{ "doctype", p_doctype },
{ "instruction", p_instruction },
{ "close", p_close },
@@ -317,12 +318,18 @@ xmllexer._rules = {
xmllexer._tokenstyles = context.styleset
+xmllexer._foldpattern = P("</") + P("<") + P("/>") -- separate entry else interference
+
xmllexer._foldsymbols = { -- somehow doesn't work yet
_patterns = {
- "[<>]",
+ "</",
+ "/>",
+ "<",
},
["keyword"] = {
- ["<"] = 1, [">"] = -1,
+ ["</"] = -1,
+ ["/>"] = -1,
+ ["<"] = 1,
},
}
diff --git a/context/data/scite/lexers/scite-context-lexer.lua b/context/data/scite/lexers/scite-context-lexer.lua
index 27d7bd37e..7883177b4 100644
--- a/context/data/scite/lexers/scite-context-lexer.lua
+++ b/context/data/scite/lexers/scite-context-lexer.lua
@@ -1,5 +1,5 @@
local info = {
- version = 1.002,
+ version = 1.324,
comment = "basics for scintilla lpeg lexer for context/metafun",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright = "PRAGMA ADE / ConTeXt Development Team",
@@ -8,6 +8,12 @@ local info = {
}
+-- todo: move all code here
+-- todo: explore adapted dll ... properties + init
+
+-- The fold and lex functions are copied and patched from original code by Mitchell (see
+-- lexer.lua). All errors are mine.
+--
-- Starting with SciTE version 3.20 there is an issue with coloring. As we still lack
-- a connection with scite itself (properties as well as printing to the log pane) we
-- cannot trace this (on windows). As far as I can see, there are no fundamental
@@ -16,9 +22,11 @@ local info = {
-- Lua lexer and no brace highlighting either. Interesting is that it does work ok in
-- the cld lexer (so the Lua code is okay). Also the fact that char-def.lua lexes fast
-- is a signal that the lexer quits somewhere halfway.
-
--- The fold and lex functions are copied and patched from original code by Mitchell (see
--- lexer.lua). All errors are mine.
+--
+-- After checking 3.24 and adapting to the new lexer tables things are okay again. So,
+-- this version assumes 3.24 or higher. In 3.24 we have a different token result, i.e. no
+-- longer a { tag, pattern } but just two return values. I didn't check other changes but
+-- will do that when I run into issues.
--
-- I've considered making a whole copy and patch the other functions too as we need
-- an extra nesting model. However, I don't want to maintain too much. An unfortunate
@@ -38,7 +46,8 @@ local info = {
-- and the cursor is at the last line of a 200K line file. Optimizing the fold function
-- brought down loading of char-def.lua from 14 sec => 8 sec. Replacing the word_match
-- function and optimizing the lex function gained another 2+ seconds. A 6 second load
--- is quite ok for me.
+-- is quite ok for me. The changed lexer table structure (no subtables) brings loading
+-- down to a few seconds.
--
-- When the lexer path is copied to the textadept lexer path, and the theme definition to
-- theme path (as lexer.lua), the lexer works there as well. When I have time and motive
@@ -61,7 +70,7 @@ local info = {
--
-- Eventually it might be safer to copy the other methods from lexer.lua here as well so
-- that we have no dependencies, apart from the c library (for which at some point the api
--- will be stable I guess).
+-- will be stable I hope).
--
-- It's a pitty that there is no scintillua library for the OSX version of scite. Even
-- better would be to have the scintillua library as integral part of scite as that way I
@@ -73,7 +82,7 @@ local info = {
local lpeg = require 'lpeg'
-local R, P, S, C, V, Cp, Cs, Ct, Cmt, Cc, Cf, Cg = lpeg.R, lpeg.P, lpeg.S, lpeg.C, lpeg.V, lpeg.Cp, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cg
+local R, P, S, C, V, Cp, Cs, Ct, Cmt, Cc, Cf, Cg, Carg = lpeg.R, lpeg.P, lpeg.S, lpeg.C, lpeg.V, lpeg.Cp, lpeg.Cs, lpeg.Ct, lpeg.Cmt, lpeg.Cc, lpeg.Cf, lpeg.Cg, lpeg.Carg
local lpegmatch = lpeg.match
local find, gmatch, match, lower, upper, gsub = string.find, string.gmatch, string.match, string.lower, string.upper, string.gsub
local concat = table.concat
@@ -259,26 +268,53 @@ end
patterns.wordtoken = R("az","AZ","\127\255")
patterns.wordpattern = patterns.wordtoken^3 -- todo: if limit and #s < limit then
+-- -- pre 3.24:
+--
+-- function context.checkedword(validwords,validminimum,s,i) -- ,limit
+-- if not validwords then -- or #s < validminimum then
+-- return true, { "text", i } -- { "default", i }
+-- else
+-- -- keys are lower
+-- local word = validwords[s]
+-- if word == s then
+-- return true, { "okay", i } -- exact match
+-- elseif word then
+-- return true, { "warning", i } -- case issue
+-- else
+-- local word = validwords[lower(s)]
+-- if word == s then
+-- return true, { "okay", i } -- exact match
+-- elseif word then
+-- return true, { "warning", i } -- case issue
+-- elseif upper(s) == s then
+-- return true, { "warning", i } -- probably a logo or acronym
+-- else
+-- return true, { "error", i }
+-- end
+-- end
+-- end
+-- end
+
function context.checkedword(validwords,validminimum,s,i) -- ,limit
if not validwords then -- or #s < validminimum then
- return true, { "text", i } -- { "default", i }
+ return true, "text", i -- { "default", i }
else
-- keys are lower
local word = validwords[s]
if word == s then
- return true, { "okay", i } -- exact match
+ return true, "okay", i -- exact match
elseif word then
- return true, { "warning", i } -- case issue
+ return true, "warning", i -- case issue
else
local word = validwords[lower(s)]
if word == s then
- return true, { "okay", i } -- exact match
+ return true, "okay", i -- exact match
elseif word then
- return true, { "warning", i } -- case issue
+ return true, "warning", i -- case issue
elseif upper(s) == s then
- return true, { "warning", i } -- probably a logo or acronym
+ return true, "warning", i -- probably a logo or acronym
else
- return true, { "error", i }
+ return true, "error", i
end
end
end
@@ -325,29 +361,57 @@ setmetatable(h_table, { __index = function(t,level) local v = { level, FOLD_HEAD
setmetatable(b_table, { __index = function(t,level) local v = { level, FOLD_BLANK } t[level] = v return v end })
setmetatable(n_table, { __index = function(t,level) local v = { level } t[level] = v return v end })
--- local newline = P("\r\n") + S("\r\n")
--- local splitlines = Ct( ( Ct ( (Cp() * Cs((1-newline)^1) * newline^-1) + (Cp() * Cc("") * newline) ) )^0)
+-- -- todo: move the local functions outside (see below) .. old variant < 3.24
--
--- local lines = lpegmatch(splitlines,text) -- iterating over lines is faster
--- for i=1, #lines do
--- local li = lines[i]
--- local line = li[2]
--- if line ~= "" then
--- local pos = li[1]
+-- local newline = P("\r\n") + S("\r\n")
+-- local p_yes = Cp() * Cs((1-newline)^1) * newline^-1
+-- local p_nop = newline
+--
+-- local function fold_by_parsing(text,start_pos,start_line,start_level,lexer)
+-- local foldsymbols = lexer._foldsymbols
+-- if not foldsymbols then
+-- return { }
+-- end
+-- local patterns = foldsymbols._patterns
+-- if not patterns then
+-- return { }
+-- end
+-- local nofpatterns = #patterns
+-- if nofpatterns == 0 then
+-- return { }
+-- end
+-- local folds = { }
+-- local line_num = start_line
+-- local prev_level = start_level
+-- local current_level = prev_level
+-- local validmatches = foldsymbols._validmatches
+-- if not validmatches then
+-- validmatches = { }
+-- for symbol, matches in next, foldsymbols do -- whatever = { start = 1, stop = -1 }
+-- if not find(symbol,"^_") then -- brrr
+-- for s, _ in next, matches do
+-- validmatches[s] = true
+-- end
+-- end
+-- end
+-- foldsymbols._validmatches = validmatches
+-- end
+-- -- of course we could instead build a nice lpeg checker .. something for
+-- -- a rainy day with a stack of new cd's at hand
+-- local function action_y(pos,line)
-- for i=1,nofpatterns do
-- for s, m in gmatch(line,patterns[i]) do
--- if hash[m] then
--- local symbols = fold_symbols[get_style_at(start_pos + pos + s - 1)]
+-- if validmatches[m] then
+-- local symbols = foldsymbols[get_style_at(start_pos + pos + s - 1)]
-- if symbols then
--- local l = symbols[m]
--- if l then
--- local t = type(l)
--- if t == 'number' then
--- current_level = current_level + l
--- elseif t == 'function' then
--- current_level = current_level + l(text, pos, line, s, match)
+-- local action = symbols[m]
+-- if action then
+-- if type(action) == 'number' then -- we could store this in validmatches if there was only one symbol category
+-- current_level = current_level + action
+-- else
+-- current_level = current_level + action(text,pos,line,s,m)
-- end
--- if current_level < FOLD_BASE then -- integrate in previous
+-- if current_level < FOLD_BASE then
-- current_level = FOLD_BASE
-- end
-- end
@@ -361,128 +425,298 @@ setmetatable(n_table, { __index = function(t,level) local v = { level
-- folds[line_num] = n_table[prev_level] -- { prev_level }
-- end
-- prev_level = current_level
--- else
+-- line_num = line_num + 1
+-- end
+-- local function action_n()
-- folds[line_num] = b_table[prev_level] -- { prev_level, FOLD_BLANK }
+-- line_num = line_num + 1
+-- end
+-- if lexer._reset_parser then
+-- lexer._reset_parser()
-- end
--- line_num = line_num + 1
+-- local lpegpattern = (p_yes/action_y + p_nop/action_n)^0 -- not too efficient but indirect function calls are neither but
+-- lpegmatch(lpegpattern,text) -- keys are not pressed that fast ... large files are slow anyway
+-- return folds
-- end
+-- The 3.24 variant; no longer subtable optimization is needed:
+
local newline = P("\r\n") + S("\r\n")
local p_yes = Cp() * Cs((1-newline)^1) * newline^-1
local p_nop = newline
+local folders = { }
+
local function fold_by_parsing(text,start_pos,start_line,start_level,lexer)
- local foldsymbols = lexer._foldsymbols
- if not foldsymbols then
- return { }
- end
- local patterns = foldsymbols._patterns
- if not patterns then
- return { }
- end
- local nofpatterns = #patterns
- if nofpatterns == 0 then
- return { }
- end
- local folds = { }
- local line_num = start_line
- local prev_level = start_level
- local current_level = prev_level
- local validmatches = foldsymbols._validmatches
- if not validmatches then
- validmatches = { }
- for symbol, matches in next, foldsymbols do -- whatever = { start = 1, stop = -1 }
- if not find(symbol,"^_") then -- brrr
- for s, _ in next, matches do
- validmatches[s] = true
+ local folder = folders[lexer]
+ if not folder then
+ --
+ local pattern, folds, text, start_pos, line_num, prev_level, current_level
+ --
+ local fold_symbols = lexer._foldsymbols
+ local fold_pattern = lexer._foldpattern -- use lpeg instead (context extension)
+ --
+ if fold_pattern then
+ -- if no functions are found then we could have a faster one
+
+ -- fold_pattern = Cp() * C(fold_pattern) * Carg(1) / function(s,match,pos)
+ -- local symbols = fold_symbols[get_style_at(start_pos + pos + s - 1)]
+ -- local l = symbols and symbols[match]
+ -- if l then
+ -- local t = type(l)
+ -- if t == 'number' then
+ -- current_level = current_level + l
+ -- elseif t == 'function' then
+ -- current_level = current_level + l(text, pos, line, s, match)
+ -- end
+ -- end
+ -- end
+ -- fold_pattern = (fold_pattern + P(1))^0
+ -- local action_y = function(pos,line)
+ -- lpegmatch(fold_pattern,line,1,pos)
+ -- folds[line_num] = prev_level
+ -- if current_level > prev_level then
+ -- folds[line_num] = prev_level + FOLD_HEADER
+ -- end
+ -- if current_level < FOLD_BASE then
+ -- current_level = FOLD_BASE
+ -- end
+ -- prev_level = current_level
+ -- line_num = line_num + 1
+ -- end
+ -- local action_n = function()
+ -- folds[line_num] = prev_level + FOLD_BLANK
+ -- line_num = line_num + 1
+ -- end
+ -- pattern = (p_yes/action_y + p_nop/action_n)^0
+
+ fold_pattern = Cp() * C(fold_pattern) / function(s,match)
+ local symbols = fold_symbols[get_style_at(start_pos + s)]
+ if symbols then
+ local l = symbols[match]
+ if l then
+ current_level = current_level + l
+ end
end
end
- end
- foldsymbols._validmatches = validmatches
- end
- local function action_y(pos,line) -- we can consider moving the local functions outside (drawback: folds is kept)
- for i=1,nofpatterns do
- for s, m in gmatch(line,patterns[i]) do
- if validmatches[m] then
- local symbols = foldsymbols[get_style_at(start_pos + pos + s - 1)]
- if symbols then
- local action = symbols[m]
- if action then
- if type(action) == 'number' then -- we could store this in validmatches if there was only one symbol category
- current_level = current_level + action
- else
- current_level = current_level + action(text,pos,line,s,m)
- end
- if current_level < FOLD_BASE then
- current_level = FOLD_BASE
- end
+ local action_y = function()
+ folds[line_num] = prev_level
+ if current_level > prev_level then
+ folds[line_num] = prev_level + FOLD_HEADER
+ end
+ if current_level < FOLD_BASE then
+ current_level = FOLD_BASE
+ end
+ prev_level = current_level
+ line_num = line_num + 1
+ end
+ local action_n = function()
+ folds[line_num] = prev_level + FOLD_BLANK
+ line_num = line_num + 1
+ end
+ pattern = ((fold_pattern + (1-newline))^1 * newline / action_y + newline/action_n)^0
+
+ else
+ -- the traditional one but a bit optimized
+ local fold_symbols_patterns = fold_symbols._patterns
+ local action_y = function(pos,line)
+ for j = 1, #fold_symbols_patterns do
+ for s, match in gmatch(line,fold_symbols_patterns[j]) do -- '()('..patterns[i]..')'
+ local symbols = fold_symbols[get_style_at(start_pos + pos + s - 1)]
+ local l = symbols and symbols[match]
+ local t = type(l)
+ if t == 'number' then
+ current_level = current_level + l
+ elseif t == 'function' then
+ current_level = current_level + l(text, pos, line, s, match)
end
end
end
+ folds[line_num] = prev_level
+ if current_level > prev_level then
+ folds[line_num] = prev_level + FOLD_HEADER
+ end
+ if current_level < FOLD_BASE then
+ current_level = FOLD_BASE
+ end
+ prev_level = current_level
+ line_num = line_num + 1
+ end
+ local action_n = function()
+ folds[line_num] = prev_level + FOLD_BLANK
+ line_num = line_num + 1
end
+ pattern = (p_yes/action_y + p_nop/action_n)^0
end
- if current_level > prev_level then
- folds[line_num] = h_table[prev_level] -- { prev_level, FOLD_HEADER }
- else
- folds[line_num] = n_table[prev_level] -- { prev_level }
+ --
+ local reset_parser = lexer._reset_parser
+ --
+ folder = function(_text_,_start_pos_,_start_line_,_start_level_)
+ if reset_parser then
+ reset_parser()
+ end
+ folds = { }
+ text = _text_
+ start_pos = _start_pos_
+ line_num = _start_line_
+ prev_level = _start_level_
+ current_level = prev_level
+ lpegmatch(pattern,text)
+-- return folds
+local t = folds
+folds = nil
+return t -- so folds can be collected
end
- prev_level = current_level
- line_num = line_num + 1
- end
- local function action_n()
- folds[line_num] = b_table[prev_level] -- { prev_level, FOLD_BLANK }
- line_num = line_num + 1
- end
- if lexer._reset_parser then
- lexer._reset_parser()
+ folders[lexer] = folder
end
- local lpegpattern = (p_yes/action_y + p_nop/action_n)^0 -- not too efficient but indirect function calls are neither but
- lpegmatch(lpegpattern,text) -- keys are not pressed that fast ... large files are slow anyway
- return folds
+ return folder(text,start_pos,start_line,start_level,lexer)
end
-local function fold_by_indentation(text,start_pos,start_line,start_level)
- local folds = { }
- local current_line = start_line
- local prev_level = start_level
- for _, line in gmatch(text,'([\t ]*)(.-)\r?\n') do
- if line ~= "" then
- local current_level = FOLD_BASE + get_indent_amount(current_line)
- if current_level > prev_level then -- next level
- local i = current_line - 1
- while true do
- local f = folds[i]
- if f and f[2] == FOLD_BLANK then
- i = i - 1
- else
- break
- end
- end
- local f = folds[i]
- if f then
- f[2] = FOLD_HEADER
- end -- low indent
- folds[current_line] = n_table[current_level] -- { current_level } -- high indent
- elseif current_level < prev_level then -- prev level
- local f = folds[current_line - 1]
- if f then
- f[1] = prev_level -- high indent
- end
- folds[current_line] = n_table[current_level] -- { current_level } -- low indent
- else -- same level
- folds[current_line] = n_table[prev_level] -- { prev_level }
+-- local function fold_by_indentation(text,start_pos,start_line,start_level)
+-- local folds = { }
+-- local current_line = start_line
+-- local prev_level = start_level
+-- for line in gmatch(text,'[\t ]*(.-)\r?\n') do
+-- if line ~= "" then
+-- local current_level = FOLD_BASE + get_indent_amount(current_line)
+-- if current_level > prev_level then -- next level
+-- local i = current_line - 1
+-- while true do
+-- local f = folds[i]
+-- if f and f[2] == FOLD_BLANK then
+-- i = i - 1
+-- else
+-- break
+-- end
+-- end
+-- local f = folds[i]
+-- if f then
+-- f[2] = FOLD_HEADER
+-- end -- low indent
+-- folds[current_line] = n_table[current_level] -- { current_level } -- high indent
+-- elseif current_level < prev_level then -- prev level
+-- local f = folds[current_line - 1]
+-- if f then
+-- f[1] = prev_level -- high indent
+-- end
+-- folds[current_line] = n_table[current_level] -- { current_level } -- low indent
+-- else -- same level
+-- folds[current_line] = n_table[prev_level] -- { prev_level }
+-- end
+-- prev_level = current_level
+-- else
+-- folds[current_line] = b_table[prev_level] -- { prev_level, FOLD_BLANK }
+-- end
+-- current_line = current_line + 1
+-- end
+-- return folds
+-- end
+
+-- local function fold_by_indentation(text,start_pos,start_line,start_level)
+-- local folds = { }
+-- local current_line = start_line
+-- local prev_level = start_level
+-- for line in gmatch(text,'[\t ]*(.-)\r?\n') do
+-- if line ~= '' then
+-- local current_level = FOLD_BASE + get_indent_amount(current_line)
+-- if current_level > prev_level then -- next level
+-- local i = current_line - 1
+-- local f
+-- while true do
+-- f = folds[i]
+-- if not f then
+-- break
+-- elseif f[2] == FOLD_BLANK then
+-- i = i - 1
+-- else
+-- f[2] = FOLD_HEADER -- low indent
+-- break
+-- end
+-- end
+-- folds[current_line] = { current_level } -- high indent
+-- elseif current_level < prev_level then -- prev level
+-- local f = folds[current_line - 1]
+-- if f then
+-- f[1] = prev_level -- high indent
+-- end
+-- folds[current_line] = { current_level } -- low indent
+-- else -- same level
+-- folds[current_line] = { prev_level }
+-- end
+-- prev_level = current_level
+-- else
+-- folds[current_line] = { prev_level, FOLD_BLANK }
+-- end
+-- current_line = current_line + 1
+-- end
+-- for line, level in next, folds do
+-- folds[line] = level[1] + (level[2] or 0)
+-- end
+-- return folds
+-- end
+
+local folds, current_line, prev_level
+
+local function action_y()
+ local current_level = FOLD_BASE + get_indent_amount(current_line)
+ if current_level > prev_level then -- next level
+ local i = current_line - 1
+ local f
+ while true do
+ f = folds[i]
+ if not f then
+ break
+ elseif f[2] == FOLD_BLANK then
+ i = i - 1
+ else
+ f[2] = FOLD_HEADER -- low indent
+ break
end
- prev_level = current_level
- else
- folds[current_line] = b_table[prev_level] -- { prev_level, FOLD_BLANK }
end
- current_line = current_line + 1
+ folds[current_line] = { current_level } -- high indent
+ elseif current_level < prev_level then -- prev level
+ local f = folds[current_line - 1]
+ if f then
+ f[1] = prev_level -- high indent
+ end
+ folds[current_line] = { current_level } -- low indent
+ else -- same level
+ folds[current_line] = { prev_level }
end
- return folds
+ prev_level = current_level
+ current_line = current_line + 1
+end
+
+local function action_n()
+ folds[current_line] = { prev_level, FOLD_BLANK }
+ current_line = current_line + 1
+end
+
+local pattern = ( S("\t ")^0 * ( (1-S("\n\r"))^1 / action_y + P(true) / action_n) * newline )^0
+
+local function fold_by_indentation(text,start_pos,start_line,start_level)
+ -- initialize
+ folds = { }
+ current_line = start_line
+ prev_level = start_level
+ -- define
+ -- -- not here .. pattern binds and local functions are not frozen
+ -- analyze
+ lpegmatch(pattern,text)
+ -- flatten
+ for line, level in next, folds do
+ folds[line] = level[1] + (level[2] or 0)
+ end
+ -- done
+-- return folds
+local t = folds
+folds = nil
+return t -- so folds can be collected
end
local function fold_by_line(text,start_pos,start_line,start_level)
local folds = { }
+ -- can also be lpeg'd
for _ in gmatch(text,".-\r?\n") do
folds[start_line] = n_table[start_level] -- { start_level }
start_line = start_line + 1
@@ -507,7 +741,7 @@ function context.fold(text,start_pos,start_line,start_level) -- hm, we had size
if filesize <= threshold_by_lexer then
return fold_by_lexer(text,start_pos,start_line,start_level,lexer)
end
- elseif fold_by_symbols and get_property('fold.by.parsing',1) > 0 then
+ elseif fold_by_symbols then -- and get_property('fold.by.parsing',1) > 0 then
if filesize <= threshold_by_parsing then
return fold_by_parsing(text,start_pos,start_line,start_level,lexer)
end
@@ -595,6 +829,10 @@ local function build_grammar(lexer, initial_rule)
end
-- so far. We need these local functions in the next one.
+--
+-- Before 3.24 we had tokens[..] = { category, position }, now it's a two values.
+
+local lineparsers = { }
function context.lex(text,init_style)
local lexer = global._LEXER
@@ -605,50 +843,75 @@ function context.lex(text,init_style)
local tokens = { }
local offset = 0
local noftokens = 0
- if true then
- for line in gmatch(text,'[^\r\n]*\r?\n?') do -- could be an lpeg
- local line_tokens = lpegmatch(grammar,line)
+ -- -- pre 3.24
+ --
+ -- for line in gmatch(text,'[^\r\n]*\r?\n?') do -- could be an lpeg
+ -- local line_tokens = lpegmatch(grammar,line)
+ -- if line_tokens then
+ -- for i=1,#line_tokens do
+ -- local token = line_tokens[i]
+ -- token[2] = token[2] + offset
+ -- noftokens = noftokens + 1
+ -- tokens[noftokens] = token
+ -- end
+ -- end
+ -- offset = offset + #line
+ -- if noftokens > 0 and tokens[noftokens][2] ~= offset then
+ -- noftokens = noftokens + 1
+ -- tokens[noftokens] = { 'default', offset + 1 }
+ -- end
+ -- end
+
+ -- for line in gmatch(text,'[^\r\n]*\r?\n?') do
+ -- local line_tokens = lpegmatch(grammar,line)
+ -- if line_tokens then
+ -- for i=1,#line_tokens,2 do
+ -- noftokens = noftokens + 1
+ -- tokens[noftokens] = line_tokens[i]
+ -- noftokens = noftokens + 1
+ -- tokens[noftokens] = line_tokens[i + 1] + offset
+ -- end
+ -- end
+ -- offset = offset + #line
+ -- if noftokens > 0 and tokens[noftokens] ~= offset then
+ -- noftokens = noftokens + 1
+ -- tokens[noftokens] = 'default'
+ -- noftokens = noftokens + 1
+ -- tokens[noftokens] = offset + 1
+ -- end
+ -- end
+
+ local lineparser = lineparsers[lexer]
+ if not lineparser then -- probably a cmt is more efficient
+ lineparser = C((1-newline)^0 * newline) / function(line)
+ local length = #line
+ local line_tokens = length > 0 and lpegmatch(grammar,line)
if line_tokens then
- for i=1,#line_tokens do
- local token = line_tokens[i]
- token[2] = token[2] + offset
+ for i=1,#line_tokens,2 do
noftokens = noftokens + 1
- tokens[noftokens] = token
- end
- end
- offset = offset + #line
- if noftokens > 0 and tokens[noftokens][2] ~= offset then
- noftokens = noftokens + 1
- tokens[noftokens] = { 'default', offset + 1 }
- end
- end
- else -- alternative
- local lasttoken, lastoffset
- for line in gmatch(text,'[^\r\n]*\r?\n?') do -- could be an lpeg
- local line_tokens = lpegmatch(grammar,line)
- if line_tokens then
- for i=1,#line_tokens do
- lasttoken = line_tokens[i]
- lastoffset = lasttoken[2] + offset
- lasttoken[2] = lastoffset
+ tokens[noftokens] = line_tokens[i]
noftokens = noftokens + 1
- tokens[noftokens] = lasttoken
+ tokens[noftokens] = line_tokens[i + 1] + offset
end
end
- offset = offset + #line
- if lastoffset ~= offset then
- lastoffset = offset + 1
- lasttoken = { 'default', lastoffset }
+ offset = offset + length
+ if noftokens > 0 and tokens[noftokens] ~= offset then
+ noftokens = noftokens + 1
+ tokens[noftokens] = 'default'
noftokens = noftokens + 1
- tokens[noftokens] = lasttoken
+ tokens[noftokens] = offset + 1
end
end
+ lineparser = lineparser^0
+ lineparsers[lexer] = lineparser
end
+ lpegmatch(lineparser,text)
return tokens
+
elseif lexer._CHILDREN then
-- as we cannot print, tracing is not possible ... this might change as we can as well
-- generate them all in one go (sharing as much as possible)
- local _hash = lexer._HASH
+ local hash = lexer._HASH -- hm, was _hash
if not hash then
hash = { }
lexer._HASH = hash
@@ -684,8 +947,14 @@ end
-- todo: keywords: one lookup and multiple matches
+-- function context.token(name, patt)
+-- return Ct(patt * Cc(name) * Cp())
+-- end
+--
+-- -- hm, changed in 3.24 .. no longer a table
+
function context.token(name, patt)
- return Ct(patt * Cc(name) * Cp())
+ return patt * Cc(name) * Cp()
end
lexer.fold = context.fold
diff --git a/context/data/scite/lexers/themes/scite-context-theme.lua b/context/data/scite/lexers/themes/scite-context-theme.lua
index 7b305d3e5..e32fe9dee 100644
--- a/context/data/scite/lexers/themes/scite-context-theme.lua
+++ b/context/data/scite/lexers/themes/scite-context-theme.lua
@@ -107,7 +107,6 @@ local style_tag = style { fore = colors.cyan }
----- style_standout = style { fore = colors.orange, bold = true }
local style_command = style { fore = colors.green, bold = true }
local style_internal = style { fore = colors.orange, bold = true }
-local style_internal = style { fore = colors.orange, bold = true }
local style_preamble = style { fore = colors.yellow }
local style_grouping = style { fore = colors.red }
diff --git a/context/data/scite/scite-context-data-interfaces.properties b/context/data/scite/scite-context-data-interfaces.properties
index e3ca49319..e698ce6c4 100644
--- a/context/data/scite/scite-context-data-interfaces.properties
+++ b/context/data/scite/scite-context-data-interfaces.properties
@@ -1,161 +1,315 @@
-keywordclass.context.de=\
-Buchstabe Buchstaben CAP Cap \
-Caps KAP Kap Kaps MONAT \
-Roemischezahlen WOCHENTAG WOERTER WORT Woerter \
-Wort Ziffern abstandlinkerrand abstandoben abstandrechterrand \
-abstandunten amgitterausrichten amgitterneuausrichten appendix arg \
-atleftmargin atrightmargin aufseite ausfuellfeld ausfuelltext \
-ausschnitt bearbeitebloecke bearbeiteseite bedeutung behaltebloecke \
-bei bemerkung benutzekodierung benutzespezielles benutzeverzeichnis \
-beschrifteversion beschriftung bestimmekopfnummer bestimmelistencharakeristika bestimmeregistercharakteristika \
-bildschirm blanko bookmark bottomspace breitelinkerrand \
-breiterechterrand bruch buchstabe buchstaben but \
-bypassblocks cap chapter chem comment \
-completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completepagenumber \
-completeregister coupledregister crlf cutspace datum \
-decrementnumber definebodyfontDEF definebodyfontREF definecolumnbreak definecolumnset \
-definecombination definedfont definefontfeature definefonthandling defineindentedtext \
-defineinmargin defineitemgroup definelayer definelayout definemathalignment \
-defineoutput definepagebreak defineplacement definerawfont definerule \
-defineschriftsynonym definetextposition definetextvariable definetype definetypeface \
-definiereabbsymbol definiereabsaetze definiereabschnitt definiereabschnittsblock definiereakzent \
-definierebefehl definierebeschreibung definierebeschreibungen definierebeschriftung definiereblanko \
-definiereblock definierefarbe definierefarbengruppe definierefeld definierefeldstapel \
-definierefliesstext definierefliesstextumgebung definieregleitobjekt definierehauptfeld definierehbox \
-definiereinteraktionsmenue definierekonversion definierelabel definiereliste definierelogo \
-definieren definierenummerierung definiereoverlay definierepalette definierepapierformat \
-definiereprofil definiereprogramme definierepuffer definierereferenz definierereferenzformat \
-definierereferenzliste definiereregister definiereschrift definiereschriftstil definieresortieren \
-definierestartstop definierestil definieresubfeld definieresymbol definieresynonyme \
-definieretabellenvorlage definieretabulator definieretext definieretippen definiereueberschrift \
-definiereumbruch definiereumrahmt definiereumrahmtertext definiereversion definierezeichen \
-definierezusammengestellteliste description dimension doppelseite doppelseitigespapier \
-drehen duennelinie duennerumriss einezeile einstellungen \
-einziehen emptylines entknuepfebeschriftung enumeration externeabbildung \
-farbbalken farbe farbewert feld feldstapel \
-festesspatium folgeprofil folgeprofilversion folgeversion footnotetext \
-forceblocks format formelnummer framedtext fussnote \
-fusszeileabstand fusszeilenhoehe gefuelltesrechteck gefuelltezeile geg \
-gesamtseitenanzahl gestreckt getnumber gitter graufarbe \
-grauwert haarlinie hauptsprache headsym heutigesdatum \
-heutigeskopfnummer hintergrund hl hoch hoeheoben \
-hoeheunten holebeschriftung holepuffer imlinken imlinkenrand \
-immaumrise immediatebetweenlist immediatetolist imrechten imrechtenrand \
-imumriss in inaktiviereinteraktionsmenue inanderermarginale indentation \
-ininner inlinkermarginale inmarginalie inneredgedistance inneredgewidth \
-innermargindistance innermarginwidth inouter inrechtermarginale installieresprache \
-interaktionsbalken interaktionsknopfe interaktionsmenue inzeile irgendwo \
-its kap keindimension keinebeschriftung keinebloeckemehr \
-keinedateienmehr keinekopfundfusszeilen keineliste keinspatium keintest \
-keinzeilenobenundunten keinzwischenraum kleinerdurchschuss klonierefeld knopf \
-komponente konvertierezahl kopf kopfniveau kopfnummer \
-kopfweite kopfzeilenabstand kopfzeilenhoehe kopierefeld korrigierezwischenraum \
-label labeling labels labeltext leg \
-liniendicke linkemarginalafstand linkemarginalbreite linksbuendig listenbreite \
-listenhoehe listenlaenge listsymbol loadsorts loadsynonyms \
-mapfontsize mar marginalafstand marginalbreite marginallinie \
-marginaltext marginaltitel marginalwort mathematik maumrise \
-mediaeval menueknopf monat moveformula movesidefloat \
-nachunten name navigating nextsection nichteinziehen \
-nocap nokap nop notiz numberofsubpages \
-nummererhoehen outeredgedistance outeredgewidth outermargindistance outermarginwidth \
-overbar overbars overstrike overstrikes pagedepth \
-pageoffset papierbreite papierhoehe paragraph part \
-passelayoutan passendfeld placefloat placeheadnumber placeheadtext \
-placelistoffloats placelistofsorts placelistofsynonyms placepagenumber placerawlist \
-placereferencelist placerule placetextvariable platzierebookmarks platziereformel \
-platzierefussnoten platzierelegende platziereliste platzierelogo platzierelokalefussnoten \
-platzierenebeneinander platziereregister platziereuntereinander platziereunterformel platzierezusammengestellteliste \
-pos position positiontext posten printpapierbreite \
-printpapierhoehe produkt programm projekt publikation \
-punkt ran randabstand randbreite rechteck \
-rechtecke rechtemarginalafstand rechtemarginalbreite rechtsbuendig ref \
-referenz referieren register registrierefelder reservefloat \
-resetnumber resettextcontent roemischezahlen ruecksetzten ruecksetztenbeschriftung \
-rumpfweite satzbreite satzhoehe schreibezumregister schreibezurliste \
-schreibezurreferenzliste schreibezwischenliste section seeregister seite \
-seitenreferenz seitenummer setnumber settext settextvariable \
-setupanswerarea setupcolumnset setupcolumnsetlines setupcolumnsetstart setupfonthandling \
-setupfontsynonym setupforms setupindentedtext setupinterlinespace2 setupitemgroup \
-setuplistalternative setupmathalignment setupnumber setuppaper setupplacement \
-setuprule setupstartstop setupstrut setuptextposition setuptextvariable \
-showsymbolset sort spalte spatium spiegeln \
-sprache startabbildung startalignment startausrichtung startbuffer \
-startcolumnmakeup startcolumns startcolumnset startcombination startcomment \
-startdescription startdocument startdokument startenger startenumeration \
-startfarbe startfigure startfloattext startformula startframedtext \
-startgeg startgegenueber startglobal startgrosserdurchschuss starthiding \
-starthintergrund startinteraktionsmenue startitemgroup startkleinerdurchschuss startkodierung \
-startkombination startkomponente startlegend startline startlinecorrection \
-startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes \
-startlokal startlokalefussnoten startmakeup startmarginalblock startmarginallinie \
-startmarginblock startnamemakeup startnarrower startopposite startoverlay \
-startoverview startparagraph startpositionieren startpositioning startpostponing \
-startprodukt startprofil startprofile startprojekt startraster \
-startregister startspalten startsymbolset startsynchronisation startsynchronization \
-starttabelle starttabellen starttable starttables starttabulate \
-starttext starttextlinie starttyping startueberblick startumbruch \
-startumgebung startunpacked startversion startzeile startzeilen \
-startzeilenkorrektur startzeilennumerierung startzitat stelleabsaetzeein stelleabsatznummerierungein \
-stelleabschnittein stelleabschnittsblockein stelleanordnenein stelleaufzaehlungenein stelleausgabeein \
-stelleausrichtungein stelleausschnittein stellebeschreibungein stellebeschriftungein stellebilderunterschriftein \
-stellebildunterschriftein stellebindestrichein stelleblankoein stelleblockein stelledrehenein \
-stelleduennerumrissein stelleeinziehenein stelleengerein stelleexterneabbildungenein stellefarbeein \
-stellefarbenein stellefeldein stellefelderin stellefliesstextein stellefliesstextumgebungein \
-stelleformelnein stellefussnotendefinitionein stellefussnotenein stellefusszeileein stellefusszeilentextein \
-stellegefuelltesrechteckein stellegefuelltezeileein stellegegenueberplatzierenein stellegleitobjekteein stellegleitobjektein \
-stellehintergruendeein stellehintergrundein stelleinmarginalieein stelleinteraktionein stelleinteraktionsbalkenein \
-stelleinteraktionsbildschirmein stelleinteraktionsmenueein stelleknopfein stellekombinationein stellekommentarein \
-stellekopfzahlein stellekopfzeileein stellekopfzeilentextein stellelabeltextein stellelayoutein \
-stellelegendeein stellelinienbreiteein stellelisteein stellemarginalblockein stellemarginallinieein \
-stellenobenein stellenummerierungein stellepaletteein stellepapierformatein stelleplatziegeteiltegleitobjekt \
-stellepositionierenein stellepostenein stelleprofilein stelleprogrammein stellepublikationein \
-stellepufferein stellerasterein stellerechteckein stellereferenzierenein stellereferenzlisteein \
-stelleregisterein stelleseitenkommentarein stelleseitennummerein stelleseitennummeriernungein stelleseitenuebergangein \
-stellesortierenein stellespaltenein stellespatiumein stellespracheein stellesymbolsetein \
-stellesynchronisationein stellesynchronisationsbalkenein stellesynonymein stellesystemein stelletabein \
-stelletabellenein stelletabulatorein stelletextein stelletextobenein stelletexttexteein \
-stelletextumrissein stelletextuntenein stelletipein stelletippenein stelletoleranzein \
-stelleueberschriftein stelleueberschriftenein stelleueberschrifttextein stelleumbruchein stelleumrahmtein \
-stelleumrahmtetexteein stelleuntenein stelleunterseitennummerein stelleunterstreichenein stelleurlein \
-stelleversalienein stelleversionein stellezeilenabstandein stellezeilenein stellezeilennumerierungein \
-stellezitierenein stellezusammengestelltelisteein stellezwischenraumein stopalignment stopausrichtung \
-stopbuffer stopcolumnmakeup stopcolumns stopcolumnset stopcombination \
-stopcomment stopdescription stopdocument stopdokument stopenger \
-stopenumeration stopfarbe stopfigure stopfloattext stopformula \
-stopframedtext stopgeg stopgegenueber stopglobal stopgrosserdurchschuss \
-stophiding stophintergrund stopinteraktionsmenue stopitemgroup stopkleinerdurchschuss \
-stopkodierung stopkombination stopkomponente stoplegend stopline \
-stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment \
-stoplocalfootnotes stoplokal stoplokalefussnoten stopmakeup stopmarginalblock \
-stopmarginallinie stopmarginblock stopnamemakeup stopnarrower stopopposite \
-stopoverlay stopoverview stopparagraph stoppositionieren stoppositioning \
-stoppostponing stopprodukt stopprofil stopprofile stopprojekt \
-stopraster stopspalten stopsymbolset stopsynchronisation stopsynchronization \
-stoptabelle stoptabellen stoptable stoptables stoptabulate \
-stoptext stoptextlinie stoptyping stopueberblick stopumbruch \
-stopumgebung stopunpacked stopversion stopzeile stopzeilen \
-stopzeilenkorrektur stopzeilennumerierung stopzitat sub subject \
-subpagenumber subsection subsubject subsubsection subsubsubject \
-switchtorawfont sym symbol synchronisationsbalken synchronisieren \
-synonym tab teilegleitobjekt testcolumn testpage \
-tex textbreite texthoehe textlinie textreferenz \
-textvariable tief tiho tip tippedatei \
-tippen tippepuffer title tooltip txt \
-ueber ueberschrifttext uebersetzten umgebung umrahmt \
-unbekant underbar underbars unterformelnummer useXMLfilter \
-usedirectory usetypescript usetypescriptfile verbergebloecke vergleichefarbengruppe \
-vergleichepalette verknuepfebeschriftung verknuepfedokument verknuepfregister version \
-verweis verweisdatum verwendeJSscript verwendeURL verwendebefehl \
-verwendebloecke verwendeexteresdokument verwendeexterneabbildung verwendeexternedatei verwendeexternedateien \
-verwendeexternestonstueck verwendemodul verwendemodule verwendereferenzen verwendesymbole \
-verwendeurl vl volleswort von waehlebloeckeaus \
-waehlepapieraus waehleversionaus wechselezumfliesstext wiederholen wochentag \
-wohnort wortrechts zeigedruck zeigeeinstellungen zeigeexterneabbildungen \
-zeigefarbe zeigefarbengruppe zeigefelder zeigefliesstext zeigefliesstextumgebung \
-zeigegitter zeigelayout zeigepalette zeigerahmen zeigestruts \
-zeigeumbruch zentriert ziffern zitat zitieren \
-zu zurbox zurseite zwischenraum
+keywordclass.context.it=\
+CAP Cap Caps GIORNOSETTIMANA \
+Lettera Lettere MESE Numeri Numeriromani \
+PAROLA PAROLE Parola Parole accoppiacarta \
+accoppiadocumento accoppiamarcatura accoppiapagina accoppiaregistro adattacampo \
+adattalayout al allineacentro allineadestra allineasinistra \
+altezzacarta altezzacartastampa altezzacima altezzaelenco altezzafondo \
+altezzaintestazione altezzamakeup altezzapdp altezzatesto ambiente \
+ampiezzabordo ampiezzabordodestro ampiezzabordosinistro ampiezzacarta ampiezzacartastampa \
+ampiezzaelenco ampiezzamakeup ampiezzamargine ampiezzamarginedestro ampiezzamarginesinistro \
+ampiezzatesto ap apagina appendix arg \
+atleftmargin atrightmargin barracolori barrainterazione barrasincronizzazione \
+bastablocchi bastafile cambiaafontdeltesto campi camporiempimento \
+cap capello chapter chim circondato \
+citazione clip clonacampo colonna colore \
+coloregrigio comment commento completecombinedlist completelistoffloats \
+completelistofsorts completelistofsynonyms completeregister componenet confrontagruppocolori \
+confrontatavolozza convertinumero copiacampo correggispaziobianco coupledregister \
+crlf cutspace da daqualcheparte data \
+datadioggi datareferral decrementnumber definebodyfontDEF definebodyfontREF \
+definecolumnbreak definecombination definedfont definefontfeature definefonthandling \
+defineindentedtext defineinmargin defineitemgroup definelayer definemathalignment \
+definepagebreak defineplacement definetypeface definisci definisciaccento \
+definisciambientefontdeltesto definisciblocco definiscibloccosezione definiscibuffer definiscicampo \
+definiscicampoprincipale definiscicapoversi definiscicarattere definiscicolore definiscicomando \
+definisciconversione definiscidescrizione definiscidimensionicarta definiscielenco definiscielencocombinato \
+definiscienumerazione definiscietichetta definiscifigurasimbolo definiscifont definiscifontdeltesto \
+definiscifontgrezzo definisciformatoriferimento definiscigruppocolonne definiscigruppocolori definiscihbox \
+definisciincorniciato definisciiniziatermina definiscilayout definiscilinea definiscilistariferimenti \
+definiscilogo definiscimakeup definiscimarcatura definiscimenuinterazione definiscimodellotabella \
+definiscioggettomobile definisciordinamento definiscioutput definisciposizionetesto definisciprofilo \
+definisciprogramma definisciregistro definisciriferimento definiscirigovuoto definiscisezione \
+definiscisimbolo definiscisinonimi definiscisinonimofont definiscisottocampo definiscisovrapposizione \
+definiscistackcampi definiscistile definiscistilefont definiscitabulato definiscitavolozza \
+definiscitesta definiscitesto definiscitestoincorniciato definiscitype definiscityping \
+definiscivariabiletesto definisciversion description determinacaratteristicheregistro determinacarattersticheelenco \
+determinanumerotesta dimensione disabilitamenuinterazione distanzabordo distanzabordodestro \
+distanzabordosinistro distanzacima distanzafondo distanzaintestazione distanzamargine \
+distanzamarginedestro distanzamarginesinistro distanzapdp domicilio el \
+elaborablocchi elaborapagina elementi elemento emptylines \
+enumeration etichetta etichette fatto figuraesterna \
+fondo forzablocchi framedtext frazione getnumber \
+giornosettimana griglia headsym hl ignoto \
+immediatebetweenlist immediatetolist impaccato impostaallineamento impostaambientefontdeltesto \
+impostaampiezzariga impostabarrainterazione impostabarrasincronizzazione impostablocchimargine impostablocco \
+impostabloccosezione impostabuffer impostacampi impostacampo impostacapoversi \
+impostacaption impostacaptions impostacima impostaclippling impostacolonne \
+impostacolore impostacolori impostacombinazioni impostacommento impostacommentopagina \
+impostadefinizionenotepdp impostadescrizioni impostadimensionicarta impostaelementi impostaelencazioni \
+impostaelenco impostaelencocombinato impostaenumerazioni impostafigureesterne impostafondo \
+impostafontdeltesto impostaforms impostaformule impostagruppocolonne impostaincorniciato \
+impostainiziatermina impostainmargine impostainstestazione impostainterazione impostainterlinea \
+impostalayout impostalegenda impostalinea impostalineemargine impostalineenere \
+impostalineeriempimento impostalineesottili impostalineetesto impostalingua impostalistariferimenti \
+impostamaiuscole impostamakeup impostamarcatura impostamenuinterazione impostamenzione \
+impostanotepdp impostanumerazione impostanumerazionecapoversi impostanumerazionepagina impostanumerazionerighe \
+impostanumeropagina impostanumerosottopagina impostanumerotesta impostaoggettimobili impostaoggettomobile \
+impostaordinamento impostaoutput impostaparranging impostapdp impostapiustretto \
+impostaposizionamento impostaposizionamentoopposti impostaposizionetesto impostaprofili impostaprogrammi \
+impostapubblicazioni impostapulsanti impostaregistro impostarientro impostariferimento \
+impostarighe impostarigheriempimento impostarigovuoto impostarotazione impostaschermi \
+impostaschermointerazione impostasegnosillabazione impostasetsimboli impostasezione impostasfondi \
+impostasfondo impostasincronizzazione impostasinonimi impostasistema impostasottolinea \
+impostaspaziatura impostaspaziobianco impostaspezzamentooggettomobile impostastrut impostatab \
+impostatabelle impostatabulato impostatavolozza impostatesta impostateste \
+impostatesticima impostatestifondo impostatestiincorniciati impostatestiintestazioni impostatestipdp \
+impostatesto impostatestoetichette impostatestointestazioni impostatestotesti impostatolleranza \
+impostatransizionepagina impostatype impostatyping impostaurl impostavariabiletesto \
+impostaversioni impostazioni in inaltromargine incorniciato \
+incrementanumero indentation indestra ininner iniziaallineamento \
+iniziaambiente iniziabloccomargine iniziacitazione iniziacodifica iniziacolonne \
+iniziacolore iniziacombinazione iniziacomponente iniziacorrezioneriga iniziadocumento \
+iniziafigura iniziaglobale iniziagruppocolonne iniziaimpaccato inizialineamargine \
+inizialineatesto inizialocale iniziamakeup inizianotepdplocali inizianumerazionerighe \
+iniziaopposto iniziaoverview iniziapiustretto iniziaposizionamento iniziaprodotto \
+iniziaprofilo iniziaprogetto iniziaraster iniziariga iniziarighe \
+iniziasetsimboli iniziasfondo iniziasincronizzazione iniziasovrapposizione iniziatabella \
+iniziatabelle iniziatesto iniziaunpacked iniziaversione inlatodestro \
+inlatosinistro inmaframed inmargine inmarginedestro inmarginesinistro \
+inneredgedistance inneredgewidth innermargindistance innermarginwidth inouter \
+inriga insinistra installalingua intorno labeling \
+leg lettera lettere lineamargine lineanera \
+lineasottile lineatesto lineenere lineeriempimento lineesottili \
+lingua linguaprincipale listsymbol livellotesta loadsorts \
+loadsynonyms logcampi lunghezzaelenco maframed mapfontsize \
+mar marcatura marcaversione matematica mediaeval \
+menuinterattivo menzione mese mettielenco mettielencocombinato \
+mettifiancoafianco mettiformula mettiingriglia mettilegenda mettilinea \
+mettiloghi mettinotepdp mettinotepdplocali mettinumeropagina mettiregistro \
+mettisegnalibro mettisottoformula mettiunosullaltro mettivariabiletesto mostraambientefontdeltesto \
+mostracampi mostracolore mostracornice mostrafiguresterne mostrafontdeltesto \
+mostragriglia mostragruppocolori mostraimpostazioni mostralyout mostramakeup \
+mostrasetsimboli mostrastampa mostrastruts mostratavolozza movesidefloat \
+name nascondiblocchi navigating nextsection nientedimensioni \
+nienteelenco nientelineecimafondo nientelineintestazionepdp nientemarcatura nienterientro \
+nientespazio nientespaziobianco nocap nome nomeunita \
+nop nota notapdp notest numberofsubpages \
+numeri numeriromani numeroformula numeropagina numeropaginacompleto \
+numerosottoformula numerotesta numerotestacorrente numerototaledipagine outeredgedistance \
+outeredgewidth outermargindistance outermarginwidth overbar overbars \
+overstrike overstrikes pagedepth pageoffset pagina \
+paragraph paroladestra parolainmargine part passaafontgrezzo \
+ped pedap perlungo placefloat placelistoffloats \
+placelistofsorts placelistofsynonyms placerawlist placereferencelist posizionanumerotesta \
+posizionatesto posizionatestotesta posizione prendibuffer prendimarcatura \
+prodotto progetto programma pubblicazione pulsante \
+pulsantemenu pulsantinterazione punti qualcheriga ran \
+referral referring register reimposta reimpostamarcatura \
+reservefloat resetnumber resettextcontent rientro rif \
+rifai riferimento riferimentopagina riferimentotesto riflessione \
+rigariempimento rigovuoto ruota saltablocchi scala \
+schermo scrividentroelenco scriviinelenco scriviinlistariferimenti scriviinregistro \
+section seeregister segnalibro seguiprofilo seguiversione \
+seguiversioneprofilo selezionablocchi selezionacarta selezionaversione separamarcatura \
+setnumber settext setupanswerarea setupcolumnsetlines setupcolumnsetstart \
+setupfonthandling setupfontsynonym setupindentedtext setupinterlinespace2 setupitemgroup \
+setuplistalternative setupmathalignment setuppaper setupplacement setvariabiletesto \
+sfondo sim simbolo sincronizza sort \
+spazifissi spazio spaziobianco spaziocima spaziodietro \
+spaziofisso spaziofondo spessoreriga spezzaoggettomobile spostaagriglia \
+spostaformula stackcampi startalignment startambiente startbuffer \
+startcitazione startcolore startcolumnmakeup startcolumns startcombination \
+startcomment startcomponenet startdescription startdocument startenumeration \
+startfatto startfigure startfloattext startformula startframedtext \
+starthiding startimpaccato startitemgroup startlegend startline \
+startlineamargine startlineatesto startlinecorrection startlinenumbering startlines \
+startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock \
+startmenuinterattivo startnamemakeup startnarrower startopposite startoverlay \
+startoverview startparagraph startpositioning startpostponing startprodotto \
+startprofile startprogetto startregister startsfondo startsymbolset \
+startsynchronization starttable starttables starttabulate starttyping \
+startunpacked startversione stirato stopalignment stopambiente \
+stopbuffer stopcitazione stopcolore stopcolumnmakeup stopcolumns \
+stopcombination stopcomment stopcomponenet stopdescription stopdocument \
+stopenumeration stopfatto stopfigure stopfloattext stopformula \
+stopframedtext stophiding stopimpaccato stopitemgroup stoplegend \
+stopline stoplineamargine stoplineatesto stoplinecorrection stoplinenumbering \
+stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup \
+stopmarginblock stopmenuinterattivo stopnamemakeup stopnarrower stopopposite \
+stopoverlay stopoverview stopparagraph stoppositioning stoppostponing \
+stopprodotto stopprofile stopprogetto stopsfondo stopsymbolset \
+stopsynchronization stoptable stoptables stoptabulate stoptyping \
+stopunpacked stopversione sub subject subpagenumber \
+subsection subsubject subsubsection subsubsubject synonym \
+tab terminaallineamento terminaambiente terminabloccomargine terminacitazione \
+terminacodifica terminacolonne terminacolore terminacombinazione terminacomponente \
+terminacorrezioneriga terminadocumento terminaglobale terminagruppocolonne terminaimpaccato \
+terminalineamargine terminalineatesto terminalocale terminamakeup terminanotepdplocali \
+terminanumerazionerighe terminaopposto terminaoverview terminapiustretto terminaposizionamento \
+terminaprodotto terminaprofili terminaprogetto terminaraster terminariga \
+terminarighe terminasfondo terminasincronizzazione terminasovrapposizione terminatabella \
+terminatabelle terminatesto terminaunpacked terminaversioni testa \
+testcolumn testoetichetta testoinmargine testoinstestazioni testonotapdp \
+testoriempimento testpage tex tieniblocchi title \
+titoloinmargine tooltip traduci txt typ \
+type typebuffer typefile underbar underbars \
+usaJSscripts usaURL usablocco usacartella usacodifica \
+usacolonnasonoraesterna usacomandi usadocumentoesterno usafiguraesterna usafileesterni \
+usafileesterno usamoduli usamodulo usariferimenti usasimboli \
+usaspecialita usaurl useXMLfilter usedirectory usetypescript \
+usetypescriptfile vaia vaiabox vaiapagina vaigiu \
+valorecolore valoregrigio variabiletesto versione vl
+
+keywordclass.context.en=\
+CAP Cap Caps Character \
+Characters MONTH Numbers Romannumerals WEEKDAY \
+WORD WORDS Word Words about \
+adaptlayout adding appendix arg at \
+atleftmargin atpage atrightmargin background backspace \
+blackrule blackrules blank bookmark bottomdistance \
+bottomheight bottomspace but button bypassblocks \
+cap chapter character characters chem \
+clip clonefield color colorbar colorvalue \
+column comment comparecolorgroup comparepalet completecombinedlist \
+completelistoffloats completelistofsorts completelistofsynonyms completepagenumber completeregister \
+component convertnumber copyfield correctwhitespace coupledocument \
+coupledregister couplemarking couplepage couplepaper coupleregister \
+crlf currentdate currentheadnumber cutspace date \
+decouplemarking decrementnumber define defineaccent defineblank \
+defineblock definebodyfont definebodyfontDEF definebodyfontREF definebodyfontenvironment \
+definebuffer definecharacter definecolor definecolorgroup definecolumnbreak \
+definecolumnset definecombination definecombinedlist definecommand defineconversion \
+definedescription definedfont defineenumeration definefield definefieldstack \
+definefiguresymbol definefloat definefont definefontfeature definefonthandling \
+definefontstyle definefontsynonym defineframed defineframedtext definehbox \
+definehead defineindentedtext defineinmargin defineinteractionmenu defineitemgroup \
+definelabel definelayer definelayout definelist definelogo \
+definemainfield definemakeup definemarking definemathalignment defineoutput \
+defineoverlay definepagebreak definepalet definepapersize defineparagraphs \
+defineplacement defineprofile defineprogram definerawfont definereference \
+definereferenceformat definereferencelist defineregister definerule definesection \
+definesectionblock definesorting definestartstop definestyle definesubfield \
+definesymbol definesynonyms definetabletemplate definetabulate definetext \
+definetextposition definetextvariable definetype definetypeface definetyping \
+defineversion description determineheadnumber determinelistcharacteristics determineregistercharacteristics \
+dimension disableinteractionmenu domicile donttest edgedistance \
+edgewidth emptylines enumeration environment externalfigure \
+fact field fieldstack fillinfield fillinline \
+fillinrules fillintext fitfield fixedspace fixedspaces \
+followprofile followprofileversion followversion footerdistance footerheight \
+footnote footnotetext forceblocks formulanumber fraction \
+framed framedtext from getbuffer getmarking \
+getnumber godown goto gotobox gotopage \
+graycolor greyvalue grid hairline head \
+headerdistance headerheight headlevel headnumber headsym \
+headtext hideblocks high hl immediatebetweenlist \
+immediatetolist in incrementnumber indentation indenting \
+inframed infull ininner inleft inleftedge \
+inleftmargin inline inmaframed inmargin inneredgedistance \
+inneredgewidth innermargindistance innermarginwidth inothermargin inouter \
+inright inrightedge inrightmargin installlanguage interactionbar \
+interactionbuttons interactionmenu item items its \
+keepblocks label labeling labels labeltext \
+language leftaligned leftedgedistance leftedgewidth leftmargindistance \
+leftmarginwidth leg linethickness listheight listlength \
+listsymbol listwidth loadsorts loadsynonyms logfields \
+lohi low maframed mainlanguage makeupheight \
+makeupwidth mapfontsize mar margindistance marginrule \
+margintext margintitle marginwidth marginword marking \
+markversion mathematics mediaeval menubutton midaligned \
+mirror month moveformula moveongrid movesidefloat \
+name navigating nextsection nocap nodimension \
+noheaderandfooterlines noindenting nolist nomarking nomoreblocks \
+nomorefiles nop nospace note notopandbottomlines \
+nowhitespace numberofsubpages numbers outeredgedistance outeredgewidth \
+outermargindistance outermarginwidth overbar overbars overstrike \
+overstrikes packed page pagedepth pagenumber \
+pageoffset pagereference paperheight paperwidth paragraph \
+part periods placebookmarks placecombinedlist placefloat \
+placefootnotes placeformula placeheadnumber placeheadtext placelegend \
+placelist placelistoffloats placelistofsorts placelistofsynonyms placelocalfootnotes \
+placelogos placeongrid placeontopofeachother placepagenumber placerawlist \
+placereferencelist placeregister placerule placesidebyside placesubformula \
+placetextvariable position positiontext printpaperheight printpaperwidth \
+processblocks processpage product program project \
+publication quotation quote ran redo \
+ref reference referral referraldate referring \
+register remark reservefloat reset resetmarking \
+resetnumber resettextcontent rightaligned rightedgedistance rightedgewidth \
+rightmargindistance rightmarginwidth romannumerals rotate scale \
+screen section seeregister selectblocks selectpaper \
+selectversion setnumber settextcontent settextvariable setupalign \
+setupanswerarea setuparranging setupbackground setupbackgrounds setupblackrules \
+setupblank setupblock setupbodyfont setupbodyfontenvironment setupbottom \
+setupbottomtexts setupbuffer setupbuttons setupcapitals setupcaption \
+setupcaptions setupclipping setupcolor setupcolors setupcolumns \
+setupcolumnset setupcolumnsetlines setupcolumnsetstart setupcombinations setupcombinedlist \
+setupcomment setupdescriptions setupenumerations setupexternalfigures setupfield \
+setupfields setupfillinlines setupfillinrules setupfloat setupfloats \
+setupfloatsplitting setupfonthandling setupfontsynonym setupfooter setupfootertexts \
+setupfootnotedefinition setupfootnotes setupforms setupformulae setupframed \
+setupframedtexts setuphead setupheader setupheadertexts setupheadnumber \
+setupheads setupheadtext setuphyphenmark setupindentedtext setupindenting \
+setupinmargin setupinteraction setupinteractionbar setupinteractionmenu setupinteractionscreen \
+setupinterlinespace setupinterlinespace2 setupitemgroup setupitemizations setupitems \
+setuplabeltext setuplanguage setuplayout setuplegend setuplinenumbering \
+setuplines setuplinewidth setuplist setuplistalternative setupmakeup \
+setupmarginblocks setupmarginrules setupmarking setupmathalignment setupnarrower \
+setupnumber setupnumbering setupoppositeplacing setupoutput setuppagecomment \
+setuppagenumber setuppagenumbering setuppagetransitions setuppalet setuppaper \
+setuppapersize setupparagraphnumbering setupparagraphs setupplacement setuppositioning \
+setupprofiles setupprograms setuppublications setupquote setupreferencelist \
+setupreferencing setupregister setuprotate setuprule setups \
+setupscreens setupsection setupsectionblock setupsorting setupspacing \
+setupstartstop setupstrut setupsubpagenumber setupsymbolset setupsynchronization \
+setupsynchronizationbar setupsynonyms setupsystem setuptab setuptables \
+setuptabulate setuptext setuptextposition setuptextrules setuptexttexts \
+setuptextvariable setupthinrules setuptolerance setuptop setuptoptexts \
+setuptype setuptyping setupunderbar setupurl setupversions \
+setupwhitespace showbodyfont showbodyfontenvironment showcolor showcolorgroup \
+showexternalfigures showfields showframe showgrid showlayout \
+showmakeup showpalet showprint showsetups showstruts \
+showsymbolset someline somewhere sort space \
+splitfloat startalignment startbackground startbuffer startcoding \
+startcolor startcolumnmakeup startcolumns startcolumnset startcombination \
+startcomment startcomponent startdescription startdocument startenumeration \
+startenvironment startfact startfigure startfloattext startformula \
+startframedtext startglobal starthiding startinteractionmenu startitemgroup \
+startlegend startline startlinecorrection startlinenumbering startlines \
+startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock \
+startmarginrule startnamemakeup startnarrower startopposite startoverlay \
+startoverview startpacked startparagraph startpositioning startpostponing \
+startproduct startprofile startproject startquotation startraster \
+startregister startsymbolset startsynchronization starttable starttables \
+starttabulate starttext starttextrule starttyping startunpacked \
+startversion stopalignment stopbackground stopbuffer stopcoding \
+stopcolor stopcolumnmakeup stopcolumns stopcolumnset stopcombination \
+stopcomment stopcomponent stopdescription stopdocument stopenumeration \
+stopenvironment stopfact stopfigure stopfloattext stopformula \
+stopframedtext stopglobal stophiding stopinteractionmenu stopitemgroup \
+stoplegend stopline stoplinecorrection stoplinenumbering stoplines \
+stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock \
+stopmarginrule stopnamemakeup stopnarrower stopopposite stopoverlay \
+stopoverview stoppacked stopparagraph stoppositioning stoppostponing \
+stopproduct stopprofile stopproject stopquotation stopraster \
+stopsymbolset stopsynchronization stoptable stoptables stoptabulate \
+stoptext stoptextrule stoptyping stopunpacked stopversion \
+stretched sub subformulanumber subject subpagenumber \
+subsection subsubject subsubsection subsubsubject switchtobodyfont \
+switchtorawfont sym symbol symoffset synchronizationbar \
+synchronize synonym tab testcolumn testpage \
+tex textheight textreference textrule textvariable \
+textwidth thinrule thinrules title tooltip \
+topdistance topheight topspace totalnumberofpages translate \
+txt typ type typebuffer typefile \
+underbar underbars unitmeaning unknown useJSscripts \
+useURL useXMLfilter useblocks usecommands usedirectory \
+useencoding useexternaldocument useexternalfigure useexternalfile useexternalfiles \
+useexternalsoundtrack usemodule usemodules usereferences usespecials \
+usesymbols usetypescript usetypescriptfile useurl version \
+vl weekday whitespace wordright writebetweenlist \
+writetolist writetoreferencelist writetoregister
keywordclass.context.cs=\
CAP Cap Caps Cisla \
@@ -317,6 +471,325 @@ ziskejbuffer ziskejznaceni zlomek znaceni znak \
znaky zpracujbloky zpracujstranu zrcadlit zref \
zvysujicicislo
+keywordclass.context.nl=\
+CAP Cap Caps Cijfers \
+KAP Kap Kaps Letter Letters \
+MAAND Romeins WEEKDAG WOORD WOORDEN \
+Woord Woorden aantalsubpaginas about achtergrond \
+appendix arg bepaalkopnummer bepaallijstkenmerken bepaalregisterkenmerken \
+betekenis binnenmargeafstand binnenmargebreedte binnenrandafstand binnenrandbreedte \
+blanko blokje blokjes blokkeerinteractiemenu bodemwit \
+bookmark bovenafstand bovenhoogte breuk buitenmargeafstand \
+buitenmargebreedte buitenrandafstand buitenrandbreedte but button \
+cap chapter chem cijfers citaat \
+citeer clip comment completecombinedlist completelistoffloats \
+completelistofsorts completelistofsynonyms converteernummer copieerveld corrigeerwitruimte \
+coupledregister crlf datum definebodyfontDEF definebodyfontREF \
+definedfont definefontfeature definefonthandling definerawfont definetypeface \
+definieer definieeraccent definieeralineas definieerbeeldmerk definieerblanko \
+definieerblok definieerbuffer definieercombinatie definieercommando definieerconversie \
+definieerfiguursymbool definieerfont definieerfontstijl definieerfontsynoniem definieerhbox \
+definieerhoofdveld definieeringesprongentext definieerinmarge definieerinteractiemenu definieeritemgroep \
+definieerkadertekst definieerkarakter definieerkleur definieerkleurgroep definieerkolomgroep \
+definieerkolomovergang definieerkop definieerkorps definieerkorpsomgeving definieerlayer \
+definieerlayout definieerletter definieerlijn definieerlijst definieermarkering \
+definieeromlijnd definieeropmaak definieeroverlay definieerpaginaovergang definieerpalet \
+definieerpapierformaat definieerplaats definieerplaatsblok definieerprofiel definieerprogramma \
+definieerreferentie definieerreferentieformaat definieerreferentielijst definieerregister definieersamengesteldelijst \
+definieersectie definieersectieblok definieersorteren definieerstartstop definieersubveld \
+definieersymbool definieersynoniemen definieertabelvorm definieertabulatie definieertekst \
+definieertekstpositie definieertekstvariabele definieertype definieertypen definieeruitvoer \
+definieerveld definieerveldstapel definieerversie definieerwiskundeuitlijnen description \
+dimensie directnaarlijst directtussenlijst doordefinieren doorlabelen \
+doornummeren dunnelijn dunnelijnen eenregel enumeration \
+ergens externfiguur forceerblokken formulenummer framedtext \
+gebruikJSscripts gebruikURL gebruikXMLfilter gebruikblokken gebruikcommandos \
+gebruikexterndocument gebruikexternefile gebruikexternefiles gebruikexternfiguur gebruikexterngeluidsfragment \
+gebruikgebied gebruikmodule gebruikmodules gebruikreferenties gebruikspecials \
+gebruiksymbolen gebruiktypescript gebruiktypescriptfile gebruikurl geenblokkenmeer \
+geenbovenenonderregels geendimensie geenfilesmeer geenhoofdenvoetregels geenlijst \
+geenmarkering geenspatie geentest geenwitruimte geg \
+grijskleur grijswaarde haalbuffer haalmarkering haalnummer \
+haarlijn handhaafblokken herhaal hl hoofdafstand \
+hoofdhoogte hoofdtaal hoog huidigedatum huidigekopnummer \
+in inanderemarge inbinnen inbuiten indentation \
+inlijnd inlinker inlinkermarge inlinkerrand inmarge \
+inrechter inrechtermarge inrechterrand inregel inspringen \
+installeertaal instellingen interactiebalk interactiebuttons interactiemenu \
+invullijnen invulregel invultekst invulveld inwilijnd \
+items its kantlijn kap kenmerk \
+kenmerkdatum kentekstvariabeletoe kleur kleurenbalk kleurwaarde \
+kloonveld kolom kop kopniveau kopnummer \
+koppeldocument koppelmarkering koppelpagina koppelpapier koppelregister \
+kopsym koptekst kopwit laag label \
+labeling labels labeltekst laho leg \
+legeregels letter letters lijndikte lijstbreedte \
+lijsthoogte lijstlengte lijstsymbool linkermargeafstand linkermargebreedte \
+linkerrandafstand linkerrandbreedte loadsorts loadsynonyms maand \
+mapfontsize mar margeafstand margebreedte margetekst \
+margetitel margewoord markeer markeerversie mediaeval \
+menubutton naam naar naarbox naarpagina \
+name navigerend nextsection nietinspringen nocap \
+nokap noot nop omgeving omlaag \
+omlijnd onbekend onderafstand onderdeel onderhoogte \
+ontkoppelmarkering op opelkaar oplinkermarge oppagina \
+oprechtermarge overbar overbars overstrike overstrikes \
+pagina paginadiepte paginanummer paginaoffset paginareferentie \
+papierbreedte papierhoogte paragraph part paslayoutaan \
+passeerblokken passendveld plaatsbeeldmerken plaatsbookmarks plaatsformule \
+plaatskopnummer plaatskoptekst plaatslegenda plaatslijn plaatslijst \
+plaatslokalevoetnoten plaatsnaastelkaar plaatsonderelkaar plaatsopgrid plaatspaginanummer \
+plaatsplaatsblok plaatsreferentielijst plaatsregister plaatsruwelijst plaatssamengesteldelijst \
+plaatssubformule plaatstekstvariabele plaatsvoetnoten placelistoffloats placelistofsorts \
+placelistofsynonyms positioneer positioneertekst printpapierbreedte printpapierhoogte \
+produkt programma projekt publicatie punten \
+ran randafstand randbreedte rechtermargeafstand rechtermargebreedte \
+rechterrandafstand rechterrandbreedte ref refereer referentie \
+regellinks regelmidden regelrechts register registreervelden \
+reservefloat reset resetmarkering resetnummer resettekstinhoud \
+resettextcontent romeins rooster roteer rugwit \
+schaal scherm schrijfnaarlijst schrijfnaarreferentielijst schrijfnaarregister \
+schrijftussenlijst section seeregister selecteerblokken selecteerpapier \
+selecteerversie setnummer setupfonthandling setupfontsynonym setupinterlinespace2 \
+setuplistalternative snijwit som sort spatie \
+spiegel splitsplaatsblok startachtergrond startalignment startbuffer \
+startcitaat startcodering startcolumns startcombinatie startcombination \
+startcomment startdescription startdocument startenumeration startfigure \
+startfiguur startfloattext startformula startframedtext startgeg \
+startglobaal starthiding startinteractiemenu startitemgroup startkantlijn \
+startkleur startkolomgroep startkolommen startkolomopmaak startlegend \
+startline startlinecorrection startlinenumbering startlines startlocal \
+startlocalenvironment startlocalfootnotes startlokaal startlokalevoetnoten startmakeup \
+startmargeblok startmarginblock startnaast startnamemakeup startnarrower \
+startomgeving startonderdeel startopelkaar startopmaak startopposite \
+startoverlay startoverview startoverzicht startparagraph startpositioneren \
+startpositioning startpostponing startprodukt startprofiel startprofile \
+startprojekt startraster startregel startregelcorrectie startregelnummeren \
+startregels startregister startsmaller startsymbolset startsymboolset \
+startsynchronisatie startsynchronization starttabel starttabellen starttable \
+starttables starttabulate starttekst starttekstlijn starttyping \
+startuitlijnen startunpacked startvanelkaar startversie stelachtergrondenin \
+stelachtergrondin stelalineasin stelantwoordgebiedin stelarrangerenin stelblankoin \
+stelblokin stelblokjesin stelblokkopjein stelblokkopjesin stelbovenin \
+stelboventekstenin stelbufferin stelbuttonsin stelciterenin stelclipin \
+stelcombinatiesin stelcommentaarin steldoordefinierenin steldoornummerenin steldunnelijnenin \
+stelexternefigurenin stelformulesin stelformulierenin stelhoofdin stelhoofdtekstenin \
+stelingesprongentextin stelinmargein stelinspringenin stelinteractiebalkin stelinteractiein \
+stelinteractiemenuin stelinteractieschermin stelinterliniein stelinvullijnenin stelinvulregelsin \
+stelitemgroepin stelitemsin stelkadertekstenin stelkantlijnin stelkapitalenin \
+stelkleurenin stelkleurin stelkolomgroepin stelkolomgroepregelsin stelkolomgroepstartin \
+stelkolommenin stelkopin stelkopnummerin stelkoppeltekenin stelkoppenin \
+stelkoptekstin stelkorpsin stelkorpsomgevingin stellabeltekstin stellayoutin \
+stellegendain stellijndiktein stellijnin stellijstin stelmargeblokkenin \
+stelmarkeringin stelnaastplaatsenin stelnummerenin stelnummerin stelomlijndin \
+stelonderin stelonderstrepenin stelondertekstenin stelopmaakin stelopsommingenin \
+stelpaginacommentaarin stelpaginanummerin stelpaginanummeringin stelpaginaovergangenin stelpaletin \
+stelpapierformaatin stelpapierin stelparagraafnummerenin stelplaatsblokin stelplaatsblokkenin \
+stelplaatsbloksplitsenin stelplaatsin stelpositionerenin stelprofielenin stelprogrammasin \
+stelpublicatiesin stelrastersin stelreferentielijstin stelrefererenin stelregelnummerenin \
+stelregelsin stelregisterin stelroterenin stelsamengesteldelijstin stelsectieblokin \
+stelsectiein stelsmallerin stelsorterenin stelspatieringin stelstartstopin \
+stelstrutin stelsubpaginanummerin stelsymboolsetin stelsynchronisatiebalkin stelsynchronisatiein \
+stelsynoniemenin stelsysteemin steltaalin steltabellenin steltabin \
+steltabulatiein steltekstin steltekstinhoudin steltekstlijnenin steltekstpositiein \
+stelteksttekstenin steltekstvariabelein steltolerantiein steltypein steltypenin \
+steluitlijnenin steluitvoerin stelurlin stelveldenin stelveldin \
+stelversiesin stelvoetin stelvoetnootdefinitiein stelvoetnotenin stelvoettekstenin \
+stelwiskundeuitlijnenin stelwitruimtein stopachtergrond stopalignment stopbuffer \
+stopcitaat stopcodering stopcolumns stopcombinatie stopcombination \
+stopcomment stopdescription stopdocument stopenumeration stopfigure \
+stopfloattext stopformula stopframedtext stopgeg stopglobaal \
+stophiding stopinteractiemenu stopitemgroup stopkantlijn stopkleur \
+stopkolomgroep stopkolommen stopkolomopmaak stoplegend stopline \
+stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment \
+stoplocalfootnotes stoplokaal stoplokalevoetnoten stopmakeup stopmargeblok \
+stopmarginblock stopnaast stopnamemakeup stopnarrower stopomgeving \
+stoponderdeel stopopelkaar stopopmaak stopopposite stopoverlay \
+stopoverview stopoverzicht stopparagraph stoppositioneren stoppositioning \
+stoppostponing stopprodukt stopprofiel stopprofile stopprojekt \
+stopraster stopregel stopregelcorrectie stopregelnummeren stopregels \
+stopsmaller stopsymbolset stopsynchronisatie stopsynchronization stoptabel \
+stoptabellen stoptable stoptables stoptabulate stoptekst \
+stoptekstlijn stoptyping stopuitlijnen stopunpacked stopvanelkaar \
+stopversie sub subformulenummer subject subpaginanummer \
+subsection subsubject subsubsection subsubsubject suggestie \
+switchnaarkorps switchtorawfont sym symbool symoffset \
+synchronisatiebalk synchroniseer synonym taal tab \
+tekstbreedte teksthoogte tekstlijn tekstreferentie tekstvariabele \
+testkolom testpagina tex title toelichting \
+toonexternefiguren toongrid tooninstellingen toonkader toonkleur \
+toonkleurgroep toonkorps toonkorpsomgeving toonlayout toonopmaak \
+toonpalet toonprint toonstruts toonsymboolset toonvelden \
+totaalaantalpaginas txt typ type typebuffer \
+typefile uit uitgerekt underbar underbars \
+usecodering usedirectory vastespatie vastespaties veld \
+veldstapel verbergblokken vergelijkkleurgroep vergelijkpalet verhoognummer \
+verlaagnummer verplaatsformule verplaatsopgrid verplaatszijblok versie \
+vertaal verwerkblokken verwerkpagina vl voetafstand \
+voethoogte voetnoot voetnoottekst volgprofiel volgprofielversie \
+volgversie volledigepaginanummer volledigregister voluit weekdag \
+wilijnd wiskunde witruimte woonplaats woordrechts \
+zetbreedte zethoogte
+
+keywordclass.context.de=\
+Buchstabe Buchstaben CAP Cap \
+Caps KAP Kap Kaps MONAT \
+Roemischezahlen WOCHENTAG WOERTER WORT Woerter \
+Wort Ziffern abstandlinkerrand abstandoben abstandrechterrand \
+abstandunten amgitterausrichten amgitterneuausrichten appendix arg \
+atleftmargin atrightmargin aufseite ausfuellfeld ausfuelltext \
+ausschnitt bearbeitebloecke bearbeiteseite bedeutung behaltebloecke \
+bei bemerkung benutzekodierung benutzespezielles benutzeverzeichnis \
+beschrifteversion beschriftung bestimmekopfnummer bestimmelistencharakeristika bestimmeregistercharakteristika \
+bildschirm blanko bookmark bottomspace breitelinkerrand \
+breiterechterrand bruch buchstabe buchstaben but \
+bypassblocks cap chapter chem comment \
+completecombinedlist completelistoffloats completelistofsorts completelistofsynonyms completepagenumber \
+completeregister coupledregister crlf cutspace datum \
+decrementnumber definebodyfontDEF definebodyfontREF definecolumnbreak definecolumnset \
+definecombination definedfont definefontfeature definefonthandling defineindentedtext \
+defineinmargin defineitemgroup definelayer definelayout definemathalignment \
+defineoutput definepagebreak defineplacement definerawfont definerule \
+defineschriftsynonym definetextposition definetextvariable definetype definetypeface \
+definiereabbsymbol definiereabsaetze definiereabschnitt definiereabschnittsblock definiereakzent \
+definierebefehl definierebeschreibung definierebeschreibungen definierebeschriftung definiereblanko \
+definiereblock definierefarbe definierefarbengruppe definierefeld definierefeldstapel \
+definierefliesstext definierefliesstextumgebung definieregleitobjekt definierehauptfeld definierehbox \
+definiereinteraktionsmenue definierekonversion definierelabel definiereliste definierelogo \
+definieren definierenummerierung definiereoverlay definierepalette definierepapierformat \
+definiereprofil definiereprogramme definierepuffer definierereferenz definierereferenzformat \
+definierereferenzliste definiereregister definiereschrift definiereschriftstil definieresortieren \
+definierestartstop definierestil definieresubfeld definieresymbol definieresynonyme \
+definieretabellenvorlage definieretabulator definieretext definieretippen definiereueberschrift \
+definiereumbruch definiereumrahmt definiereumrahmtertext definiereversion definierezeichen \
+definierezusammengestellteliste description dimension doppelseite doppelseitigespapier \
+drehen duennelinie duennerumriss einezeile einstellungen \
+einziehen emptylines entknuepfebeschriftung enumeration externeabbildung \
+farbbalken farbe farbewert feld feldstapel \
+festesspatium folgeprofil folgeprofilversion folgeversion footnotetext \
+forceblocks format formelnummer framedtext fussnote \
+fusszeileabstand fusszeilenhoehe gefuelltesrechteck gefuelltezeile geg \
+gesamtseitenanzahl gestreckt getnumber gitter graufarbe \
+grauwert haarlinie hauptsprache headsym heutigesdatum \
+heutigeskopfnummer hintergrund hl hoch hoeheoben \
+hoeheunten holebeschriftung holepuffer imlinken imlinkenrand \
+immaumrise immediatebetweenlist immediatetolist imrechten imrechtenrand \
+imumriss in inaktiviereinteraktionsmenue inanderermarginale indentation \
+ininner inlinkermarginale inmarginalie inneredgedistance inneredgewidth \
+innermargindistance innermarginwidth inouter inrechtermarginale installieresprache \
+interaktionsbalken interaktionsknopfe interaktionsmenue inzeile irgendwo \
+its kap keindimension keinebeschriftung keinebloeckemehr \
+keinedateienmehr keinekopfundfusszeilen keineliste keinspatium keintest \
+keinzeilenobenundunten keinzwischenraum kleinerdurchschuss klonierefeld knopf \
+komponente konvertierezahl kopf kopfniveau kopfnummer \
+kopfweite kopfzeilenabstand kopfzeilenhoehe kopierefeld korrigierezwischenraum \
+label labeling labels labeltext leg \
+liniendicke linkemarginalafstand linkemarginalbreite linksbuendig listenbreite \
+listenhoehe listenlaenge listsymbol loadsorts loadsynonyms \
+mapfontsize mar marginalafstand marginalbreite marginallinie \
+marginaltext marginaltitel marginalwort mathematik maumrise \
+mediaeval menueknopf monat moveformula movesidefloat \
+nachunten name navigating nextsection nichteinziehen \
+nocap nokap nop notiz numberofsubpages \
+nummererhoehen outeredgedistance outeredgewidth outermargindistance outermarginwidth \
+overbar overbars overstrike overstrikes pagedepth \
+pageoffset papierbreite papierhoehe paragraph part \
+passelayoutan passendfeld placefloat placeheadnumber placeheadtext \
+placelistoffloats placelistofsorts placelistofsynonyms placepagenumber placerawlist \
+placereferencelist placerule placetextvariable platzierebookmarks platziereformel \
+platzierefussnoten platzierelegende platziereliste platzierelogo platzierelokalefussnoten \
+platzierenebeneinander platziereregister platziereuntereinander platziereunterformel platzierezusammengestellteliste \
+pos position positiontext posten printpapierbreite \
+printpapierhoehe produkt programm projekt publikation \
+punkt ran randabstand randbreite rechteck \
+rechtecke rechtemarginalafstand rechtemarginalbreite rechtsbuendig ref \
+referenz referieren register registrierefelder reservefloat \
+resetnumber resettextcontent roemischezahlen ruecksetzten ruecksetztenbeschriftung \
+rumpfweite satzbreite satzhoehe schreibezumregister schreibezurliste \
+schreibezurreferenzliste schreibezwischenliste section seeregister seite \
+seitenreferenz seitenummer setnumber settext settextvariable \
+setupanswerarea setupcolumnset setupcolumnsetlines setupcolumnsetstart setupfonthandling \
+setupfontsynonym setupforms setupindentedtext setupinterlinespace2 setupitemgroup \
+setuplistalternative setupmathalignment setupnumber setuppaper setupplacement \
+setuprule setupstartstop setupstrut setuptextposition setuptextvariable \
+showsymbolset sort spalte spatium spiegeln \
+sprache startabbildung startalignment startausrichtung startbuffer \
+startcolumnmakeup startcolumns startcolumnset startcombination startcomment \
+startdescription startdocument startdokument startenger startenumeration \
+startfarbe startfigure startfloattext startformula startframedtext \
+startgeg startgegenueber startglobal startgrosserdurchschuss starthiding \
+starthintergrund startinteraktionsmenue startitemgroup startkleinerdurchschuss startkodierung \
+startkombination startkomponente startlegend startline startlinecorrection \
+startlinenumbering startlines startlocal startlocalenvironment startlocalfootnotes \
+startlokal startlokalefussnoten startmakeup startmarginalblock startmarginallinie \
+startmarginblock startnamemakeup startnarrower startopposite startoverlay \
+startoverview startparagraph startpositionieren startpositioning startpostponing \
+startprodukt startprofil startprofile startprojekt startraster \
+startregister startspalten startsymbolset startsynchronisation startsynchronization \
+starttabelle starttabellen starttable starttables starttabulate \
+starttext starttextlinie starttyping startueberblick startumbruch \
+startumgebung startunpacked startversion startzeile startzeilen \
+startzeilenkorrektur startzeilennumerierung startzitat stelleabsaetzeein stelleabsatznummerierungein \
+stelleabschnittein stelleabschnittsblockein stelleanordnenein stelleaufzaehlungenein stelleausgabeein \
+stelleausrichtungein stelleausschnittein stellebeschreibungein stellebeschriftungein stellebilderunterschriftein \
+stellebildunterschriftein stellebindestrichein stelleblankoein stelleblockein stelledrehenein \
+stelleduennerumrissein stelleeinziehenein stelleengerein stelleexterneabbildungenein stellefarbeein \
+stellefarbenein stellefeldein stellefelderin stellefliesstextein stellefliesstextumgebungein \
+stelleformelnein stellefussnotendefinitionein stellefussnotenein stellefusszeileein stellefusszeilentextein \
+stellegefuelltesrechteckein stellegefuelltezeileein stellegegenueberplatzierenein stellegleitobjekteein stellegleitobjektein \
+stellehintergruendeein stellehintergrundein stelleinmarginalieein stelleinteraktionein stelleinteraktionsbalkenein \
+stelleinteraktionsbildschirmein stelleinteraktionsmenueein stelleknopfein stellekombinationein stellekommentarein \
+stellekopfzahlein stellekopfzeileein stellekopfzeilentextein stellelabeltextein stellelayoutein \
+stellelegendeein stellelinienbreiteein stellelisteein stellemarginalblockein stellemarginallinieein \
+stellenobenein stellenummerierungein stellepaletteein stellepapierformatein stelleplatziegeteiltegleitobjekt \
+stellepositionierenein stellepostenein stelleprofilein stelleprogrammein stellepublikationein \
+stellepufferein stellerasterein stellerechteckein stellereferenzierenein stellereferenzlisteein \
+stelleregisterein stelleseitenkommentarein stelleseitennummerein stelleseitennummeriernungein stelleseitenuebergangein \
+stellesortierenein stellespaltenein stellespatiumein stellespracheein stellesymbolsetein \
+stellesynchronisationein stellesynchronisationsbalkenein stellesynonymein stellesystemein stelletabein \
+stelletabellenein stelletabulatorein stelletextein stelletextobenein stelletexttexteein \
+stelletextumrissein stelletextuntenein stelletipein stelletippenein stelletoleranzein \
+stelleueberschriftein stelleueberschriftenein stelleueberschrifttextein stelleumbruchein stelleumrahmtein \
+stelleumrahmtetexteein stelleuntenein stelleunterseitennummerein stelleunterstreichenein stelleurlein \
+stelleversalienein stelleversionein stellezeilenabstandein stellezeilenein stellezeilennumerierungein \
+stellezitierenein stellezusammengestelltelisteein stellezwischenraumein stopalignment stopausrichtung \
+stopbuffer stopcolumnmakeup stopcolumns stopcolumnset stopcombination \
+stopcomment stopdescription stopdocument stopdokument stopenger \
+stopenumeration stopfarbe stopfigure stopfloattext stopformula \
+stopframedtext stopgeg stopgegenueber stopglobal stopgrosserdurchschuss \
+stophiding stophintergrund stopinteraktionsmenue stopitemgroup stopkleinerdurchschuss \
+stopkodierung stopkombination stopkomponente stoplegend stopline \
+stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment \
+stoplocalfootnotes stoplokal stoplokalefussnoten stopmakeup stopmarginalblock \
+stopmarginallinie stopmarginblock stopnamemakeup stopnarrower stopopposite \
+stopoverlay stopoverview stopparagraph stoppositionieren stoppositioning \
+stoppostponing stopprodukt stopprofil stopprofile stopprojekt \
+stopraster stopspalten stopsymbolset stopsynchronisation stopsynchronization \
+stoptabelle stoptabellen stoptable stoptables stoptabulate \
+stoptext stoptextlinie stoptyping stopueberblick stopumbruch \
+stopumgebung stopunpacked stopversion stopzeile stopzeilen \
+stopzeilenkorrektur stopzeilennumerierung stopzitat sub subject \
+subpagenumber subsection subsubject subsubsection subsubsubject \
+switchtorawfont sym symbol synchronisationsbalken synchronisieren \
+synonym tab teilegleitobjekt testcolumn testpage \
+tex textbreite texthoehe textlinie textreferenz \
+textvariable tief tiho tip tippedatei \
+tippen tippepuffer title tooltip txt \
+ueber ueberschrifttext uebersetzten umgebung umrahmt \
+unbekant underbar underbars unterformelnummer useXMLfilter \
+usedirectory usetypescript usetypescriptfile verbergebloecke vergleichefarbengruppe \
+vergleichepalette verknuepfebeschriftung verknuepfedokument verknuepfregister version \
+verweis verweisdatum verwendeJSscript verwendeURL verwendebefehl \
+verwendebloecke verwendeexteresdokument verwendeexterneabbildung verwendeexternedatei verwendeexternedateien \
+verwendeexternestonstueck verwendemodul verwendemodule verwendereferenzen verwendesymbole \
+verwendeurl vl volleswort von waehlebloeckeaus \
+waehlepapieraus waehleversionaus wechselezumfliesstext wiederholen wochentag \
+wohnort wortrechts zeigedruck zeigeeinstellungen zeigeexterneabbildungen \
+zeigefarbe zeigefarbengruppe zeigefelder zeigefliesstext zeigefliesstextumgebung \
+zeigegitter zeigelayout zeigepalette zeigerahmen zeigestruts \
+zeigeumbruch zentriert ziffern zitat zitieren \
+zu zurbox zurseite zwischenraum
+
keywordclass.context.fr=\
CAP Cap Caps Caractere \
Caracteres Chiffresromains JOURSEMAINE MOIS MOT \
@@ -480,332 +953,6 @@ utilisepsiteaudioexterne utilisereferences utilisespecialites utilisesymboles ut
va vaalaboite vaalapage vaenbas valeurcouleur \
valeurgris variabletexte version vide vl
-keywordclass.context.it=\
-CAP Cap Caps GIORNOSETTIMANA \
-Lettera Lettere MESE Numeri Numeriromani \
-PAROLA PAROLE Parola Parole accoppiacarta \
-accoppiadocumento accoppiamarcatura accoppiapagina accoppiaregistro adattacampo \
-adattalayout al allineacentro allineadestra allineasinistra \
-altezzacarta altezzacartastampa altezzacima altezzaelenco altezzafondo \
-altezzaintestazione altezzamakeup altezzapdp altezzatesto ambiente \
-ampiezzabordo ampiezzabordodestro ampiezzabordosinistro ampiezzacarta ampiezzacartastampa \
-ampiezzaelenco ampiezzamakeup ampiezzamargine ampiezzamarginedestro ampiezzamarginesinistro \
-ampiezzatesto ap apagina appendix arg \
-atleftmargin atrightmargin barracolori barrainterazione barrasincronizzazione \
-bastablocchi bastafile cambiaafontdeltesto campi camporiempimento \
-cap capello chapter chim circondato \
-citazione clip clonacampo colonna colore \
-coloregrigio comment commento completecombinedlist completelistoffloats \
-completelistofsorts completelistofsynonyms completeregister componenet confrontagruppocolori \
-confrontatavolozza convertinumero copiacampo correggispaziobianco coupledregister \
-crlf cutspace da daqualcheparte data \
-datadioggi datareferral decrementnumber definebodyfontDEF definebodyfontREF \
-definecolumnbreak definecombination definedfont definefontfeature definefonthandling \
-defineindentedtext defineinmargin defineitemgroup definelayer definemathalignment \
-definepagebreak defineplacement definetypeface definisci definisciaccento \
-definisciambientefontdeltesto definisciblocco definiscibloccosezione definiscibuffer definiscicampo \
-definiscicampoprincipale definiscicapoversi definiscicarattere definiscicolore definiscicomando \
-definisciconversione definiscidescrizione definiscidimensionicarta definiscielenco definiscielencocombinato \
-definiscienumerazione definiscietichetta definiscifigurasimbolo definiscifont definiscifontdeltesto \
-definiscifontgrezzo definisciformatoriferimento definiscigruppocolonne definiscigruppocolori definiscihbox \
-definisciincorniciato definisciiniziatermina definiscilayout definiscilinea definiscilistariferimenti \
-definiscilogo definiscimakeup definiscimarcatura definiscimenuinterazione definiscimodellotabella \
-definiscioggettomobile definisciordinamento definiscioutput definisciposizionetesto definisciprofilo \
-definisciprogramma definisciregistro definisciriferimento definiscirigovuoto definiscisezione \
-definiscisimbolo definiscisinonimi definiscisinonimofont definiscisottocampo definiscisovrapposizione \
-definiscistackcampi definiscistile definiscistilefont definiscitabulato definiscitavolozza \
-definiscitesta definiscitesto definiscitestoincorniciato definiscitype definiscityping \
-definiscivariabiletesto definisciversion description determinacaratteristicheregistro determinacarattersticheelenco \
-determinanumerotesta dimensione disabilitamenuinterazione distanzabordo distanzabordodestro \
-distanzabordosinistro distanzacima distanzafondo distanzaintestazione distanzamargine \
-distanzamarginedestro distanzamarginesinistro distanzapdp domicilio el \
-elaborablocchi elaborapagina elementi elemento emptylines \
-enumeration etichetta etichette fatto figuraesterna \
-fondo forzablocchi framedtext frazione getnumber \
-giornosettimana griglia headsym hl ignoto \
-immediatebetweenlist immediatetolist impaccato impostaallineamento impostaambientefontdeltesto \
-impostaampiezzariga impostabarrainterazione impostabarrasincronizzazione impostablocchimargine impostablocco \
-impostabloccosezione impostabuffer impostacampi impostacampo impostacapoversi \
-impostacaption impostacaptions impostacima impostaclippling impostacolonne \
-impostacolore impostacolori impostacombinazioni impostacommento impostacommentopagina \
-impostadefinizionenotepdp impostadescrizioni impostadimensionicarta impostaelementi impostaelencazioni \
-impostaelenco impostaelencocombinato impostaenumerazioni impostafigureesterne impostafondo \
-impostafontdeltesto impostaforms impostaformule impostagruppocolonne impostaincorniciato \
-impostainiziatermina impostainmargine impostainstestazione impostainterazione impostainterlinea \
-impostalayout impostalegenda impostalinea impostalineemargine impostalineenere \
-impostalineeriempimento impostalineesottili impostalineetesto impostalingua impostalistariferimenti \
-impostamaiuscole impostamakeup impostamarcatura impostamenuinterazione impostamenzione \
-impostanotepdp impostanumerazione impostanumerazionecapoversi impostanumerazionepagina impostanumerazionerighe \
-impostanumeropagina impostanumerosottopagina impostanumerotesta impostaoggettimobili impostaoggettomobile \
-impostaordinamento impostaoutput impostaparranging impostapdp impostapiustretto \
-impostaposizionamento impostaposizionamentoopposti impostaposizionetesto impostaprofili impostaprogrammi \
-impostapubblicazioni impostapulsanti impostaregistro impostarientro impostariferimento \
-impostarighe impostarigheriempimento impostarigovuoto impostarotazione impostaschermi \
-impostaschermointerazione impostasegnosillabazione impostasetsimboli impostasezione impostasfondi \
-impostasfondo impostasincronizzazione impostasinonimi impostasistema impostasottolinea \
-impostaspaziatura impostaspaziobianco impostaspezzamentooggettomobile impostastrut impostatab \
-impostatabelle impostatabulato impostatavolozza impostatesta impostateste \
-impostatesticima impostatestifondo impostatestiincorniciati impostatestiintestazioni impostatestipdp \
-impostatesto impostatestoetichette impostatestointestazioni impostatestotesti impostatolleranza \
-impostatransizionepagina impostatype impostatyping impostaurl impostavariabiletesto \
-impostaversioni impostazioni in inaltromargine incorniciato \
-incrementanumero indentation indestra ininner iniziaallineamento \
-iniziaambiente iniziabloccomargine iniziacitazione iniziacodifica iniziacolonne \
-iniziacolore iniziacombinazione iniziacomponente iniziacorrezioneriga iniziadocumento \
-iniziafigura iniziaglobale iniziagruppocolonne iniziaimpaccato inizialineamargine \
-inizialineatesto inizialocale iniziamakeup inizianotepdplocali inizianumerazionerighe \
-iniziaopposto iniziaoverview iniziapiustretto iniziaposizionamento iniziaprodotto \
-iniziaprofilo iniziaprogetto iniziaraster iniziariga iniziarighe \
-iniziasetsimboli iniziasfondo iniziasincronizzazione iniziasovrapposizione iniziatabella \
-iniziatabelle iniziatesto iniziaunpacked iniziaversione inlatodestro \
-inlatosinistro inmaframed inmargine inmarginedestro inmarginesinistro \
-inneredgedistance inneredgewidth innermargindistance innermarginwidth inouter \
-inriga insinistra installalingua intorno labeling \
-leg lettera lettere lineamargine lineanera \
-lineasottile lineatesto lineenere lineeriempimento lineesottili \
-lingua linguaprincipale listsymbol livellotesta loadsorts \
-loadsynonyms logcampi lunghezzaelenco maframed mapfontsize \
-mar marcatura marcaversione matematica mediaeval \
-menuinterattivo menzione mese mettielenco mettielencocombinato \
-mettifiancoafianco mettiformula mettiingriglia mettilegenda mettilinea \
-mettiloghi mettinotepdp mettinotepdplocali mettinumeropagina mettiregistro \
-mettisegnalibro mettisottoformula mettiunosullaltro mettivariabiletesto mostraambientefontdeltesto \
-mostracampi mostracolore mostracornice mostrafiguresterne mostrafontdeltesto \
-mostragriglia mostragruppocolori mostraimpostazioni mostralyout mostramakeup \
-mostrasetsimboli mostrastampa mostrastruts mostratavolozza movesidefloat \
-name nascondiblocchi navigating nextsection nientedimensioni \
-nienteelenco nientelineecimafondo nientelineintestazionepdp nientemarcatura nienterientro \
-nientespazio nientespaziobianco nocap nome nomeunita \
-nop nota notapdp notest numberofsubpages \
-numeri numeriromani numeroformula numeropagina numeropaginacompleto \
-numerosottoformula numerotesta numerotestacorrente numerototaledipagine outeredgedistance \
-outeredgewidth outermargindistance outermarginwidth overbar overbars \
-overstrike overstrikes pagedepth pageoffset pagina \
-paragraph paroladestra parolainmargine part passaafontgrezzo \
-ped pedap perlungo placefloat placelistoffloats \
-placelistofsorts placelistofsynonyms placerawlist placereferencelist posizionanumerotesta \
-posizionatesto posizionatestotesta posizione prendibuffer prendimarcatura \
-prodotto progetto programma pubblicazione pulsante \
-pulsantemenu pulsantinterazione punti qualcheriga ran \
-referral referring register reimposta reimpostamarcatura \
-reservefloat resetnumber resettextcontent rientro rif \
-rifai riferimento riferimentopagina riferimentotesto riflessione \
-rigariempimento rigovuoto ruota saltablocchi scala \
-schermo scrividentroelenco scriviinelenco scriviinlistariferimenti scriviinregistro \
-section seeregister segnalibro seguiprofilo seguiversione \
-seguiversioneprofilo selezionablocchi selezionacarta selezionaversione separamarcatura \
-setnumber settext setupanswerarea setupcolumnsetlines setupcolumnsetstart \
-setupfonthandling setupfontsynonym setupindentedtext setupinterlinespace2 setupitemgroup \
-setuplistalternative setupmathalignment setuppaper setupplacement setvariabiletesto \
-sfondo sim simbolo sincronizza sort \
-spazifissi spazio spaziobianco spaziocima spaziodietro \
-spaziofisso spaziofondo spessoreriga spezzaoggettomobile spostaagriglia \
-spostaformula stackcampi startalignment startambiente startbuffer \
-startcitazione startcolore startcolumnmakeup startcolumns startcombination \
-startcomment startcomponenet startdescription startdocument startenumeration \
-startfatto startfigure startfloattext startformula startframedtext \
-starthiding startimpaccato startitemgroup startlegend startline \
-startlineamargine startlineatesto startlinecorrection startlinenumbering startlines \
-startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock \
-startmenuinterattivo startnamemakeup startnarrower startopposite startoverlay \
-startoverview startparagraph startpositioning startpostponing startprodotto \
-startprofile startprogetto startregister startsfondo startsymbolset \
-startsynchronization starttable starttables starttabulate starttyping \
-startunpacked startversione stirato stopalignment stopambiente \
-stopbuffer stopcitazione stopcolore stopcolumnmakeup stopcolumns \
-stopcombination stopcomment stopcomponenet stopdescription stopdocument \
-stopenumeration stopfatto stopfigure stopfloattext stopformula \
-stopframedtext stophiding stopimpaccato stopitemgroup stoplegend \
-stopline stoplineamargine stoplineatesto stoplinecorrection stoplinenumbering \
-stoplines stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup \
-stopmarginblock stopmenuinterattivo stopnamemakeup stopnarrower stopopposite \
-stopoverlay stopoverview stopparagraph stoppositioning stoppostponing \
-stopprodotto stopprofile stopprogetto stopsfondo stopsymbolset \
-stopsynchronization stoptable stoptables stoptabulate stoptyping \
-stopunpacked stopversione sub subject subpagenumber \
-subsection subsubject subsubsection subsubsubject synonym \
-tab terminaallineamento terminaambiente terminabloccomargine terminacitazione \
-terminacodifica terminacolonne terminacolore terminacombinazione terminacomponente \
-terminacorrezioneriga terminadocumento terminaglobale terminagruppocolonne terminaimpaccato \
-terminalineamargine terminalineatesto terminalocale terminamakeup terminanotepdplocali \
-terminanumerazionerighe terminaopposto terminaoverview terminapiustretto terminaposizionamento \
-terminaprodotto terminaprofili terminaprogetto terminaraster terminariga \
-terminarighe terminasfondo terminasincronizzazione terminasovrapposizione terminatabella \
-terminatabelle terminatesto terminaunpacked terminaversioni testa \
-testcolumn testoetichetta testoinmargine testoinstestazioni testonotapdp \
-testoriempimento testpage tex tieniblocchi title \
-titoloinmargine tooltip traduci txt typ \
-type typebuffer typefile underbar underbars \
-usaJSscripts usaURL usablocco usacartella usacodifica \
-usacolonnasonoraesterna usacomandi usadocumentoesterno usafiguraesterna usafileesterni \
-usafileesterno usamoduli usamodulo usariferimenti usasimboli \
-usaspecialita usaurl useXMLfilter usedirectory usetypescript \
-usetypescriptfile vaia vaiabox vaiapagina vaigiu \
-valorecolore valoregrigio variabiletesto versione vl
-
-keywordclass.context.pe=\
-CAP Cap Caps Character \
-Characters MONTH Numbers Romannumerals WEEKDAY \
-WORD WORDS Word Words appendix \
-cap chapter chem comment completecombinedlist \
-completelistoffloats completelistofsorts completelistofsynonyms coupledregister crlf \
-definebodyfontDEF definebodyfontREF definedfont definefontfeature definefonthandling \
-defineindentedtext definetypeface description enumeration framedtext \
-indentation inmframed its labeling loadsorts \
-loadsynonyms mapfontsize mediaeval mframed name \
-nextsection nocap overbar overbars overstrike \
-overstrikes paragraph part placelistoffloats placelistofsorts \
-placelistofsynonyms ran register reservefloat resettextcontent \
-section seeregister setupanswerarea setupcapitals setupfonthandling \
-setupfontsynonym setupindentedtext setupinterlinespace2 setuplistalternative setupurl \
-sort startalignment startbuffer startcolumns startcombination \
-startcomment startdescription startdocument startenumeration startfigure \
-startfloattext startformula startframedtext starthiding startitemgroup \
-startlegend startline startlinecorrection startlinenumbering startlines \
-startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock \
-startnamemakeup startnarrower startopposite startoverlay startoverview \
-startparagraph startpositioning startpostponing startprofile startraster \
-startregister startsymbolset startsynchronization starttable starttables \
-starttabulate starttyping startunpacked startتولید startحقیقت \
-startخط‌حاشیه startخط‌متن startرنگ startفشرده startمحیط \
-startمنوی‌پانل startمولفه startنسخه startنقل‌قول startپروژه \
-startپس‌زمینه stopalignment stopbuffer stopcolumns stopcombination \
-stopcomment stopdescription stopdocument stopenumeration stopfigure \
-stopfloattext stopformula stopframedtext stophiding stopitemgroup \
-stoplegend stopline stoplinecorrection stoplinenumbering stoplines \
-stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock \
-stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview \
-stopparagraph stoppositioning stoppostponing stopprofile stopraster \
-stopsymbolset stopsynchronization stoptable stoptables stoptabulate \
-stoptyping stopunpacked stopتولید stopحقیقت stopخط‌حاشیه \
-stopخط‌متن stopرنگ stopفشرده stopمحیط stopمنوی‌پانل \
-stopمولفه stopنسخه stopنقل‌قول stopپروژه stopپس‌زمینه \
-sub subject subsection subsubject subsubsection \
-subsubsubject synonym title tooltip txt \
-typ underbar underbars useJSscripts useURL \
-useXMLfilter usedirectory useurl آفست‌صفحه آیتم \
-آیتمها آینه اجباربلوکها ارتفاع‌آرایش ارتفاع‌بالا \
-ارتفاع‌برگ ارتفاع‌ته‌برگ ارتفاع‌خط ارتفاع‌سربرگ ارتفاع‌متن \
-ارتفاع‌پایین از ازکارانداختن‌منوی‌پانل استفاده‌بلوکها استفاده‌دستخط‌تایپ \
-استفاده‌رمزینه استفاده‌شکل‌خارجی استفاده‌فرمانها استفاده‌قطعه‌موزیک‌خارجی استفاده‌مدول \
-استفاده‌مدولها استفاده‌مرجعها استفاده‌مسیر استفاده‌نمادها استفاده‌نوشتارخارجی \
-استفاده‌ویژگیها استفاده‌پرونده‌خارجی استفاده‌پرونده‌دستخط‌تایپ استفاده‌پرونده‌های‌خارجی اعدادلاتین \
-افزودن اما امتحان‌نکن انتخاب‌برگ انتخاب‌بلوکها \
-انتخاب‌نسخه انتقال‌به‌توری انتقال‌فرمول انتقال‌کنار‌شناور انجام‌دوباره \
-بارگذاریها بارگذاری‌آرایش بارگذاری‌آیتمها بارگذاری‌ارجاع بارگذاری‌اندازه‌برگ \
-بارگذاری‌باریکتر بارگذاری‌بافر بارگذاری‌بالا بارگذاری‌بخش بارگذاری‌بردباری \
-بارگذاری‌برنامه‌ها بارگذاری‌برگ بارگذاری‌بست بارگذاری‌بلوک بارگذاری‌بلوکهای‌حاشیه \
-بارگذاری‌بلوک‌بخش بارگذاری‌تایپ بارگذاری‌تایپ‌کردن بارگذاری‌تب بارگذاری‌ترتیب \
-بارگذاری‌ترکیب‌ها بارگذاری‌تطابق بارگذاری‌تعریف‌پانوشت بارگذاری‌تنظیم بارگذاری‌تنظیم‌ریاضی \
-بارگذاری‌ته‌برگ بارگذاری‌تورفتگی بارگذاری‌توضیح بارگذاری‌توضیح‌صفحه بارگذاری‌ثبت \
-بارگذاری‌جانشانی بارگذاری‌جدولها بارگذاری‌جدول‌بندی بارگذاری‌خالی بارگذاری‌خروجی \
-بارگذاری‌خط بارگذاری‌خطها بارگذاری‌خطهای‌حاشیه بارگذاری‌خطهای‌سیاه بارگذاری‌خطهای‌متن \
-بارگذاری‌خطهای‌مجموعه‌ستون بارگذاری‌خطها‌ی‌نازک بارگذاری‌درج‌درخطها بارگذاری‌درج‌مخالف بارگذاری‌درون‌حاشیه \
-بارگذاری‌دوران بارگذاری‌دکمه‌ها بارگذاری‌راهنما بارگذاری‌رنگ بارگذاری‌رنگها \
-بارگذاری‌زبان بارگذاری‌ستونها بارگذاری‌سر بارگذاری‌سربرگ بارگذاری‌سرها \
-بارگذاری‌سیستم بارگذاری‌شرح بارگذاری‌شرحها بارگذاری‌شروع‌مجموعه‌ستون بارگذاری‌شروع‌پایان \
-بارگذاری‌شماره بارگذاری‌شماره‌زیرصفحه بارگذاری‌شماره‌سر بارگذاری‌شماره‌صفحه بارگذاری‌شماره‌گذاری \
-بارگذاری‌شماره‌گذاریها بارگذاری‌شماره‌گذاری‌صفحه بارگذاری‌شماره‌گذاری‌پاراگراف بارگذاری‌شماره‌‌گذاری‌خط بارگذاری‌شناور \
-بارگذاری‌شناورها بارگذاری‌شکافتن‌شناورها بارگذاری‌شکلهای‌خارجی بارگذاری‌طرح بارگذاری‌طرح‌بندی \
-بارگذاری‌عرض‌خط بارگذاری‌فاصله‌بین‌خط بارگذاری‌فرمولها بارگذاری‌فضای‌سفید بارگذاری‌فضا‌گذاری \
-بارگذاری‌قالبی بارگذاری‌قلم‌متن بارگذاری‌لوح بارگذاری‌لیست بارگذاری‌لیست‌ترکیبی \
-بارگذاری‌لیست‌مرجع بارگذاری‌مترادفها بارگذاری‌متغیر‌متن بارگذاری‌متن بارگذاری‌متنهای‌بالا \
-بارگذاری‌متن‌سر بارگذاری‌متن‌سربرگ بارگذاری‌متن‌قالبی بارگذاری‌متن‌متنها بارگذاری‌متن‌پانوشت \
-بارگذاری‌متن‌پایین بارگذاری‌مجموعه‌ستون بارگذاری‌مجموعه‌نماد بارگذاری‌محیط‌قلم‌متن بارگذاری‌منوی‌پانل \
-بارگذاری‌مکان‌متن بارگذاری‌مکان‌گذاری بارگذاری‌میدان بارگذاری‌میدانها بارگذاری‌میله‌تطابق \
-بارگذاری‌میله‌زیر بارگذاری‌میله‌پانل بارگذاری‌نسخه‌ها بارگذاری‌نشانه‌شکستن بارگذاری‌نشانه‌گذاری \
-بارگذاری‌نشرها بارگذاری‌نقل بارگذاری‌پاراگرافها بارگذاری‌پانل بارگذاری‌پانوشتها \
-بارگذاری‌پایین بارگذاری‌پرده‌ها بارگذاری‌پرده‌پانل بارگذاری‌پروفایلها بارگذاری‌پرکردن‌خطها \
-بارگذاری‌پس‌زمینه بارگذاری‌پس‌زمینه‌ها بارگذاری‌چیدن بارگذاری‌گذارصفحه بارگذاری‌گروههای‌آیتم \
-بارگذاری‌گروه‌آیتم بازنشانی بازنشانی‌شماره بازنشانی‌متن بازنشانی‌نشانه‌گذاری \
-باگذاری‌متن‌برچسب بدون‌بعد بدون‌بلوکهای‌بیشتر بدون‌تورفتگی بدون‌خط‌بالاوپایین \
-بدون‌خط‌سروته‌برگ بدون‌فایلهای‌بیشتر بدون‌فضا بدون‌فضای‌سفید بدون‌لیست \
-بدون‌نشانه‌گذاری برنامه بروبه بروبه‌جعبه بروبه‌صفحه \
-بروپایین برچسب برچسبها بعد بلند \
-بلوکهای‌پردازش بلوکها‌پنهان بنویس‌بین‌لیست بنویس‌درثبت بنویس‌درلیست‌مرجع \
-بنویس‌در‌لیست تاریخ تاریخ‌جاری تاریخ‌رجوع تایپ \
-تایپ‌بافر تایپ‌پرونده تب ترجمه تطابق \
-تعریف تعریف‌آرایش تعریف‌آرم تعریف‌الگوی‌جدول تعریف‌اندازه‌برگ \
-تعریف‌بافر تعریف‌بخش تعریف‌برنامه تعریف‌برچسب تعریف‌بلوک \
-تعریف‌بلوک‌بخش تعریف‌تایپ تعریف‌تایپ‌کردن تعریف‌تبدیل تعریف‌ترتیب \
-تعریف‌ترکیب تعریف‌تنظیم‌ریاضی تعریف‌توده‌میدان تعریف‌ثبت تعریف‌جانشانی \
-تعریف‌جدول‌بندی تعریف‌جعبه‌‌افقی تعریف‌حرف تعریف‌خالی تعریف‌خروجی \
-تعریف‌خط‌حائل تعریف‌درون‌حاشیه تعریف‌رنگ تعریف‌زیرمیدان تعریف‌سبک \
-تعریف‌سبک‌قلم تعریف‌سر تعریف‌شرح تعریف‌شروع‌پایان تعریف‌شماره‌بندی \
-تعریف‌شمایل‌مرجع تعریف‌شناور تعریف‌شکستن‌ستون تعریف‌شکست‌صفحه تعریف‌طرح‌بندی \
-تعریف‌فرمان تعریف‌قالبی تعریف‌قلم تعریف‌قلم‌خام تعریف‌قلم‌متن \
-تعریف‌لایه تعریف‌لهجه تعریف‌لوح تعریف‌لیست تعریف‌لیست‌ترکیبی \
-تعریف‌لیست‌مرجع تعریف‌مترادفها تعریف‌مترادف‌قلم تعریف‌متغیرمتن تعریف‌متن \
-تعریف‌متن‌قالبی تعریف‌مجموعه‌ستون تعریف‌محیط‌قلم‌بدنه تعریف‌مرجع تعریف‌منوی‌پانل \
-تعریف‌مکان‌متن تعریف‌میدان تعریف‌میدان‌اصلی تعریف‌نسخه تعریف‌نشانه‌گذاری \
-تعریف‌نماد تعریف‌نمادشکل تعریف‌پاراگرافها تعریف‌پروفایل تعریف‌پوشش \
-تعریف‌گروه‌آیتم تعریف‌گروه‌رنگ تعیین‌شماره تعیین‌شماره‌سر تعیین‌متغیر‌متن \
-تعیین‌محتوای‌متن تعیین‌مشخصات‌ثبت تعیین‌مشخصات‌لیست تغییربه‌قلم‌بدنه تغییربه‌قلم‌خام \
-تنظیم‌راست تنظیم‌طرح‌بندی تنظیم‌وسط توجه تورفتگی \
-توری تولید تک ثبت‌زوج ثبت‌کامل \
-جداسازی‌نشانه‌گذاری حاش حرف حرفها حفظ‌بلوکها \
-حقیقت خالی خطهای‌سیاه خطهای‌نازک خطها‌خالی \
-خط‌حاشیه خط‌سیاه خط‌متن خط‌مو خط‌نازک \
-خ‌ا خ‌ع در درج‌آرمها درج‌ثبت \
-درج‌خط درج‌درخط درج‌درخطها درج‌درمتن درج‌درمیدان \
-درج‌در‌بالای‌یکدیگر درج‌در‌توری درج‌راهنما درج‌زیرفرمول درج‌شماره‌سر \
-درج‌شماره‌صفحه درج‌شناور درج‌فرمول درج‌لیست درج‌لیست‌خام \
-درج‌لیست‌مختلط درج‌لیست‌مرجع درج‌متغیرمتن درج‌متن‌سر درج‌پانوشتها \
-درج‌پانوشتهای‌موضعی درج‌چوب‌خط درج‌کنار‌به‌کنار درحاشیه درحاشیه‌دیگر \
-درحاشیه‌راست درحاشیه‌چپ درخارجی درخط درداخلی \
-درراست درصفحه درقالبی درلبه‌راست درلبه‌چپ \
-درمورد درون درپر درچپ دریافت‌بافر \
-دریافت‌شماره دریافت‌نشانه دوران دکمه دکمه‌منو \
-دکمه‌پانل رج رجوع رنگ رنگ‌خاکستری \
-روزهفته ریاضی زبان زبان‌اصلی ستون \
-ستون‌امتحان سر سرپوش‌کوچک‌نه شروع‌آرایش شروع‌آرایش‌ستون \
-شروع‌باریکتر شروع‌بازبینی شروع‌بلوک‌حاشیه شروع‌ترکیب شروع‌تصحیح‌خط \
-شروع‌تطابق شروع‌تنظیم شروع‌تولید شروع‌جدول شروع‌جدولها \
-شروع‌خط شروع‌خطها شروع‌خط‌حاشیه شروع‌خط‌متن شروع‌رنگ \
-شروع‌ستونها شروع‌سراسری شروع‌شماره‌گذاری‌خط شروع‌شکل شروع‌غیر‌فشرده \
-شروع‌فشرده شروع‌متن شروع‌مجموعه‌ستون شروع‌مجموعه‌نماد شروع‌محیط \
-شروع‌مخالف شروع‌موضعی شروع‌مولفه شروع‌مکان‌گذاری شروع‌نسخه \
-شروع‌نقل‌قول شروع‌نوشتار شروع‌پانوشتهای‌موضعی شروع‌پروفایل شروع‌پروژه \
-شروع‌پس‌زمینه شروع‌پوشش شروع‌کد شماره‌افزایش شماره‌زیرصفحه \
-شماره‌زیرفرمول شماره‌سر شماره‌سرجاری شماره‌صفحه شماره‌صفحه‌کامل \
-شماره‌فرمول شماره‌مبدل شماره‌ها شماره‌کاهش شماره‌کل‌صفحه‌ها \
-شکافتن‌شناور شکل‌خارجی صفحه صفحه‌تست صفحه‌زوج \
-صفحه‌پردازش طول‌لیست عبوربلوکها عرض‌آرایش عرض‌برگ \
-عرض‌حاشیه عرض‌حاشیه‌خارجی عرض‌حاشیه‌داخلی عرض‌حاشیه‌راست عرض‌حاشیه‌چپ \
-عرض‌خط عرض‌لبه عرض‌لبه‌خارجی عرض‌لبه‌داخلی عرض‌لبه‌راست \
-عرض‌لبه‌چپ عرض‌لیست عرض‌متن عمق‌صفحه عنوان‌حاشیه \
-فاصله‌بالا فاصله‌ته‌برگ فاصله‌حاشیه فاصله‌حاشیه‌خارجی فاصله‌حاشیه‌داخلی \
-فاصله‌حاشیه‌راست فاصله‌حاشیه‌چپ فاصله‌سربرگ فاصله‌لبه فاصله‌لبه‌خارجی \
-فاصله‌لبه‌داخلی فاصله‌لبه‌راست فاصله‌لبه‌چپ فاصله‌پایین فاصله‌پشت \
-فشرده فضا فضاهای‌ثابت فضای‌بالا فضای‌برش \
-فضای‌ثابت فضای‌سفید فضای‌سفیدصحیح فضای‌پایین فوری‌به‌لیست \
-فوری‌بین‌لیست قالبی لوح‌مقایسه ماه متغیر متن \
-متن‌برچسب متن‌حاشیه متن‌سر متن‌پانوشت محیط \
-مراجعه مرجع مرجع‌صفحه مرجع‌متن مرحله‌سر \
-مسکن معنی‌واحد مقایسه‌گروه‌رنگ مقدارخاکستری مقداررنگ \
-مقیاس منفی منوی‌پانل مولفه مکان \
-مکان‌متن میدان میدانهای‌گزارش میدان‌شبیه‌سازی میدان‌پشته \
-میدان‌کپی میله‌تطابق میله‌رنگ میله‌پانل ناشناس \
-نام‌ماکرو نسخه نسخه‌نشانه نشانه‌گذاری نشانه‌گذاری‌زوج \
-نشر نصب‌زبان نقطه‌ها نقل نقل‌قول \
-نم نماد نمادسر نمادلیست نمایش‌آرایش \
-نمایش‌بارگذاریها نمایش‌بستها نمایش‌توری نمایش‌رنگ نمایش‌شکلهای‌خارجی \
-نمایش‌طرح‌بندی نمایش‌قالب نمایش‌قلم‌بدنه نمایش‌لوح نمایش‌مجموعه‌علامت \
-نمایش‌محیط‌قلم‌بدنه نمایش‌میدانها نمایش‌چاپ نمایش‌گروه‌رنگ نوشتارزوج \
-هدایت پا پابا پانوشت پایان‌آرایش \
-پایان‌آرایش‌ستون پایان‌بازبینی پایان‌بلوک‌حاشیه پایان‌ترکیب پایان‌تصحیح‌خط \
-پایان‌تطابق پایان‌تنظیم پایان‌تولید پایان‌جدول پایان‌جدولها \
-پایان‌خط پایان‌خطها پایان‌خط‌حاشیه پایان‌خط‌متن پایان‌رنگ \
-پایان‌ستونها پایان‌سراسری پایان‌شماره‌گذاری‌خط پایان‌غیرفشرده پایان‌فشرده \
-پایان‌متن پایان‌مجموعه‌ستون پایان‌محیط پایان‌مخالف پایان‌موضعی \
-پایان‌مولفه پایان‌مکان‌گذاری پایان‌نازکتر پایان‌نسخه پایان‌نقل‌قول \
-پایان‌نوشتار پایان‌پانوشتهای‌موضعی پایان‌پروفایل پایان‌پروژه پایان‌پس‌زمینه \
-پایان‌پوشش پایان‌کد پایین پرده پروژه \
-پرکردن‌میدان پس‌زمینه پیروی‌نسخه پیروی‌نسخه‌پروفایل پیروی‌پروفایل \
-چاپ‌ارتفاع‌برگ چاپ‌عرض‌برگ چوبخط چپ‌چین کاغذزوج \
-کسر کشیده کلمه‌حاشیه کلمه‌راست گیره \
-یادداشت یک‌جا یک‌خط
-
keywordclass.context.ro=\
CAP CUVANT CUVINTE Cap \
Caps Cuvant Cuvinte KAP Kap \
@@ -964,313 +1111,166 @@ urmeazaversiune urmeazaversiuneprofil useXMLfilter usedirectory usetypescript \
usetypescriptfile valoareculoare valoaregri variabilatext versiune \
vl zidinsaptamana
-keywordclass.context.en=\
+keywordclass.context.pe=\
CAP Cap Caps Character \
Characters MONTH Numbers Romannumerals WEEKDAY \
-WORD WORDS Word Words about \
-adaptlayout adding appendix arg at \
-atleftmargin atpage atrightmargin background backspace \
-blackrule blackrules blank bookmark bottomdistance \
-bottomheight bottomspace but button bypassblocks \
-cap chapter character characters chem \
-clip clonefield color colorbar colorvalue \
-column comment comparecolorgroup comparepalet completecombinedlist \
-completelistoffloats completelistofsorts completelistofsynonyms completepagenumber completeregister \
-component convertnumber copyfield correctwhitespace coupledocument \
-coupledregister couplemarking couplepage couplepaper coupleregister \
-crlf currentdate currentheadnumber cutspace date \
-decouplemarking decrementnumber define defineaccent defineblank \
-defineblock definebodyfont definebodyfontDEF definebodyfontREF definebodyfontenvironment \
-definebuffer definecharacter definecolor definecolorgroup definecolumnbreak \
-definecolumnset definecombination definecombinedlist definecommand defineconversion \
-definedescription definedfont defineenumeration definefield definefieldstack \
-definefiguresymbol definefloat definefont definefontfeature definefonthandling \
-definefontstyle definefontsynonym defineframed defineframedtext definehbox \
-definehead defineindentedtext defineinmargin defineinteractionmenu defineitemgroup \
-definelabel definelayer definelayout definelist definelogo \
-definemainfield definemakeup definemarking definemathalignment defineoutput \
-defineoverlay definepagebreak definepalet definepapersize defineparagraphs \
-defineplacement defineprofile defineprogram definerawfont definereference \
-definereferenceformat definereferencelist defineregister definerule definesection \
-definesectionblock definesorting definestartstop definestyle definesubfield \
-definesymbol definesynonyms definetabletemplate definetabulate definetext \
-definetextposition definetextvariable definetype definetypeface definetyping \
-defineversion description determineheadnumber determinelistcharacteristics determineregistercharacteristics \
-dimension disableinteractionmenu domicile donttest edgedistance \
-edgewidth emptylines enumeration environment externalfigure \
-fact field fieldstack fillinfield fillinline \
-fillinrules fillintext fitfield fixedspace fixedspaces \
-followprofile followprofileversion followversion footerdistance footerheight \
-footnote footnotetext forceblocks formulanumber fraction \
-framed framedtext from getbuffer getmarking \
-getnumber godown goto gotobox gotopage \
-graycolor greyvalue grid hairline head \
-headerdistance headerheight headlevel headnumber headsym \
-headtext hideblocks high hl immediatebetweenlist \
-immediatetolist in incrementnumber indentation indenting \
-inframed infull ininner inleft inleftedge \
-inleftmargin inline inmaframed inmargin inneredgedistance \
-inneredgewidth innermargindistance innermarginwidth inothermargin inouter \
-inright inrightedge inrightmargin installlanguage interactionbar \
-interactionbuttons interactionmenu item items its \
-keepblocks label labeling labels labeltext \
-language leftaligned leftedgedistance leftedgewidth leftmargindistance \
-leftmarginwidth leg linethickness listheight listlength \
-listsymbol listwidth loadsorts loadsynonyms logfields \
-lohi low maframed mainlanguage makeupheight \
-makeupwidth mapfontsize mar margindistance marginrule \
-margintext margintitle marginwidth marginword marking \
-markversion mathematics mediaeval menubutton midaligned \
-mirror month moveformula moveongrid movesidefloat \
-name navigating nextsection nocap nodimension \
-noheaderandfooterlines noindenting nolist nomarking nomoreblocks \
-nomorefiles nop nospace note notopandbottomlines \
-nowhitespace numberofsubpages numbers outeredgedistance outeredgewidth \
-outermargindistance outermarginwidth overbar overbars overstrike \
-overstrikes packed page pagedepth pagenumber \
-pageoffset pagereference paperheight paperwidth paragraph \
-part periods placebookmarks placecombinedlist placefloat \
-placefootnotes placeformula placeheadnumber placeheadtext placelegend \
-placelist placelistoffloats placelistofsorts placelistofsynonyms placelocalfootnotes \
-placelogos placeongrid placeontopofeachother placepagenumber placerawlist \
-placereferencelist placeregister placerule placesidebyside placesubformula \
-placetextvariable position positiontext printpaperheight printpaperwidth \
-processblocks processpage product program project \
-publication quotation quote ran redo \
-ref reference referral referraldate referring \
-register remark reservefloat reset resetmarking \
-resetnumber resettextcontent rightaligned rightedgedistance rightedgewidth \
-rightmargindistance rightmarginwidth romannumerals rotate scale \
-screen section seeregister selectblocks selectpaper \
-selectversion setnumber settextcontent settextvariable setupalign \
-setupanswerarea setuparranging setupbackground setupbackgrounds setupblackrules \
-setupblank setupblock setupbodyfont setupbodyfontenvironment setupbottom \
-setupbottomtexts setupbuffer setupbuttons setupcapitals setupcaption \
-setupcaptions setupclipping setupcolor setupcolors setupcolumns \
-setupcolumnset setupcolumnsetlines setupcolumnsetstart setupcombinations setupcombinedlist \
-setupcomment setupdescriptions setupenumerations setupexternalfigures setupfield \
-setupfields setupfillinlines setupfillinrules setupfloat setupfloats \
-setupfloatsplitting setupfonthandling setupfontsynonym setupfooter setupfootertexts \
-setupfootnotedefinition setupfootnotes setupforms setupformulae setupframed \
-setupframedtexts setuphead setupheader setupheadertexts setupheadnumber \
-setupheads setupheadtext setuphyphenmark setupindentedtext setupindenting \
-setupinmargin setupinteraction setupinteractionbar setupinteractionmenu setupinteractionscreen \
-setupinterlinespace setupinterlinespace2 setupitemgroup setupitemizations setupitems \
-setuplabeltext setuplanguage setuplayout setuplegend setuplinenumbering \
-setuplines setuplinewidth setuplist setuplistalternative setupmakeup \
-setupmarginblocks setupmarginrules setupmarking setupmathalignment setupnarrower \
-setupnumber setupnumbering setupoppositeplacing setupoutput setuppagecomment \
-setuppagenumber setuppagenumbering setuppagetransitions setuppalet setuppaper \
-setuppapersize setupparagraphnumbering setupparagraphs setupplacement setuppositioning \
-setupprofiles setupprograms setuppublications setupquote setupreferencelist \
-setupreferencing setupregister setuprotate setuprule setups \
-setupscreens setupsection setupsectionblock setupsorting setupspacing \
-setupstartstop setupstrut setupsubpagenumber setupsymbolset setupsynchronization \
-setupsynchronizationbar setupsynonyms setupsystem setuptab setuptables \
-setuptabulate setuptext setuptextposition setuptextrules setuptexttexts \
-setuptextvariable setupthinrules setuptolerance setuptop setuptoptexts \
-setuptype setuptyping setupunderbar setupurl setupversions \
-setupwhitespace showbodyfont showbodyfontenvironment showcolor showcolorgroup \
-showexternalfigures showfields showframe showgrid showlayout \
-showmakeup showpalet showprint showsetups showstruts \
-showsymbolset someline somewhere sort space \
-splitfloat startalignment startbackground startbuffer startcoding \
-startcolor startcolumnmakeup startcolumns startcolumnset startcombination \
-startcomment startcomponent startdescription startdocument startenumeration \
-startenvironment startfact startfigure startfloattext startformula \
-startframedtext startglobal starthiding startinteractionmenu startitemgroup \
+WORD WORDS Word Words appendix \
+cap chapter chem comment completecombinedlist \
+completelistoffloats completelistofsorts completelistofsynonyms coupledregister crlf \
+definebodyfontDEF definebodyfontREF definedfont definefontfeature definefonthandling \
+defineindentedtext definetypeface description enumeration framedtext \
+indentation inmframed its labeling loadsorts \
+loadsynonyms mapfontsize mediaeval mframed name \
+nextsection nocap overbar overbars overstrike \
+overstrikes paragraph part placelistoffloats placelistofsorts \
+placelistofsynonyms ran register reservefloat resettextcontent \
+section seeregister setupanswerarea setupcapitals setupfonthandling \
+setupfontsynonym setupindentedtext setupinterlinespace2 setuplistalternative setupurl \
+sort startalignment startbuffer startcolumns startcombination \
+startcomment startdescription startdocument startenumeration startfigure \
+startfloattext startformula startframedtext starthiding startitemgroup \
startlegend startline startlinecorrection startlinenumbering startlines \
startlocal startlocalenvironment startlocalfootnotes startmakeup startmarginblock \
-startmarginrule startnamemakeup startnarrower startopposite startoverlay \
-startoverview startpacked startparagraph startpositioning startpostponing \
-startproduct startprofile startproject startquotation startraster \
+startnamemakeup startnarrower startopposite startoverlay startoverview \
+startparagraph startpositioning startpostponing startprofile startraster \
startregister startsymbolset startsynchronization starttable starttables \
-starttabulate starttext starttextrule starttyping startunpacked \
-startversion stopalignment stopbackground stopbuffer stopcoding \
-stopcolor stopcolumnmakeup stopcolumns stopcolumnset stopcombination \
-stopcomment stopcomponent stopdescription stopdocument stopenumeration \
-stopenvironment stopfact stopfigure stopfloattext stopformula \
-stopframedtext stopglobal stophiding stopinteractionmenu stopitemgroup \
+starttabulate starttyping startunpacked startتولید startحقیقت \
+startخط‌حاشیه startخط‌متن startرنگ startفشرده startمحیط \
+startمنوی‌پانل startمولفه startنسخه startنقل‌قول startپروژه \
+startپس‌زمینه stopalignment stopbuffer stopcolumns stopcombination \
+stopcomment stopdescription stopdocument stopenumeration stopfigure \
+stopfloattext stopformula stopframedtext stophiding stopitemgroup \
stoplegend stopline stoplinecorrection stoplinenumbering stoplines \
stoplocal stoplocalenvironment stoplocalfootnotes stopmakeup stopmarginblock \
-stopmarginrule stopnamemakeup stopnarrower stopopposite stopoverlay \
-stopoverview stoppacked stopparagraph stoppositioning stoppostponing \
-stopproduct stopprofile stopproject stopquotation stopraster \
+stopnamemakeup stopnarrower stopopposite stopoverlay stopoverview \
+stopparagraph stoppositioning stoppostponing stopprofile stopraster \
stopsymbolset stopsynchronization stoptable stoptables stoptabulate \
-stoptext stoptextrule stoptyping stopunpacked stopversion \
-stretched sub subformulanumber subject subpagenumber \
-subsection subsubject subsubsection subsubsubject switchtobodyfont \
-switchtorawfont sym symbol symoffset synchronizationbar \
-synchronize synonym tab testcolumn testpage \
-tex textheight textreference textrule textvariable \
-textwidth thinrule thinrules title tooltip \
-topdistance topheight topspace totalnumberofpages translate \
-txt typ type typebuffer typefile \
-underbar underbars unitmeaning unknown useJSscripts \
-useURL useXMLfilter useblocks usecommands usedirectory \
-useencoding useexternaldocument useexternalfigure useexternalfile useexternalfiles \
-useexternalsoundtrack usemodule usemodules usereferences usespecials \
-usesymbols usetypescript usetypescriptfile useurl version \
-vl weekday whitespace wordright writebetweenlist \
-writetolist writetoreferencelist writetoregister
-
-keywordclass.context.nl=\
-CAP Cap Caps Cijfers \
-KAP Kap Kaps Letter Letters \
-MAAND Romeins WEEKDAG WOORD WOORDEN \
-Woord Woorden aantalsubpaginas about achtergrond \
-appendix arg bepaalkopnummer bepaallijstkenmerken bepaalregisterkenmerken \
-betekenis binnenmargeafstand binnenmargebreedte binnenrandafstand binnenrandbreedte \
-blanko blokje blokjes blokkeerinteractiemenu bodemwit \
-bookmark bovenafstand bovenhoogte breuk buitenmargeafstand \
-buitenmargebreedte buitenrandafstand buitenrandbreedte but button \
-cap chapter chem cijfers citaat \
-citeer clip comment completecombinedlist completelistoffloats \
-completelistofsorts completelistofsynonyms converteernummer copieerveld corrigeerwitruimte \
-coupledregister crlf datum definebodyfontDEF definebodyfontREF \
-definedfont definefontfeature definefonthandling definerawfont definetypeface \
-definieer definieeraccent definieeralineas definieerbeeldmerk definieerblanko \
-definieerblok definieerbuffer definieercombinatie definieercommando definieerconversie \
-definieerfiguursymbool definieerfont definieerfontstijl definieerfontsynoniem definieerhbox \
-definieerhoofdveld definieeringesprongentext definieerinmarge definieerinteractiemenu definieeritemgroep \
-definieerkadertekst definieerkarakter definieerkleur definieerkleurgroep definieerkolomgroep \
-definieerkolomovergang definieerkop definieerkorps definieerkorpsomgeving definieerlayer \
-definieerlayout definieerletter definieerlijn definieerlijst definieermarkering \
-definieeromlijnd definieeropmaak definieeroverlay definieerpaginaovergang definieerpalet \
-definieerpapierformaat definieerplaats definieerplaatsblok definieerprofiel definieerprogramma \
-definieerreferentie definieerreferentieformaat definieerreferentielijst definieerregister definieersamengesteldelijst \
-definieersectie definieersectieblok definieersorteren definieerstartstop definieersubveld \
-definieersymbool definieersynoniemen definieertabelvorm definieertabulatie definieertekst \
-definieertekstpositie definieertekstvariabele definieertype definieertypen definieeruitvoer \
-definieerveld definieerveldstapel definieerversie definieerwiskundeuitlijnen description \
-dimensie directnaarlijst directtussenlijst doordefinieren doorlabelen \
-doornummeren dunnelijn dunnelijnen eenregel enumeration \
-ergens externfiguur forceerblokken formulenummer framedtext \
-gebruikJSscripts gebruikURL gebruikXMLfilter gebruikblokken gebruikcommandos \
-gebruikexterndocument gebruikexternefile gebruikexternefiles gebruikexternfiguur gebruikexterngeluidsfragment \
-gebruikgebied gebruikmodule gebruikmodules gebruikreferenties gebruikspecials \
-gebruiksymbolen gebruiktypescript gebruiktypescriptfile gebruikurl geenblokkenmeer \
-geenbovenenonderregels geendimensie geenfilesmeer geenhoofdenvoetregels geenlijst \
-geenmarkering geenspatie geentest geenwitruimte geg \
-grijskleur grijswaarde haalbuffer haalmarkering haalnummer \
-haarlijn handhaafblokken herhaal hl hoofdafstand \
-hoofdhoogte hoofdtaal hoog huidigedatum huidigekopnummer \
-in inanderemarge inbinnen inbuiten indentation \
-inlijnd inlinker inlinkermarge inlinkerrand inmarge \
-inrechter inrechtermarge inrechterrand inregel inspringen \
-installeertaal instellingen interactiebalk interactiebuttons interactiemenu \
-invullijnen invulregel invultekst invulveld inwilijnd \
-items its kantlijn kap kenmerk \
-kenmerkdatum kentekstvariabeletoe kleur kleurenbalk kleurwaarde \
-kloonveld kolom kop kopniveau kopnummer \
-koppeldocument koppelmarkering koppelpagina koppelpapier koppelregister \
-kopsym koptekst kopwit laag label \
-labeling labels labeltekst laho leg \
-legeregels letter letters lijndikte lijstbreedte \
-lijsthoogte lijstlengte lijstsymbool linkermargeafstand linkermargebreedte \
-linkerrandafstand linkerrandbreedte loadsorts loadsynonyms maand \
-mapfontsize mar margeafstand margebreedte margetekst \
-margetitel margewoord markeer markeerversie mediaeval \
-menubutton naam naar naarbox naarpagina \
-name navigerend nextsection nietinspringen nocap \
-nokap noot nop omgeving omlaag \
-omlijnd onbekend onderafstand onderdeel onderhoogte \
-ontkoppelmarkering op opelkaar oplinkermarge oppagina \
-oprechtermarge overbar overbars overstrike overstrikes \
-pagina paginadiepte paginanummer paginaoffset paginareferentie \
-papierbreedte papierhoogte paragraph part paslayoutaan \
-passeerblokken passendveld plaatsbeeldmerken plaatsbookmarks plaatsformule \
-plaatskopnummer plaatskoptekst plaatslegenda plaatslijn plaatslijst \
-plaatslokalevoetnoten plaatsnaastelkaar plaatsonderelkaar plaatsopgrid plaatspaginanummer \
-plaatsplaatsblok plaatsreferentielijst plaatsregister plaatsruwelijst plaatssamengesteldelijst \
-plaatssubformule plaatstekstvariabele plaatsvoetnoten placelistoffloats placelistofsorts \
-placelistofsynonyms positioneer positioneertekst printpapierbreedte printpapierhoogte \
-produkt programma projekt publicatie punten \
-ran randafstand randbreedte rechtermargeafstand rechtermargebreedte \
-rechterrandafstand rechterrandbreedte ref refereer referentie \
-regellinks regelmidden regelrechts register registreervelden \
-reservefloat reset resetmarkering resetnummer resettekstinhoud \
-resettextcontent romeins rooster roteer rugwit \
-schaal scherm schrijfnaarlijst schrijfnaarreferentielijst schrijfnaarregister \
-schrijftussenlijst section seeregister selecteerblokken selecteerpapier \
-selecteerversie setnummer setupfonthandling setupfontsynonym setupinterlinespace2 \
-setuplistalternative snijwit som sort spatie \
-spiegel splitsplaatsblok startachtergrond startalignment startbuffer \
-startcitaat startcodering startcolumns startcombinatie startcombination \
-startcomment startdescription startdocument startenumeration startfigure \
-startfiguur startfloattext startformula startframedtext startgeg \
-startglobaal starthiding startinteractiemenu startitemgroup startkantlijn \
-startkleur startkolomgroep startkolommen startkolomopmaak startlegend \
-startline startlinecorrection startlinenumbering startlines startlocal \
-startlocalenvironment startlocalfootnotes startlokaal startlokalevoetnoten startmakeup \
-startmargeblok startmarginblock startnaast startnamemakeup startnarrower \
-startomgeving startonderdeel startopelkaar startopmaak startopposite \
-startoverlay startoverview startoverzicht startparagraph startpositioneren \
-startpositioning startpostponing startprodukt startprofiel startprofile \
-startprojekt startraster startregel startregelcorrectie startregelnummeren \
-startregels startregister startsmaller startsymbolset startsymboolset \
-startsynchronisatie startsynchronization starttabel starttabellen starttable \
-starttables starttabulate starttekst starttekstlijn starttyping \
-startuitlijnen startunpacked startvanelkaar startversie stelachtergrondenin \
-stelachtergrondin stelalineasin stelantwoordgebiedin stelarrangerenin stelblankoin \
-stelblokin stelblokjesin stelblokkopjein stelblokkopjesin stelbovenin \
-stelboventekstenin stelbufferin stelbuttonsin stelciterenin stelclipin \
-stelcombinatiesin stelcommentaarin steldoordefinierenin steldoornummerenin steldunnelijnenin \
-stelexternefigurenin stelformulesin stelformulierenin stelhoofdin stelhoofdtekstenin \
-stelingesprongentextin stelinmargein stelinspringenin stelinteractiebalkin stelinteractiein \
-stelinteractiemenuin stelinteractieschermin stelinterliniein stelinvullijnenin stelinvulregelsin \
-stelitemgroepin stelitemsin stelkadertekstenin stelkantlijnin stelkapitalenin \
-stelkleurenin stelkleurin stelkolomgroepin stelkolomgroepregelsin stelkolomgroepstartin \
-stelkolommenin stelkopin stelkopnummerin stelkoppeltekenin stelkoppenin \
-stelkoptekstin stelkorpsin stelkorpsomgevingin stellabeltekstin stellayoutin \
-stellegendain stellijndiktein stellijnin stellijstin stelmargeblokkenin \
-stelmarkeringin stelnaastplaatsenin stelnummerenin stelnummerin stelomlijndin \
-stelonderin stelonderstrepenin stelondertekstenin stelopmaakin stelopsommingenin \
-stelpaginacommentaarin stelpaginanummerin stelpaginanummeringin stelpaginaovergangenin stelpaletin \
-stelpapierformaatin stelpapierin stelparagraafnummerenin stelplaatsblokin stelplaatsblokkenin \
-stelplaatsbloksplitsenin stelplaatsin stelpositionerenin stelprofielenin stelprogrammasin \
-stelpublicatiesin stelrastersin stelreferentielijstin stelrefererenin stelregelnummerenin \
-stelregelsin stelregisterin stelroterenin stelsamengesteldelijstin stelsectieblokin \
-stelsectiein stelsmallerin stelsorterenin stelspatieringin stelstartstopin \
-stelstrutin stelsubpaginanummerin stelsymboolsetin stelsynchronisatiebalkin stelsynchronisatiein \
-stelsynoniemenin stelsysteemin steltaalin steltabellenin steltabin \
-steltabulatiein steltekstin steltekstinhoudin steltekstlijnenin steltekstpositiein \
-stelteksttekstenin steltekstvariabelein steltolerantiein steltypein steltypenin \
-steluitlijnenin steluitvoerin stelurlin stelveldenin stelveldin \
-stelversiesin stelvoetin stelvoetnootdefinitiein stelvoetnotenin stelvoettekstenin \
-stelwiskundeuitlijnenin stelwitruimtein stopachtergrond stopalignment stopbuffer \
-stopcitaat stopcodering stopcolumns stopcombinatie stopcombination \
-stopcomment stopdescription stopdocument stopenumeration stopfigure \
-stopfloattext stopformula stopframedtext stopgeg stopglobaal \
-stophiding stopinteractiemenu stopitemgroup stopkantlijn stopkleur \
-stopkolomgroep stopkolommen stopkolomopmaak stoplegend stopline \
-stoplinecorrection stoplinenumbering stoplines stoplocal stoplocalenvironment \
-stoplocalfootnotes stoplokaal stoplokalevoetnoten stopmakeup stopmargeblok \
-stopmarginblock stopnaast stopnamemakeup stopnarrower stopomgeving \
-stoponderdeel stopopelkaar stopopmaak stopopposite stopoverlay \
-stopoverview stopoverzicht stopparagraph stoppositioneren stoppositioning \
-stoppostponing stopprodukt stopprofiel stopprofile stopprojekt \
-stopraster stopregel stopregelcorrectie stopregelnummeren stopregels \
-stopsmaller stopsymbolset stopsynchronisatie stopsynchronization stoptabel \
-stoptabellen stoptable stoptables stoptabulate stoptekst \
-stoptekstlijn stoptyping stopuitlijnen stopunpacked stopvanelkaar \
-stopversie sub subformulenummer subject subpaginanummer \
-subsection subsubject subsubsection subsubsubject suggestie \
-switchnaarkorps switchtorawfont sym symbool symoffset \
-synchronisatiebalk synchroniseer synonym taal tab \
-tekstbreedte teksthoogte tekstlijn tekstreferentie tekstvariabele \
-testkolom testpagina tex title toelichting \
-toonexternefiguren toongrid tooninstellingen toonkader toonkleur \
-toonkleurgroep toonkorps toonkorpsomgeving toonlayout toonopmaak \
-toonpalet toonprint toonstruts toonsymboolset toonvelden \
-totaalaantalpaginas txt typ type typebuffer \
-typefile uit uitgerekt underbar underbars \
-usecodering usedirectory vastespatie vastespaties veld \
-veldstapel verbergblokken vergelijkkleurgroep vergelijkpalet verhoognummer \
-verlaagnummer verplaatsformule verplaatsopgrid verplaatszijblok versie \
-vertaal verwerkblokken verwerkpagina vl voetafstand \
-voethoogte voetnoot voetnoottekst volgprofiel volgprofielversie \
-volgversie volledigepaginanummer volledigregister voluit weekdag \
-wilijnd wiskunde witruimte woonplaats woordrechts \
-zetbreedte zethoogte
+stoptyping stopunpacked stopتولید stopحقیقت stopخط‌حاشیه \
+stopخط‌متن stopرنگ stopفشرده stopمحیط stopمنوی‌پانل \
+stopمولفه stopنسخه stopنقل‌قول stopپروژه stopپس‌زمینه \
+sub subject subsection subsubject subsubsection \
+subsubsubject synonym title tooltip txt \
+typ underbar underbars useJSscripts useURL \
+useXMLfilter usedirectory useurl آفست‌صفحه آیتم \
+آیتمها آینه اجباربلوکها ارتفاع‌آرایش ارتفاع‌بالا \
+ارتفاع‌برگ ارتفاع‌ته‌برگ ارتفاع‌خط ارتفاع‌سربرگ ارتفاع‌متن \
+ارتفاع‌پایین از ازکارانداختن‌منوی‌پانل استفاده‌بلوکها استفاده‌دستخط‌تایپ \
+استفاده‌رمزینه استفاده‌شکل‌خارجی استفاده‌فرمانها استفاده‌قطعه‌موزیک‌خارجی استفاده‌مدول \
+استفاده‌مدولها استفاده‌مرجعها استفاده‌مسیر استفاده‌نمادها استفاده‌نوشتارخارجی \
+استفاده‌ویژگیها استفاده‌پرونده‌خارجی استفاده‌پرونده‌دستخط‌تایپ استفاده‌پرونده‌های‌خارجی اعدادلاتین \
+افزودن اما امتحان‌نکن انتخاب‌برگ انتخاب‌بلوکها \
+انتخاب‌نسخه انتقال‌به‌توری انتقال‌فرمول انتقال‌کنار‌شناور انجام‌دوباره \
+بارگذاریها بارگذاری‌آرایش بارگذاری‌آیتمها بارگذاری‌ارجاع بارگذاری‌اندازه‌برگ \
+بارگذاری‌باریکتر بارگذاری‌بافر بارگذاری‌بالا بارگذاری‌بخش بارگذاری‌بردباری \
+بارگذاری‌برنامه‌ها بارگذاری‌برگ بارگذاری‌بست بارگذاری‌بلوک بارگذاری‌بلوکهای‌حاشیه \
+بارگذاری‌بلوک‌بخش بارگذاری‌تایپ بارگذاری‌تایپ‌کردن بارگذاری‌تب بارگذاری‌ترتیب \
+بارگذاری‌ترکیب‌ها بارگذاری‌تطابق بارگذاری‌تعریف‌پانوشت بارگذاری‌تنظیم بارگذاری‌تنظیم‌ریاضی \
+بارگذاری‌ته‌برگ بارگذاری‌تورفتگی بارگذاری‌توضیح بارگذاری‌توضیح‌صفحه بارگذاری‌ثبت \
+بارگذاری‌جانشانی بارگذاری‌جدولها بارگذاری‌جدول‌بندی بارگذاری‌خالی بارگذاری‌خروجی \
+بارگذاری‌خط بارگذاری‌خطها بارگذاری‌خطهای‌حاشیه بارگذاری‌خطهای‌سیاه بارگذاری‌خطهای‌متن \
+بارگذاری‌خطهای‌مجموعه‌ستون بارگذاری‌خطها‌ی‌نازک بارگذاری‌درج‌درخطها بارگذاری‌درج‌مخالف بارگذاری‌درون‌حاشیه \
+بارگذاری‌دوران بارگذاری‌دکمه‌ها بارگذاری‌راهنما بارگذاری‌رنگ بارگذاری‌رنگها \
+بارگذاری‌زبان بارگذاری‌ستونها بارگذاری‌سر بارگذاری‌سربرگ بارگذاری‌سرها \
+بارگذاری‌سیستم بارگذاری‌شرح بارگذاری‌شرحها بارگذاری‌شروع‌مجموعه‌ستون بارگذاری‌شروع‌پایان \
+بارگذاری‌شماره بارگذاری‌شماره‌زیرصفحه بارگذاری‌شماره‌سر بارگذاری‌شماره‌صفحه بارگذاری‌شماره‌گذاری \
+بارگذاری‌شماره‌گذاریها بارگذاری‌شماره‌گذاری‌صفحه بارگذاری‌شماره‌گذاری‌پاراگراف بارگذاری‌شماره‌‌گذاری‌خط بارگذاری‌شناور \
+بارگذاری‌شناورها بارگذاری‌شکافتن‌شناورها بارگذاری‌شکلهای‌خارجی بارگذاری‌طرح بارگذاری‌طرح‌بندی \
+بارگذاری‌عرض‌خط بارگذاری‌فاصله‌بین‌خط بارگذاری‌فرمولها بارگذاری‌فضای‌سفید بارگذاری‌فضا‌گذاری \
+بارگذاری‌قالبی بارگذاری‌قلم‌متن بارگذاری‌لوح بارگذاری‌لیست بارگذاری‌لیست‌ترکیبی \
+بارگذاری‌لیست‌مرجع بارگذاری‌مترادفها بارگذاری‌متغیر‌متن بارگذاری‌متن بارگذاری‌متنهای‌بالا \
+بارگذاری‌متن‌سر بارگذاری‌متن‌سربرگ بارگذاری‌متن‌قالبی بارگذاری‌متن‌متنها بارگذاری‌متن‌پانوشت \
+بارگذاری‌متن‌پایین بارگذاری‌مجموعه‌ستون بارگذاری‌مجموعه‌نماد بارگذاری‌محیط‌قلم‌متن بارگذاری‌منوی‌پانل \
+بارگذاری‌مکان‌متن بارگذاری‌مکان‌گذاری بارگذاری‌میدان بارگذاری‌میدانها بارگذاری‌میله‌تطابق \
+بارگذاری‌میله‌زیر بارگذاری‌میله‌پانل بارگذاری‌نسخه‌ها بارگذاری‌نشانه‌شکستن بارگذاری‌نشانه‌گذاری \
+بارگذاری‌نشرها بارگذاری‌نقل بارگذاری‌پاراگرافها بارگذاری‌پانل بارگذاری‌پانوشتها \
+بارگذاری‌پایین بارگذاری‌پرده‌ها بارگذاری‌پرده‌پانل بارگذاری‌پروفایلها بارگذاری‌پرکردن‌خطها \
+بارگذاری‌پس‌زمینه بارگذاری‌پس‌زمینه‌ها بارگذاری‌چیدن بارگذاری‌گذارصفحه بارگذاری‌گروههای‌آیتم \
+بارگذاری‌گروه‌آیتم بازنشانی بازنشانی‌شماره بازنشانی‌متن بازنشانی‌نشانه‌گذاری \
+باگذاری‌متن‌برچسب بدون‌بعد بدون‌بلوکهای‌بیشتر بدون‌تورفتگی بدون‌خط‌بالاوپایین \
+بدون‌خط‌سروته‌برگ بدون‌فایلهای‌بیشتر بدون‌فضا بدون‌فضای‌سفید بدون‌لیست \
+بدون‌نشانه‌گذاری برنامه بروبه بروبه‌جعبه بروبه‌صفحه \
+بروپایین برچسب برچسبها بعد بلند \
+بلوکهای‌پردازش بلوکها‌پنهان بنویس‌بین‌لیست بنویس‌درثبت بنویس‌درلیست‌مرجع \
+بنویس‌در‌لیست تاریخ تاریخ‌جاری تاریخ‌رجوع تایپ \
+تایپ‌بافر تایپ‌پرونده تب ترجمه تطابق \
+تعریف تعریف‌آرایش تعریف‌آرم تعریف‌الگوی‌جدول تعریف‌اندازه‌برگ \
+تعریف‌بافر تعریف‌بخش تعریف‌برنامه تعریف‌برچسب تعریف‌بلوک \
+تعریف‌بلوک‌بخش تعریف‌تایپ تعریف‌تایپ‌کردن تعریف‌تبدیل تعریف‌ترتیب \
+تعریف‌ترکیب تعریف‌تنظیم‌ریاضی تعریف‌توده‌میدان تعریف‌ثبت تعریف‌جانشانی \
+تعریف‌جدول‌بندی تعریف‌جعبه‌‌افقی تعریف‌حرف تعریف‌خالی تعریف‌خروجی \
+تعریف‌خط‌حائل تعریف‌درون‌حاشیه تعریف‌رنگ تعریف‌زیرمیدان تعریف‌سبک \
+تعریف‌سبک‌قلم تعریف‌سر تعریف‌شرح تعریف‌شروع‌پایان تعریف‌شماره‌بندی \
+تعریف‌شمایل‌مرجع تعریف‌شناور تعریف‌شکستن‌ستون تعریف‌شکست‌صفحه تعریف‌طرح‌بندی \
+تعریف‌فرمان تعریف‌قالبی تعریف‌قلم تعریف‌قلم‌خام تعریف‌قلم‌متن \
+تعریف‌لایه تعریف‌لهجه تعریف‌لوح تعریف‌لیست تعریف‌لیست‌ترکیبی \
+تعریف‌لیست‌مرجع تعریف‌مترادفها تعریف‌مترادف‌قلم تعریف‌متغیرمتن تعریف‌متن \
+تعریف‌متن‌قالبی تعریف‌مجموعه‌ستون تعریف‌محیط‌قلم‌بدنه تعریف‌مرجع تعریف‌منوی‌پانل \
+تعریف‌مکان‌متن تعریف‌میدان تعریف‌میدان‌اصلی تعریف‌نسخه تعریف‌نشانه‌گذاری \
+تعریف‌نماد تعریف‌نمادشکل تعریف‌پاراگرافها تعریف‌پروفایل تعریف‌پوشش \
+تعریف‌گروه‌آیتم تعریف‌گروه‌رنگ تعیین‌شماره تعیین‌شماره‌سر تعیین‌متغیر‌متن \
+تعیین‌محتوای‌متن تعیین‌مشخصات‌ثبت تعیین‌مشخصات‌لیست تغییربه‌قلم‌بدنه تغییربه‌قلم‌خام \
+تنظیم‌راست تنظیم‌طرح‌بندی تنظیم‌وسط توجه تورفتگی \
+توری تولید تک ثبت‌زوج ثبت‌کامل \
+جداسازی‌نشانه‌گذاری حاش حرف حرفها حفظ‌بلوکها \
+حقیقت خالی خطهای‌سیاه خطهای‌نازک خطها‌خالی \
+خط‌حاشیه خط‌سیاه خط‌متن خط‌مو خط‌نازک \
+خ‌ا خ‌ع در درج‌آرمها درج‌ثبت \
+درج‌خط درج‌درخط درج‌درخطها درج‌درمتن درج‌درمیدان \
+درج‌در‌بالای‌یکدیگر درج‌در‌توری درج‌راهنما درج‌زیرفرمول درج‌شماره‌سر \
+درج‌شماره‌صفحه درج‌شناور درج‌فرمول درج‌لیست درج‌لیست‌خام \
+درج‌لیست‌مختلط درج‌لیست‌مرجع درج‌متغیرمتن درج‌متن‌سر درج‌پانوشتها \
+درج‌پانوشتهای‌موضعی درج‌چوب‌خط درج‌کنار‌به‌کنار درحاشیه درحاشیه‌دیگر \
+درحاشیه‌راست درحاشیه‌چپ درخارجی درخط درداخلی \
+درراست درصفحه درقالبی درلبه‌راست درلبه‌چپ \
+درمورد درون درپر درچپ دریافت‌بافر \
+دریافت‌شماره دریافت‌نشانه دوران دکمه دکمه‌منو \
+دکمه‌پانل رج رجوع رنگ رنگ‌خاکستری \
+روزهفته ریاضی زبان زبان‌اصلی ستون \
+ستون‌امتحان سر سرپوش‌کوچک‌نه شروع‌آرایش شروع‌آرایش‌ستون \
+شروع‌باریکتر شروع‌بازبینی شروع‌بلوک‌حاشیه شروع‌ترکیب شروع‌تصحیح‌خط \
+شروع‌تطابق شروع‌تنظیم شروع‌تولید شروع‌جدول شروع‌جدولها \
+شروع‌خط شروع‌خطها شروع‌خط‌حاشیه شروع‌خط‌متن شروع‌رنگ \
+شروع‌ستونها شروع‌سراسری شروع‌شماره‌گذاری‌خط شروع‌شکل شروع‌غیر‌فشرده \
+شروع‌فشرده شروع‌متن شروع‌مجموعه‌ستون شروع‌مجموعه‌نماد شروع‌محیط \
+شروع‌مخالف شروع‌موضعی شروع‌مولفه شروع‌مکان‌گذاری شروع‌نسخه \
+شروع‌نقل‌قول شروع‌نوشتار شروع‌پانوشتهای‌موضعی شروع‌پروفایل شروع‌پروژه \
+شروع‌پس‌زمینه شروع‌پوشش شروع‌کد شماره‌افزایش شماره‌زیرصفحه \
+شماره‌زیرفرمول شماره‌سر شماره‌سرجاری شماره‌صفحه شماره‌صفحه‌کامل \
+شماره‌فرمول شماره‌مبدل شماره‌ها شماره‌کاهش شماره‌کل‌صفحه‌ها \
+شکافتن‌شناور شکل‌خارجی صفحه صفحه‌تست صفحه‌زوج \
+صفحه‌پردازش طول‌لیست عبوربلوکها عرض‌آرایش عرض‌برگ \
+عرض‌حاشیه عرض‌حاشیه‌خارجی عرض‌حاشیه‌داخلی عرض‌حاشیه‌راست عرض‌حاشیه‌چپ \
+عرض‌خط عرض‌لبه عرض‌لبه‌خارجی عرض‌لبه‌داخلی عرض‌لبه‌راست \
+عرض‌لبه‌چپ عرض‌لیست عرض‌متن عمق‌صفحه عنوان‌حاشیه \
+فاصله‌بالا فاصله‌ته‌برگ فاصله‌حاشیه فاصله‌حاشیه‌خارجی فاصله‌حاشیه‌داخلی \
+فاصله‌حاشیه‌راست فاصله‌حاشیه‌چپ فاصله‌سربرگ فاصله‌لبه فاصله‌لبه‌خارجی \
+فاصله‌لبه‌داخلی فاصله‌لبه‌راست فاصله‌لبه‌چپ فاصله‌پایین فاصله‌پشت \
+فشرده فضا فضاهای‌ثابت فضای‌بالا فضای‌برش \
+فضای‌ثابت فضای‌سفید فضای‌سفیدصحیح فضای‌پایین فوری‌به‌لیست \
+فوری‌بین‌لیست قالبی لوح‌مقایسه ماه متغیر متن \
+متن‌برچسب متن‌حاشیه متن‌سر متن‌پانوشت محیط \
+مراجعه مرجع مرجع‌صفحه مرجع‌متن مرحله‌سر \
+مسکن معنی‌واحد مقایسه‌گروه‌رنگ مقدارخاکستری مقداررنگ \
+مقیاس منفی منوی‌پانل مولفه مکان \
+مکان‌متن میدان میدانهای‌گزارش میدان‌شبیه‌سازی میدان‌پشته \
+میدان‌کپی میله‌تطابق میله‌رنگ میله‌پانل ناشناس \
+نام‌ماکرو نسخه نسخه‌نشانه نشانه‌گذاری نشانه‌گذاری‌زوج \
+نشر نصب‌زبان نقطه‌ها نقل نقل‌قول \
+نم نماد نمادسر نمادلیست نمایش‌آرایش \
+نمایش‌بارگذاریها نمایش‌بستها نمایش‌توری نمایش‌رنگ نمایش‌شکلهای‌خارجی \
+نمایش‌طرح‌بندی نمایش‌قالب نمایش‌قلم‌بدنه نمایش‌لوح نمایش‌مجموعه‌علامت \
+نمایش‌محیط‌قلم‌بدنه نمایش‌میدانها نمایش‌چاپ نمایش‌گروه‌رنگ نوشتارزوج \
+هدایت پا پابا پانوشت پایان‌آرایش \
+پایان‌آرایش‌ستون پایان‌بازبینی پایان‌بلوک‌حاشیه پایان‌ترکیب پایان‌تصحیح‌خط \
+پایان‌تطابق پایان‌تنظیم پایان‌تولید پایان‌جدول پایان‌جدولها \
+پایان‌خط پایان‌خطها پایان‌خط‌حاشیه پایان‌خط‌متن پایان‌رنگ \
+پایان‌ستونها پایان‌سراسری پایان‌شماره‌گذاری‌خط پایان‌غیرفشرده پایان‌فشرده \
+پایان‌متن پایان‌مجموعه‌ستون پایان‌محیط پایان‌مخالف پایان‌موضعی \
+پایان‌مولفه پایان‌مکان‌گذاری پایان‌نازکتر پایان‌نسخه پایان‌نقل‌قول \
+پایان‌نوشتار پایان‌پانوشتهای‌موضعی پایان‌پروفایل پایان‌پروژه پایان‌پس‌زمینه \
+پایان‌پوشش پایان‌کد پایین پرده پروژه \
+پرکردن‌میدان پس‌زمینه پیروی‌نسخه پیروی‌نسخه‌پروفایل پیروی‌پروفایل \
+چاپ‌ارتفاع‌برگ چاپ‌عرض‌برگ چوبخط چپ‌چین کاغذزوج \
+کسر کشیده کلمه‌حاشیه کلمه‌راست گیره \
+یادداشت یک‌جا یک‌خط
diff --git a/context/data/scite/scite-context-data-metapost.properties b/context/data/scite/scite-context-data-metapost.properties
index c07103324..a51f4d9ad 100644
--- a/context/data/scite/scite-context-data-metapost.properties
+++ b/context/data/scite/scite-context-data-metapost.properties
@@ -1,3 +1,10 @@
+keywordclass.metapost.tex=\
+btex etex verbatimtex
+
+keywordclass.metapost.shortcuts=\
+.. ... -- --- \
+&
+
keywordclass.metapost.primitives=\
charcode day linecap linejoin \
miterlimit month pausing prologues showstopping \
@@ -82,13 +89,6 @@ yellow black white background graypart \
graycolor mm pt dd bp \
cm pc cc in
-keywordclass.metapost.tex=\
-btex etex verbatimtex
-
-keywordclass.metapost.shortcuts=\
-.. ... -- --- \
-&
-
keywordclass.metapost.internals=\
mitered rounded beveled butt \
squared eps epsilon infinity bboxmargin \
diff --git a/context/data/scite/scite-context-data-tex.properties b/context/data/scite/scite-context-data-tex.properties
index cb258795e..c43a5356a 100644
--- a/context/data/scite/scite-context-data-tex.properties
+++ b/context/data/scite/scite-context-data-tex.properties
@@ -1,42 +1,18 @@
+keywordclass.tex.omega=\
+OmegaVersion bodydir chardp charht \
+charit charwd leftghost localbrokenpenalty localinterlinepenalty \
+localleftbox localrightbox mathdir odelcode odelimiter \
+omathaccent omathchar omathchardef omathcode oradical \
+pagedir pageheight pagewidth pardir rightghost \
+textdir
+
keywordclass.tex.xetex=\
XeTeXversion
-keywordclass.tex.luatex=\
-Udelcode Udelcodenum Udelimiter Udelimiterover \
-Udelimiterunder Umathaccent Umathaxis Umathbinbinspacing Umathbinclosespacing \
-Umathbininnerspacing Umathbinopenspacing Umathbinopspacing Umathbinordspacing Umathbinpunctspacing \
-Umathbinrelspacing Umathchar Umathchardef Umathcharnum Umathclosebinspacing \
-Umathcloseclosespacing Umathcloseinnerspacing Umathcloseopenspacing Umathcloseopspacing Umathcloseordspacing \
-Umathclosepunctspacing Umathcloserelspacing Umathcode Umathcodenum Umathconnectoroverlapmin \
-Umathfractiondelsize Umathfractiondenomdown Umathfractiondenomvgap Umathfractionnumup Umathfractionnumvgap \
-Umathfractionrule Umathinnerbinspacing Umathinnerclosespacing Umathinnerinnerspacing Umathinneropenspacing \
-Umathinneropspacing Umathinnerordspacing Umathinnerpunctspacing Umathinnerrelspacing Umathlimitabovebgap \
-Umathlimitabovekern Umathlimitabovevgap Umathlimitbelowbgap Umathlimitbelowkern Umathlimitbelowvgap \
-Umathopbinspacing Umathopclosespacing Umathopenbinspacing Umathopenclosespacing Umathopeninnerspacing \
-Umathopenopenspacing Umathopenopspacing Umathopenordspacing Umathopenpunctspacing Umathopenrelspacing \
-Umathoperatorsize Umathopinnerspacing Umathopopenspacing Umathopopspacing Umathopordspacing \
-Umathoppunctspacing Umathoprelspacing Umathordbinspacing Umathordclosespacing Umathordinnerspacing \
-Umathordopenspacing Umathordopspacing Umathordordspacing Umathordpunctspacing Umathordrelspacing \
-Umathoverbarkern Umathoverbarrule Umathoverbarvgap Umathoverdelimiterbgap Umathoverdelimitervgap \
-Umathpunctbinspacing Umathpunctclosespacing Umathpunctinnerspacing Umathpunctopenspacing Umathpunctopspacing \
-Umathpunctordspacing Umathpunctpunctspacing Umathpunctrelspacing Umathquad Umathradicaldegreeafter \
-Umathradicaldegreebefore Umathradicaldegreeraise Umathradicalkern Umathradicalrule Umathradicalvgap \
-Umathrelbinspacing Umathrelclosespacing Umathrelinnerspacing Umathrelopenspacing Umathrelopspacing \
-Umathrelordspacing Umathrelpunctspacing Umathrelrelspacing Umathspaceafterscript Umathstackdenomdown \
-Umathstacknumup Umathstackvgap Umathsubshiftdown Umathsubshiftdrop Umathsubsupshiftdown \
-Umathsubsupvgap Umathsubtopmax Umathsupbottommin Umathsupshiftdrop Umathsupshiftup \
-Umathsupsubbottommax Umathunderbarkern Umathunderbarrule Umathunderbarvgap Umathunderdelimiterbgap \
-Umathunderdelimitervgap Uoverdelimiter Uradical Uroot Ustack \
-Ustartdisplaymath Ustartmath Ustopdisplaymath Ustopmath Usubscript \
-Usuperscript Uunderdelimiter alignmark aligntab attribute \
-attributedef catcodetable clearmarks crampeddisplaystyle crampedscriptscriptstyle \
-crampedscriptstyle crampedtextstyle fontid formatname gleaders \
-ifabsdim ifabsnum ifprimitive initcatcodetable latelua \
-luaescapestring luastartup luatexdatestamp luatexrevision luatexversion \
-mathstyle nokerns noligs outputbox pageleftoffset \
-pagetopoffset postexhyphenchar posthyphenchar preexhyphenchar prehyphenchar \
-primitive savecatcodetable scantextokens suppressfontnotfounderror suppressifcsnameerror \
-suppresslongerror suppressoutererror synctex
+keywordclass.tex.aleph=\
+AlephVersion Alephminorversion Alephrevision Alephversion \
+Omegaminorversion Omegarevision Omegaversion boxdir pagebottomoffset \
+pagerightoffset
keywordclass.tex.tex=\
- / AlephVersion Alephminorversion \
@@ -183,19 +159,6 @@ vskip vsplit vss vtop wd \
widowpenalties widowpenalty write xdef xleaders \
xspaceskip year
-keywordclass.tex.omega=\
-OmegaVersion bodydir chardp charht \
-charit charwd leftghost localbrokenpenalty localinterlinepenalty \
-localleftbox localrightbox mathdir odelcode odelimiter \
-omathaccent omathchar omathchardef omathcode oradical \
-pagedir pageheight pagewidth pardir rightghost \
-textdir
-
-keywordclass.tex.aleph=\
-AlephVersion Alephminorversion Alephrevision Alephversion \
-Omegaminorversion Omegarevision Omegaversion boxdir pagebottomoffset \
-pagerightoffset
-
keywordclass.tex.pdftex=\
efcode expanded ifincsname ifpdfabsdim \
ifpdfabsnum ifpdfprimitive leftmarginkern letterspacefont lpcode \
@@ -236,3 +199,40 @@ showtokens splitbotmarks splitdiscards splitfirstmarks topmarks \
tracingassigns tracinggroups tracingifs tracingnesting tracingscantokens \
unexpanded unless widowpenalties
+keywordclass.tex.luatex=\
+Udelcode Udelcodenum Udelimiter Udelimiterover \
+Udelimiterunder Umathaccent Umathaxis Umathbinbinspacing Umathbinclosespacing \
+Umathbininnerspacing Umathbinopenspacing Umathbinopspacing Umathbinordspacing Umathbinpunctspacing \
+Umathbinrelspacing Umathchar Umathchardef Umathcharnum Umathclosebinspacing \
+Umathcloseclosespacing Umathcloseinnerspacing Umathcloseopenspacing Umathcloseopspacing Umathcloseordspacing \
+Umathclosepunctspacing Umathcloserelspacing Umathcode Umathcodenum Umathconnectoroverlapmin \
+Umathfractiondelsize Umathfractiondenomdown Umathfractiondenomvgap Umathfractionnumup Umathfractionnumvgap \
+Umathfractionrule Umathinnerbinspacing Umathinnerclosespacing Umathinnerinnerspacing Umathinneropenspacing \
+Umathinneropspacing Umathinnerordspacing Umathinnerpunctspacing Umathinnerrelspacing Umathlimitabovebgap \
+Umathlimitabovekern Umathlimitabovevgap Umathlimitbelowbgap Umathlimitbelowkern Umathlimitbelowvgap \
+Umathopbinspacing Umathopclosespacing Umathopenbinspacing Umathopenclosespacing Umathopeninnerspacing \
+Umathopenopenspacing Umathopenopspacing Umathopenordspacing Umathopenpunctspacing Umathopenrelspacing \
+Umathoperatorsize Umathopinnerspacing Umathopopenspacing Umathopopspacing Umathopordspacing \
+Umathoppunctspacing Umathoprelspacing Umathordbinspacing Umathordclosespacing Umathordinnerspacing \
+Umathordopenspacing Umathordopspacing Umathordordspacing Umathordpunctspacing Umathordrelspacing \
+Umathoverbarkern Umathoverbarrule Umathoverbarvgap Umathoverdelimiterbgap Umathoverdelimitervgap \
+Umathpunctbinspacing Umathpunctclosespacing Umathpunctinnerspacing Umathpunctopenspacing Umathpunctopspacing \
+Umathpunctordspacing Umathpunctpunctspacing Umathpunctrelspacing Umathquad Umathradicaldegreeafter \
+Umathradicaldegreebefore Umathradicaldegreeraise Umathradicalkern Umathradicalrule Umathradicalvgap \
+Umathrelbinspacing Umathrelclosespacing Umathrelinnerspacing Umathrelopenspacing Umathrelopspacing \
+Umathrelordspacing Umathrelpunctspacing Umathrelrelspacing Umathspaceafterscript Umathstackdenomdown \
+Umathstacknumup Umathstackvgap Umathsubshiftdown Umathsubshiftdrop Umathsubsupshiftdown \
+Umathsubsupvgap Umathsubtopmax Umathsupbottommin Umathsupshiftdrop Umathsupshiftup \
+Umathsupsubbottommax Umathunderbarkern Umathunderbarrule Umathunderbarvgap Umathunderdelimiterbgap \
+Umathunderdelimitervgap Uoverdelimiter Uradical Uroot Ustack \
+Ustartdisplaymath Ustartmath Ustopdisplaymath Ustopmath Usubscript \
+Usuperscript Uunderdelimiter alignmark aligntab attribute \
+attributedef catcodetable clearmarks crampeddisplaystyle crampedscriptscriptstyle \
+crampedscriptstyle crampedtextstyle fontid formatname gleaders \
+ifabsdim ifabsnum ifprimitive initcatcodetable latelua \
+luaescapestring luastartup luatexdatestamp luatexrevision luatexversion \
+mathstyle nokerns noligs outputbox pageleftoffset \
+pagetopoffset postexhyphenchar posthyphenchar preexhyphenchar prehyphenchar \
+primitive savecatcodetable scantextokens suppressfontnotfounderror suppressifcsnameerror \
+suppresslongerror suppressoutererror synctex
+
diff --git a/context/data/scite/scite-context-external.properties b/context/data/scite/scite-context-external.properties
index 2bca111b4..5c7149341 100644
--- a/context/data/scite/scite-context-external.properties
+++ b/context/data/scite/scite-context-external.properties
@@ -35,14 +35,24 @@ file.patterns.cweb=*.h;*.c;*.w;*.hh;*.cc;*.ww;*.hpp;*.cpp;*.hxx;*.cxx;
lexer.$(file.patterns.metapost)=lpeg_scite-context-lexer-mps
lexer.$(file.patterns.metafun)=lpeg_scite-context-lexer-mps
lexer.$(file.patterns.context)=lpeg_scite-context-lexer-tex
-lexer.$(file.patterns.tex)=lpeg_scite-context-lexer-tex
lexer.$(file.patterns.lua)=lpeg_scite-context-lexer-lua
lexer.$(file.patterns.example)=lpeg_scite-context-lexer-xml
lexer.$(file.patterns.text)=lpeg_scite-context-lexer-txt
lexer.$(file.patterns.pdf)=lpeg_scite-context-lexer-pdf
lexer.$(file.patterns.cweb)=lpeg_scite-context-lexer-web
+
+lexer.$(file.patterns.tex)=lpeg_scite-context-lexer-tex
+lexer.$(file.patterns.xml)=lpeg_scite-context-lexer-xml
+lexer.$(file.patterns.html)=lpeg_scite-context-lexer-xml
lexer.$(file.patterns.cpp)=lpeg_scite-context-lexer-web
+# It's a real pitty that we cannot overload the errorlist lexer. That would
+# make scite even more interesting. Add to that including lpeg and the lpeg
+# lexer and thereby providing an interface to properties.
+
+# lexer.errorlist=lpeg_scite-context-lexer-txt
+# lexer.output=lpeg_scite-context-lexer-txt
+
comment.block.lpeg_scite-context-lexer-tex=%
comment.block.at.line.start.lpeg_scite-context-lexer-tex=1
diff --git a/context/data/scite/scite-context-internal.properties b/context/data/scite/scite-context-internal.properties
index 92806b8e2..130e64f1e 100644
--- a/context/data/scite/scite-context-internal.properties
+++ b/context/data/scite/scite-context-internal.properties
@@ -80,3 +80,7 @@ lexer.context.auto.if=1
lexer.$(file.patterns.context)=tex
lexer.$(file.patterns.tex)=tex
+
+lexer.$(file.patterns.example)=xml
+lexer.$(file.patterns.lua)=lua
+lexer.$(file.patterns.metafun)=metapost
diff --git a/context/data/scite/scite-context.properties b/context/data/scite/scite-context.properties
index 773ccea8b..995c1299c 100644
--- a/context/data/scite/scite-context.properties
+++ b/context/data/scite/scite-context.properties
@@ -64,21 +64,16 @@ open.suffix.$(file.patterns.context)=.tex
file.patterns.xml=
file.patterns.example=*.xml;*.xsl;*.xsd;*.fo;*.exa;*.rlb;*.rlg;*.rlv;*.rng;*.xfdf;*.xslt;*.dtd;*.lmx;*.htm;*.html;*.ctx;*.export;
-
open.suffix.$(file.patterns.example)=.xml
-
filter.example=eXaMpLe|$(file.patterns.example)|
-
-lexer.$(file.patterns.example)=xml
+#~ lexer.$(file.patterns.example)=xml
# Lua : patterns
file.patterns.lua=*.lua;*.luc;*.cld;*.tuc;*.luj;*.lum;*.tma;*.lfg;*.luv;*.lui
-
open.suffix.$(file.patterns.lua)=.lua
-
filter.lua=Lua MkIV|$(file.patterns.lua)|
-lexer.$(file.patterns.lua)=lua
+#~ lexer.$(file.patterns.lua)=lua
command.compile.$(file.patterns.lua)=mtxrun --script "$(FileNameExt)"
command.go.$(file.patterns.lua)=mtxrun --script "$(FileNameExt)"
@@ -90,15 +85,12 @@ command.go.$(file.patterns.lua)=mtxrun --script "$(FileNameExt)"
# Test: patterns
file.patterns.text=*.txt
-
filter.text=Text |$(file.patterns.text)|
+# lexer.$(file.patterns.txt)=txt
file.patterns.pdf=*.pdf
-
filter.pdf=PDF |$(file.patterns.pdf)|
-# lexer.$(file.patterns.txt)=txt
-
# Commands: some scripts
if PLAT_WIN
@@ -557,13 +549,9 @@ style.errorlist.16=fore:$(colors.red)
# Metapost: patterns
-file.patterns.metafun=
-
file.patterns.metafun=*.mp;*.mpx;*.mpy;*.mpii;*.mpiv
-
filter.metafun=MetaFun|$(file.patterns.metafun)|
-
-lexer.$(file.patterns.metafun)=metapost
+#~ lexer.$(file.patterns.metafun)=metapost
command.compile.$(file.patterns.metafun)=
command.build.$(file.patterns.metafun)=$(name.context.mtxrun) --script context $(name.flag.pdfopen) $(FileNameExt)
diff --git a/metapost/context/base/mp-grap.mpiv b/metapost/context/base/mp-grap.mpiv
index 98f537315..bc02e8610 100644
--- a/metapost/context/base/mp-grap.mpiv
+++ b/metapost/context/base/mp-grap.mpiv
@@ -11,6 +11,8 @@
%C therefore copyrighted by \PRAGMA. See licen-en.pdf for
%C details.
+% maybe we should have another Gerr ... something grph_error_message
+
if known context_grap : endinput ; fi ;
boolean context_grap ; context_grap := true ;
@@ -130,7 +132,7 @@ for i = 3 upto 9 : % polygons
grap_sym[i] :=
for j = 0 upto i-1 :
(up scaled .5) rotated (360j/i) --
- endfor cycle ;
+ endfor cycle ;
endfor
grap_sym[12] := grap_sym[2] rotated +90 ; % horizontal line
@@ -203,3 +205,172 @@ def plotsymbol(expr n,c,f) = % (number,color,color|number)
nullpicture
fi
enddef ;
+
+% Here starts a section with some extensions that come in handy when drawing
+% polynomials. We assume that metapost is run in double number mode.
+
+% Least-squares "fit" to a polynomial
+%
+% Example of use:
+%
+% path p[] ;
+% numeric a[] ; a0 := 1 ; a1 := .1 ; a2 := .01 ; a3 := .001 ; a4 := 0.0001 ;
+% for i=0 upto 10:
+% x1 := 5i/10 ;
+% y1 := poly.a(4,x1) ;
+% augment.p0(z1) ;
+% augment.p1((x1,y1+.005normaldeviate)) ;
+% endfor
+% gdraw p0 ;
+% gdraw p1 plot plotsymbol(1,black,.5) ;
+%
+% numeric chisq, b[] ;
+% polyfit.p1(chisq, b, 4) ;
+% for i=0 upto length p1 :
+% x1 := xpart(point i of p1) ;
+% y1 := poly.b(4,x1) ;
+% augment.p2(z1) ;
+% endfor
+% gdraw p2 ;
+%
+% numeric c[] ;
+% linefit.p1(chisq, c) ;
+% for i=0 upto length p1 :
+% x1 := xpart(point i of p1) ;
+% y1 := line.c(x1) ;
+% augment.p3(z1) ;
+% endfor
+% gdraw p3 dashed evenly ;
+
+vardef det@# (expr n) = % find the determinant of a (n+1)*(n+1) matrix
+ % indices run from 0 to n.
+ % first, we make a copy so as not to corrupt the matrix.
+ save copy ; numeric copy[][] ;
+ for k=0 upto n :
+ for j=0 upto n :
+ copy[k][j] := @#[k][j] ;
+ endfor
+ endfor
+ numeric determinant, jj ; determinant := 1 ;
+ boolean zero ; zero := false ;
+ for k=0 upto n :
+ if copy[k][k] = 0 :
+ for 0 = k upto n :
+ if copy[k][j]=0 :
+ zero := true ;
+ else :
+ jj := j ;
+ fi
+ exitunless zero ;
+ endfor
+ if zero :
+ determinant := 0 ;
+ fi
+ exitif zero ;
+ for j = k upto n : % interchange the columns
+ temp := copy[j][jj] ;
+ copy[j][jj] := copy[j][k] ;
+ copy[j][k] := temp ;
+ endfor
+ determinant = -determinant ;
+ fi
+ exitif zero ;
+ determinant := determinant * copy[k][k] ;
+ if k < n : % subtract row k from lower rows to get a diagonal matrix
+ for j = k + 1 upto n :
+ for i = k + 1 upto n :
+ copy[j][i] := copy[j][i] - copy[j][k] * copy[k][i] / copy[k][k] ;
+ endfor
+ endfor
+ fi
+ endfor ;
+ determinant % no ;
+enddef ;
+
+% least-squares fit of a polynomial $ of order n to a path @#
+
+vardef polyfit@# (suffix chisq, $) (expr n) =
+ if not path begingroup @# endgroup :
+ Gerr(begingroup @# endgroup, "Cannot fit--not a path") ;
+ elseif length @# < n :
+ Gerr(begingroup @# endgroup, "Cannot fit--not enough points") ;
+ else:
+ chisq := 0 ;
+ % calculate sums of the data
+ save sumx, sumy ; numeric sumx[], sumy[] ;
+ save nmax ; numeric nmax ; nmax := 2*n ;
+ for i = 0 upto nmax :
+ sumx[i] := 0 ;
+ endfor
+ for i = 0 upto n :
+ sumy[i] := 0 ;
+ endfor
+ save xp, yp ; numeric xp, yp ;
+ save zi ; pair zi ;
+ for i = 0 upto length @# :
+ zi := point i of @# ;
+ x0 := xpart zi ; y0 := ypart zi ;
+ x1 := 1 ;
+ for j = 0 upto nmax :
+ sumx[j] := sumx[j] + x1 ;
+ x1 := x1 * x0 ;
+ endfor
+ y1 := y0 ;
+ for j = 0 upto n :
+ sumy[j] := sumy[j] + y1 ;
+ y1 := y1 * x0 ;
+ endfor
+ chisq := chisq + y0*y0 ;
+ endfor
+ % construct matrices and calculate the polynomial coefficients
+ save m ; numeric m[][] ;
+ for j = 0 upto n :
+ for k = 0 upto n :
+ i := j + k ;
+ m[j][k] := sumx[i] ;
+ endfor
+ endfor
+ save delta ; numeric delta ;
+ delta := det.m(n) ;
+ if delta = 0 :
+ chisq := 0 ;
+ for j=0 upto n :
+ $[j] := 0 ;
+ endfor
+ else :
+ for l = 0 upto n :
+ for j = 0 upto n :
+ for k = 0 upto n :
+ i := j + k ;
+ m[j][k] := sumx[i] ;
+ endfor
+ m[j][l] := sumy[j] ;
+ endfor
+ $[l] := det.m(n) / delta ;
+ endfor
+ for j = 0 upto n :
+ chisq := chisq - 2*$[j]*sumy[j] ;
+ for k = 0 upto n :
+ i := j + k ;
+ chisq := chisq + $[j]*$[k]*sumx[i] ;
+ endfor
+ endfor
+ % normalize by the number of degrees of freedom
+ chisq := chisq / (length(@#) - n) ;
+ fi
+ fi
+enddef ;
+
+vardef poly@#(expr n, x) =
+ for j = 0 upto n :
+ + @#[j]*(x**j)
+ endfor % no ;
+enddef ;
+
+vardef line@#(expr x) =
+ poly@# (1,x)
+enddef ;
+
+vardef linefit@#(suffix chisq, $) =
+ polyfit@#(chisq, $, 1) ;
+enddef ;
diff --git a/scripts/context/lua/mtx-package.lua b/scripts/context/lua/mtx-package.lua
index 294ef4f28..879dd5e3a 100644
--- a/scripts/context/lua/mtx-package.lua
+++ b/scripts/context/lua/mtx-package.lua
@@ -24,18 +24,18 @@ scripts = scripts or { }
messages = messages or { }
scripts.package = scripts.package or { }
-function scripts.package.merge_luatex_files(name,strip)
+function scripts.package.merge_luatex_files(name)
local oldname = resolvers.findfile(name) or ""
oldname = file.replacesuffix(oldname,"lua")
if oldname == "" then
- report("missing '%s'",name)
+ report("missing %q",name)
else
local newname = file.removesuffix(oldname) .. "-merged.lua"
local data = io.loaddata(oldname) or ""
if data == "" then
- report("missing '%s'",newname)
+ report("missing %q",newname)
else
- report("loading '%s'",oldname)
+ report("loading %q",oldname)
local collected = { }
collected[#collected+1] = format("-- merged file : %s\n",newname)
collected[#collected+1] = format("-- parent file : %s\n",oldname)
@@ -45,23 +45,17 @@ function scripts.package.merge_luatex_files(name,strip)
if file.basename(lib) ~= file.basename(newname) then
local fullname = resolvers.findfile(lib) or ""
if fullname == "" then
- report("missing '%s'",lib)
+ report("missing %q",lib)
else
- report("fetching '%s'",fullname)
+ report("fetching %q",fullname)
local data = io.loaddata(fullname)
- if strip then
- data = gsub(data,"%-%-%[%[ldx%-%-.-%-%-%ldx%]%]%-%-[\n\r]*","")
- data = gsub(data,"%-%-%~[^\n\r]*[\n\r]*","\n")
- data = gsub(data,"%s+%-%-[^\n\r]*[\n\r]*","\n")
- data = gsub(data,"[\n\r]+","\n")
- end
collected[#collected+1] = "\ndo -- begin closure to overcome local limits and interference\n\n"
- collected[#collected+1] = data
+ collected[#collected+1] = utilities.merger.compact(data)
collected[#collected+1] = "\nend -- closure\n"
end
end
end
- report("saving '%s'",newname)
+ report("saving %q",newname)
io.savedata(newname,table.concat(collected))
end
end
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index 66b6f5466..6a894df01 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -2197,7 +2197,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
--- original size: 13731, stripped down to: 8450
+-- original size: 13891, stripped down to: 8591
if not modules then modules={} end modules ['l-os']={
version=1.001,
@@ -2319,7 +2319,13 @@ function os.runtime()
end
os.resolvers=os.resolvers or {}
local resolvers=os.resolvers
-local osmt=getmetatable(os) or { __index=function(t,k) t[k]="unset" return "unset" end }
+local osmt=getmetatable(os) or { __index=function(t,k)
+ local v=function()
+ print(format("function os.%s in namespace is undefined"))
+ end
+ t[k]=v
+ return v
+end }
local osix=osmt.__index
osmt.__index=function(t,k)
return (resolvers[k] or osix)(t,k)
@@ -2510,6 +2516,9 @@ end
function os.now()
return date("!%Y-%m-%d %H:%M:%S")
end
+if not os.sleep and socket then
+ os.sleep=socket.sleep
+end
end -- of closure
@@ -4817,7 +4826,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
--- original size: 7158, stripped down to: 5738
+-- original size: 7245, stripped down to: 5822
if not modules then modules={} end modules ['util-mrg']={
version=1.001,
@@ -4890,6 +4899,9 @@ local compact=Cs ((
)^1 )
local strip=Cs((emptyline^2/"\n"+1)^0)
local stripreturn=Cs((1-P("return")*space^1*P(1-space-eol)^1*(space+eol)^0*P(-1))^1)
+function merger.compact(data)
+ return lpegmatch(strip,lpegmatch(compact,data))
+end
local function self_compact(data)
local delta=0
if merger.strip_comment then
@@ -7051,23 +7063,19 @@ end -- of closure
do -- create closure to overcome 200 locals limit
--- original size: 12282, stripped down to: 8098
+-- original size: 7702, stripped down to: 4701
-if not modules then modules={} end modules ['luat-env']={
+if not modules then modules={} end modules ['util-env']={
version=1.001,
comment="companion to luat-lib.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local report_lua=logs.reporter("resolvers","lua")
local allocate,mark=utilities.storage.allocate,utilities.storage.mark
local format,sub,match,gsub,find=string.format,string.sub,string.match,string.gsub,string.find
local unquoted,quoted=string.unquoted,string.quoted
local concat,insert,remove=table.concat,table.insert,table.remove
-local luautilities=utilities.lua
-local luasuffixes=luautilities.suffixes
environment=environment or {}
local environment=environment
os.setlocale(nil,nil)
@@ -7084,18 +7092,25 @@ local basicengines=allocate {
["luajittex"]="luajittex",
["texluajit"]="luajittex",
}
+local luaengines=allocate {
+ ["lua"]=true,
+ ["luajit"]=true,
+}
environment.validengines=validengines
environment.basicengines=basicengines
-if arg and validengines[file.removesuffix(arg[0])] and arg[1]=="--luaonly" then
- arg[-1]=arg[0]
- arg[ 0]=arg[2]
- for k=3,#arg do
- arg[k-2]=arg[k]
+if not arg then
+elseif luaengines[file.removesuffix(arg[-1])] then
+elseif validengines[file.removesuffix(arg[0])] then
+ if arg[1]=="--luaonly" then
+ arg[-1]=arg[0]
+ arg[ 0]=arg[2]
+ for k=3,#arg do
+ arg[k-2]=arg[k]
+ end
+ remove(arg)
+ remove(arg)
+ else
end
- remove(arg)
- remove(arg)
-end
-do
local originalzero=file.basename(arg[0])
local specialmapping={ luatools=="base" }
if originalzero~="mtxrun" and originalzero~="mtxrun.lua" then
@@ -7107,32 +7122,6 @@ end
environment.arguments=allocate()
environment.files=allocate()
environment.sortedflags=nil
-local mt={
- __index=function(_,k)
- if k=="version" then
- local version=tex.toks and tex.toks.contextversiontoks
- if version and version~="" then
- rawset(environment,"version",version)
- return version
- else
- return "unknown"
- end
- elseif k=="jobname" or k=="formatname" then
- local name=tex and tex[k]
- if name or name=="" then
- rawset(environment,k,name)
- return name
- else
- return "unknown"
- end
- elseif k=="outputfilename" then
- local name=environment.jobname
- rawset(environment,k,name)
- return name
- end
- end
-}
-setmetatable(environment,mt)
function environment.initializearguments(arg)
local arguments,files={},{}
environment.arguments,environment.files,environment.sortedflags=arguments,files,nil
@@ -7247,6 +7236,53 @@ if arg then
environment.rawarguments=mark(arg)
arg={}
end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+-- original size: 5441, stripped down to: 3874
+
+ if not modules then modules={} end modules ['luat-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_lua=logs.reporter("resolvers","lua")
+local luautilities=utilities.lua
+local luasuffixes=luautilities.suffixes
+environment=environment or {}
+local environment=environment
+local mt={
+ __index=function(_,k)
+ if k=="version" then
+ local version=tex.toks and tex.toks.contextversiontoks
+ if version and version~="" then
+ rawset(environment,"version",version)
+ return version
+ else
+ return "unknown"
+ end
+ elseif k=="jobname" or k=="formatname" then
+ local name=tex and tex[k]
+ if name or name=="" then
+ rawset(environment,k,name)
+ return name
+ else
+ return "unknown"
+ end
+ elseif k=="outputfilename" then
+ local name=environment.jobname
+ rawset(environment,k,name)
+ return name
+ end
+ end
+}
+setmetatable(environment,mt)
function environment.texfile(filename)
return resolvers.findfile(filename,'tex')
end
@@ -14763,10 +14799,10 @@ end
end -- of closure
--- used libraries : l-lua.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-tab.lua util-sto.lua util-str.lua util-mrg.lua util-lua.lua util-prs.lua util-fmt.lua util-deb.lua trac-inf.lua trac-set.lua trac-log.lua trac-pro.lua util-tpl.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua luat-sta.lua luat-fmt.lua
+-- used libraries : l-lua.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-tab.lua util-sto.lua util-str.lua util-mrg.lua util-lua.lua util-prs.lua util-fmt.lua util-deb.lua trac-inf.lua trac-set.lua trac-log.lua trac-pro.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 593245
--- stripped bytes : 199776
+-- original bytes : 594353
+-- stripped bytes : 200182
-- end library merge
@@ -14822,6 +14858,7 @@ local ownlibs = { -- order can be made better
'util-tpl.lua',
+ 'util-env.lua',
'luat-env.lua', -- can come before inf (as in mkiv)
'lxml-tab.lua',
@@ -16019,7 +16056,6 @@ elseif environment.files[1] == 'texmfcnf.lua' then -- so that we don't need to l
resolvers.listers.configurations()
else
-
runners.loadbase()
runners.execute_ctx_script("mtx-base",filename)
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index 66b6f5466..6a894df01 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -2197,7 +2197,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
--- original size: 13731, stripped down to: 8450
+-- original size: 13891, stripped down to: 8591
if not modules then modules={} end modules ['l-os']={
version=1.001,
@@ -2319,7 +2319,13 @@ function os.runtime()
end
os.resolvers=os.resolvers or {}
local resolvers=os.resolvers
-local osmt=getmetatable(os) or { __index=function(t,k) t[k]="unset" return "unset" end }
+local osmt=getmetatable(os) or { __index=function(t,k)
+ local v=function()
+ print(format("function os.%s in namespace is undefined"))
+ end
+ t[k]=v
+ return v
+end }
local osix=osmt.__index
osmt.__index=function(t,k)
return (resolvers[k] or osix)(t,k)
@@ -2510,6 +2516,9 @@ end
function os.now()
return date("!%Y-%m-%d %H:%M:%S")
end
+if not os.sleep and socket then
+ os.sleep=socket.sleep
+end
end -- of closure
@@ -4817,7 +4826,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
--- original size: 7158, stripped down to: 5738
+-- original size: 7245, stripped down to: 5822
if not modules then modules={} end modules ['util-mrg']={
version=1.001,
@@ -4890,6 +4899,9 @@ local compact=Cs ((
)^1 )
local strip=Cs((emptyline^2/"\n"+1)^0)
local stripreturn=Cs((1-P("return")*space^1*P(1-space-eol)^1*(space+eol)^0*P(-1))^1)
+function merger.compact(data)
+ return lpegmatch(strip,lpegmatch(compact,data))
+end
local function self_compact(data)
local delta=0
if merger.strip_comment then
@@ -7051,23 +7063,19 @@ end -- of closure
do -- create closure to overcome 200 locals limit
--- original size: 12282, stripped down to: 8098
+-- original size: 7702, stripped down to: 4701
-if not modules then modules={} end modules ['luat-env']={
+if not modules then modules={} end modules ['util-env']={
version=1.001,
comment="companion to luat-lib.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local report_lua=logs.reporter("resolvers","lua")
local allocate,mark=utilities.storage.allocate,utilities.storage.mark
local format,sub,match,gsub,find=string.format,string.sub,string.match,string.gsub,string.find
local unquoted,quoted=string.unquoted,string.quoted
local concat,insert,remove=table.concat,table.insert,table.remove
-local luautilities=utilities.lua
-local luasuffixes=luautilities.suffixes
environment=environment or {}
local environment=environment
os.setlocale(nil,nil)
@@ -7084,18 +7092,25 @@ local basicengines=allocate {
["luajittex"]="luajittex",
["texluajit"]="luajittex",
}
+local luaengines=allocate {
+ ["lua"]=true,
+ ["luajit"]=true,
+}
environment.validengines=validengines
environment.basicengines=basicengines
-if arg and validengines[file.removesuffix(arg[0])] and arg[1]=="--luaonly" then
- arg[-1]=arg[0]
- arg[ 0]=arg[2]
- for k=3,#arg do
- arg[k-2]=arg[k]
+if not arg then
+elseif luaengines[file.removesuffix(arg[-1])] then
+elseif validengines[file.removesuffix(arg[0])] then
+ if arg[1]=="--luaonly" then
+ arg[-1]=arg[0]
+ arg[ 0]=arg[2]
+ for k=3,#arg do
+ arg[k-2]=arg[k]
+ end
+ remove(arg)
+ remove(arg)
+ else
end
- remove(arg)
- remove(arg)
-end
-do
local originalzero=file.basename(arg[0])
local specialmapping={ luatools=="base" }
if originalzero~="mtxrun" and originalzero~="mtxrun.lua" then
@@ -7107,32 +7122,6 @@ end
environment.arguments=allocate()
environment.files=allocate()
environment.sortedflags=nil
-local mt={
- __index=function(_,k)
- if k=="version" then
- local version=tex.toks and tex.toks.contextversiontoks
- if version and version~="" then
- rawset(environment,"version",version)
- return version
- else
- return "unknown"
- end
- elseif k=="jobname" or k=="formatname" then
- local name=tex and tex[k]
- if name or name=="" then
- rawset(environment,k,name)
- return name
- else
- return "unknown"
- end
- elseif k=="outputfilename" then
- local name=environment.jobname
- rawset(environment,k,name)
- return name
- end
- end
-}
-setmetatable(environment,mt)
function environment.initializearguments(arg)
local arguments,files={},{}
environment.arguments,environment.files,environment.sortedflags=arguments,files,nil
@@ -7247,6 +7236,53 @@ if arg then
environment.rawarguments=mark(arg)
arg={}
end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+-- original size: 5441, stripped down to: 3874
+
+ if not modules then modules={} end modules ['luat-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_lua=logs.reporter("resolvers","lua")
+local luautilities=utilities.lua
+local luasuffixes=luautilities.suffixes
+environment=environment or {}
+local environment=environment
+local mt={
+ __index=function(_,k)
+ if k=="version" then
+ local version=tex.toks and tex.toks.contextversiontoks
+ if version and version~="" then
+ rawset(environment,"version",version)
+ return version
+ else
+ return "unknown"
+ end
+ elseif k=="jobname" or k=="formatname" then
+ local name=tex and tex[k]
+ if name or name=="" then
+ rawset(environment,k,name)
+ return name
+ else
+ return "unknown"
+ end
+ elseif k=="outputfilename" then
+ local name=environment.jobname
+ rawset(environment,k,name)
+ return name
+ end
+ end
+}
+setmetatable(environment,mt)
function environment.texfile(filename)
return resolvers.findfile(filename,'tex')
end
@@ -14763,10 +14799,10 @@ end
end -- of closure
--- used libraries : l-lua.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-tab.lua util-sto.lua util-str.lua util-mrg.lua util-lua.lua util-prs.lua util-fmt.lua util-deb.lua trac-inf.lua trac-set.lua trac-log.lua trac-pro.lua util-tpl.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua luat-sta.lua luat-fmt.lua
+-- used libraries : l-lua.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-tab.lua util-sto.lua util-str.lua util-mrg.lua util-lua.lua util-prs.lua util-fmt.lua util-deb.lua trac-inf.lua trac-set.lua trac-log.lua trac-pro.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 593245
--- stripped bytes : 199776
+-- original bytes : 594353
+-- stripped bytes : 200182
-- end library merge
@@ -14822,6 +14858,7 @@ local ownlibs = { -- order can be made better
'util-tpl.lua',
+ 'util-env.lua',
'luat-env.lua', -- can come before inf (as in mkiv)
'lxml-tab.lua',
@@ -16019,7 +16056,6 @@ elseif environment.files[1] == 'texmfcnf.lua' then -- so that we don't need to l
resolvers.listers.configurations()
else
-
runners.loadbase()
runners.execute_ctx_script("mtx-base",filename)
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index 66b6f5466..6a894df01 100755
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -2197,7 +2197,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
--- original size: 13731, stripped down to: 8450
+-- original size: 13891, stripped down to: 8591
if not modules then modules={} end modules ['l-os']={
version=1.001,
@@ -2319,7 +2319,13 @@ function os.runtime()
end
os.resolvers=os.resolvers or {}
local resolvers=os.resolvers
-local osmt=getmetatable(os) or { __index=function(t,k) t[k]="unset" return "unset" end }
+local osmt=getmetatable(os) or { __index=function(t,k)
+ local v=function()
+ print(format("function os.%s in namespace is undefined"))
+ end
+ t[k]=v
+ return v
+end }
local osix=osmt.__index
osmt.__index=function(t,k)
return (resolvers[k] or osix)(t,k)
@@ -2510,6 +2516,9 @@ end
function os.now()
return date("!%Y-%m-%d %H:%M:%S")
end
+if not os.sleep and socket then
+ os.sleep=socket.sleep
+end
end -- of closure
@@ -4817,7 +4826,7 @@ end -- of closure
do -- create closure to overcome 200 locals limit
--- original size: 7158, stripped down to: 5738
+-- original size: 7245, stripped down to: 5822
if not modules then modules={} end modules ['util-mrg']={
version=1.001,
@@ -4890,6 +4899,9 @@ local compact=Cs ((
)^1 )
local strip=Cs((emptyline^2/"\n"+1)^0)
local stripreturn=Cs((1-P("return")*space^1*P(1-space-eol)^1*(space+eol)^0*P(-1))^1)
+function merger.compact(data)
+ return lpegmatch(strip,lpegmatch(compact,data))
+end
local function self_compact(data)
local delta=0
if merger.strip_comment then
@@ -7051,23 +7063,19 @@ end -- of closure
do -- create closure to overcome 200 locals limit
--- original size: 12282, stripped down to: 8098
+-- original size: 7702, stripped down to: 4701
-if not modules then modules={} end modules ['luat-env']={
+if not modules then modules={} end modules ['util-env']={
version=1.001,
comment="companion to luat-lib.mkiv",
author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
copyright="PRAGMA ADE / ConTeXt Development Team",
license="see context related readme files"
}
-local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
-local report_lua=logs.reporter("resolvers","lua")
local allocate,mark=utilities.storage.allocate,utilities.storage.mark
local format,sub,match,gsub,find=string.format,string.sub,string.match,string.gsub,string.find
local unquoted,quoted=string.unquoted,string.quoted
local concat,insert,remove=table.concat,table.insert,table.remove
-local luautilities=utilities.lua
-local luasuffixes=luautilities.suffixes
environment=environment or {}
local environment=environment
os.setlocale(nil,nil)
@@ -7084,18 +7092,25 @@ local basicengines=allocate {
["luajittex"]="luajittex",
["texluajit"]="luajittex",
}
+local luaengines=allocate {
+ ["lua"]=true,
+ ["luajit"]=true,
+}
environment.validengines=validengines
environment.basicengines=basicengines
-if arg and validengines[file.removesuffix(arg[0])] and arg[1]=="--luaonly" then
- arg[-1]=arg[0]
- arg[ 0]=arg[2]
- for k=3,#arg do
- arg[k-2]=arg[k]
+if not arg then
+elseif luaengines[file.removesuffix(arg[-1])] then
+elseif validengines[file.removesuffix(arg[0])] then
+ if arg[1]=="--luaonly" then
+ arg[-1]=arg[0]
+ arg[ 0]=arg[2]
+ for k=3,#arg do
+ arg[k-2]=arg[k]
+ end
+ remove(arg)
+ remove(arg)
+ else
end
- remove(arg)
- remove(arg)
-end
-do
local originalzero=file.basename(arg[0])
local specialmapping={ luatools=="base" }
if originalzero~="mtxrun" and originalzero~="mtxrun.lua" then
@@ -7107,32 +7122,6 @@ end
environment.arguments=allocate()
environment.files=allocate()
environment.sortedflags=nil
-local mt={
- __index=function(_,k)
- if k=="version" then
- local version=tex.toks and tex.toks.contextversiontoks
- if version and version~="" then
- rawset(environment,"version",version)
- return version
- else
- return "unknown"
- end
- elseif k=="jobname" or k=="formatname" then
- local name=tex and tex[k]
- if name or name=="" then
- rawset(environment,k,name)
- return name
- else
- return "unknown"
- end
- elseif k=="outputfilename" then
- local name=environment.jobname
- rawset(environment,k,name)
- return name
- end
- end
-}
-setmetatable(environment,mt)
function environment.initializearguments(arg)
local arguments,files={},{}
environment.arguments,environment.files,environment.sortedflags=arguments,files,nil
@@ -7247,6 +7236,53 @@ if arg then
environment.rawarguments=mark(arg)
arg={}
end
+
+
+end -- of closure
+
+do -- create closure to overcome 200 locals limit
+
+-- original size: 5441, stripped down to: 3874
+
+ if not modules then modules={} end modules ['luat-env']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
+}
+local trace_locating=false trackers.register("resolvers.locating",function(v) trace_locating=v end)
+local report_lua=logs.reporter("resolvers","lua")
+local luautilities=utilities.lua
+local luasuffixes=luautilities.suffixes
+environment=environment or {}
+local environment=environment
+local mt={
+ __index=function(_,k)
+ if k=="version" then
+ local version=tex.toks and tex.toks.contextversiontoks
+ if version and version~="" then
+ rawset(environment,"version",version)
+ return version
+ else
+ return "unknown"
+ end
+ elseif k=="jobname" or k=="formatname" then
+ local name=tex and tex[k]
+ if name or name=="" then
+ rawset(environment,k,name)
+ return name
+ else
+ return "unknown"
+ end
+ elseif k=="outputfilename" then
+ local name=environment.jobname
+ rawset(environment,k,name)
+ return name
+ end
+ end
+}
+setmetatable(environment,mt)
function environment.texfile(filename)
return resolvers.findfile(filename,'tex')
end
@@ -14763,10 +14799,10 @@ end
end -- of closure
--- used libraries : l-lua.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-tab.lua util-sto.lua util-str.lua util-mrg.lua util-lua.lua util-prs.lua util-fmt.lua util-deb.lua trac-inf.lua trac-set.lua trac-log.lua trac-pro.lua util-tpl.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua luat-sta.lua luat-fmt.lua
+-- used libraries : l-lua.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-tab.lua util-sto.lua util-str.lua util-mrg.lua util-lua.lua util-prs.lua util-fmt.lua util-deb.lua trac-inf.lua trac-set.lua trac-log.lua trac-pro.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 593245
--- stripped bytes : 199776
+-- original bytes : 594353
+-- stripped bytes : 200182
-- end library merge
@@ -14822,6 +14858,7 @@ local ownlibs = { -- order can be made better
'util-tpl.lua',
+ 'util-env.lua',
'luat-env.lua', -- can come before inf (as in mkiv)
'lxml-tab.lua',
@@ -16019,7 +16056,6 @@ elseif environment.files[1] == 'texmfcnf.lua' then -- so that we don't need to l
resolvers.listers.configurations()
else
-
runners.loadbase()
runners.execute_ctx_script("mtx-base",filename)
diff --git a/tex/context/base/cont-new.mkii b/tex/context/base/cont-new.mkii
index 5a9ea173a..ad44564be 100644
--- a/tex/context/base/cont-new.mkii
+++ b/tex/context/base/cont-new.mkii
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2013.01.31 22:47}
+\newcontextversion{2013.02.05 13:35}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 8e7d04564..c91c44933 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2013.01.31 22:47}
+\newcontextversion{2013.02.05 13:35}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf
index 1c49aea89..ee4931f5a 100644
--- a/tex/context/base/context-version.pdf
+++ b/tex/context/base/context-version.pdf
Binary files differ
diff --git a/tex/context/base/context-version.png b/tex/context/base/context-version.png
index c90a939dd..ba3ff464b 100644
--- a/tex/context/base/context-version.png
+++ b/tex/context/base/context-version.png
Binary files differ
diff --git a/tex/context/base/context.mkii b/tex/context/base/context.mkii
index 8a8aa70dc..62235cc1c 100644
--- a/tex/context/base/context.mkii
+++ b/tex/context/base/context.mkii
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2013.01.31 22:47}
+\edef\contextversion{2013.02.05 13:35}
%D For those who want to use this:
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index 8ad9f130e..64a146e60 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -25,7 +25,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2013.01.31 22:47}
+\edef\contextversion{2013.02.05 13:35}
%D For those who want to use this:
diff --git a/tex/context/base/grph-inc.lua b/tex/context/base/grph-inc.lua
index fd282dac2..4006dfdbe 100644
--- a/tex/context/base/grph-inc.lua
+++ b/tex/context/base/grph-inc.lua
@@ -72,6 +72,14 @@ local context, img = context, img
local f_hash_part = formatters["%s->%s->%s"]
local f_hash_full = formatters["%s->%s->%s->%s->%s->%s->%s"]
+local v_yes = variables.yes
+local v_low = variables.low
+local v_medium = variables.medium
+local v_high = variables.high
+local v_global = variables["global"]
+local v_local = variables["local"]
+local v_default = variables.default
+
local maxdimen = 2^30-1
function img.check(figure)
@@ -322,18 +330,18 @@ function figures.setpaths(locationset,pathlist)
-- this function can be called each graphic so we provide this optimization
return
end
- local iv, t, h = interfaces.variables, figure_paths, settings_to_hash(locationset)
+ local t, h = figure_paths, settings_to_hash(locationset)
if last_locationset ~= locationset then
-- change == reset (actually, a 'reset' would indeed reset
- if h[iv["local"]] then
+ if h[v_local] then
t = table.fastcopy(figures.localpaths or { })
else
t = { }
end
- figures.defaultsearch = h[iv["default"]]
+ figures.defaultsearch = h[v_default]
last_locationset = locationset
end
- if h[iv["global"]] then
+ if h[v_global] then
local list = settings_to_array(pathlist)
for i=1,#list do
local s = list[i]
@@ -423,15 +431,14 @@ function figures.initialize(request)
--
request.page = math.max(tonumber(request.page) or 1,1)
request.size = img.checksize(request.size)
- request.object = request.object == variables.yes
- request["repeat"] = request["repeat"] == variables.yes
- request.preview = request.preview == variables.yes
+ request.object = request.object == v_yes
+ request["repeat"] = request["repeat"] == v_yes
+ request.preview = request.preview == v_yes
request.cache = request.cache ~= "" and request.cache
request.prefix = request.prefix ~= "" and request.prefix
request.format = request.format ~= "" and request.format
table.merge(figuredata.request,request)
end
- -- inspect(figuredata)
return figuredata
end
@@ -650,7 +657,7 @@ local function register(askedname,specification)
return specification
end
-local resolve_too = true -- urls
+local resolve_too = false -- true
local function locate(request) -- name, format, cache
-- not resolvers.cleanpath(request.name) as it fails on a!b.pdf and b~c.pdf
@@ -661,28 +668,58 @@ local function locate(request) -- name, format, cache
if foundname then
return foundname
end
+ --
+ local askedcache = request.cache
+ local askedconversion = request.conversion
+ local askedresolution = request.resolution
+ --
+ if request.format == "" or request.format == "unknown" then
+ request.format = nil
+ end
-- protocol check
local hashed = url.hashed(askedname)
- if hashed then
- if hashed.scheme == "file" then
- local path = hashed.path
- if path and path ~= "" then
- askedname = path
+ if not hashed then
+ -- go on
+ elseif hashed.scheme == "file" then
+ local path = hashed.path
+ if path and path ~= "" then
+ askedname = path
+ end
+ else
+ local foundname = resolvers.findbinfile(askedname)
+ if not lfs.isfile(foundname) then -- foundname can be dummy
+ if trace_figures then
+ report_inclusion("strategy: unresolved url: %s",askedname)
end
+ -- url not found
+ return register(askedname)
+ end
+ local askedformat = request.format or file.suffix(askedname) or ""
+ local guessedformat = figures.guess(foundname)
+ if askedformat ~= guessedformat then
+ if trace_figures then
+ report_inclusion("strategy: resolved url: %s, unknown format",askedname)
+ end
+ -- url found, but wrong format
+ return register(askedname)
else
- local foundname = resolvers.findbinfile(askedname)
- if foundname then
- askedname = foundname
+ if trace_figures then
+ report_inclusion("strategy: resolved url: %s -> %s",askedname,foundname)
end
+ return register(askedname, {
+ askedname = askedname,
+ fullname = foundname,
+ format = askedformat,
+ cache = askedcache,
+ conversion = askedconversion,
+ resolution = askedresolution,
+ })
end
end
-- we could use the hashed data instead
local askedpath= file.is_rootbased_path(askedname)
local askedbase = file.basename(askedname)
- local askedformat = request.format ~= "" and request.format ~= "unknown" and request.format or file.suffix(askedname) or ""
- local askedcache = request.cache
- local askedconversion = request.conversion
- local askedresolution = request.resolution
+ local askedformat = request.format or file.suffix(askedname) or ""
if askedformat ~= "" then
askedformat = lower(askedformat)
if trace_figures then
@@ -806,7 +843,7 @@ local function locate(request) -- name, format, cache
report_inclusion("warning: skipping path %s",path)
end
else
- local foundname, quitscanning, forcedformat = figures.exists(check,format,true)
+ local foundname, quitscanning, forcedformat = figures.exists(check,format,resolve_too) -- true)
if foundname then
return register(askedname, {
askedname = askedname,
@@ -1246,9 +1283,9 @@ converters.ps = epsconverter
local epstopdf = {
resolutions = {
- [variables.low] = "screen",
- [variables.medium] = "ebook",
- [variables.high] = "prepress",
+ [v_low] = "screen",
+ [v_medium] = "ebook",
+ [v_high] = "prepress",
},
command = os.type == "windows" and "gswin32c" or "gs",
-- -dProcessDSCComments=false
diff --git a/tex/context/base/l-os.lua b/tex/context/base/l-os.lua
index 08136b24d..1e7c91757 100644
--- a/tex/context/base/l-os.lua
+++ b/tex/context/base/l-os.lua
@@ -193,7 +193,14 @@ os.resolvers = os.resolvers or { } -- will become private
local resolvers = os.resolvers
-local osmt = getmetatable(os) or { __index = function(t,k) t[k] = "unset" return "unset" end } -- maybe nil
+local osmt = getmetatable(os) or { __index = function(t,k)
+ local v = function()
+ print(format("function os.%s in namespace is undefined"))
+ end
+ t[k] = v
+ return v
+end } -- maybe nil
+
local osix = osmt.__index
osmt.__index = function(t,k)
@@ -456,6 +463,9 @@ function os.now()
return date("!%Y-%m-%d %H:%M:%S") -- 2011-12-04 14:59:12
end
+if not os.sleep and socket then
+ os.sleep = socket.sleep
+end
-- print(os.which("inkscape.exe"))
-- print(os.which("inkscape"))
diff --git a/tex/context/base/luat-env.lua b/tex/context/base/luat-env.lua
index e483169fd..efaaebf3e 100644
--- a/tex/context/base/luat-env.lua
+++ b/tex/context/base/luat-env.lua
@@ -1,4 +1,4 @@
-if not modules then modules = { } end modules ['luat-env'] = {
+ if not modules then modules = { } end modules ['luat-env'] = {
version = 1.001,
comment = "companion to luat-lib.mkiv",
author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
@@ -6,94 +6,23 @@ if not modules then modules = { } end modules ['luat-env'] = {
license = "see context related readme files"
}
--- A former version provided functionality for non embeded core
--- scripts i.e. runtime library loading. Given the amount of
--- Lua code we use now, this no longer makes sense. Much of this
--- evolved before bytecode arrays were available and so a lot of
+-- A former version provided functionality for non embeded core scripts i.e. runtime
+-- library loading. Given the amount of Lua code we use now, this no longer makes
+-- sense. Much of this evolved before bytecode arrays were available and so a lot of
-- code has disappeared already.
local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end)
local report_lua = logs.reporter("resolvers","lua")
-local allocate, mark = utilities.storage.allocate, utilities.storage.mark
-
-local format, sub, match, gsub, find = string.format, string.sub, string.match, string.gsub, string.find
-local unquoted, quoted = string.unquoted, string.quoted
-local concat, insert, remove = table.concat, table.insert, table.remove
-
local luautilities = utilities.lua
local luasuffixes = luautilities.suffixes
environment = environment or { }
local environment = environment
--- precautions
-
-os.setlocale(nil,nil) -- useless feature and even dangerous in luatex
-
-function os.setlocale()
- -- no way you can mess with it
-end
-
--- dirty tricks (we will replace the texlua call by luatex --luaonly)
-
-local validengines = allocate {
- ["luatex"] = true,
- ["luajittex"] = true,
- -- ["luatex.exe"] = true,
- -- ["luajittex.exe"] = true,
-}
-
-local basicengines = allocate {
- ["luatex"] = "luatex",
- ["texlua"] = "luatex",
- ["texluac"] = "luatex",
- ["luajittex"] = "luajittex",
- ["texluajit"] = "luajittex",
- -- ["texlua.exe"] = "luatex",
- -- ["texluajit.exe"] = "luajittex",
-}
-
-environment.validengines = validengines
-environment.basicengines = basicengines
-
-if arg and validengines[file.removesuffix(arg[0])] and arg[1] == "--luaonly" then
- arg[-1] = arg[0]
- arg[ 0] = arg[2]
- for k=3,#arg do
- arg[k-2] = arg[k]
- end
- remove(arg) -- last
- remove(arg) -- pre-last
-end
-
--- This is an ugly hack but it permits symlinking a script (say 'context') to 'mtxrun' as in:
---
--- ln -s /opt/minimals/tex/texmf-linux-64/bin/mtxrun context
---
--- The special mapping hack is needed because 'luatools' boils down to 'mtxrun --script base'
--- but it's unlikely that there will be more of this
-
-do
-
- local originalzero = file.basename(arg[0])
- local specialmapping = { luatools == "base" }
-
- if originalzero ~= "mtxrun" and originalzero ~= "mtxrun.lua" then
- arg[0] = specialmapping[originalzero] or originalzero
- insert(arg,0,"--script")
- insert(arg,0,"mtxrun")
- end
-
-end
-
-- environment
-environment.arguments = allocate()
-environment.files = allocate()
-environment.sortedflags = nil
-
local mt = {
__index = function(_,k)
if k == "version" then
@@ -122,161 +51,6 @@ local mt = {
setmetatable(environment,mt)
--- context specific arguments (in order not to confuse the engine)
-
-function environment.initializearguments(arg)
- local arguments, files = { }, { }
- environment.arguments, environment.files, environment.sortedflags = arguments, files, nil
- for index=1,#arg do
- local argument = arg[index]
- if index > 0 then
- local flag, value = match(argument,"^%-+(.-)=(.-)$")
- if flag then
- flag = gsub(flag,"^c:","")
- arguments[flag] = unquoted(value or "")
- else
- flag = match(argument,"^%-+(.+)")
- if flag then
- flag = gsub(flag,"^c:","")
- arguments[flag] = true
- else
- files[#files+1] = argument
- end
- end
- end
- end
- environment.ownname = file.reslash(environment.ownname or arg[0] or 'unknown.lua')
-end
-
-function environment.setargument(name,value)
- environment.arguments[name] = value
-end
-
--- todo: defaults, better checks e.g on type (boolean versus string)
---
--- tricky: too many hits when we support partials unless we add
--- a registration of arguments so from now on we have 'partial'
-
-function environment.getargument(name,partial)
- local arguments, sortedflags = environment.arguments, environment.sortedflags
- if arguments[name] then
- return arguments[name]
- elseif partial then
- if not sortedflags then
- sortedflags = allocate(table.sortedkeys(arguments))
- for k=1,#sortedflags do
- sortedflags[k] = "^" .. sortedflags[k]
- end
- environment.sortedflags = sortedflags
- end
- -- example of potential clash: ^mode ^modefile
- for k=1,#sortedflags do
- local v = sortedflags[k]
- if find(name,v) then
- return arguments[sub(v,2,#v)]
- end
- end
- end
- return nil
-end
-
-environment.argument = environment.getargument
-
-function environment.splitarguments(separator) -- rather special, cut-off before separator
- local done, before, after = false, { }, { }
- local originalarguments = environment.originalarguments
- for k=1,#originalarguments do
- local v = originalarguments[k]
- if not done and v == separator then
- done = true
- elseif done then
- after[#after+1] = v
- else
- before[#before+1] = v
- end
- end
- return before, after
-end
-
-function environment.reconstructcommandline(arg,noquote)
- arg = arg or environment.originalarguments
- if noquote and #arg == 1 then
- -- we could just do: return unquoted(resolvers.resolve(arg[i]))
- local a = arg[1]
- a = resolvers.resolve(a)
- a = unquoted(a)
- return a
- elseif #arg > 0 then
- local result = { }
- for i=1,#arg do
- -- we could just do: result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i])))
- local a = arg[i]
- a = resolvers.resolve(a)
- a = unquoted(a)
- a = gsub(a,'"','\\"') -- tricky
- if find(a," ") then
- result[#result+1] = quoted(a)
- else
- result[#result+1] = a
- end
- end
- return concat(result," ")
- else
- return ""
- end
-end
-
--- -- to be tested:
---
--- function environment.reconstructcommandline(arg,noquote)
--- arg = arg or environment.originalarguments
--- if noquote and #arg == 1 then
--- return unquoted(resolvers.resolve(arg[1]))
--- elseif #arg > 0 then
--- local result = { }
--- for i=1,#arg do
--- result[#result+1] = format("%q",unquoted(resolvers.resolve(arg[i]))) -- always quote
--- end
--- return concat(result," ")
--- else
--- return ""
--- end
--- end
-
-if arg then
-
- -- new, reconstruct quoted snippets (maybe better just remove the " then and add them later)
- local newarg, instring = { }, false
-
- for index=1,#arg do
- local argument = arg[index]
- if find(argument,"^\"") then
- newarg[#newarg+1] = gsub(argument,"^\"","")
- if not find(argument,"\"$") then
- instring = true
- end
- elseif find(argument,"\"$") then
- newarg[#newarg] = newarg[#newarg] .. " " .. gsub(argument,"\"$","")
- instring = false
- elseif instring then
- newarg[#newarg] = newarg[#newarg] .. " " .. argument
- else
- newarg[#newarg+1] = argument
- end
- end
- for i=1,-5,-1 do
- newarg[i] = arg[i]
- end
-
- environment.initializearguments(newarg)
-
- environment.originalarguments = mark(newarg)
- environment.rawarguments = mark(arg)
-
- arg = { } -- prevent duplicate handling
-
-end
-
-- weird place ... depends on a not yet loaded module
function environment.texfile(filename)
diff --git a/tex/context/base/luat-lib.mkiv b/tex/context/base/luat-lib.mkiv
index 521ecbf5e..4ece2a29a 100644
--- a/tex/context/base/luat-lib.mkiv
+++ b/tex/context/base/luat-lib.mkiv
@@ -65,6 +65,7 @@
\registerctxluafile{luat-lua}{1.001}
\registerctxluafile{luat-sto}{1.001}
\registerctxluafile{luat-ini}{1.001}
+\registerctxluafile{util-env}{1.001}
\registerctxluafile{luat-env}{1.001}
\registerctxluafile{luat-exe}{1.001}
\registerctxluafile{luat-iop}{1.001}
diff --git a/tex/context/base/m-graph.mkiv b/tex/context/base/m-graph.mkiv
index 8acbe1f8d..c80e4ad91 100644
--- a/tex/context/base/m-graph.mkiv
+++ b/tex/context/base/m-graph.mkiv
@@ -15,6 +15,8 @@
% are limited by what mp can do. We support @ as replacement for
% the percent symbol. We also add a specifier when no one is given.
+\unprotect
+
\startluacode
local format, gsub, find, match = string.format, string.gsub, string.find, string.match
@@ -54,10 +56,24 @@
% We could also delegate parsing using lower level plugins.
-\startMPextensions
- if unknown Fe_plus : picture Fe_plus ; Fe_plus := textext("+") ; fi ;
+\defineMPinstance
+ [graph]
+ [\s!format=metafun,
+ \s!extensions=\v!yes,
+ \s!initializations=\v!yes,
+ \c!method=\s!double]
+
+\startMPdefinitions{graph}
if unknown context_grap: input "mp-grap.mpiv" ; fi ;
-\stopMPextensions
+\stopMPdefinitions
+
+% gone:
+%
+% \startMPextensions
+% if unknown Fe_plus : picture Fe_plus ; Fe_plus := textext("+") ; fi ;
+% \stopMPextensions
+
+\protect
\continueifinputfile{m-graph.mkiv}
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index 1a957aa56..26f01897e 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 0d0ded3d8..d4b7fb4c5 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/strc-flt.mkvi b/tex/context/base/strc-flt.mkvi
index e268afee7..6199a3e6b 100644
--- a/tex/context/base/strc-flt.mkvi
+++ b/tex/context/base/strc-flt.mkvi
@@ -756,8 +756,8 @@
\def\floatcaptionattribute
{\iflocation
- \ifnofloatnumber
- \else
+ %\ifnofloatnumber
+ %\else
\ifnofloatcaption
\else
\ifinsidesplitfloat
@@ -769,7 +769,7 @@
attr \destinationattribute \currentfloatattribute
\fi
\fi
- \fi
+ %\fi
\fi}
\newconditional\usesamefloatnumber
@@ -1029,7 +1029,7 @@
\unexpanded\def\placefloats
{\page_otr_command_flush_floats}
-
+
\newdimen\d_strc_floats_margin
\newdimen\d_strc_floats_top
\newdimen\d_strc_floats_bottom
diff --git a/tex/context/base/trac-lmx.lua b/tex/context/base/trac-lmx.lua
index 2754dd16d..d20241735 100644
--- a/tex/context/base/trac-lmx.lua
+++ b/tex/context/base/trac-lmx.lua
@@ -142,6 +142,10 @@ local function loadedfile(name)
return data
end
+local function loadedsubfile(name)
+ return io.loaddata(resolvers and resolvers.findfile and resolvers.findfile(name) or name)
+end
+
lmx.loadedfile = loadedfile
-- A few helpers (the next one could end up in l-lpeg):
@@ -208,14 +212,15 @@ local function do_type_variable(str)
end
local function do_include(filename)
- local data = loadedfile(filename)
+ local data = loadedsubfile(filename)
if (not data or data == "") and type(usedpaths) == "table" then
for i=1,#usedpaths do
- data = loadedfile(joinpath(usedpaths[i],filename))
+ data = loadedsubfile(joinpath(usedpaths[i],filename))
end
end
if not data or data == "" then
data = format("<!-- unknown lmx include file: %s -->",filename)
+ report_lmx("empty include file: %s",filename)
end
return data
end
diff --git a/tex/context/base/util-mrg.lua b/tex/context/base/util-mrg.lua
index cdac10ac1..276531d79 100644
--- a/tex/context/base/util-mrg.lua
+++ b/tex/context/base/util-mrg.lua
@@ -120,6 +120,10 @@ local compact = Cs ( (
local strip = Cs((emptyline^2/"\n" + 1)^0)
local stripreturn = Cs((1-P("return") * space^1 * P(1-space-eol)^1 * (space+eol)^0 * P(-1))^1)
+function merger.compact(data)
+ return lpegmatch(strip,lpegmatch(compact,data))
+end
+
local function self_compact(data)
local delta = 0
if merger.strip_comment then
diff --git a/tex/context/base/util-sql-tickets.lua b/tex/context/base/util-sql-tickets.lua
index 2dc334ce6..63bbf4d44 100644
--- a/tex/context/base/util-sql-tickets.lua
+++ b/tex/context/base/util-sql-tickets.lua
@@ -87,9 +87,7 @@ local template =[[
]]
function tickets.createdb(presets,datatable)
-
local db = checkeddb(presets,datatable)
-
local data, keys = db.execute {
template = template,
variables = {
diff --git a/tex/context/base/util-sql.lua b/tex/context/base/util-sql.lua
index 5310ea699..5d57c22f5 100644
--- a/tex/context/base/util-sql.lua
+++ b/tex/context/base/util-sql.lua
@@ -111,7 +111,9 @@ local defaults = { __index =
}
table.setmetatableindex(sql.methods,function(t,k)
+ report_state("start loading method %q",k)
require("util-sql-imp-"..k)
+ report_state("loading method %q done",k)
return rawget(t,k)
end)
@@ -328,6 +330,8 @@ function sql.usedatabase(presets,datatable)
deserialize = deserialize,
unpackdata = unpackdata,
}
+ else
+ report_state("missing name in usedatabase specification")
end
end
diff --git a/tex/generic/context/luatex/luatex-basics-nod.lua b/tex/generic/context/luatex/luatex-basics-nod.lua
index 151d98a8f..ec515001e 100644
--- a/tex/generic/context/luatex/luatex-basics-nod.lua
+++ b/tex/generic/context/luatex/luatex-basics-nod.lua
@@ -63,6 +63,9 @@ nodes.glyphcodes = glyphcodes
local free_node = node.free
local remove_node = node.remove
local new_node = node.new
+local traverse_id = node.traverse_id
+
+local math_code = nodecodes.math
nodes.handlers.protectglyphs = node.protect_glyphs
nodes.handlers.unprotectglyphs = node.unprotect_glyphs
@@ -93,3 +96,9 @@ function nodes.pool.kern(k)
n.kern = k
return n
end
+
+function nodes.endofmath(n)
+ for n in traverse_id(math_code,n.next) do
+ return n
+ end
+end
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index 6121c7206..1b0334cc9 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,13581 +1,10360 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 01/31/13 22:47:25
+-- merge date : 02/05/13 13:35:14
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['l-lpeg'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['l-lpeg']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
--- a new lpeg fails on a #(1-P(":")) test and really needs a + P(-1)
-
--- move utf -> l-unicode
--- move string -> l-string or keep it here
-
-local lpeg = require("lpeg")
-
--- tracing (only used when we encounter a problem in integration of lpeg in luatex)
-
--- some code will move to unicode and string
-
-local report = texio and texio.write_nl or print
-
--- local lpmatch = lpeg.match
--- local lpprint = lpeg.print
--- local lpp = lpeg.P
--- local lpr = lpeg.R
--- local lps = lpeg.S
--- local lpc = lpeg.C
--- local lpb = lpeg.B
--- local lpv = lpeg.V
--- local lpcf = lpeg.Cf
--- local lpcb = lpeg.Cb
--- local lpcg = lpeg.Cg
--- local lpct = lpeg.Ct
--- local lpcs = lpeg.Cs
--- local lpcc = lpeg.Cc
--- local lpcmt = lpeg.Cmt
--- local lpcarg = lpeg.Carg
-
--- function lpeg.match(l,...) report("LPEG MATCH") lpprint(l) return lpmatch(l,...) end
-
--- function lpeg.P (l) local p = lpp (l) report("LPEG P =") lpprint(l) return p end
--- function lpeg.R (l) local p = lpr (l) report("LPEG R =") lpprint(l) return p end
--- function lpeg.S (l) local p = lps (l) report("LPEG S =") lpprint(l) return p end
--- function lpeg.C (l) local p = lpc (l) report("LPEG C =") lpprint(l) return p end
--- function lpeg.B (l) local p = lpb (l) report("LPEG B =") lpprint(l) return p end
--- function lpeg.V (l) local p = lpv (l) report("LPEG V =") lpprint(l) return p end
--- function lpeg.Cf (l) local p = lpcf (l) report("LPEG Cf =") lpprint(l) return p end
--- function lpeg.Cb (l) local p = lpcb (l) report("LPEG Cb =") lpprint(l) return p end
--- function lpeg.Cg (l) local p = lpcg (l) report("LPEG Cg =") lpprint(l) return p end
--- function lpeg.Ct (l) local p = lpct (l) report("LPEG Ct =") lpprint(l) return p end
--- function lpeg.Cs (l) local p = lpcs (l) report("LPEG Cs =") lpprint(l) return p end
--- function lpeg.Cc (l) local p = lpcc (l) report("LPEG Cc =") lpprint(l) return p end
--- function lpeg.Cmt (l) local p = lpcmt (l) report("LPEG Cmt =") lpprint(l) return p end
--- function lpeg.Carg (l) local p = lpcarg(l) report("LPEG Carg =") lpprint(l) return p end
-
-local type, next, tostring = type, next, tostring
-local byte, char, gmatch, format = string.byte, string.char, string.gmatch, string.format
------ mod, div = math.mod, math.div
-local floor = math.floor
-
-local P, R, S, V, Ct, C, Cs, Cc, Cp, Cmt = lpeg.P, lpeg.R, lpeg.S, lpeg.V, lpeg.Ct, lpeg.C, lpeg.Cs, lpeg.Cc, lpeg.Cp, lpeg.Cmt
-local lpegtype, lpegmatch, lpegprint = lpeg.type, lpeg.match, lpeg.print
-
--- let's start with an inspector:
-
+local lpeg=require("lpeg")
+local report=texio and texio.write_nl or print
+local type,next,tostring=type,next,tostring
+local byte,char,gmatch,format=string.byte,string.char,string.gmatch,string.format
+local floor=math.floor
+local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt
+local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print
setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end)
-
--- Beware, we predefine a bunch of patterns here and one reason for doing so
--- is that we get consistent behaviour in some of the visualizers.
-
-lpeg.patterns = lpeg.patterns or { } -- so that we can share
-local patterns = lpeg.patterns
-
-
-local anything = P(1)
-local endofstring = P(-1)
-local alwaysmatched = P(true)
-
-patterns.anything = anything
-patterns.endofstring = endofstring
-patterns.beginofstring = alwaysmatched
-patterns.alwaysmatched = alwaysmatched
-
-local digit, sign = R('09'), S('+-')
-local cr, lf, crlf = P("\r"), P("\n"), P("\r\n")
-local newline = crlf + S("\r\n") -- cr + lf
-local escaped = P("\\") * anything
-local squote = P("'")
-local dquote = P('"')
-local space = P(" ")
-
-local utfbom_32_be = P('\000\000\254\255')
-local utfbom_32_le = P('\255\254\000\000')
-local utfbom_16_be = P('\255\254')
-local utfbom_16_le = P('\254\255')
-local utfbom_8 = P('\239\187\191')
-local utfbom = utfbom_32_be + utfbom_32_le
- + utfbom_16_be + utfbom_16_le
- + utfbom_8
-local utftype = utfbom_32_be * Cc("utf-32-be") + utfbom_32_le * Cc("utf-32-le")
- + utfbom_16_be * Cc("utf-16-be") + utfbom_16_le * Cc("utf-16-le")
- + utfbom_8 * Cc("utf-8") + alwaysmatched * Cc("utf-8") -- assume utf8
-local utfoffset = utfbom_32_be * Cc(4) + utfbom_32_le * Cc(4)
- + utfbom_16_be * Cc(2) + utfbom_16_le * Cc(2)
- + utfbom_8 * Cc(3) + Cc(0)
-
-local utf8next = R("\128\191")
-
-patterns.utf8one = R("\000\127")
-patterns.utf8two = R("\194\223") * utf8next
-patterns.utf8three = R("\224\239") * utf8next * utf8next
-patterns.utf8four = R("\240\244") * utf8next * utf8next * utf8next
-patterns.utfbom = utfbom
-patterns.utftype = utftype
-patterns.utfoffset = utfoffset
-
-local utf8char = patterns.utf8one + patterns.utf8two + patterns.utf8three + patterns.utf8four
-local validutf8char = utf8char^0 * endofstring * Cc(true) + Cc(false)
-
-patterns.utf8 = utf8char
-patterns.utf8char = utf8char
-patterns.validutf8 = validutf8char
-patterns.validutf8char = validutf8char
-
-local eol = S("\n\r")
-local spacer = S(" \t\f\v") -- + char(0xc2, 0xa0) if we want utf (cf mail roberto)
-local whitespace = eol + spacer
-local nonspacer = 1 - spacer
-local nonwhitespace = 1 - whitespace
-
-patterns.eol = eol
-patterns.spacer = spacer
-patterns.whitespace = whitespace
-patterns.nonspacer = nonspacer
-patterns.nonwhitespace = nonwhitespace
-
-local stripper = spacer^0 * C((spacer^0 * nonspacer^1)^0) -- from example by roberto
-
------ collapser = Cs(spacer^0/"" * ((spacer^1 * P(-1) / "") + (spacer^1/" ") + P(1))^0)
-local collapser = Cs(spacer^0/"" * nonspacer^0 * ((spacer^0/" " * nonspacer^1)^0))
-
-patterns.stripper = stripper
-patterns.collapser = collapser
-
-patterns.digit = digit
-patterns.sign = sign
-patterns.cardinal = sign^0 * digit^1
-patterns.integer = sign^0 * digit^1
-patterns.unsigned = digit^0 * P('.') * digit^1
-patterns.float = sign^0 * patterns.unsigned
-patterns.cunsigned = digit^0 * P(',') * digit^1
-patterns.cfloat = sign^0 * patterns.cunsigned
-patterns.number = patterns.float + patterns.integer
-patterns.cnumber = patterns.cfloat + patterns.integer
-patterns.oct = P("0") * R("07")^1
-patterns.octal = patterns.oct
-patterns.HEX = P("0x") * R("09","AF")^1
-patterns.hex = P("0x") * R("09","af")^1
-patterns.hexadecimal = P("0x") * R("09","AF","af")^1
-patterns.lowercase = R("az")
-patterns.uppercase = R("AZ")
-patterns.letter = patterns.lowercase + patterns.uppercase
-patterns.space = space
-patterns.tab = P("\t")
-patterns.spaceortab = patterns.space + patterns.tab
-patterns.newline = newline
-patterns.emptyline = newline^1
-patterns.equal = P("=")
-patterns.comma = P(",")
-patterns.commaspacer = P(",") * spacer^0
-patterns.period = P(".")
-patterns.colon = P(":")
-patterns.semicolon = P(";")
-patterns.underscore = P("_")
-patterns.escaped = escaped
-patterns.squote = squote
-patterns.dquote = dquote
-patterns.nosquote = (escaped + (1-squote))^0
-patterns.nodquote = (escaped + (1-dquote))^0
-patterns.unsingle = (squote/"") * patterns.nosquote * (squote/"") -- will change to C in the middle
-patterns.undouble = (dquote/"") * patterns.nodquote * (dquote/"") -- will change to C in the middle
-patterns.unquoted = patterns.undouble + patterns.unsingle -- more often undouble
-patterns.unspacer = ((patterns.spacer^1)/"")^0
-
-patterns.singlequoted = squote * patterns.nosquote * squote
-patterns.doublequoted = dquote * patterns.nodquote * dquote
-patterns.quoted = patterns.doublequoted + patterns.singlequoted
-
-patterns.propername = R("AZ","az","__") * R("09","AZ","az", "__")^0 * P(-1)
-
-patterns.somecontent = (anything - newline - space)^1 -- (utf8char - newline - space)^1
-patterns.beginline = #(1-newline)
-
-patterns.longtostring = Cs(whitespace^0/"" * nonwhitespace^0 * ((whitespace^0/" " * (patterns.quoted + nonwhitespace)^1)^0))
-
-local function anywhere(pattern) --slightly adapted from website
- return P { P(pattern) + 1 * V(1) }
-end
-
-lpeg.anywhere = anywhere
-
+lpeg.patterns=lpeg.patterns or {}
+local patterns=lpeg.patterns
+local anything=P(1)
+local endofstring=P(-1)
+local alwaysmatched=P(true)
+patterns.anything=anything
+patterns.endofstring=endofstring
+patterns.beginofstring=alwaysmatched
+patterns.alwaysmatched=alwaysmatched
+local digit,sign=R('09'),S('+-')
+local cr,lf,crlf=P("\r"),P("\n"),P("\r\n")
+local newline=crlf+S("\r\n")
+local escaped=P("\\")*anything
+local squote=P("'")
+local dquote=P('"')
+local space=P(" ")
+local utfbom_32_be=P('\000\000\254\255')
+local utfbom_32_le=P('\255\254\000\000')
+local utfbom_16_be=P('\255\254')
+local utfbom_16_le=P('\254\255')
+local utfbom_8=P('\239\187\191')
+local utfbom=utfbom_32_be+utfbom_32_le+utfbom_16_be+utfbom_16_le+utfbom_8
+local utftype=utfbom_32_be*Cc("utf-32-be")+utfbom_32_le*Cc("utf-32-le")+utfbom_16_be*Cc("utf-16-be")+utfbom_16_le*Cc("utf-16-le")+utfbom_8*Cc("utf-8")+alwaysmatched*Cc("utf-8")
+local utfoffset=utfbom_32_be*Cc(4)+utfbom_32_le*Cc(4)+utfbom_16_be*Cc(2)+utfbom_16_le*Cc(2)+utfbom_8*Cc(3)+Cc(0)
+local utf8next=R("\128\191")
+patterns.utf8one=R("\000\127")
+patterns.utf8two=R("\194\223")*utf8next
+patterns.utf8three=R("\224\239")*utf8next*utf8next
+patterns.utf8four=R("\240\244")*utf8next*utf8next*utf8next
+patterns.utfbom=utfbom
+patterns.utftype=utftype
+patterns.utfoffset=utfoffset
+local utf8char=patterns.utf8one+patterns.utf8two+patterns.utf8three+patterns.utf8four
+local validutf8char=utf8char^0*endofstring*Cc(true)+Cc(false)
+patterns.utf8=utf8char
+patterns.utf8char=utf8char
+patterns.validutf8=validutf8char
+patterns.validutf8char=validutf8char
+local eol=S("\n\r")
+local spacer=S(" \t\f\v")
+local whitespace=eol+spacer
+local nonspacer=1-spacer
+local nonwhitespace=1-whitespace
+patterns.eol=eol
+patterns.spacer=spacer
+patterns.whitespace=whitespace
+patterns.nonspacer=nonspacer
+patterns.nonwhitespace=nonwhitespace
+local stripper=spacer^0*C((spacer^0*nonspacer^1)^0)
+local collapser=Cs(spacer^0/""*nonspacer^0*((spacer^0/" "*nonspacer^1)^0))
+patterns.stripper=stripper
+patterns.collapser=collapser
+patterns.digit=digit
+patterns.sign=sign
+patterns.cardinal=sign^0*digit^1
+patterns.integer=sign^0*digit^1
+patterns.unsigned=digit^0*P('.')*digit^1
+patterns.float=sign^0*patterns.unsigned
+patterns.cunsigned=digit^0*P(',')*digit^1
+patterns.cfloat=sign^0*patterns.cunsigned
+patterns.number=patterns.float+patterns.integer
+patterns.cnumber=patterns.cfloat+patterns.integer
+patterns.oct=P("0")*R("07")^1
+patterns.octal=patterns.oct
+patterns.HEX=P("0x")*R("09","AF")^1
+patterns.hex=P("0x")*R("09","af")^1
+patterns.hexadecimal=P("0x")*R("09","AF","af")^1
+patterns.lowercase=R("az")
+patterns.uppercase=R("AZ")
+patterns.letter=patterns.lowercase+patterns.uppercase
+patterns.space=space
+patterns.tab=P("\t")
+patterns.spaceortab=patterns.space+patterns.tab
+patterns.newline=newline
+patterns.emptyline=newline^1
+patterns.equal=P("=")
+patterns.comma=P(",")
+patterns.commaspacer=P(",")*spacer^0
+patterns.period=P(".")
+patterns.colon=P(":")
+patterns.semicolon=P(";")
+patterns.underscore=P("_")
+patterns.escaped=escaped
+patterns.squote=squote
+patterns.dquote=dquote
+patterns.nosquote=(escaped+(1-squote))^0
+patterns.nodquote=(escaped+(1-dquote))^0
+patterns.unsingle=(squote/"")*patterns.nosquote*(squote/"")
+patterns.undouble=(dquote/"")*patterns.nodquote*(dquote/"")
+patterns.unquoted=patterns.undouble+patterns.unsingle
+patterns.unspacer=((patterns.spacer^1)/"")^0
+patterns.singlequoted=squote*patterns.nosquote*squote
+patterns.doublequoted=dquote*patterns.nodquote*dquote
+patterns.quoted=patterns.doublequoted+patterns.singlequoted
+patterns.propername=R("AZ","az","__")*R("09","AZ","az","__")^0*P(-1)
+patterns.somecontent=(anything-newline-space)^1
+patterns.beginline=#(1-newline)
+patterns.longtostring=Cs(whitespace^0/""*nonwhitespace^0*((whitespace^0/" "*(patterns.quoted+nonwhitespace)^1)^0))
+local function anywhere(pattern)
+ return P { P(pattern)+1*V(1) }
+end
+lpeg.anywhere=anywhere
function lpeg.instringchecker(p)
- p = anywhere(p)
- return function(str)
- return lpegmatch(p,str) and true or false
- end
+ p=anywhere(p)
+ return function(str)
+ return lpegmatch(p,str) and true or false
+ end
end
-
-function lpeg.splitter(pattern, action)
- return (((1-P(pattern))^1)/action+1)^0
+function lpeg.splitter(pattern,action)
+ return (((1-P(pattern))^1)/action+1)^0
end
-
-function lpeg.tsplitter(pattern, action)
- return Ct((((1-P(pattern))^1)/action+1)^0)
+function lpeg.tsplitter(pattern,action)
+ return Ct((((1-P(pattern))^1)/action+1)^0)
end
-
--- probleem: separator can be lpeg and that does not hash too well, but
--- it's quite okay as the key is then not garbage collected
-
-local splitters_s, splitters_m, splitters_t = { }, { }, { }
-
+local splitters_s,splitters_m,splitters_t={},{},{}
local function splitat(separator,single)
- local splitter = (single and splitters_s[separator]) or splitters_m[separator]
- if not splitter then
- separator = P(separator)
- local other = C((1 - separator)^0)
- if single then
- local any = anything
- splitter = other * (separator * C(any^0) + "") -- ?
- splitters_s[separator] = splitter
- else
- splitter = other * (separator * other)^0
- splitters_m[separator] = splitter
- end
+ local splitter=(single and splitters_s[separator]) or splitters_m[separator]
+ if not splitter then
+ separator=P(separator)
+ local other=C((1-separator)^0)
+ if single then
+ local any=anything
+ splitter=other*(separator*C(any^0)+"")
+ splitters_s[separator]=splitter
+ else
+ splitter=other*(separator*other)^0
+ splitters_m[separator]=splitter
end
- return splitter
+ end
+ return splitter
end
-
local function tsplitat(separator)
- local splitter = splitters_t[separator]
- if not splitter then
- splitter = Ct(splitat(separator))
- splitters_t[separator] = splitter
- end
- return splitter
-end
-
-lpeg.splitat = splitat
-lpeg.tsplitat = tsplitat
-
+ local splitter=splitters_t[separator]
+ if not splitter then
+ splitter=Ct(splitat(separator))
+ splitters_t[separator]=splitter
+ end
+ return splitter
+end
+lpeg.splitat=splitat
+lpeg.tsplitat=tsplitat
function string.splitup(str,separator)
- if not separator then
- separator = ","
- end
- return lpegmatch(splitters_m[separator] or splitat(separator),str)
+ if not separator then
+ separator=","
+ end
+ return lpegmatch(splitters_m[separator] or splitat(separator),str)
end
-
--- local p = splitat("->",false) print(lpegmatch(p,"oeps->what->more")) -- oeps what more
--- local p = splitat("->",true) print(lpegmatch(p,"oeps->what->more")) -- oeps what->more
--- local p = splitat("->",false) print(lpegmatch(p,"oeps")) -- oeps
--- local p = splitat("->",true) print(lpegmatch(p,"oeps")) -- oeps
-
-local cache = { }
-
+local cache={}
function lpeg.split(separator,str)
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
- end
- return lpegmatch(c,str)
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
end
-
function string.split(str,separator)
- if separator then
- local c = cache[separator]
- if not c then
- c = tsplitat(separator)
- cache[separator] = c
- end
- return lpegmatch(c,str)
- else
- return { str }
+ if separator then
+ local c=cache[separator]
+ if not c then
+ c=tsplitat(separator)
+ cache[separator]=c
end
-end
-
-local spacing = patterns.spacer^0 * newline -- sort of strip
-local empty = spacing * Cc("")
-local nonempty = Cs((1-spacing)^1) * spacing^-1
-local content = (empty + nonempty)^1
-
-patterns.textline = content
-
-local linesplitter = tsplitat(newline)
-
-patterns.linesplitter = linesplitter
-
+ return lpegmatch(c,str)
+ else
+ return { str }
+ end
+end
+local spacing=patterns.spacer^0*newline
+local empty=spacing*Cc("")
+local nonempty=Cs((1-spacing)^1)*spacing^-1
+local content=(empty+nonempty)^1
+patterns.textline=content
+local linesplitter=tsplitat(newline)
+patterns.linesplitter=linesplitter
function string.splitlines(str)
- return lpegmatch(linesplitter,str)
+ return lpegmatch(linesplitter,str)
end
-
--- lpeg.splitters = cache -- no longer public
-
-local cache = { }
-
+local cache={}
function lpeg.checkedsplit(separator,str)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return lpegmatch(c,str)
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
end
-
function string.checkedsplit(str,separator)
- local c = cache[separator]
- if not c then
- separator = P(separator)
- local other = C((1 - separator)^1)
- c = Ct(separator^0 * other * (separator^1 * other)^0)
- cache[separator] = c
- end
- return lpegmatch(c,str)
-end
-
--- from roberto's site:
-
-local function f2(s) local c1, c2 = byte(s,1,2) return c1 * 64 + c2 - 12416 end
-local function f3(s) local c1, c2, c3 = byte(s,1,3) return (c1 * 64 + c2) * 64 + c3 - 925824 end
-local function f4(s) local c1, c2, c3, c4 = byte(s,1,4) return ((c1 * 64 + c2) * 64 + c3) * 64 + c4 - 63447168 end
-
-local utf8byte = patterns.utf8one/byte + patterns.utf8two/f2 + patterns.utf8three/f3 + patterns.utf8four/f4
-
-patterns.utf8byte = utf8byte
-
---~ local str = " a b c d "
-
---~ local s = lpeg.stripper(lpeg.R("az")) print("["..lpegmatch(s,str).."]")
---~ local s = lpeg.keeper(lpeg.R("az")) print("["..lpegmatch(s,str).."]")
---~ local s = lpeg.stripper("ab") print("["..lpegmatch(s,str).."]")
---~ local s = lpeg.keeper("ab") print("["..lpegmatch(s,str).."]")
-
-local cache = { }
-
+ local c=cache[separator]
+ if not c then
+ separator=P(separator)
+ local other=C((1-separator)^1)
+ c=Ct(separator^0*other*(separator^1*other)^0)
+ cache[separator]=c
+ end
+ return lpegmatch(c,str)
+end
+local function f2(s) local c1,c2=byte(s,1,2) return c1*64+c2-12416 end
+local function f3(s) local c1,c2,c3=byte(s,1,3) return (c1*64+c2)*64+c3-925824 end
+local function f4(s) local c1,c2,c3,c4=byte(s,1,4) return ((c1*64+c2)*64+c3)*64+c4-63447168 end
+local utf8byte=patterns.utf8one/byte+patterns.utf8two/f2+patterns.utf8three/f3+patterns.utf8four/f4
+patterns.utf8byte=utf8byte
+local cache={}
function lpeg.stripper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs(((S(str)^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs(((str^1)/"" + 1)^0)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs(((S(str)^1)/""+1)^0)
+ cache[str]=s
end
+ return s
+ else
+ return Cs(((str^1)/""+1)^0)
+ end
end
-
-local cache = { }
-
+local cache={}
function lpeg.keeper(str)
- if type(str) == "string" then
- local s = cache[str]
- if not s then
- s = Cs((((1-S(str))^1)/"" + 1)^0)
- cache[str] = s
- end
- return s
- else
- return Cs((((1-str)^1)/"" + 1)^0)
- end
-end
-
-function lpeg.frontstripper(str) -- or pattern (yet undocumented)
- return (P(str) + P(true)) * Cs(anything^0)
-end
-
-function lpeg.endstripper(str) -- or pattern (yet undocumented)
- return Cs((1 - P(str) * endofstring)^0)
-end
-
--- Just for fun I looked at the used bytecode and
--- p = (p and p + pp) or pp gets one more (testset).
-
--- todo: cache when string
-
-function lpeg.replacer(one,two,makefunction,isutf) -- in principle we should sort the keys
- local pattern
- local u = isutf and utf8char or 1
- if type(one) == "table" then
- local no = #one
- local p = P(false)
- if no == 0 then
- for k, v in next, one do
- p = p + P(k) / v
- end
- pattern = Cs((p + u)^0)
- elseif no == 1 then
- local o = one[1]
- one, two = P(o[1]), o[2]
- -- pattern = Cs(((1-one)^1 + one/two)^0)
- pattern = Cs((one/two + u)^0)
- else
- for i=1,no do
- local o = one[i]
- p = p + P(o[1]) / o[2]
- end
- pattern = Cs((p + u)^0)
- end
- else
- pattern = Cs((P(one)/(two or "") + u)^0)
+ if type(str)=="string" then
+ local s=cache[str]
+ if not s then
+ s=Cs((((1-S(str))^1)/""+1)^0)
+ cache[str]=s
end
- if makefunction then
- return function(str)
- return lpegmatch(pattern,str)
- end
+ return s
+ else
+ return Cs((((1-str)^1)/""+1)^0)
+ end
+end
+function lpeg.frontstripper(str)
+ return (P(str)+P(true))*Cs(anything^0)
+end
+function lpeg.endstripper(str)
+ return Cs((1-P(str)*endofstring)^0)
+end
+function lpeg.replacer(one,two,makefunction,isutf)
+ local pattern
+ local u=isutf and utf8char or 1
+ if type(one)=="table" then
+ local no=#one
+ local p=P(false)
+ if no==0 then
+ for k,v in next,one do
+ p=p+P(k)/v
+ end
+ pattern=Cs((p+u)^0)
+ elseif no==1 then
+ local o=one[1]
+ one,two=P(o[1]),o[2]
+ pattern=Cs((one/two+u)^0)
else
- return pattern
+ for i=1,no do
+ local o=one[i]
+ p=p+P(o[1])/o[2]
+ end
+ pattern=Cs((p+u)^0)
+ end
+ else
+ pattern=Cs((P(one)/(two or "")+u)^0)
+ end
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
end
+ else
+ return pattern
+ end
end
-
function lpeg.finder(lst,makefunction)
- local pattern
- if type(lst) == "table" then
- pattern = P(false)
- if #lst == 0 then
- for k, v in next, lst do
- pattern = pattern + P(k) -- ignore key, so we can use a replacer table
- end
- else
- for i=1,#lst do
- pattern = pattern + P(lst[i])
- end
- end
+ local pattern
+ if type(lst)=="table" then
+ pattern=P(false)
+ if #lst==0 then
+ for k,v in next,lst do
+ pattern=pattern+P(k)
+ end
else
- pattern = P(lst)
- end
- pattern = (1-pattern)^0 * pattern
- if makefunction then
- return function(str)
- return lpegmatch(pattern,str)
- end
- else
- return pattern
- end
-end
-
--- print(lpeg.match(lpeg.replacer("e","a"),"test test"))
--- print(lpeg.match(lpeg.replacer{{"e","a"}},"test test"))
--- print(lpeg.match(lpeg.replacer({ e = "a", t = "x" }),"test test"))
-
-local splitters_f, splitters_s = { }, { }
-
-function lpeg.firstofsplit(separator) -- always return value
- local splitter = splitters_f[separator]
- if not splitter then
- separator = P(separator)
- splitter = C((1 - separator)^0)
- splitters_f[separator] = splitter
- end
- return splitter
-end
-
-function lpeg.secondofsplit(separator) -- nil if not split
- local splitter = splitters_s[separator]
- if not splitter then
- separator = P(separator)
- splitter = (1 - separator)^0 * separator * C(anything^0)
- splitters_s[separator] = splitter
- end
- return splitter
+ for i=1,#lst do
+ pattern=pattern+P(lst[i])
+ end
+ end
+ else
+ pattern=P(lst)
+ end
+ pattern=(1-pattern)^0*pattern
+ if makefunction then
+ return function(str)
+ return lpegmatch(pattern,str)
+ end
+ else
+ return pattern
+ end
+end
+local splitters_f,splitters_s={},{}
+function lpeg.firstofsplit(separator)
+ local splitter=splitters_f[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=C((1-separator)^0)
+ splitters_f[separator]=splitter
+ end
+ return splitter
+end
+function lpeg.secondofsplit(separator)
+ local splitter=splitters_s[separator]
+ if not splitter then
+ separator=P(separator)
+ splitter=(1-separator)^0*separator*C(anything^0)
+ splitters_s[separator]=splitter
+ end
+ return splitter
end
-
function lpeg.balancer(left,right)
- left, right = P(left), P(right)
- return P { left * ((1 - left - right) + V(1))^0 * right }
+ left,right=P(left),P(right)
+ return P { left*((1-left-right)+V(1))^0*right }
end
-
--- print(1,lpegmatch(lpeg.firstofsplit(":"),"bc:de"))
--- print(2,lpegmatch(lpeg.firstofsplit(":"),":de")) -- empty
--- print(3,lpegmatch(lpeg.firstofsplit(":"),"bc"))
--- print(4,lpegmatch(lpeg.secondofsplit(":"),"bc:de"))
--- print(5,lpegmatch(lpeg.secondofsplit(":"),"bc:")) -- empty
--- print(6,lpegmatch(lpeg.secondofsplit(":",""),"bc"))
--- print(7,lpegmatch(lpeg.secondofsplit(":"),"bc"))
--- print(9,lpegmatch(lpeg.secondofsplit(":","123"),"bc"))
-
--- -- slower:
---
--- function lpeg.counter(pattern)
--- local n, pattern = 0, (lpeg.P(pattern)/function() n = n + 1 end + lpeg.anything)^0
--- return function(str) n = 0 ; lpegmatch(pattern,str) ; return n end
--- end
-
-local nany = utf8char/""
-
+local nany=utf8char/""
function lpeg.counter(pattern)
- pattern = Cs((P(pattern)/" " + nany)^0)
- return function(str)
- return #lpegmatch(pattern,str)
- end
-end
-
--- utf extensies
-
-local utfcharacters = utf and utf.characters or string.utfcharacters
-local utfgmatch = unicode and unicode.utf8.gmatch
-local utfchar = utf and utf.char or (unicode and unicode.utf8 and unicode.utf8.char)
-
-lpeg.UP = lpeg.P
-
+ pattern=Cs((P(pattern)/" "+nany)^0)
+ return function(str)
+ return #lpegmatch(pattern,str)
+ end
+end
+local utfcharacters=utf and utf.characters or string.utfcharacters
+local utfgmatch=unicode and unicode.utf8.gmatch
+local utfchar=utf and utf.char or (unicode and unicode.utf8 and unicode.utf8.char)
+lpeg.UP=lpeg.P
if utfcharacters then
-
- function lpeg.US(str)
- local p = P(false)
- for uc in utfcharacters(str) do
- p = p + P(uc)
- end
- return p
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfcharacters(str) do
+ p=p+P(uc)
end
-
-
+ return p
+ end
elseif utfgmatch then
-
- function lpeg.US(str)
- local p = P(false)
- for uc in utfgmatch(str,".") do
- p = p + P(uc)
- end
- return p
+ function lpeg.US(str)
+ local p=P(false)
+ for uc in utfgmatch(str,".") do
+ p=p+P(uc)
end
-
+ return p
+ end
else
-
- function lpeg.US(str)
- local p = P(false)
- local f = function(uc)
- p = p + P(uc)
- end
- lpegmatch((utf8char/f)^0,str)
- return p
+ function lpeg.US(str)
+ local p=P(false)
+ local f=function(uc)
+ p=p+P(uc)
end
-
+ lpegmatch((utf8char/f)^0,str)
+ return p
+ end
end
-
-local range = utf8byte * utf8byte + Cc(false) -- utf8byte is already a capture
-
+local range=utf8byte*utf8byte+Cc(false)
function lpeg.UR(str,more)
- local first, last
- if type(str) == "number" then
- first = str
- last = more or first
- else
- first, last = lpegmatch(range,str)
- if not last then
- return P(str)
- end
- end
- if first == last then
- return P(str)
- elseif utfchar and (last - first < 8) then -- a somewhat arbitrary criterium
- local p = P(false)
- for i=first,last do
- p = p + P(utfchar(i))
- end
- return p -- nil when invalid range
- else
- local f = function(b)
- return b >= first and b <= last
- end
- -- tricky, these nested captures
- return utf8byte / f -- nil when invalid range
- end
+ local first,last
+ if type(str)=="number" then
+ first=str
+ last=more or first
+ else
+ first,last=lpegmatch(range,str)
+ if not last then
+ return P(str)
+ end
+ end
+ if first==last then
+ return P(str)
+ elseif utfchar and (last-first<8) then
+ local p=P(false)
+ for i=first,last do
+ p=p+P(utfchar(i))
+ end
+ return p
+ else
+ local f=function(b)
+ return b>=first and b<=last
+ end
+ return utf8byte/f
+ end
end
-
--- print(lpeg.match(lpeg.Cs((C(lpeg.UR("αω"))/{ ["χ"] = "OEPS" })^0),"αωχαω"))
-
--- lpeg.print(lpeg.R("ab","cd","gh"))
--- lpeg.print(lpeg.P("a","b","c"))
--- lpeg.print(lpeg.S("a","b","c"))
-
--- print(lpeg.count("äáàa",lpeg.P("á") + lpeg.P("à")))
--- print(lpeg.count("äáàa",lpeg.UP("áà")))
--- print(lpeg.count("äáàa",lpeg.US("àá")))
--- print(lpeg.count("äáàa",lpeg.UR("aá")))
--- print(lpeg.count("äáàa",lpeg.UR("àá")))
--- print(lpeg.count("äáàa",lpeg.UR(0x0000,0xFFFF)))
-
function lpeg.is_lpeg(p)
- return p and lpegtype(p) == "pattern"
-end
-
-function lpeg.oneof(list,...) -- lpeg.oneof("elseif","else","if","then") -- assume proper order
- if type(list) ~= "table" then
- list = { list, ... }
- end
- -- table.sort(list) -- longest match first
- local p = P(list[1])
- for l=2,#list do
- p = p + P(list[l])
- end
- return p
-end
-
--- For the moment here, but it might move to utilities. Beware, we need to
--- have the longest keyword first, so 'aaa' comes beforte 'aa' which is why we
--- loop back from the end cq. prepend.
-
-local sort = table.sort
-
+ return p and lpegtype(p)=="pattern"
+end
+function lpeg.oneof(list,...)
+ if type(list)~="table" then
+ list={ list,... }
+ end
+ local p=P(list[1])
+ for l=2,#list do
+ p=p+P(list[l])
+ end
+ return p
+end
+local sort=table.sort
local function copyindexed(old)
- local new = { }
- for i=1,#old do
- new[i] = old
- end
- return new
+ local new={}
+ for i=1,#old do
+ new[i]=old
+ end
+ return new
end
-
local function sortedkeys(tab)
- local keys, s = { }, 0
- for key,_ in next, tab do
- s = s + 1
- keys[s] = key
- end
- sort(keys)
- return keys
+ local keys,s={},0
+ for key,_ in next,tab do
+ s=s+1
+ keys[s]=key
+ end
+ sort(keys)
+ return keys
end
-
function lpeg.append(list,pp,delayed,checked)
- local p = pp
- if #list > 0 then
- local keys = copyindexed(list)
- sort(keys)
- for i=#keys,1,-1 do
- local k = keys[i]
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- elseif delayed then -- hm, it looks like the lpeg parser resolves anyway
- local keys = sortedkeys(list)
+ local p=pp
+ if #list>0 then
+ local keys=copyindexed(list)
+ sort(keys)
+ for i=#keys,1,-1 do
+ local k=keys[i]
+ if p then
+ p=P(k)+p
+ else
+ p=P(k)
+ end
+ end
+ elseif delayed then
+ local keys=sortedkeys(list)
+ if p then
+ for i=1,#keys,1 do
+ local k=keys[i]
+ local v=list[k]
+ p=P(k)/list+p
+ end
+ else
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
if p then
- for i=1,#keys,1 do
- local k = keys[i]
- local v = list[k]
- p = P(k)/list + p
- end
+ p=P(k)+p
else
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- p = P(k) + p
- else
- p = P(k)
- end
- end
- if p then
- p = p / list
- end
- end
- elseif checked then
- -- problem: substitution gives a capture
- local keys = sortedkeys(list)
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- if k == v then
- p = P(k) + p
- else
- p = P(k)/v + p
- end
- else
- if k == v then
- p = P(k)
- else
- p = P(k)/v
- end
- end
- end
- else
- local keys = sortedkeys(list)
- for i=1,#keys do
- local k = keys[i]
- local v = list[k]
- if p then
- p = P(k)/v + p
- else
- p = P(k)/v
- end
+ p=P(k)
end
+ end
+ if p then
+ p=p/list
+ end
end
- return p
-end
-
--- inspect(lpeg.append({ a = "1", aa = "1", aaa = "1" } ,nil,true))
--- inspect(lpeg.append({ ["degree celsius"] = "1", celsius = "1", degree = "1" } ,nil,true))
-
--- function lpeg.exact_match(words,case_insensitive)
--- local pattern = concat(words)
--- if case_insensitive then
--- local pattern = S(upper(characters)) + S(lower(characters))
--- local list = { }
--- for i=1,#words do
--- list[lower(words[i])] = true
--- end
--- return Cmt(pattern^1, function(_,i,s)
--- return list[lower(s)] and i
--- end)
--- else
--- local pattern = S(concat(words))
--- local list = { }
--- for i=1,#words do
--- list[words[i]] = true
--- end
--- return Cmt(pattern^1, function(_,i,s)
--- return list[s] and i
--- end)
--- end
--- end
-
--- experiment:
-
-local function make(t)
- local p
- local keys = sortedkeys(t)
+ elseif checked then
+ local keys=sortedkeys(list)
for i=1,#keys do
- local k = keys[i]
- local v = t[k]
- if not p then
- if next(v) then
- p = P(k) * make(v)
- else
- p = P(k)
- end
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ if k==v then
+ p=P(k)+p
else
- if next(v) then
- p = p + P(k) * make(v)
- else
- p = p + P(k)
- end
+ p=P(k)/v+p
end
- end
- return p
-end
-
-function lpeg.utfchartabletopattern(list) -- goes to util-lpg
- local tree = { }
- for i=1,#list do
- local t = tree
- for c in gmatch(list[i],".") do
- if not t[c] then
- t[c] = { }
- end
- t = t[c]
- end
- end
- return make(tree)
-end
-
--- inspect ( lpeg.utfchartabletopattern {
--- utfchar(0x00A0), -- nbsp
--- utfchar(0x2000), -- enquad
--- utfchar(0x2001), -- emquad
--- utfchar(0x2002), -- enspace
--- utfchar(0x2003), -- emspace
--- utfchar(0x2004), -- threeperemspace
--- utfchar(0x2005), -- fourperemspace
--- utfchar(0x2006), -- sixperemspace
--- utfchar(0x2007), -- figurespace
--- utfchar(0x2008), -- punctuationspace
--- utfchar(0x2009), -- breakablethinspace
--- utfchar(0x200A), -- hairspace
--- utfchar(0x200B), -- zerowidthspace
--- utfchar(0x202F), -- narrownobreakspace
--- utfchar(0x205F), -- math thinspace
--- } )
-
--- a few handy ones:
---
--- faster than find(str,"[\n\r]") when match and # > 7 and always faster when # > 3
-
-patterns.containseol = lpeg.finder(eol) -- (1-eol)^0 * eol
-
--- The next pattern^n variant is based on an approach suggested
--- by Roberto: constructing a big repetition in chunks.
---
--- Being sparse is not needed, and only complicate matters and
--- the number of redundant entries is not that large.
-
-local function nextstep(n,step,result)
- local m = n % step -- mod(n,step)
- local d = floor(n/step) -- div(n,step)
- if d > 0 then
- local v = V(tostring(step))
- local s = result.start
- for i=1,d do
- if s then
- s = v * s
- else
- s = v
- end
+ else
+ if k==v then
+ p=P(k)
+ else
+ p=P(k)/v
end
- result.start = s
+ end
end
- if step > 1 and result.start then
- local v = V(tostring(step/2))
- result[tostring(step)] = v * v
+ else
+ local keys=sortedkeys(list)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=list[k]
+ if p then
+ p=P(k)/v+p
+ else
+ p=P(k)/v
+ end
end
- if step > 0 then
- return nextstep(m,step/2,result)
+ end
+ return p
+end
+local function make(t)
+ local p
+ local keys=sortedkeys(t)
+ for i=1,#keys do
+ local k=keys[i]
+ local v=t[k]
+ if not p then
+ if next(v) then
+ p=P(k)*make(v)
+ else
+ p=P(k)
+ end
else
- return result
- end
+ if next(v) then
+ p=p+P(k)*make(v)
+ else
+ p=p+P(k)
+ end
+ end
+ end
+ return p
+end
+function lpeg.utfchartabletopattern(list)
+ local tree={}
+ for i=1,#list do
+ local t=tree
+ for c in gmatch(list[i],".") do
+ if not t[c] then
+ t[c]={}
+ end
+ t=t[c]
+ end
+ end
+ return make(tree)
+end
+patterns.containseol=lpeg.finder(eol)
+local function nextstep(n,step,result)
+ local m=n%step
+ local d=floor(n/step)
+ if d>0 then
+ local v=V(tostring(step))
+ local s=result.start
+ for i=1,d do
+ if s then
+ s=v*s
+ else
+ s=v
+ end
+ end
+ result.start=s
+ end
+ if step>1 and result.start then
+ local v=V(tostring(step/2))
+ result[tostring(step)]=v*v
+ end
+ if step>0 then
+ return nextstep(m,step/2,result)
+ else
+ return result
+ end
end
-
function lpeg.times(pattern,n)
- return P(nextstep(n,2^16,{ "start", ["1"] = pattern }))
+ return P(nextstep(n,2^16,{ "start",["1"]=pattern }))
end
--- local p = lpeg.Cs((1 - lpeg.times(lpeg.P("AB"),25))^1)
--- local s = "12" .. string.rep("AB",20) .. "34" .. string.rep("AB",30) .. "56"
--- inspect(p)
--- print(lpeg.match(p,s))
-
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['l-functions'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['l-functions']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-functions = functions or { }
-
+functions=functions or {}
function functions.dummy() end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['l-string'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['l-string']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local string = string
-local sub, gmatch, format, char, byte, rep, lower = string.sub, string.gmatch, string.format, string.char, string.byte, string.rep, string.lower
-local lpegmatch, patterns = lpeg.match, lpeg.patterns
-local P, S, C, Ct, Cc, Cs = lpeg.P, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.Cs
-
--- Some functions are already defined in l-lpeg and maybe some from here will
--- move there (unless we also expose caches).
-
--- if not string.split then
---
--- function string.split(str,pattern)
--- local t = { }
--- if #str > 0 then
--- local n = 1
--- for s in gmatch(str..pattern,"(.-)"..pattern) do
--- t[n] = s
--- n = n + 1
--- end
--- end
--- return t
--- end
---
--- end
-
--- function string.unquoted(str)
--- return (gsub(str,"^([\"\'])(.*)%1$","%2")) -- interesting pattern
--- end
-
-local unquoted = patterns.squote * C(patterns.nosquote) * patterns.squote
- + patterns.dquote * C(patterns.nodquote) * patterns.dquote
-
+local string=string
+local sub,gmatch,format,char,byte,rep,lower=string.sub,string.gmatch,string.format,string.char,string.byte,string.rep,string.lower
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local P,S,C,Ct,Cc,Cs=lpeg.P,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.Cs
+local unquoted=patterns.squote*C(patterns.nosquote)*patterns.squote+patterns.dquote*C(patterns.nodquote)*patterns.dquote
function string.unquoted(str)
- return lpegmatch(unquoted,str) or str
+ return lpegmatch(unquoted,str) or str
end
-
--- print(string.unquoted("test"))
--- print(string.unquoted([["t\"est"]]))
--- print(string.unquoted([["t\"est"x]]))
--- print(string.unquoted("\'test\'"))
--- print(string.unquoted('"test"'))
--- print(string.unquoted('"test"'))
-
function string.quoted(str)
- return format("%q",str) -- always double quote
-end
-
-function string.count(str,pattern) -- variant 3
- local n = 0
- for _ in gmatch(str,pattern) do -- not for utf
- n = n + 1
- end
- return n
-end
-
-function string.limit(str,n,sentinel) -- not utf proof
- if #str > n then
- sentinel = sentinel or "..."
- return sub(str,1,(n-#sentinel)) .. sentinel
- else
- return str
- end
+ return format("%q",str)
+end
+function string.count(str,pattern)
+ local n=0
+ for _ in gmatch(str,pattern) do
+ n=n+1
+ end
+ return n
+end
+function string.limit(str,n,sentinel)
+ if #str>n then
+ sentinel=sentinel or "..."
+ return sub(str,1,(n-#sentinel))..sentinel
+ else
+ return str
+ end
end
-
-local stripper = patterns.stripper
-local collapser = patterns.collapser
-local longtostring = patterns.longtostring
-
+local stripper=patterns.stripper
+local collapser=patterns.collapser
+local longtostring=patterns.longtostring
function string.strip(str)
- return lpegmatch(stripper,str) or ""
+ return lpegmatch(stripper,str) or ""
end
-
function string.collapsespaces(str)
- return lpegmatch(collapser,str) or ""
+ return lpegmatch(collapser,str) or ""
end
-
function string.longtostring(str)
- return lpegmatch(longtostring,str) or ""
+ return lpegmatch(longtostring,str) or ""
end
-
--- function string.is_empty(str)
--- return not find(str,"%S")
--- end
-
-local pattern = P(" ")^0 * P(-1)
-
+local pattern=P(" ")^0*P(-1)
function string.is_empty(str)
- if str == "" then
- return true
- else
- return lpegmatch(pattern,str) and true or false
- end
-end
-
--- if not string.escapedpattern then
---
--- local patterns_escapes = {
--- ["%"] = "%%",
--- ["."] = "%.",
--- ["+"] = "%+", ["-"] = "%-", ["*"] = "%*",
--- ["["] = "%[", ["]"] = "%]",
--- ["("] = "%(", [")"] = "%)",
--- -- ["{"] = "%{", ["}"] = "%}"
--- -- ["^"] = "%^", ["$"] = "%$",
--- }
---
--- local simple_escapes = {
--- ["-"] = "%-",
--- ["."] = "%.",
--- ["?"] = ".",
--- ["*"] = ".*",
--- }
---
--- function string.escapedpattern(str,simple)
--- return (gsub(str,".",simple and simple_escapes or patterns_escapes))
--- end
---
--- function string.topattern(str,lowercase,strict)
--- if str == "" then
--- return ".*"
--- else
--- str = gsub(str,".",simple_escapes)
--- if lowercase then
--- str = lower(str)
--- end
--- if strict then
--- return "^" .. str .. "$"
--- else
--- return str
--- end
--- end
--- end
---
--- end
-
---- needs checking
-
-local anything = patterns.anything
-local allescapes = Cc("%") * S(".-+%?()[]*") -- also {} and ^$ ?
-local someescapes = Cc("%") * S(".-+%()[]") -- also {} and ^$ ?
-local matchescapes = Cc(".") * S("*?") -- wildcard and single match
-
-local pattern_a = Cs ( ( allescapes + anything )^0 )
-local pattern_b = Cs ( ( someescapes + matchescapes + anything )^0 )
-local pattern_c = Cs ( Cc("^") * ( someescapes + matchescapes + anything )^0 * Cc("$") )
-
+ if str=="" then
+ return true
+ else
+ return lpegmatch(pattern,str) and true or false
+ end
+end
+local anything=patterns.anything
+local allescapes=Cc("%")*S(".-+%?()[]*")
+local someescapes=Cc("%")*S(".-+%()[]")
+local matchescapes=Cc(".")*S("*?")
+local pattern_a=Cs ((allescapes+anything )^0 )
+local pattern_b=Cs ((someescapes+matchescapes+anything )^0 )
+local pattern_c=Cs (Cc("^")*(someescapes+matchescapes+anything )^0*Cc("$") )
function string.escapedpattern(str,simple)
- return lpegmatch(simple and pattern_b or pattern_a,str)
+ return lpegmatch(simple and pattern_b or pattern_a,str)
end
-
function string.topattern(str,lowercase,strict)
- if str == "" then
- return ".*"
- elseif strict then
- str = lpegmatch(pattern_c,str)
- else
- str = lpegmatch(pattern_b,str)
- end
- if lowercase then
- return lower(str)
- else
- return str
- end
+ if str=="" then
+ return ".*"
+ elseif strict then
+ str=lpegmatch(pattern_c,str)
+ else
+ str=lpegmatch(pattern_b,str)
+ end
+ if lowercase then
+ return lower(str)
+ else
+ return str
+ end
end
-
--- print(string.escapedpattern("12+34*.tex",false))
--- print(string.escapedpattern("12+34*.tex",true))
--- print(string.topattern ("12+34*.tex",false,false))
--- print(string.topattern ("12+34*.tex",false,true))
-
function string.valid(str,default)
- return (type(str) == "string" and str ~= "" and str) or default or nil
+ return (type(str)=="string" and str~="" and str) or default or nil
end
-
--- handy fallback
-
-string.itself = function(s) return s end
-
--- also handy (see utf variant)
-
-local pattern = Ct(C(1)^0) -- string and not utf !
-
+string.itself=function(s) return s end
+local pattern=Ct(C(1)^0)
function string.totable(str)
- return lpegmatch(pattern,str)
+ return lpegmatch(pattern,str)
end
-
--- handy from within tex:
-
-local replacer = lpeg.replacer("@","%%") -- Watch the escaped % in lpeg!
-
+local replacer=lpeg.replacer("@","%%")
function string.tformat(fmt,...)
- return format(lpegmatch(replacer,fmt),...)
+ return format(lpegmatch(replacer,fmt),...)
end
-
--- obsolete names:
-
-string.quote = string.quoted
-string.unquote = string.unquoted
+string.quote=string.quoted
+string.unquote=string.unquoted
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['l-table'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['l-table']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local type, next, tostring, tonumber, ipairs, select = type, next, tostring, tonumber, ipairs, select
-local table, string = table, string
-local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
-local format, lower, dump = string.format, string.lower, string.dump
-local getmetatable, setmetatable = getmetatable, setmetatable
-local getinfo = debug.getinfo
-local lpegmatch, patterns = lpeg.match, lpeg.patterns
-local floor = math.floor
-
--- extra functions, some might go (when not used)
-
-local stripper = patterns.stripper
-
+local type,next,tostring,tonumber,ipairs,select=type,next,tostring,tonumber,ipairs,select
+local table,string=table,string
+local concat,sort,insert,remove=table.concat,table.sort,table.insert,table.remove
+local format,lower,dump=string.format,string.lower,string.dump
+local getmetatable,setmetatable=getmetatable,setmetatable
+local getinfo=debug.getinfo
+local lpegmatch,patterns=lpeg.match,lpeg.patterns
+local floor=math.floor
+local stripper=patterns.stripper
function table.strip(tab)
- local lst, l = { }, 0
- for i=1,#tab do
- local s = lpegmatch(stripper,tab[i]) or ""
- if s == "" then
- -- skip this one
- else
- l = l + 1
- lst[l] = s
- end
+ local lst,l={},0
+ for i=1,#tab do
+ local s=lpegmatch(stripper,tab[i]) or ""
+ if s=="" then
+ else
+ l=l+1
+ lst[l]=s
end
- return lst
+ end
+ return lst
end
-
function table.keys(t)
- if t then
- local keys, k = { }, 0
- for key, _ in next, t do
- k = k + 1
- keys[k] = key
- end
- return keys
- else
- return { }
+ if t then
+ local keys,k={},0
+ for key,_ in next,t do
+ k=k+1
+ keys[k]=key
end
+ return keys
+ else
+ return {}
+ end
end
-
local function compare(a,b)
- local ta, tb = type(a), type(b) -- needed, else 11 < 2
- if ta == tb then
- return a < b
- else
- return tostring(a) < tostring(b)
- end
+ local ta,tb=type(a),type(b)
+ if ta==tb then
+ return a<b
+ else
+ return tostring(a)<tostring(b)
+ end
end
-
local function sortedkeys(tab)
- if tab then
- local srt, category, s = { }, 0, 0 -- 0=unknown 1=string, 2=number 3=mixed
- for key,_ in next, tab do
- s = s + 1
- srt[s] = key
- if category == 3 then
- -- no further check
- else
- local tkey = type(key)
- if tkey == "string" then
- category = (category == 2 and 3) or 1
- elseif tkey == "number" then
- category = (category == 1 and 3) or 2
- else
- category = 3
- end
- end
- end
- if category == 0 or category == 3 then
- sort(srt,compare)
+ if tab then
+ local srt,category,s={},0,0
+ for key,_ in next,tab do
+ s=s+1
+ srt[s]=key
+ if category==3 then
+ else
+ local tkey=type(key)
+ if tkey=="string" then
+ category=(category==2 and 3) or 1
+ elseif tkey=="number" then
+ category=(category==1 and 3) or 2
else
- sort(srt)
+ category=3
end
- return srt
- else
- return { }
+ end
end
-end
-
-local function sortedhashkeys(tab) -- fast one
- if tab then
- local srt, s = { }, 0
- for key,_ in next, tab do
- if key then
- s= s + 1
- srt[s] = key
- end
- end
- sort(srt)
- return srt
+ if category==0 or category==3 then
+ sort(srt,compare)
else
- return { }
- end
+ sort(srt)
+ end
+ return srt
+ else
+ return {}
+ end
+end
+local function sortedhashkeys(tab)
+ if tab then
+ local srt,s={},0
+ for key,_ in next,tab do
+ if key then
+ s=s+1
+ srt[s]=key
+ end
+ end
+ sort(srt)
+ return srt
+ else
+ return {}
+ end
end
-
function table.allkeys(t)
- local keys = { }
- for i=1,#t do
- for k, v in next, t[i] do
- keys[k] = true
- end
+ local keys={}
+ for i=1,#t do
+ for k,v in next,t[i] do
+ keys[k]=true
end
- return sortedkeys(keys)
+ end
+ return sortedkeys(keys)
end
-
-table.sortedkeys = sortedkeys
-table.sortedhashkeys = sortedhashkeys
-
+table.sortedkeys=sortedkeys
+table.sortedhashkeys=sortedhashkeys
local function nothing() end
-
local function sortedhash(t)
- if t then
- local n, s = 0, sortedkeys(t) -- the robust one
- local function kv(s)
- n = n + 1
- local k = s[n]
- return k, t[k]
- end
- return kv, s
- else
- return nothing
- end
-end
-
-table.sortedhash = sortedhash
-table.sortedpairs = sortedhash -- obsolete
-
+ if t then
+ local n,s=0,sortedkeys(t)
+ local function kv(s)
+ n=n+1
+ local k=s[n]
+ return k,t[k]
+ end
+ return kv,s
+ else
+ return nothing
+ end
+end
+table.sortedhash=sortedhash
+table.sortedpairs=sortedhash
function table.append(t,list)
- local n = #t
- for i=1,#list do
- n = n + 1
- t[n] = list[i]
- end
- return t
+ local n=#t
+ for i=1,#list do
+ n=n+1
+ t[n]=list[i]
+ end
+ return t
+end
+function table.prepend(t,list)
+ local nl=#list
+ local nt=nl+#t
+ for i=#t,1,-1 do
+ t[nt]=t[i]
+ nt=nt-1
+ end
+ for i=1,#list do
+ t[i]=list[i]
+ end
+ return t
+end
+function table.merge(t,...)
+ t=t or {}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
+ end
+ end
+ return t
end
-
-function table.prepend(t, list)
- local nl = #list
- local nt = nl + #t
- for i=#t,1,-1 do
- t[nt] = t[i]
- nt = nt - 1
- end
- for i=1,#list do
- t[i] = list[i]
- end
- return t
-end
-
--- function table.merge(t, ...) -- first one is target
--- t = t or { }
--- local lst = { ... }
--- for i=1,#lst do
--- for k, v in next, lst[i] do
--- t[k] = v
--- end
--- end
--- return t
--- end
-
-function table.merge(t, ...) -- first one is target
- t = t or { }
- for i=1,select("#",...) do
- for k, v in next, (select(i,...)) do
- t[k] = v
- end
- end
- return t
-end
-
--- function table.merged(...)
--- local tmp, lst = { }, { ... }
--- for i=1,#lst do
--- for k, v in next, lst[i] do
--- tmp[k] = v
--- end
--- end
--- return tmp
--- end
-
function table.merged(...)
- local t = { }
- for i=1,select("#",...) do
- for k, v in next, (select(i,...)) do
- t[k] = v
- end
+ local t={}
+ for i=1,select("#",...) do
+ for k,v in next,(select(i,...)) do
+ t[k]=v
end
- return t
+ end
+ return t
end
-
--- function table.imerge(t, ...)
--- local lst, nt = { ... }, #t
--- for i=1,#lst do
--- local nst = lst[i]
--- for j=1,#nst do
--- nt = nt + 1
--- t[nt] = nst[j]
--- end
--- end
--- return t
--- end
-
-function table.imerge(t, ...)
- local nt = #t
- for i=1,select("#",...) do
- local nst = select(i,...)
- for j=1,#nst do
- nt = nt + 1
- t[nt] = nst[j]
- end
+function table.imerge(t,...)
+ local nt=#t
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ nt=nt+1
+ t[nt]=nst[j]
end
- return t
+ end
+ return t
end
-
--- function table.imerged(...)
--- local tmp, ntmp, lst = { }, 0, {...}
--- for i=1,#lst do
--- local nst = lst[i]
--- for j=1,#nst do
--- ntmp = ntmp + 1
--- tmp[ntmp] = nst[j]
--- end
--- end
--- return tmp
--- end
-
function table.imerged(...)
- local tmp, ntmp = { }, 0
- for i=1,select("#",...) do
- local nst = select(i,...)
- for j=1,#nst do
- ntmp = ntmp + 1
- tmp[ntmp] = nst[j]
- end
+ local tmp,ntmp={},0
+ for i=1,select("#",...) do
+ local nst=select(i,...)
+ for j=1,#nst do
+ ntmp=ntmp+1
+ tmp[ntmp]=nst[j]
+ end
+ end
+ return tmp
+end
+local function fastcopy(old,metatabletoo)
+ if old then
+ local new={}
+ for k,v in next,old do
+ if type(v)=="table" then
+ new[k]=fastcopy(v,metatabletoo)
+ else
+ new[k]=v
+ end
+ end
+ if metatabletoo then
+ local mt=getmetatable(old)
+ if mt then
+ setmetatable(new,mt)
+ end
end
- return tmp
-end
-
-local function fastcopy(old,metatabletoo) -- fast one
- if old then
- local new = { }
- for k, v in next, old do
- if type(v) == "table" then
- new[k] = fastcopy(v,metatabletoo) -- was just table.copy
- else
- new[k] = v
- end
- end
- if metatabletoo then
- -- optional second arg
- local mt = getmetatable(old)
- if mt then
- setmetatable(new,mt)
- end
- end
- return new
+ return new
+ else
+ return {}
+ end
+end
+local function copy(t,tables)
+ tables=tables or {}
+ local tcopy={}
+ if not tables[t] then
+ tables[t]=tcopy
+ end
+ for i,v in next,t do
+ if type(i)=="table" then
+ if tables[i] then
+ i=tables[i]
+ else
+ i=copy(i,tables)
+ end
+ end
+ if type(v)~="table" then
+ tcopy[i]=v
+ elseif tables[v] then
+ tcopy[i]=tables[v]
else
- return { }
- end
-end
-
--- todo : copy without metatable
-
-local function copy(t, tables) -- taken from lua wiki, slightly adapted
- tables = tables or { }
- local tcopy = {}
- if not tables[t] then
- tables[t] = tcopy
- end
- for i,v in next, t do -- brrr, what happens with sparse indexed
- if type(i) == "table" then
- if tables[i] then
- i = tables[i]
- else
- i = copy(i, tables)
- end
- end
- if type(v) ~= "table" then
- tcopy[i] = v
- elseif tables[v] then
- tcopy[i] = tables[v]
- else
- tcopy[i] = copy(v, tables)
- end
- end
- local mt = getmetatable(t)
- if mt then
- setmetatable(tcopy,mt)
- end
- return tcopy
+ tcopy[i]=copy(v,tables)
+ end
+ end
+ local mt=getmetatable(t)
+ if mt then
+ setmetatable(tcopy,mt)
+ end
+ return tcopy
+end
+table.fastcopy=fastcopy
+table.copy=copy
+function table.derive(parent)
+ local child={}
+ if parent then
+ setmetatable(child,{ __index=parent })
+ end
+ return child
end
-
-table.fastcopy = fastcopy
-table.copy = copy
-
-function table.derive(parent) -- for the moment not public
- local child = { }
- if parent then
- setmetatable(child,{ __index = parent })
- end
- return child
-end
-
function table.tohash(t,value)
- local h = { }
- if t then
- if value == nil then value = true end
- for _, v in next, t do -- no ipairs here
- h[v] = value
- end
+ local h={}
+ if t then
+ if value==nil then value=true end
+ for _,v in next,t do
+ h[v]=value
end
- return h
+ end
+ return h
end
-
function table.fromhash(t)
- local hsh, h = { }, 0
- for k, v in next, t do -- no ipairs here
- if v then
- h = h + 1
- hsh[h] = k
- end
- end
- return hsh
-end
-
-local noquotes, hexify, handle, reduce, compact, inline, functions
-
-local reserved = table.tohash { -- intercept a language inconvenience: no reserved words as key
- 'and', 'break', 'do', 'else', 'elseif', 'end', 'false', 'for', 'function', 'if',
- 'in', 'local', 'nil', 'not', 'or', 'repeat', 'return', 'then', 'true', 'until', 'while',
+ local hsh,h={},0
+ for k,v in next,t do
+ if v then
+ h=h+1
+ hsh[h]=k
+ end
+ end
+ return hsh
+end
+local noquotes,hexify,handle,reduce,compact,inline,functions
+local reserved=table.tohash {
+ 'and','break','do','else','elseif','end','false','for','function','if',
+ 'in','local','nil','not','or','repeat','return','then','true','until','while',
}
-
local function simple_table(t)
- if #t > 0 then
- local n = 0
- for _,v in next, t do
- n = n + 1
- end
- if n == #t then
- local tt, nt = { }, 0
- for i=1,#t do
- local v = t[i]
- local tv = type(v)
- if tv == "number" then
- nt = nt + 1
- if hexify then
- tt[nt] = format("0x%04X",v)
- else
- tt[nt] = tostring(v) -- tostring not needed
- end
- elseif tv == "boolean" then
- nt = nt + 1
- tt[nt] = tostring(v)
- elseif tv == "string" then
- nt = nt + 1
- tt[nt] = format("%q",v)
- else
- tt = nil
- break
- end
- end
- return tt
+ if #t>0 then
+ local n=0
+ for _,v in next,t do
+ n=n+1
+ end
+ if n==#t then
+ local tt,nt={},0
+ for i=1,#t do
+ local v=t[i]
+ local tv=type(v)
+ if tv=="number" then
+ nt=nt+1
+ if hexify then
+ tt[nt]=format("0x%04X",v)
+ else
+ tt[nt]=tostring(v)
+ end
+ elseif tv=="boolean" then
+ nt=nt+1
+ tt[nt]=tostring(v)
+ elseif tv=="string" then
+ nt=nt+1
+ tt[nt]=format("%q",v)
+ else
+ tt=nil
+ break
end
+ end
+ return tt
end
- return nil
+ end
+ return nil
end
-
--- Because this is a core function of mkiv I moved some function calls
--- inline.
---
--- twice as fast in a test:
---
--- local propername = lpeg.P(lpeg.R("AZ","az","__") * lpeg.R("09","AZ","az", "__")^0 * lpeg.P(-1) )
-
--- problem: there no good number_to_string converter with the best resolution
-
--- probably using .. is faster than format
--- maybe split in a few cases (yes/no hexify)
-
--- todo: %g faster on numbers than %s
-
-local propername = patterns.propername -- was find(name,"^%a[%w%_]*$")
-
+local propername=patterns.propername
local function dummy() end
-
local function do_serialize(root,name,depth,level,indexed)
- if level > 0 then
- depth = depth .. " "
- if indexed then
- handle(format("%s{",depth))
+ if level>0 then
+ depth=depth.." "
+ if indexed then
+ handle(format("%s{",depth))
+ else
+ local tn=type(name)
+ if tn=="number" then
+ if hexify then
+ handle(format("%s[0x%04X]={",depth,name))
else
- local tn = type(name)
- if tn == "number" then
- if hexify then
- handle(format("%s[0x%04X]={",depth,name))
- else
- handle(format("%s[%s]={",depth,name))
- end
- elseif tn == "string" then
- if noquotes and not reserved[name] and lpegmatch(propername,name) then
- handle(format("%s%s={",depth,name))
- else
- handle(format("%s[%q]={",depth,name))
- end
- elseif tn == "boolean" then
- handle(format("%s[%s]={",depth,tostring(name)))
- else
- handle(format("%s{",depth))
- end
+ handle(format("%s[%s]={",depth,name))
end
- end
- -- we could check for k (index) being number (cardinal)
- if root and next(root) then
- -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
- -- if compact then
- -- -- NOT: for k=1,#root do (we need to quit at nil)
- -- for k,v in ipairs(root) do -- can we use next?
- -- if not first then first = k end
- -- last = last + 1
- -- end
- -- end
- local first, last = nil, 0
- if compact then
- last = #root
- for k=1,last do
- if root[k] == nil then
- last = k - 1
- break
- end
- end
- if last > 0 then
- first = 1
- end
- end
- local sk = sortedkeys(root)
- for i=1,#sk do
- local k = sk[i]
- local v = root[k]
- --~ if v == root then
- -- circular
- --~ else
- local t, tk = type(v), type(k)
- if compact and first and tk == "number" and k >= first and k <= last then
- if t == "number" then
- if hexify then
- handle(format("%s 0x%04X,",depth,v))
- else
- handle(format("%s %s,",depth,v)) -- %.99g
- end
- elseif t == "string" then
- if reduce and tonumber(v) then
- handle(format("%s %s,",depth,v))
- else
- handle(format("%s %q,",depth,v))
- end
- elseif t == "table" then
- if not next(v) then
- handle(format("%s {},",depth))
- elseif inline then -- and #t > 0
- local st = simple_table(v)
- if st then
- handle(format("%s { %s },",depth,concat(st,", ")))
- else
- do_serialize(v,k,depth,level+1,true)
- end
- else
- do_serialize(v,k,depth,level+1,true)
- end
- elseif t == "boolean" then
- handle(format("%s %s,",depth,tostring(v)))
- elseif t == "function" then
- if functions then
- handle(format('%s load(%q),',depth,dump(v)))
- else
- handle(format('%s "function",',depth))
- end
- else
- handle(format("%s %q,",depth,tostring(v)))
- end
- elseif k == "__p__" then -- parent
- if false then
- handle(format("%s __p__=nil,",depth))
- end
- elseif t == "number" then
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v)) -- %.99g
- end
- elseif tk == "boolean" then
- if hexify then
- handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
- else
- handle(format("%s [%s]=%s,",depth,tostring(k),v)) -- %.99g
- end
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- if hexify then
- handle(format("%s %s=0x%04X,",depth,k,v))
- else
- handle(format("%s %s=%s,",depth,k,v)) -- %.99g
- end
- else
- if hexify then
- handle(format("%s [%q]=0x%04X,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v)) -- %.99g
- end
- end
- elseif t == "string" then
- if reduce and tonumber(v) then
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,v))
- else
- handle(format("%s [%s]=%s,",depth,k,v))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%s,",depth,tostring(k),v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%s,",depth,k,v))
- else
- handle(format("%s [%q]=%s,",depth,k,v))
- end
- else
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,v))
- else
- handle(format("%s [%s]=%q,",depth,k,v))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%q,",depth,tostring(k),v))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%q,",depth,k,v))
- else
- handle(format("%s [%q]=%q,",depth,k,v))
- end
- end
- elseif t == "table" then
- if not next(v) then
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]={},",depth,k))
- else
- handle(format("%s [%s]={},",depth,k))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]={},",depth,tostring(k)))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s={},",depth,k))
- else
- handle(format("%s [%q]={},",depth,k))
- end
- elseif inline then
- local st = simple_table(v)
- if st then
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
- else
- handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- else
- do_serialize(v,k,depth,level+1)
- end
- elseif t == "boolean" then
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
- else
- handle(format("%s [%s]=%s,",depth,k,tostring(v)))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%s,",depth,k,tostring(v)))
- else
- handle(format("%s [%q]=%s,",depth,k,tostring(v)))
- end
- elseif t == "function" then
- if functions then
- local f = getinfo(v).what == "C" and dump(dummy) or dump(v)
- -- local f = getinfo(v).what == "C" and dump(function(...) return v(...) end) or dump(v)
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]=load(%q),",depth,k,f))
- else
- handle(format("%s [%s]=load(%q),",depth,k,f))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=load(%q),",depth,tostring(k),f))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=load(%q),",depth,k,f))
- else
- handle(format("%s [%q]=load(%q),",depth,k,f))
- end
- end
+ elseif tn=="string" then
+ if noquotes and not reserved[name] and lpegmatch(propername,name) then
+ handle(format("%s%s={",depth,name))
+ else
+ handle(format("%s[%q]={",depth,name))
+ end
+ elseif tn=="boolean" then
+ handle(format("%s[%s]={",depth,tostring(name)))
+ else
+ handle(format("%s{",depth))
+ end
+ end
+ end
+ if root and next(root) then
+ local first,last=nil,0
+ if compact then
+ last=#root
+ for k=1,last do
+ if root[k]==nil then
+ last=k-1
+ break
+ end
+ end
+ if last>0 then
+ first=1
+ end
+ end
+ local sk=sortedkeys(root)
+ for i=1,#sk do
+ local k=sk[i]
+ local v=root[k]
+ local t,tk=type(v),type(k)
+ if compact and first and tk=="number" and k>=first and k<=last then
+ if t=="number" then
+ if hexify then
+ handle(format("%s 0x%04X,",depth,v))
+ else
+ handle(format("%s %s,",depth,v))
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ handle(format("%s %s,",depth,v))
+ else
+ handle(format("%s %q,",depth,v))
+ end
+ elseif t=="table" then
+ if not next(v) then
+ handle(format("%s {},",depth))
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ handle(format("%s { %s },",depth,concat(st,", ")))
else
- if tk == "number" then
- if hexify then
- handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%s]=%q,",depth,k,tostring(v)))
- end
- elseif tk == "boolean" then
- handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
- elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
- handle(format("%s %s=%q,",depth,k,tostring(v)))
- else
- handle(format("%s [%q]=%q,",depth,k,tostring(v)))
- end
+ do_serialize(v,k,depth,level+1,true)
+ end
+ else
+ do_serialize(v,k,depth,level+1,true)
+ end
+ elseif t=="boolean" then
+ handle(format("%s %s,",depth,tostring(v)))
+ elseif t=="function" then
+ if functions then
+ handle(format('%s load(%q),',depth,dump(v)))
+ else
+ handle(format('%s "function",',depth))
+ end
+ else
+ handle(format("%s %q,",depth,tostring(v)))
+ end
+ elseif k=="__p__" then
+ if false then
+ handle(format("%s __p__=nil,",depth))
+ end
+ elseif t=="number" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ if hexify then
+ handle(format("%s [%s]=0x%04X,",depth,tostring(k),v))
+ else
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ end
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ if hexify then
+ handle(format("%s %s=0x%04X,",depth,k,v))
+ else
+ handle(format("%s %s=%s,",depth,k,v))
+ end
+ else
+ if hexify then
+ handle(format("%s [%q]=0x%04X,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ end
+ elseif t=="string" then
+ if reduce and tonumber(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,v))
+ else
+ handle(format("%s [%s]=%s,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,v))
+ else
+ handle(format("%s [%q]=%s,",depth,k,v))
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,v))
+ else
+ handle(format("%s [%s]=%q,",depth,k,v))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),v))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,v))
+ else
+ handle(format("%s [%q]=%q,",depth,k,v))
+ end
+ end
+ elseif t=="table" then
+ if not next(v) then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={},",depth,k))
+ else
+ handle(format("%s [%s]={},",depth,k))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={},",depth,tostring(k)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={},",depth,k))
+ else
+ handle(format("%s [%q]={},",depth,k))
+ end
+ elseif inline then
+ local st=simple_table(v)
+ if st then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%s]={ %s },",depth,k,concat(st,", ")))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]={ %s },",depth,tostring(k),concat(st,", ")))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s={ %s },",depth,k,concat(st,", ")))
+ else
+ handle(format("%s [%q]={ %s },",depth,k,concat(st,", ")))
end
- --~ end
- end
- end
- if level > 0 then
- handle(format("%s},",depth))
- end
-end
-
--- replacing handle by a direct t[#t+1] = ... (plus test) is not much
--- faster (0.03 on 1.00 for zapfino.tma)
-
-local function serialize(_handle,root,name,specification) -- handle wins
- local tname = type(name)
- if type(specification) == "table" then
- noquotes = specification.noquotes
- hexify = specification.hexify
- handle = _handle or specification.handle or print
- reduce = specification.reduce or false
- functions = specification.functions
- compact = specification.compact
- inline = specification.inline and compact
- if functions == nil then
- functions = true
- end
- if compact == nil then
- compact = true
- end
- if inline == nil then
- inline = compact
- end
- else
- noquotes = false
- hexify = false
- handle = _handle or print
- reduce = false
- compact = true
- inline = true
- functions = true
- end
- if tname == "string" then
- if name == "return" then
- handle("return {")
+ else
+ do_serialize(v,k,depth,level+1)
+ end
else
- handle(name .. "={")
- end
- elseif tname == "number" then
- if hexify then
- handle(format("[0x%04X]={",name))
+ do_serialize(v,k,depth,level+1)
+ end
+ elseif t=="boolean" then
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%s,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%s,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%s,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%s,",depth,k,tostring(v)))
else
- handle("[" .. name .. "]={")
- end
- elseif tname == "boolean" then
- if name then
- handle("return {")
+ handle(format("%s [%q]=%s,",depth,k,tostring(v)))
+ end
+ elseif t=="function" then
+ if functions then
+ local f=getinfo(v).what=="C" and dump(dummy) or dump(v)
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%s]=load(%q),",depth,k,f))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=load(%q),",depth,tostring(k),f))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=load(%q),",depth,k,f))
+ else
+ handle(format("%s [%q]=load(%q),",depth,k,f))
+ end
+ end
+ else
+ if tk=="number" then
+ if hexify then
+ handle(format("%s [0x%04X]=%q,",depth,k,tostring(v)))
+ else
+ handle(format("%s [%s]=%q,",depth,k,tostring(v)))
+ end
+ elseif tk=="boolean" then
+ handle(format("%s [%s]=%q,",depth,tostring(k),tostring(v)))
+ elseif noquotes and not reserved[k] and lpegmatch(propername,k) then
+ handle(format("%s %s=%q,",depth,k,tostring(v)))
else
- handle("{")
- end
+ handle(format("%s [%q]=%q,",depth,k,tostring(v)))
+ end
+ end
+ end
+ end
+ if level>0 then
+ handle(format("%s},",depth))
+ end
+end
+local function serialize(_handle,root,name,specification)
+ local tname=type(name)
+ if type(specification)=="table" then
+ noquotes=specification.noquotes
+ hexify=specification.hexify
+ handle=_handle or specification.handle or print
+ reduce=specification.reduce or false
+ functions=specification.functions
+ compact=specification.compact
+ inline=specification.inline and compact
+ if functions==nil then
+ functions=true
+ end
+ if compact==nil then
+ compact=true
+ end
+ if inline==nil then
+ inline=compact
+ end
+ else
+ noquotes=false
+ hexify=false
+ handle=_handle or print
+ reduce=false
+ compact=true
+ inline=true
+ functions=true
+ end
+ if tname=="string" then
+ if name=="return" then
+ handle("return {")
else
- handle("t={")
- end
- if root then
- -- The dummy access will initialize a table that has a delayed initialization
- -- using a metatable. (maybe explicitly test for metatable)
- if getmetatable(root) then -- todo: make this an option, maybe even per subtable
- local dummy = root._w_h_a_t_e_v_e_r_
- root._w_h_a_t_e_v_e_r_ = nil
- end
- -- Let's forget about empty tables.
- if next(root) then
- do_serialize(root,name,"",0)
- end
+ handle(name.."={")
end
- handle("}")
-end
-
--- name:
---
--- true : return { }
--- false : { }
--- nil : t = { }
--- string : string = { }
--- "return" : return { }
--- number : [number] = { }
-
-function table.serialize(root,name,specification)
- local t, n = { }, 0
- local function flush(s)
- n = n + 1
- t[n] = s
+ elseif tname=="number" then
+ if hexify then
+ handle(format("[0x%04X]={",name))
+ else
+ handle("["..name.."]={")
end
- serialize(flush,root,name,specification)
- return concat(t,"\n")
-end
-
-table.tohandle = serialize
-
--- sometimes tables are real use (zapfino extra pro is some 85M) in which
--- case a stepwise serialization is nice; actually, we could consider:
---
--- for line in table.serializer(root,name,reduce,noquotes) do
--- ...(line)
--- end
---
--- so this is on the todo list
-
-local maxtab = 2*1024
-
-function table.tofile(filename,root,name,specification)
- local f = io.open(filename,'w')
- if f then
- if maxtab > 1 then
- local t, n = { }, 0
- local function flush(s)
- n = n + 1
- t[n] = s
- if n > maxtab then
- f:write(concat(t,"\n"),"\n") -- hm, write(sometable) should be nice
- t, n = { }, 0 -- we could recycle t if needed
- end
- end
- serialize(flush,root,name,specification)
- f:write(concat(t,"\n"),"\n")
- else
- local function flush(s)
- f:write(s,"\n")
- end
- serialize(flush,root,name,specification)
- end
- f:close()
- io.flush()
+ elseif tname=="boolean" then
+ if name then
+ handle("return {")
+ else
+ handle("{")
end
-end
-
-local function flattened(t,f,depth)
- if f == nil then
- f = { }
- depth = 0xFFFF
- elseif tonumber(f) then
- -- assume that only two arguments are given
- depth = f
- f = { }
- elseif not depth then
- depth = 0xFFFF
- end
- for k, v in next, t do
- if type(k) ~= "number" then
- if depth > 0 and type(v) == "table" then
- flattened(v,f,depth-1)
- else
- f[k] = v
- end
- end
+ else
+ handle("t={")
+ end
+ if root then
+ if getmetatable(root) then
+ local dummy=root._w_h_a_t_e_v_e_r_
+ root._w_h_a_t_e_v_e_r_=nil
end
- local n = #f
- for k=1,#t do
- local v = t[k]
- if depth > 0 and type(v) == "table" then
- flattened(v,f,depth-1)
- n = #f
- else
- n = n + 1
- f[n] = v
- end
+ if next(root) then
+ do_serialize(root,name,"",0)
end
- return f
+ end
+ handle("}")
end
-
-table.flattened = flattened
-
-local function unnest(t,f) -- only used in mk, for old times sake
- if not f then -- and only relevant for token lists
- f = { } -- this one can become obsolete
- end
- for i=1,#t do
- local v = t[i]
- if type(v) == "table" then
- if type(v[1]) == "table" then
- unnest(v,f)
- else
- f[#f+1] = v
- end
- else
- f[#f+1] = v
- end
+function table.serialize(root,name,specification)
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ end
+ serialize(flush,root,name,specification)
+ return concat(t,"\n")
+end
+table.tohandle=serialize
+local maxtab=2*1024
+function table.tofile(filename,root,name,specification)
+ local f=io.open(filename,'w')
+ if f then
+ if maxtab>1 then
+ local t,n={},0
+ local function flush(s)
+ n=n+1
+ t[n]=s
+ if n>maxtab then
+ f:write(concat(t,"\n"),"\n")
+ t,n={},0
+ end
+ end
+ serialize(flush,root,name,specification)
+ f:write(concat(t,"\n"),"\n")
+ else
+ local function flush(s)
+ f:write(s,"\n")
+ end
+ serialize(flush,root,name,specification)
end
- return f
-end
-
-function table.unnest(t) -- bad name
- return unnest(t)
+ f:close()
+ io.flush()
+ end
end
-
-local function are_equal(a,b,n,m) -- indexed
- if a and b and #a == #b then
- n = n or 1
- m = m or #a
- for i=n,m do
- local ai, bi = a[i], b[i]
- if ai==bi then
- -- same
- elseif type(ai) == "table" and type(bi) == "table" then
- if not are_equal(ai,bi) then
- return false
- end
- else
- return false
- end
- end
- return true
+local function flattened(t,f,depth)
+ if f==nil then
+ f={}
+ depth=0xFFFF
+ elseif tonumber(f) then
+ depth=f
+ f={}
+ elseif not depth then
+ depth=0xFFFF
+ end
+ for k,v in next,t do
+ if type(k)~="number" then
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ else
+ f[k]=v
+ end
+ end
+ end
+ local n=#f
+ for k=1,#t do
+ local v=t[k]
+ if depth>0 and type(v)=="table" then
+ flattened(v,f,depth-1)
+ n=#f
+ else
+ n=n+1
+ f[n]=v
+ end
+ end
+ return f
+end
+table.flattened=flattened
+local function unnest(t,f)
+ if not f then
+ f={}
+ end
+ for i=1,#t do
+ local v=t[i]
+ if type(v)=="table" then
+ if type(v[1])=="table" then
+ unnest(v,f)
+ else
+ f[#f+1]=v
+ end
else
+ f[#f+1]=v
+ end
+ end
+ return f
+end
+function table.unnest(t)
+ return unnest(t)
+end
+local function are_equal(a,b,n,m)
+ if a and b and #a==#b then
+ n=n or 1
+ m=m or #a
+ for i=n,m do
+ local ai,bi=a[i],b[i]
+ if ai==bi then
+ elseif type(ai)=="table" and type(bi)=="table" then
+ if not are_equal(ai,bi) then
+ return false
+ end
+ else
return false
- end
-end
-
-local function identical(a,b) -- assumes same structure
- for ka, va in next, a do
- local vb = b[ka]
- if va == vb then
- -- same
- elseif type(va) == "table" and type(vb) == "table" then
- if not identical(va,vb) then
- return false
- end
- else
- return false
- end
+ end
end
return true
-end
-
-table.identical = identical
-table.are_equal = are_equal
-
--- maybe also make a combined one
-
-function table.compact(t) -- remove empty tables, assumes subtables
- if t then
- for k, v in next, t do
- if not next(v) then -- no type checking
- t[k] = nil
- end
- end
- end
-end
-
-function table.contains(t, v)
- if t then
- for i=1, #t do
- if t[i] == v then
- return i
- end
- end
- end
+ else
return false
-end
-
-function table.count(t)
- local n = 0
- for k, v in next, t do
- n = n + 1
+ end
+end
+local function identical(a,b)
+ for ka,va in next,a do
+ local vb=b[ka]
+ if va==vb then
+ elseif type(va)=="table" and type(vb)=="table" then
+ if not identical(va,vb) then
+ return false
+ end
+ else
+ return false
end
- return n
+ end
+ return true
end
-
-function table.swapped(t,s) -- hash
- local n = { }
- if s then
- for k, v in next, s do
- n[k] = v
- end
+table.identical=identical
+table.are_equal=are_equal
+function table.compact(t)
+ if t then
+ for k,v in next,t do
+ if not next(v) then
+ t[k]=nil
+ end
end
- for k, v in next, t do
- n[v] = k
- end
- return n
+ end
end
-
-function table.mirrored(t) -- hash
- local n = { }
- for k, v in next, t do
- n[v] = k
- n[k] = v
+function table.contains(t,v)
+ if t then
+ for i=1,#t do
+ if t[i]==v then
+ return i
+ end
end
- return n
+ end
+ return false
+end
+function table.count(t)
+ local n=0
+ for k,v in next,t do
+ n=n+1
+ end
+ return n
+end
+function table.swapped(t,s)
+ local n={}
+ if s then
+ for k,v in next,s do
+ n[k]=v
+ end
+ end
+ for k,v in next,t do
+ n[v]=k
+ end
+ return n
+end
+function table.mirrored(t)
+ local n={}
+ for k,v in next,t do
+ n[v]=k
+ n[k]=v
+ end
+ return n
end
-
function table.reversed(t)
- if t then
- local tt, tn = { }, #t
- if tn > 0 then
- local ttn = 0
- for i=tn,1,-1 do
- ttn = ttn + 1
- tt[ttn] = t[i]
- end
- end
- return tt
- end
+ if t then
+ local tt,tn={},#t
+ if tn>0 then
+ local ttn=0
+ for i=tn,1,-1 do
+ ttn=ttn+1
+ tt[ttn]=t[i]
+ end
+ end
+ return tt
+ end
end
-
function table.reverse(t)
- if t then
- local n = #t
- for i=1,floor(n/2) do
- local j = n - i + 1
- t[i], t[j] = t[j], t[i]
- end
- return t
- end
-end
-
-function table.sequenced(t,sep) -- hash only
- if t then
- local s, n = { }, 0
- for k, v in sortedhash(t) do
- if simple then
- if v == true then
- n = n + 1
- s[n] = k
- elseif v and v~= "" then
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
- end
- else
- n = n + 1
- s[n] = k .. "=" .. tostring(v)
- end
- end
- return concat(s, sep or " | ")
- else
- return ""
+ if t then
+ local n=#t
+ for i=1,floor(n/2) do
+ local j=n-i+1
+ t[i],t[j]=t[j],t[i]
end
+ return t
+ end
+end
+function table.sequenced(t,sep)
+ if t then
+ local s,n={},0
+ for k,v in sortedhash(t) do
+ if simple then
+ if v==true then
+ n=n+1
+ s[n]=k
+ elseif v and v~="" then
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ else
+ n=n+1
+ s[n]=k.."="..tostring(v)
+ end
+ end
+ return concat(s,sep or " | ")
+ else
+ return ""
+ end
end
-
function table.print(t,...)
- if type(t) ~= "table" then
- print(tostring(t))
- else
- serialize(print,t,...)
- end
+ if type(t)~="table" then
+ print(tostring(t))
+ else
+ serialize(print,t,...)
+ end
end
-
-setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end)
-
--- -- -- obsolete but we keep them for a while and might comment them later -- -- --
-
--- roughly: copy-loop : unpack : sub == 0.9 : 0.4 : 0.45 (so in critical apps, use unpack)
-
+setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end)
function table.sub(t,i,j)
- return { unpack(t,i,j) }
+ return { unpack(t,i,j) }
end
-
--- slower than #t on indexed tables (#t only returns the size of the numerically indexed slice)
-
function table.is_empty(t)
- return not t or not next(t)
+ return not t or not next(t)
end
-
function table.has_one_entry(t)
- return t and not next(t,next(t))
+ return t and not next(t,next(t))
end
-
--- new
-
-function table.loweredkeys(t) -- maybe utf
- local l = { }
- for k, v in next, t do
- l[lower(k)] = v
- end
- return l
+function table.loweredkeys(t)
+ local l={}
+ for k,v in next,t do
+ l[lower(k)]=v
+ end
+ return l
end
-
--- new, might move (maybe duplicate)
-
function table.unique(old)
- local hash = { }
- local new = { }
- local n = 0
- for i=1,#old do
- local oi = old[i]
- if not hash[oi] then
- n = n + 1
- new[n] = oi
- hash[oi] = true
- end
- end
- return new
+ local hash={}
+ local new={}
+ local n=0
+ for i=1,#old do
+ local oi=old[i]
+ if not hash[oi] then
+ n=n+1
+ new[n]=oi
+ hash[oi]=true
+ end
+ end
+ return new
end
-
function table.sorted(t,...)
- sort(t,...)
- return t -- still sorts in-place
+ sort(t,...)
+ return t
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['l-boolean'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['l-io']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local type, tonumber = type, tonumber
-
-boolean = boolean or { }
-local boolean = boolean
-
-function boolean.tonumber(b)
- if b then return 1 else return 0 end -- test and return or return
+local io=io
+local byte,find,gsub,format=string.byte,string.find,string.gsub,string.format
+local concat=table.concat
+local floor=math.floor
+local type=type
+if string.find(os.getenv("PATH"),";") then
+ io.fileseparator,io.pathseparator="\\",";"
+else
+ io.fileseparator,io.pathseparator="/",":"
end
-
-function toboolean(str,tolerant)
- if str == nil then
- return false
- elseif str == false then
- return false
- elseif str == true then
- return true
- elseif str == "true" then
- return true
- elseif str == "false" then
- return false
- elseif not tolerant then
- return false
- elseif str == 0 then
- return false
- elseif (tonumber(str) or 0) > 0 then
- return true
+local function readall(f)
+ return f:read("*all")
+end
+local function readall(f)
+ local size=f:seek("end")
+ if size==0 then
+ return ""
+ elseif size<1024*1024 then
+ f:seek("set",0)
+ return f:read('*all')
+ else
+ local done=f:seek("set",0)
+ if size<1024*1024 then
+ step=1024*1024
+ elseif size>16*1024*1024 then
+ step=16*1024*1024
else
- return str == "yes" or str == "on" or str == "t"
+ step=floor(size/(1024*1024))*1024*1024/8
end
+ local data={}
+ while true do
+ local r=f:read(step)
+ if not r then
+ return concat(data)
+ else
+ data[#data+1]=r
+ end
+ end
+ end
+end
+io.readall=readall
+function io.loaddata(filename,textmode)
+ local f=io.open(filename,(textmode and 'r') or 'rb')
+ if f then
+ local data=readall(f)
+ f:close()
+ if #data>0 then
+ return data
+ end
+ end
end
-
-string.toboolean = toboolean
-
-function string.booleanstring(str)
- if str == nil then
- return false
- elseif str == false then
- return false
- elseif str == true then
- return true
- elseif str == "true" then
- return true
- elseif str == "false" then
- return false
- elseif str == 0 then
- return false
- elseif (tonumber(str) or 0) > 0 then
- return true
+function io.savedata(filename,data,joiner)
+ local f=io.open(filename,"wb")
+ if f then
+ if type(data)=="table" then
+ f:write(concat(data,joiner or ""))
+ elseif type(data)=="function" then
+ data(f)
else
- return str == "yes" or str == "on" or str == "t"
+ f:write(data or "")
end
+ f:close()
+ io.flush()
+ return true
+ else
+ return false
+ end
+end
+function io.loadlines(filename,n)
+ local f=io.open(filename,'r')
+ if not f then
+ elseif n then
+ local lines={}
+ for i=1,n do
+ local line=f:read("*lines")
+ if line then
+ lines[#lines+1]=line
+ else
+ break
+ end
+ end
+ f:close()
+ lines=concat(lines,"\n")
+ if #lines>0 then
+ return lines
+ end
+ else
+ local line=f:read("*line") or ""
+ f:close()
+ if #line>0 then
+ return line
+ end
+ end
end
-
-function string.is_boolean(str,default)
- if type(str) == "string" then
- if str == "true" or str == "yes" or str == "on" or str == "t" then
- return true
- elseif str == "false" or str == "no" or str == "off" or str == "f" then
- return false
- end
+function io.loadchunk(filename,n)
+ local f=io.open(filename,'rb')
+ if f then
+ local data=f:read(n or 1024)
+ f:close()
+ if #data>0 then
+ return data
end
- return default
+ end
end
-
-end -- closure
-
-do -- begin closure to overcome local limits and interference
-
-if not modules then modules = { } end modules ['l-math'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-local floor, sin, cos, tan = math.floor, math.sin, math.cos, math.tan
-
-if not math.round then
- function math.round(x) return floor(x + 0.5) end
+function io.exists(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return false
+ else
+ f:close()
+ return true
+ end
end
-
-if not math.div then
- function math.div(n,m) return floor(n/m) end
+function io.size(filename)
+ local f=io.open(filename)
+ if f==nil then
+ return 0
+ else
+ local s=f:seek("end")
+ f:close()
+ return s
+ end
end
-
-if not math.mod then
- function math.mod(n,m) return n % m end
+function io.noflines(f)
+ if type(f)=="string" then
+ local f=io.open(filename)
+ if f then
+ local n=f and io.noflines(f) or 0
+ f:close()
+ return n
+ else
+ return 0
+ end
+ else
+ local n=0
+ for _ in f:lines() do
+ n=n+1
+ end
+ f:seek('set',0)
+ return n
+ end
+end
+local nextchar={
+ [ 4]=function(f)
+ return f:read(1,1,1,1)
+ end,
+ [ 2]=function(f)
+ return f:read(1,1)
+ end,
+ [ 1]=function(f)
+ return f:read(1)
+ end,
+ [-2]=function(f)
+ local a,b=f:read(1,1)
+ return b,a
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ return d,c,b,a
+ end
+}
+function io.characters(f,n)
+ if f then
+ return nextchar[n or 1],f
+ end
+end
+local nextbyte={
+ [4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(a),byte(b),byte(c),byte(d)
+ end
+ end,
+ [3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(a),byte(b),byte(c)
+ end
+ end,
+ [2]=function(f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(a),byte(b)
+ end
+ end,
+ [1]=function (f)
+ local a=f:read(1)
+ if a then
+ return byte(a)
+ end
+ end,
+ [-2]=function (f)
+ local a,b=f:read(1,1)
+ if b then
+ return byte(b),byte(a)
+ end
+ end,
+ [-3]=function(f)
+ local a,b,c=f:read(1,1,1)
+ if b then
+ return byte(c),byte(b),byte(a)
+ end
+ end,
+ [-4]=function(f)
+ local a,b,c,d=f:read(1,1,1,1)
+ if d then
+ return byte(d),byte(c),byte(b),byte(a)
+ end
+ end
+}
+function io.bytes(f,n)
+ if f then
+ return nextbyte[n or 1],f
+ else
+ return nil,nil
+ end
end
-
-local pipi = 2*math.pi/360
-
-if not math.sind then
- function math.sind(d) return sin(d*pipi) end
- function math.cosd(d) return cos(d*pipi) end
- function math.tand(d) return tan(d*pipi) end
+function io.ask(question,default,options)
+ while true do
+ io.write(question)
+ if options then
+ io.write(format(" [%s]",concat(options,"|")))
+ end
+ if default then
+ io.write(format(" [%s]",default))
+ end
+ io.write(format(" "))
+ io.flush()
+ local answer=io.read()
+ answer=gsub(answer,"^%s*(.*)%s*$","%1")
+ if answer=="" and default then
+ return default
+ elseif not options then
+ return answer
+ else
+ for k=1,#options do
+ if options[k]==answer then
+ return answer
+ end
+ end
+ local pattern="^"..answer
+ for k=1,#options do
+ local v=options[k]
+ if find(v,pattern) then
+ return v
+ end
+ end
+ end
+ end
end
-
-if not math.odd then
- function math.odd (n) return n % 2 ~= 0 end
- function math.even(n) return n % 2 == 0 end
+local function readnumber(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ if n==1 then
+ return byte(f:read(1))
+ elseif n==2 then
+ local a,b=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==3 then
+ local a,b,c=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==4 then
+ local a,b,c,d=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==8 then
+ local a,b=readnumber(f,4),readnumber(f,4)
+ return 256*a+b
+ elseif n==12 then
+ local a,b,c=readnumber(f,4),readnumber(f,4),readnumber(f,4)
+ return 256*256*a+256*b+c
+ elseif n==-2 then
+ local b,a=byte(f:read(2),1,2)
+ return 256*a+b
+ elseif n==-3 then
+ local c,b,a=byte(f:read(3),1,3)
+ return 256*256*a+256*b+c
+ elseif n==-4 then
+ local d,c,b,a=byte(f:read(4),1,4)
+ return 256*256*256*a+256*256*b+256*c+d
+ elseif n==-8 then
+ local h,g,f,e,d,c,b,a=byte(f:read(8),1,8)
+ return 256*256*256*256*256*256*256*a+256*256*256*256*256*256*b+256*256*256*256*256*c+256*256*256*256*d+256*256*256*e+256*256*f+256*g+h
+ else
+ return 0
+ end
+end
+io.readnumber=readnumber
+function io.readstring(f,n,m)
+ if m then
+ f:seek("set",n)
+ n=m
+ end
+ local str=gsub(f:read(n),"\000","")
+ return str
end
+if not io.i_limiter then function io.i_limiter() end end
+if not io.o_limiter then function io.o_limiter() end end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['l-file'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['l-file']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
--- needs a cleanup
-
-file = file or { }
-local file = file
-
+file=file or {}
+local file=file
if not lfs then
-
- lfs = {
- getcurrentdir = function()
- return "."
- end,
- attributes = function()
- return nil
- end,
- isfile = function(name)
- local f = io.open(name,'rb')
- if f then
- f:close()
- return true
- end
- end,
- isdir = function(name)
- print("you need to load lfs")
- return false
- end
- }
-
-elseif not lfs.isfile then
-
- local attributes = lfs.attributes
-
- function lfs.isdir(name)
- return attributes(name,"mode") == "directory"
- end
-
- function lfs.isfile(name)
- return attributes(name,"mode") == "file"
+ lfs={
+ getcurrentdir=function()
+ return "."
+ end,
+ attributes=function()
+ return nil
+ end,
+ isfile=function(name)
+ local f=io.open(name,'rb')
+ if f then
+ f:close()
+ return true
+ end
+ end,
+ isdir=function(name)
+ print("you need to load lfs")
+ return false
end
-
- -- function lfs.isdir(name)
- -- local a = attributes(name)
- -- return a and a.mode == "directory"
- -- end
-
- -- function lfs.isfile(name)
- -- local a = attributes(name)
- -- return a and a.mode == "file"
- -- end
-
-end
-
-local insert, concat = table.insert, table.concat
-local match = string.match
-local lpegmatch = lpeg.match
-local getcurrentdir, attributes = lfs.currentdir, lfs.attributes
-local checkedsplit = string.checkedsplit
-
--- local patterns = file.patterns or { }
--- file.patterns = patterns
-
-local P, R, S, C, Cs, Cp, Cc, Ct = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Cs, lpeg.Cp, lpeg.Cc, lpeg.Ct
-
-local colon = P(":")
-local period = P(".")
-local periods = P("..")
-local fwslash = P("/")
-local bwslash = P("\\")
-local slashes = S("\\/")
-local noperiod = 1-period
-local noslashes = 1-slashes
-local name = noperiod^1
-local suffix = period/"" * (1-period-slashes)^1 * -1
-
------ pattern = C((noslashes^0 * slashes^1)^1)
-local pattern = C((1 - (slashes^1 * noslashes^1 * -1))^1) * P(1) -- there must be a more efficient way
-
+ }
+elseif not lfs.isfile then
+ local attributes=lfs.attributes
+ function lfs.isdir(name)
+ return attributes(name,"mode")=="directory"
+ end
+ function lfs.isfile(name)
+ return attributes(name,"mode")=="file"
+ end
+end
+local insert,concat=table.insert,table.concat
+local match=string.match
+local lpegmatch=lpeg.match
+local getcurrentdir,attributes=lfs.currentdir,lfs.attributes
+local checkedsplit=string.checkedsplit
+local P,R,S,C,Cs,Cp,Cc,Ct=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Cs,lpeg.Cp,lpeg.Cc,lpeg.Ct
+local colon=P(":")
+local period=P(".")
+local periods=P("..")
+local fwslash=P("/")
+local bwslash=P("\\")
+local slashes=S("\\/")
+local noperiod=1-period
+local noslashes=1-slashes
+local name=noperiod^1
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=C((1-(slashes^1*noslashes^1*-1))^1)*P(1)
local function pathpart(name,default)
- return name and lpegmatch(pattern,name) or default or ""
+ return name and lpegmatch(pattern,name) or default or ""
end
-
-local pattern = (noslashes^0 * slashes)^1 * C(noslashes^1) * -1
-
+local pattern=(noslashes^0*slashes)^1*C(noslashes^1)*-1
local function basename(name)
- return name and lpegmatch(pattern,name) or name
+ return name and lpegmatch(pattern,name) or name
end
-
--- print(pathpart("file"))
--- print(pathpart("dir/file"))
--- print(pathpart("/dir/file"))
--- print(basename("file"))
--- print(basename("dir/file"))
--- print(basename("/dir/file"))
-
-local pattern = (noslashes^0 * slashes^1)^0 * Cs((1-suffix)^1) * suffix^0
-
+local pattern=(noslashes^0*slashes^1)^0*Cs((1-suffix)^1)*suffix^0
local function nameonly(name)
- return name and lpegmatch(pattern,name) or name
+ return name and lpegmatch(pattern,name) or name
end
-
-local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * C(noperiod^1) * -1
-
+local pattern=(noslashes^0*slashes)^0*(noperiod^1*period)^1*C(noperiod^1)*-1
local function suffixonly(name)
- return name and lpegmatch(pattern,name) or ""
-end
-
-file.pathpart = pathpart
-file.basename = basename
-file.nameonly = nameonly
-file.suffixonly = suffixonly
-file.suffix = suffixonly
-
-file.dirname = pathpart -- obsolete
-file.extname = suffixonly -- obsolete
-
--- actually these are schemes
-
-local drive = C(R("az","AZ")) * colon
-local path = C((noslashes^0 * slashes)^0)
-local suffix = period * C(P(1-period)^0 * P(-1))
-local base = C((1-suffix)^0)
-local rest = C(P(1)^0)
-
-drive = drive + Cc("")
-path = path + Cc("")
-base = base + Cc("")
-suffix = suffix + Cc("")
-
-local pattern_a = drive * path * base * suffix
-local pattern_b = path * base * suffix
-local pattern_c = C(drive * path) * C(base * suffix) -- trick: two extra captures
-local pattern_d = path * rest
-
+ return name and lpegmatch(pattern,name) or ""
+end
+file.pathpart=pathpart
+file.basename=basename
+file.nameonly=nameonly
+file.suffixonly=suffixonly
+file.suffix=suffixonly
+file.dirname=pathpart
+file.extname=suffixonly
+local drive=C(R("az","AZ"))*colon
+local path=C((noslashes^0*slashes)^0)
+local suffix=period*C(P(1-period)^0*P(-1))
+local base=C((1-suffix)^0)
+local rest=C(P(1)^0)
+drive=drive+Cc("")
+path=path+Cc("")
+base=base+Cc("")
+suffix=suffix+Cc("")
+local pattern_a=drive*path*base*suffix
+local pattern_b=path*base*suffix
+local pattern_c=C(drive*path)*C(base*suffix)
+local pattern_d=path*rest
function file.splitname(str,splitdrive)
- if not str then
- -- error
- elseif splitdrive then
- return lpegmatch(pattern_a,str) -- returns drive, path, base, suffix
- else
- return lpegmatch(pattern_b,str) -- returns path, base, suffix
- end
+ if not str then
+ elseif splitdrive then
+ return lpegmatch(pattern_a,str)
+ else
+ return lpegmatch(pattern_b,str)
+ end
end
-
function file.splitbase(str)
- return str and lpegmatch(pattern_d,str) -- returns path, base+suffix
+ return str and lpegmatch(pattern_d,str)
end
-
----- stripslash = C((1 - P("/")^1*P(-1))^0)
-
function file.nametotable(str,splitdrive)
- if str then
- local path, drive, subpath, name, base, suffix = lpegmatch(pattern_c,str)
- -- if path ~= "" then
- -- path = lpegmatch(stripslash,path) -- unfortunate hack, maybe this becomes default
- -- end
- if splitdrive then
- return {
- path = path,
- drive = drive,
- subpath = subpath,
- name = name,
- base = base,
- suffix = suffix,
- }
- else
- return {
- path = path,
- name = name,
- base = base,
- suffix = suffix,
- }
- end
+ if str then
+ local path,drive,subpath,name,base,suffix=lpegmatch(pattern_c,str)
+ if splitdrive then
+ return {
+ path=path,
+ drive=drive,
+ subpath=subpath,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
+ else
+ return {
+ path=path,
+ name=name,
+ base=base,
+ suffix=suffix,
+ }
end
+ end
end
-
--- print(file.splitname("file"))
--- print(file.splitname("dir/file"))
--- print(file.splitname("/dir/file"))
--- print(file.splitname("file"))
--- print(file.splitname("dir/file"))
--- print(file.splitname("/dir/file"))
-
--- inspect(file.nametotable("file.ext"))
--- inspect(file.nametotable("dir/file.ext"))
--- inspect(file.nametotable("/dir/file.ext"))
--- inspect(file.nametotable("file.ext"))
--- inspect(file.nametotable("dir/file.ext"))
--- inspect(file.nametotable("/dir/file.ext"))
-
------ pattern = Cs(((period * noperiod^1 * -1) / "" + 1)^1)
-local pattern = Cs(((period * (1-period-slashes)^1 * -1) / "" + 1)^1)
-
+local pattern=Cs(((period*(1-period-slashes)^1*-1)/""+1)^1)
function file.removesuffix(name)
- return name and lpegmatch(pattern,name)
+ return name and lpegmatch(pattern,name)
end
-
--- local pattern = (noslashes^0 * slashes)^0 * (noperiod^1 * period)^1 * Cp() * noperiod^1 * -1
---
--- function file.addsuffix(name, suffix)
--- local p = lpegmatch(pattern,name)
--- if p then
--- return name
--- else
--- return name .. "." .. suffix
--- end
--- end
-
-local suffix = period/"" * (1-period-slashes)^1 * -1
-local pattern = Cs((noslashes^0 * slashes^1)^0 * ((1-suffix)^1)) * Cs(suffix)
-
+local suffix=period/""*(1-period-slashes)^1*-1
+local pattern=Cs((noslashes^0*slashes^1)^0*((1-suffix)^1))*Cs(suffix)
function file.addsuffix(filename,suffix,criterium)
- if not filename or not suffix or suffix == "" then
- return filename
- elseif criterium == true then
- return filename .. "." .. suffix
- elseif not criterium then
- local n, s = lpegmatch(pattern,filename)
- if not s or s == "" then
- return filename .. "." .. suffix
- else
+ if not filename or not suffix or suffix=="" then
+ return filename
+ elseif criterium==true then
+ return filename.."."..suffix
+ elseif not criterium then
+ local n,s=lpegmatch(pattern,filename)
+ if not s or s=="" then
+ return filename.."."..suffix
+ else
+ return filename
+ end
+ else
+ local n,s=lpegmatch(pattern,filename)
+ if s and s~="" then
+ local t=type(criterium)
+ if t=="table" then
+ for i=1,#criterium do
+ if s==criterium[i] then
return filename
+ end
end
- else
- local n, s = lpegmatch(pattern,filename)
- if s and s ~= "" then
- local t = type(criterium)
- if t == "table" then
- -- keep if in criterium
- for i=1,#criterium do
- if s == criterium[i] then
- return filename
- end
- end
- elseif t == "string" then
- -- keep if criterium
- if s == criterium then
- return filename
- end
- end
+ elseif t=="string" then
+ if s==criterium then
+ return filename
end
- return (n or filename) .. "." .. suffix
+ end
end
+ return (n or filename).."."..suffix
+ end
end
-
--- print("1 " .. file.addsuffix("name","new") .. " -> name.new")
--- print("2 " .. file.addsuffix("name.old","new") .. " -> name.old")
--- print("3 " .. file.addsuffix("name.old","new",true) .. " -> name.old.new")
--- print("4 " .. file.addsuffix("name.old","new","new") .. " -> name.new")
--- print("5 " .. file.addsuffix("name.old","new","old") .. " -> name.old")
--- print("6 " .. file.addsuffix("name.old","new","foo") .. " -> name.new")
--- print("7 " .. file.addsuffix("name.old","new",{"foo","bar"}) .. " -> name.new")
--- print("8 " .. file.addsuffix("name.old","new",{"old","bar"}) .. " -> name.old")
-
-local suffix = period * (1-period-slashes)^1 * -1
-local pattern = Cs((1-suffix)^0)
-
+local suffix=period*(1-period-slashes)^1*-1
+local pattern=Cs((1-suffix)^0)
function file.replacesuffix(name,suffix)
- if name and suffix and suffix ~= "" then
- return lpegmatch(pattern,name) .. "." .. suffix
- else
- return name
- end
+ if name and suffix and suffix~="" then
+ return lpegmatch(pattern,name).."."..suffix
+ else
+ return name
+ end
end
-
---
-
-local reslasher = lpeg.replacer(P("\\"),"/")
-
+local reslasher=lpeg.replacer(P("\\"),"/")
function file.reslash(str)
- return str and lpegmatch(reslasher,str)
+ return str and lpegmatch(reslasher,str)
end
-
--- We should be able to use:
---
--- local writable = P(1) * P("w") * Cc(true)
---
--- function file.is_writable(name)
--- local a = attributes(name) or attributes(pathpart(name,"."))
--- return a and lpegmatch(writable,a.permissions) or false
--- end
---
--- But after some testing Taco and I came up with the more robust
--- variant:
-
function file.is_writable(name)
- if not name then
- -- error
- elseif lfs.isdir(name) then
- name = name .. "/m_t_x_t_e_s_t.tmp"
- local f = io.open(name,"wb")
- if f then
- f:close()
- os.remove(name)
- return true
- end
- elseif lfs.isfile(name) then
- local f = io.open(name,"ab")
- if f then
- f:close()
- return true
- end
- else
- local f = io.open(name,"ab")
- if f then
- f:close()
- os.remove(name)
- return true
- end
+ if not name then
+ elseif lfs.isdir(name) then
+ name=name.."/m_t_x_t_e_s_t.tmp"
+ local f=io.open(name,"wb")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
end
- return false
-end
-
-local readable = P("r") * Cc(true)
-
-function file.is_readable(name)
- if name then
- local a = attributes(name)
- return a and lpegmatch(readable,a.permissions) or false
- else
- return false
+ elseif lfs.isfile(name) then
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ return true
end
-end
-
-file.isreadable = file.is_readable -- depricated
-file.iswritable = file.is_writable -- depricated
-
-function file.size(name)
- if name then
- local a = attributes(name)
- return a and a.size or 0
- else
- return 0
+ else
+ local f=io.open(name,"ab")
+ if f then
+ f:close()
+ os.remove(name)
+ return true
end
+ end
+ return false
end
-
-function file.splitpath(str,separator) -- string .. reslash is a bonus (we could do a direct split)
- return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
-end
-
-function file.joinpath(tab,separator) -- table
- return tab and concat(tab,separator or io.pathseparator) -- can have trailing //
+local readable=P("r")*Cc(true)
+function file.is_readable(name)
+ if name then
+ local a=attributes(name)
+ return a and lpegmatch(readable,a.permissions) or false
+ else
+ return false
+ end
end
-
-local stripper = Cs(P(fwslash)^0/"" * reslasher)
-local isnetwork = fwslash * fwslash * (1-fwslash) + (1-fwslash-colon)^1 * colon
-local isroot = fwslash^1 * -1
-local hasroot = fwslash^1
-
-local deslasher = lpeg.replacer(S("\\/")^1,"/")
-
--- If we have a network or prefix then there is a change that we end up with two
--- // in the middle ... we could prevent this if we (1) expand prefixes: and (2)
--- split and rebuild as url. Of course we could assume no network paths (which
--- makes sense) adn assume either mapped drives (windows) or mounts (unix) but
--- then we still have to deal with urls ... anyhow, multiple // are never a real
--- problem but just ugly.
-
+file.isreadable=file.is_readable
+file.iswritable=file.is_writable
+function file.size(name)
+ if name then
+ local a=attributes(name)
+ return a and a.size or 0
+ else
+ return 0
+ end
+end
+function file.splitpath(str,separator)
+ return str and checkedsplit(lpegmatch(reslasher,str),separator or io.pathseparator)
+end
+function file.joinpath(tab,separator)
+ return tab and concat(tab,separator or io.pathseparator)
+end
+local stripper=Cs(P(fwslash)^0/""*reslasher)
+local isnetwork=fwslash*fwslash*(1-fwslash)+(1-fwslash-colon)^1*colon
+local isroot=fwslash^1*-1
+local hasroot=fwslash^1
+local deslasher=lpeg.replacer(S("\\/")^1,"/")
function file.join(...)
- local lst = { ... }
- local one = lst[1]
- if lpegmatch(isnetwork,one) then
- local two = lpegmatch(deslasher,concat(lst,"/",2))
- return one .. "/" .. two
- elseif lpegmatch(isroot,one) then
- local two = lpegmatch(deslasher,concat(lst,"/",2))
- if lpegmatch(hasroot,two) then
- return two
- else
- return "/" .. two
- end
- elseif one == "" then
- return lpegmatch(stripper,concat(lst,"/",2))
+ local lst={... }
+ local one=lst[1]
+ if lpegmatch(isnetwork,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ return one.."/"..two
+ elseif lpegmatch(isroot,one) then
+ local two=lpegmatch(deslasher,concat(lst,"/",2))
+ if lpegmatch(hasroot,two) then
+ return two
else
- return lpegmatch(deslasher,concat(lst,"/"))
- end
-end
-
--- print(file.join("c:/whatever","name"))
--- print(file.join("//","/y"))
--- print(file.join("/","/y"))
--- print(file.join("","/y"))
--- print(file.join("/x/","/y"))
--- print(file.join("x/","/y"))
--- print(file.join("http://","/y"))
--- print(file.join("http://a","/y"))
--- print(file.join("http:///a","/y"))
--- print(file.join("//nas-1","/y"))
-
--- The previous one fails on "a.b/c" so Taco came up with a split based
--- variant. After some skyping we got it sort of compatible with the old
--- one. After that the anchoring to currentdir was added in a better way.
--- Of course there are some optimizations too. Finally we had to deal with
--- windows drive prefixes and things like sys://. Eventually gsubs and
--- finds were replaced by lpegs.
-
-local drivespec = R("az","AZ")^1 * colon
-local anchors = fwslash + drivespec
-local untouched = periods + (1-period)^1 * P(-1)
-local splitstarter = (Cs(drivespec * (bwslash/"/" + fwslash)^0) + Cc(false)) * Ct(lpeg.splitat(S("/\\")^1))
-local absolute = fwslash
-
+ return "/"..two
+ end
+ elseif one=="" then
+ return lpegmatch(stripper,concat(lst,"/",2))
+ else
+ return lpegmatch(deslasher,concat(lst,"/"))
+ end
+end
+local drivespec=R("az","AZ")^1*colon
+local anchors=fwslash+drivespec
+local untouched=periods+(1-period)^1*P(-1)
+local splitstarter=(Cs(drivespec*(bwslash/"/"+fwslash)^0)+Cc(false))*Ct(lpeg.splitat(S("/\\")^1))
+local absolute=fwslash
function file.collapsepath(str,anchor)
- if not str then
- return
- end
- if anchor and not lpegmatch(anchors,str) then
- str = getcurrentdir() .. "/" .. str
- end
- if str == "" or str =="." then
- return "."
- elseif lpegmatch(untouched,str) then
- return lpegmatch(reslasher,str)
- end
- local starter, oldelements = lpegmatch(splitstarter,str)
- local newelements = { }
- local i = #oldelements
- while i > 0 do
- local element = oldelements[i]
- if element == '.' then
- -- do nothing
- elseif element == '..' then
- local n = i - 1
- while n > 0 do
- local element = oldelements[n]
- if element ~= '..' and element ~= '.' then
- oldelements[n] = '.'
- break
- else
- n = n - 1
- end
- end
- if n < 1 then
- insert(newelements,1,'..')
- end
- elseif element ~= "" then
- insert(newelements,1,element)
- end
- i = i - 1
- end
- if #newelements == 0 then
- return starter or "."
- elseif starter then
- return starter .. concat(newelements, '/')
- elseif lpegmatch(absolute,str) then
- return "/" .. concat(newelements,'/')
- else
- return concat(newelements, '/')
- end
-end
-
--- local function test(str)
--- print(string.format("%-20s %-15s %-15s",str,file.collapsepath(str),file.collapsepath(str,true)))
--- end
--- test("a/b.c/d") test("b.c/d") test("b.c/..")
--- test("/") test("c:/..") test("sys://..")
--- test("") test("./") test(".") test("..") test("./..") test("../..")
--- test("a") test("./a") test("/a") test("a/../..")
--- test("a/./b/..") test("a/aa/../b/bb") test("a/.././././b/..") test("a/./././b/..")
--- test("a/b/c/../..") test("./a/b/c/../..") test("a/b/c/../..")
-
-local validchars = R("az","09","AZ","--","..")
-local pattern_a = lpeg.replacer(1-validchars)
-local pattern_a = Cs((validchars + P(1)/"-")^1)
-local whatever = P("-")^0 / ""
-local pattern_b = Cs(whatever * (1 - whatever * -1)^1)
-
-function file.robustname(str,strict)
- if str then
- str = lpegmatch(pattern_a,str) or str
- if strict then
- return lpegmatch(pattern_b,str) or str -- two step is cleaner (less backtracking)
+ if not str then
+ return
+ end
+ if anchor and not lpegmatch(anchors,str) then
+ str=getcurrentdir().."/"..str
+ end
+ if str=="" or str=="." then
+ return "."
+ elseif lpegmatch(untouched,str) then
+ return lpegmatch(reslasher,str)
+ end
+ local starter,oldelements=lpegmatch(splitstarter,str)
+ local newelements={}
+ local i=#oldelements
+ while i>0 do
+ local element=oldelements[i]
+ if element=='.' then
+ elseif element=='..' then
+ local n=i-1
+ while n>0 do
+ local element=oldelements[n]
+ if element~='..' and element~='.' then
+ oldelements[n]='.'
+ break
else
- return str
- end
+ n=n-1
+ end
+ end
+ if n<1 then
+ insert(newelements,1,'..')
+ end
+ elseif element~="" then
+ insert(newelements,1,element)
+ end
+ i=i-1
+ end
+ if #newelements==0 then
+ return starter or "."
+ elseif starter then
+ return starter..concat(newelements,'/')
+ elseif lpegmatch(absolute,str) then
+ return "/"..concat(newelements,'/')
+ else
+ return concat(newelements,'/')
+ end
+end
+local validchars=R("az","09","AZ","--","..")
+local pattern_a=lpeg.replacer(1-validchars)
+local pattern_a=Cs((validchars+P(1)/"-")^1)
+local whatever=P("-")^0/""
+local pattern_b=Cs(whatever*(1-whatever*-1)^1)
+function file.robustname(str,strict)
+ if str then
+ str=lpegmatch(pattern_a,str) or str
+ if strict then
+ return lpegmatch(pattern_b,str) or str
+ else
+ return str
end
+ end
end
-
-file.readdata = io.loaddata
-file.savedata = io.savedata
-
+file.readdata=io.loaddata
+file.savedata=io.savedata
function file.copy(oldname,newname)
- if oldname and newname then
- file.savedata(newname,io.loaddata(oldname))
- end
-end
-
--- also rewrite previous
-
-local letter = R("az","AZ") + S("_-+")
-local separator = P("://")
-
-local qualified = period^0 * fwslash
- + letter * colon
- + letter^1 * separator
- + letter^1 * fwslash
-local rootbased = fwslash
- + letter * colon
-
-lpeg.patterns.qualified = qualified
-lpeg.patterns.rootbased = rootbased
-
--- ./name ../name /name c: :// name/name
-
+ if oldname and newname then
+ file.savedata(newname,io.loaddata(oldname))
+ end
+end
+local letter=R("az","AZ")+S("_-+")
+local separator=P("://")
+local qualified=period^0*fwslash+letter*colon+letter^1*separator+letter^1*fwslash
+local rootbased=fwslash+letter*colon
+lpeg.patterns.qualified=qualified
+lpeg.patterns.rootbased=rootbased
function file.is_qualified_path(filename)
- return filename and lpegmatch(qualified,filename) ~= nil
+ return filename and lpegmatch(qualified,filename)~=nil
end
-
function file.is_rootbased_path(filename)
- return filename and lpegmatch(rootbased,filename) ~= nil
+ return filename and lpegmatch(rootbased,filename)~=nil
end
-
--- function test(t) for k, v in next, t do print(v, "=>", file.splitname(v)) end end
---
--- test { "c:", "c:/aa", "c:/aa/bb", "c:/aa/bb/cc", "c:/aa/bb/cc.dd", "c:/aa/bb/cc.dd.ee" }
--- test { "c:", "c:aa", "c:aa/bb", "c:aa/bb/cc", "c:aa/bb/cc.dd", "c:aa/bb/cc.dd.ee" }
--- test { "/aa", "/aa/bb", "/aa/bb/cc", "/aa/bb/cc.dd", "/aa/bb/cc.dd.ee" }
--- test { "aa", "aa/bb", "aa/bb/cc", "aa/bb/cc.dd", "aa/bb/cc.dd.ee" }
-
--- -- maybe:
---
--- if os.type == "windows" then
--- local currentdir = getcurrentdir
--- function getcurrentdir()
--- return lpegmatch(reslasher,currentdir())
--- end
--- end
-
--- for myself:
-
function file.strip(name,dir)
- if name then
- local b, a = match(name,"^(.-)" .. dir .. "(.*)$")
- return a ~= "" and a or name
- end
+ if name then
+ local b,a=match(name,"^(.-)"..dir.."(.*)$")
+ return a~="" and a or name
+ end
end
--- local debuglist = {
--- "pathpart", "basename", "nameonly", "suffixonly", "suffix", "dirname", "extname",
--- "addsuffix", "removesuffix", "replacesuffix", "join",
--- "strip","collapsepath", "joinpath", "splitpath",
--- }
-
--- for i=1,#debuglist do
--- local name = debuglist[i]
--- local f = file[name]
--- file[name] = function(...)
--- print(name,f(...))
--- return f(...)
--- end
--- end
-
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['l-io'] = {
- version = 1.001,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['l-boolean']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local io = io
-local byte, find, gsub, format = string.byte, string.find, string.gsub, string.format
-local concat = table.concat
-local floor = math.floor
-local type = type
-
-if string.find(os.getenv("PATH"),";") then
- io.fileseparator, io.pathseparator = "\\", ";"
-else
- io.fileseparator, io.pathseparator = "/" , ":"
-end
-
-local function readall(f)
- return f:read("*all")
-end
-
--- The next one is upto 50% faster on large files and less memory consumption due
--- to less intermediate large allocations. This phenomena was discussed on the
--- luatex dev list.
-
-local function readall(f)
- local size = f:seek("end")
- if size == 0 then
- return ""
- elseif size < 1024*1024 then
- f:seek("set",0)
- return f:read('*all')
- else
- local done = f:seek("set",0)
- if size < 1024*1024 then
- step = 1024 * 1024
- elseif size > 16*1024*1024 then
- step = 16*1024*1024
- else
- step = floor(size/(1024*1024)) * 1024 * 1024 / 8
- end
- local data = { }
- while true do
- local r = f:read(step)
- if not r then
- return concat(data)
- else
- data[#data+1] = r
- end
- end
- end
-end
-
-io.readall = readall
-
-function io.loaddata(filename,textmode) -- return nil if empty
- local f = io.open(filename,(textmode and 'r') or 'rb')
- if f then
--- local data = f:read('*all')
- local data = readall(f)
- f:close()
- if #data > 0 then
- return data
- end
- end
-end
-
-function io.savedata(filename,data,joiner)
- local f = io.open(filename,"wb")
- if f then
- if type(data) == "table" then
- f:write(concat(data,joiner or ""))
- elseif type(data) == "function" then
- data(f)
- else
- f:write(data or "")
- end
- f:close()
- io.flush()
- return true
- else
- return false
- end
+local type,tonumber=type,tonumber
+boolean=boolean or {}
+local boolean=boolean
+function boolean.tonumber(b)
+ if b then return 1 else return 0 end
end
-
--- we can also chunk this one if needed: io.lines(filename,chunksize,"*l")
-
-function io.loadlines(filename,n) -- return nil if empty
- local f = io.open(filename,'r')
- if not f then
- -- no file
- elseif n then
- local lines = { }
- for i=1,n do
- local line = f:read("*lines")
- if line then
- lines[#lines+1] = line
- else
- break
- end
- end
- f:close()
- lines = concat(lines,"\n")
- if #lines > 0 then
- return lines
- end
- else
- local line = f:read("*line") or ""
- f:close()
- if #line > 0 then
- return line
- end
- end
+function toboolean(str,tolerant)
+ if str==nil then
+ return false
+ elseif str==false then
+ return false
+ elseif str==true then
+ return true
+ elseif str=="true" then
+ return true
+ elseif str=="false" then
+ return false
+ elseif not tolerant then
+ return false
+ elseif str==0 then
+ return false
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
end
-
-function io.loadchunk(filename,n)
- local f = io.open(filename,'rb')
- if f then
- local data = f:read(n or 1024)
- f:close()
- if #data > 0 then
- return data
- end
- end
+string.toboolean=toboolean
+function string.booleanstring(str)
+ if str==nil then
+ return false
+ elseif str==false then
+ return false
+ elseif str==true then
+ return true
+ elseif str=="true" then
+ return true
+ elseif str=="false" then
+ return false
+ elseif str==0 then
+ return false
+ elseif (tonumber(str) or 0)>0 then
+ return true
+ else
+ return str=="yes" or str=="on" or str=="t"
+ end
end
-
-function io.exists(filename)
- local f = io.open(filename)
- if f == nil then
- return false
- else
- f:close()
- return true
+function string.is_boolean(str,default)
+ if type(str)=="string" then
+ if str=="true" or str=="yes" or str=="on" or str=="t" then
+ return true
+ elseif str=="false" or str=="no" or str=="off" or str=="f" then
+ return false
end
+ end
+ return default
end
-function io.size(filename)
- local f = io.open(filename)
- if f == nil then
- return 0
- else
- local s = f:seek("end")
- f:close()
- return s
- end
-end
+end -- closure
-function io.noflines(f)
- if type(f) == "string" then
- local f = io.open(filename)
- if f then
- local n = f and io.noflines(f) or 0
- f:close()
- return n
- else
- return 0
- end
- else
- local n = 0
- for _ in f:lines() do
- n = n + 1
- end
- f:seek('set',0)
- return n
- end
-end
+do -- begin closure to overcome local limits and interference
-local nextchar = {
- [ 4] = function(f)
- return f:read(1,1,1,1)
- end,
- [ 2] = function(f)
- return f:read(1,1)
- end,
- [ 1] = function(f)
- return f:read(1)
- end,
- [-2] = function(f)
- local a, b = f:read(1,1)
- return b, a
- end,
- [-4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- return d, c, b, a
- end
+if not modules then modules={} end modules ['l-math']={
+ version=1.001,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-function io.characters(f,n)
- if f then
- return nextchar[n or 1], f
- end
+local floor,sin,cos,tan=math.floor,math.sin,math.cos,math.tan
+if not math.round then
+ function math.round(x) return floor(x+0.5) end
end
-
-local nextbyte = {
- [4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- if d then
- return byte(a), byte(b), byte(c), byte(d)
- end
- end,
- [3] = function(f)
- local a, b, c = f:read(1,1,1)
- if b then
- return byte(a), byte(b), byte(c)
- end
- end,
- [2] = function(f)
- local a, b = f:read(1,1)
- if b then
- return byte(a), byte(b)
- end
- end,
- [1] = function (f)
- local a = f:read(1)
- if a then
- return byte(a)
- end
- end,
- [-2] = function (f)
- local a, b = f:read(1,1)
- if b then
- return byte(b), byte(a)
- end
- end,
- [-3] = function(f)
- local a, b, c = f:read(1,1,1)
- if b then
- return byte(c), byte(b), byte(a)
- end
- end,
- [-4] = function(f)
- local a, b, c, d = f:read(1,1,1,1)
- if d then
- return byte(d), byte(c), byte(b), byte(a)
- end
- end
-}
-
-function io.bytes(f,n)
- if f then
- return nextbyte[n or 1], f
- else
- return nil, nil
- end
+if not math.div then
+ function math.div(n,m) return floor(n/m) end
end
-
-function io.ask(question,default,options)
- while true do
- io.write(question)
- if options then
- io.write(format(" [%s]",concat(options,"|")))
- end
- if default then
- io.write(format(" [%s]",default))
- end
- io.write(format(" "))
- io.flush()
- local answer = io.read()
- answer = gsub(answer,"^%s*(.*)%s*$","%1")
- if answer == "" and default then
- return default
- elseif not options then
- return answer
- else
- for k=1,#options do
- if options[k] == answer then
- return answer
- end
- end
- local pattern = "^" .. answer
- for k=1,#options do
- local v = options[k]
- if find(v,pattern) then
- return v
- end
- end
- end
- end
+if not math.mod then
+ function math.mod(n,m) return n%m end
end
-
-local function readnumber(f,n,m)
- if m then
- f:seek("set",n)
- n = m
- end
- if n == 1 then
- return byte(f:read(1))
- elseif n == 2 then
- local a, b = byte(f:read(2),1,2)
- return 256 * a + b
- elseif n == 3 then
- local a, b, c = byte(f:read(3),1,3)
- return 256*256 * a + 256 * b + c
- elseif n == 4 then
- local a, b, c, d = byte(f:read(4),1,4)
- return 256*256*256 * a + 256*256 * b + 256 * c + d
- elseif n == 8 then
- local a, b = readnumber(f,4), readnumber(f,4)
- return 256 * a + b
- elseif n == 12 then
- local a, b, c = readnumber(f,4), readnumber(f,4), readnumber(f,4)
- return 256*256 * a + 256 * b + c
- elseif n == -2 then
- local b, a = byte(f:read(2),1,2)
- return 256*a + b
- elseif n == -3 then
- local c, b, a = byte(f:read(3),1,3)
- return 256*256 * a + 256 * b + c
- elseif n == -4 then
- local d, c, b, a = byte(f:read(4),1,4)
- return 256*256*256 * a + 256*256 * b + 256*c + d
- elseif n == -8 then
- local h, g, f, e, d, c, b, a = byte(f:read(8),1,8)
- return 256*256*256*256*256*256*256 * a +
- 256*256*256*256*256*256 * b +
- 256*256*256*256*256 * c +
- 256*256*256*256 * d +
- 256*256*256 * e +
- 256*256 * f +
- 256 * g +
- h
- else
- return 0
- end
+local pipi=2*math.pi/360
+if not math.sind then
+ function math.sind(d) return sin(d*pipi) end
+ function math.cosd(d) return cos(d*pipi) end
+ function math.tand(d) return tan(d*pipi) end
end
-
-io.readnumber = readnumber
-
-function io.readstring(f,n,m)
- if m then
- f:seek("set",n)
- n = m
- end
- local str = gsub(f:read(n),"\000","")
- return str
+if not math.odd then
+ function math.odd (n) return n%2~=0 end
+ function math.even(n) return n%2==0 end
end
---
-
-if not io.i_limiter then function io.i_limiter() end end -- dummy so we can test safely
-if not io.o_limiter then function io.o_limiter() end end -- dummy so we can test safely
-
--- This works quite ok:
---
--- function io.piped(command,writer)
--- local pipe = io.popen(command)
--- -- for line in pipe:lines() do
--- -- print(line)
--- -- end
--- while true do
--- local line = pipe:read(1)
--- if not line then
--- break
--- elseif line ~= "\n" then
--- writer(line)
--- end
--- end
--- return pipe:close() -- ok, status, (error)code
--- end
-
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luat-basics-gen'] = {
- version = 1.100,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luat-basics-gen']={
+ version=1.100,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
-
-local dummyfunction = function() end
-local dummyreporter = function(c) return function(...) texio.write(c .. " : " .. string.format(...)) end end
-
-statistics = {
- register = dummyfunction,
- starttiming = dummyfunction,
- stoptiming = dummyfunction,
- elapsedtime = nil,
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local dummyfunction=function() end
+local dummyreporter=function(c) return function(...) texio.write(c.." : "..string.format(...)) end end
+statistics={
+ register=dummyfunction,
+ starttiming=dummyfunction,
+ stoptiming=dummyfunction,
+ elapsedtime=nil,
}
-
-directives = {
- register = dummyfunction,
- enable = dummyfunction,
- disable = dummyfunction,
+directives={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
}
-
-trackers = {
- register = dummyfunction,
- enable = dummyfunction,
- disable = dummyfunction,
+trackers={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
}
-
-experiments = {
- register = dummyfunction,
- enable = dummyfunction,
- disable = dummyfunction,
+experiments={
+ register=dummyfunction,
+ enable=dummyfunction,
+ disable=dummyfunction,
}
-
-storage = { -- probably no longer needed
- register = dummyfunction,
- shared = { },
+storage={
+ register=dummyfunction,
+ shared={},
}
-
-logs = {
- new = dummyreporter,
- reporter = dummyreporter,
- messenger = dummyreporter,
- report = dummyfunction,
+logs={
+ new=dummyreporter,
+ reporter=dummyreporter,
+ messenger=dummyreporter,
+ report=dummyfunction,
}
-
-callbacks = {
- register = function(n,f) return callback.register(n,f) end,
-
+callbacks={
+ register=function(n,f) return callback.register(n,f) end,
}
-
-utilities = {
- storage = {
- allocate = function(t) return t or { } end,
- mark = function(t) return t or { } end,
- },
+utilities={
+ storage={
+ allocate=function(t) return t or {} end,
+ mark=function(t) return t or {} end,
+ },
}
-
-characters = characters or {
- data = { }
+characters=characters or {
+ data={}
}
-
--- we need to cheat a bit here
-
-texconfig.kpse_init = true
-
-resolvers = resolvers or { } -- no fancy file helpers used
-
-local remapper = {
- otf = "opentype fonts",
- ttf = "truetype fonts",
- ttc = "truetype fonts",
- dfont = "truetype fonts", -- "truetype dictionary",
- cid = "cid maps",
- cidmap = "cid maps",
- fea = "font feature files",
- pfa = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
- pfb = "type1 fonts", -- this is for Khaled, in ConTeXt we don't use this!
+texconfig.kpse_init=true
+resolvers=resolvers or {}
+local remapper={
+ otf="opentype fonts",
+ ttf="truetype fonts",
+ ttc="truetype fonts",
+ dfont="truetype fonts",
+ cid="cid maps",
+ cidmap="cid maps",
+ fea="font feature files",
+ pfa="type1 fonts",
+ pfb="type1 fonts",
}
-
function resolvers.findfile(name,fileformat)
- name = string.gsub(name,"\\","/")
- if not fileformat or fileformat == "" then
- fileformat = file.suffix(name)
- if fileformat == "" then
- fileformat = "tex"
- end
- end
- fileformat = string.lower(fileformat)
- fileformat = remapper[fileformat] or fileformat
- local found = kpse.find_file(name,fileformat)
- if not found or found == "" then
- found = kpse.find_file(name,"other text files")
- end
- return found
-end
-
--- function resolvers.findbinfile(name,fileformat)
--- if not fileformat or fileformat == "" then
--- fileformat = file.suffix(name)
--- end
--- return resolvers.findfile(name,(fileformat and remapper[fileformat]) or fileformat)
--- end
-
-resolvers.findbinfile = resolvers.findfile
-
+ name=string.gsub(name,"\\","/")
+ if not fileformat or fileformat=="" then
+ fileformat=file.suffix(name)
+ if fileformat=="" then
+ fileformat="tex"
+ end
+ end
+ fileformat=string.lower(fileformat)
+ fileformat=remapper[fileformat] or fileformat
+ local found=kpse.find_file(name,fileformat)
+ if not found or found=="" then
+ found=kpse.find_file(name,"other text files")
+ end
+ return found
+end
+resolvers.findbinfile=resolvers.findfile
function resolvers.resolve(s)
- return s
+ return s
end
-
function resolvers.unresolve(s)
- return s
+ return s
end
-
--- Caches ... I will make a real stupid version some day when I'm in the
--- mood. After all, the generic code does not need the more advanced
--- ConTeXt features. Cached data is not shared between ConTeXt and other
--- usage as I don't want any dependency at all. Also, ConTeXt might have
--- different needs and tricks added.
-
---~ containers.usecache = true
-
-caches = { }
-
-local writable, readables = nil, { }
-
-if not caches.namespace or caches.namespace == "" or caches.namespace == "context" then
- caches.namespace = 'generic'
+caches={}
+local writable,readables=nil,{}
+if not caches.namespace or caches.namespace=="" or caches.namespace=="context" then
+ caches.namespace='generic'
end
-
do
-
- local cachepaths = kpse.expand_path('$TEXMFCACHE') or ""
-
- if cachepaths == "" then
- cachepaths = kpse.expand_path('$TEXMFVAR')
- end
-
- if cachepaths == "" then
- cachepaths = kpse.expand_path('$VARTEXMF')
- end
-
- if cachepaths == "" then
- cachepaths = "."
- end
-
- cachepaths = string.split(cachepaths,os.type == "windows" and ";" or ":")
-
- for i=1,#cachepaths do
- if file.is_writable(cachepaths[i]) then
- writable = file.join(cachepaths[i],"luatex-cache")
- lfs.mkdir(writable)
- writable = file.join(writable,caches.namespace)
- lfs.mkdir(writable)
- break
- end
- end
-
- for i=1,#cachepaths do
- if file.is_readable(cachepaths[i]) then
- readables[#readables+1] = file.join(cachepaths[i],"luatex-cache",caches.namespace)
- end
- end
-
- if not writable then
- texio.write_nl("quiting: fix your writable cache path")
- os.exit()
- elseif #readables == 0 then
- texio.write_nl("quiting: fix your readable cache path")
- os.exit()
- elseif #readables == 1 and readables[1] == writable then
- texio.write(string.format("(using cache: %s)",writable))
- else
- texio.write(string.format("(using write cache: %s)",writable))
- texio.write(string.format("(using read cache: %s)",table.concat(readables, " ")))
- end
-
+ local cachepaths=kpse.expand_path('$TEXMFCACHE') or ""
+ if cachepaths=="" then
+ cachepaths=kpse.expand_path('$TEXMFVAR')
+ end
+ if cachepaths=="" then
+ cachepaths=kpse.expand_path('$VARTEXMF')
+ end
+ if cachepaths=="" then
+ cachepaths="."
+ end
+ cachepaths=string.split(cachepaths,os.type=="windows" and ";" or ":")
+ for i=1,#cachepaths do
+ if file.is_writable(cachepaths[i]) then
+ writable=file.join(cachepaths[i],"luatex-cache")
+ lfs.mkdir(writable)
+ writable=file.join(writable,caches.namespace)
+ lfs.mkdir(writable)
+ break
+ end
+ end
+ for i=1,#cachepaths do
+ if file.is_readable(cachepaths[i]) then
+ readables[#readables+1]=file.join(cachepaths[i],"luatex-cache",caches.namespace)
+ end
+ end
+ if not writable then
+ texio.write_nl("quiting: fix your writable cache path")
+ os.exit()
+ elseif #readables==0 then
+ texio.write_nl("quiting: fix your readable cache path")
+ os.exit()
+ elseif #readables==1 and readables[1]==writable then
+ texio.write(string.format("(using cache: %s)",writable))
+ else
+ texio.write(string.format("(using write cache: %s)",writable))
+ texio.write(string.format("(using read cache: %s)",table.concat(readables," ")))
+ end
end
-
function caches.getwritablepath(category,subcategory)
- local path = file.join(writable,category)
- lfs.mkdir(path)
- path = file.join(path,subcategory)
- lfs.mkdir(path)
- return path
+ local path=file.join(writable,category)
+ lfs.mkdir(path)
+ path=file.join(path,subcategory)
+ lfs.mkdir(path)
+ return path
end
-
function caches.getreadablepaths(category,subcategory)
- local t = { }
- for i=1,#readables do
- t[i] = file.join(readables[i],category,subcategory)
- end
- return t
+ local t={}
+ for i=1,#readables do
+ t[i]=file.join(readables[i],category,subcategory)
+ end
+ return t
end
-
local function makefullname(path,name)
- if path and path ~= "" then
- name = "temp-" .. name -- clash prevention
- return file.addsuffix(file.join(path,name),"lua"), file.addsuffix(file.join(path,name),"luc")
- end
+ if path and path~="" then
+ name="temp-"..name
+ return file.addsuffix(file.join(path,name),"lua"),file.addsuffix(file.join(path,name),"luc")
+ end
end
-
function caches.is_writable(path,name)
- local fullname = makefullname(path,name)
- return fullname and file.is_writable(fullname)
+ local fullname=makefullname(path,name)
+ return fullname and file.is_writable(fullname)
end
-
function caches.loaddata(paths,name)
- for i=1,#paths do
- local data = false
- local luaname, lucname = makefullname(paths[i],name)
- if lucname and lfs.isfile(lucname) then
- texio.write(string.format("(load: %s)",lucname))
- data = loadfile(lucname)
- end
- if not data and luaname and lfs.isfile(luaname) then
- texio.write(string.format("(load: %s)",luaname))
- data = loadfile(luaname)
- end
- return data and data()
+ for i=1,#paths do
+ local data=false
+ local luaname,lucname=makefullname(paths[i],name)
+ if lucname and lfs.isfile(lucname) then
+ texio.write(string.format("(load: %s)",lucname))
+ data=loadfile(lucname)
end
-end
-
-function caches.savedata(path,name,data)
- local luaname, lucname = makefullname(path,name)
- if luaname then
- texio.write(string.format("(save: %s)",luaname))
- table.tofile(luaname,data,true,{ reduce = true })
- if lucname and type(caches.compile) == "function" then
- os.remove(lucname) -- better be safe
- texio.write(string.format("(save: %s)",lucname))
- caches.compile(data,luaname,lucname)
- end
+ if not data and luaname and lfs.isfile(luaname) then
+ texio.write(string.format("(load: %s)",luaname))
+ data=loadfile(luaname)
end
+ return data and data()
+ end
end
-
--- According to KH os.execute is not permitted in plain/latex so there is
--- no reason to use the normal context way. So the method here is slightly
--- different from the one we have in context. We also use different suffixes
--- as we don't want any clashes (sharing cache files is not that handy as
--- context moves on faster.)
---
--- Beware: serialization might fail on large files (so maybe we should pcall
--- this) in which case one should limit the method to luac and enable support
--- for execution.
-
-caches.compilemethod = "luac" -- luac dump both
-
+function caches.savedata(path,name,data)
+ local luaname,lucname=makefullname(path,name)
+ if luaname then
+ texio.write(string.format("(save: %s)",luaname))
+ table.tofile(luaname,data,true,{ reduce=true })
+ if lucname and type(caches.compile)=="function" then
+ os.remove(lucname)
+ texio.write(string.format("(save: %s)",lucname))
+ caches.compile(data,luaname,lucname)
+ end
+ end
+end
+caches.compilemethod="luac"
function caches.compile(data,luaname,lucname)
- local done = false
- if caches.compilemethod == "luac" or caches.compilemethod == "both" then
- local command = "-o " .. string.quoted(lucname) .. " -s " .. string.quoted(luaname)
- done = os.spawn("texluac " .. command) == 0
- end
- if not done and (caches.compilemethod == "dump" or caches.compilemethod == "both") then
- local d = table.serialize(data,true)
- if d and d ~= "" then
- local f = io.open(lucname,'w')
- if f then
- local s = loadstring(d)
- f:write(string.dump(s))
- f:close()
- end
- end
+ local done=false
+ if caches.compilemethod=="luac" or caches.compilemethod=="both" then
+ local command="-o "..string.quoted(lucname).." -s "..string.quoted(luaname)
+ done=os.spawn("texluac "..command)==0
+ end
+ if not done and (caches.compilemethod=="dump" or caches.compilemethod=="both") then
+ local d=table.serialize(data,true)
+ if d and d~="" then
+ local f=io.open(lucname,'w')
+ if f then
+ local s=loadstring(d)
+ f:write(string.dump(s))
+ f:close()
+ end
end
+ end
end
-
---
-
function table.setmetatableindex(t,f)
- setmetatable(t,{ __index = f })
+ setmetatable(t,{ __index=f })
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['data-con'] = {
- version = 1.100,
- comment = "companion to luat-lib.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['data-con']={
+ version=1.100,
+ comment="companion to luat-lib.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local format, lower, gsub = string.format, string.lower, string.gsub
-
-local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end)
-local trace_containers = false trackers.register("resolvers.containers", function(v) trace_containers = v end)
-local trace_storage = false trackers.register("resolvers.storage", function(v) trace_storage = v end)
-
---[[ldx--
-<p>Once we found ourselves defining similar cache constructs
-several times, containers were introduced. Containers are used
-to collect tables in memory and reuse them when possible based
-on (unique) hashes (to be provided by the calling function).</p>
-
-<p>Caching to disk is disabled by default. Version numbers are
-stored in the saved table which makes it possible to change the
-table structures without bothering about the disk cache.</p>
-
-<p>Examples of usage can be found in the font related code.</p>
---ldx]]--
-
-containers = containers or { }
-local containers = containers
-containers.usecache = true
-
-local report_containers = logs.reporter("resolvers","containers")
-
+local format,lower,gsub=string.format,string.lower,string.gsub
+local trace_cache=false trackers.register("resolvers.cache",function(v) trace_cache=v end)
+local trace_containers=false trackers.register("resolvers.containers",function(v) trace_containers=v end)
+local trace_storage=false trackers.register("resolvers.storage",function(v) trace_storage=v end)
+containers=containers or {}
+local containers=containers
+containers.usecache=true
+local report_containers=logs.reporter("resolvers","containers")
local function report(container,tag,name)
- if trace_cache or trace_containers then
- report_containers("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid')
- end
-end
-
-local allocated = { }
-
-local mt = {
- __index = function(t,k)
- if k == "writable" then
- local writable = caches.getwritablepath(t.category,t.subcategory) or { "." }
- t.writable = writable
- return writable
- elseif k == "readables" then
- local readables = caches.getreadablepaths(t.category,t.subcategory) or { "." }
- t.readables = readables
- return readables
- end
- end,
- __storage__ = true
+ if trace_cache or trace_containers then
+ report_containers("container: %s, tag: %s, name: %s",container.subcategory,tag,name or 'invalid')
+ end
+end
+local allocated={}
+local mt={
+ __index=function(t,k)
+ if k=="writable" then
+ local writable=caches.getwritablepath(t.category,t.subcategory) or { "." }
+ t.writable=writable
+ return writable
+ elseif k=="readables" then
+ local readables=caches.getreadablepaths(t.category,t.subcategory) or { "." }
+ t.readables=readables
+ return readables
+ end
+ end,
+ __storage__=true
}
-
-function containers.define(category, subcategory, version, enabled)
- if category and subcategory then
- local c = allocated[category]
- if not c then
- c = { }
- allocated[category] = c
- end
- local s = c[subcategory]
- if not s then
- s = {
- category = category,
- subcategory = subcategory,
- storage = { },
- enabled = enabled,
- version = version or math.pi, -- after all, this is TeX
- trace = false,
- -- writable = caches.getwritablepath and caches.getwritablepath (category,subcategory) or { "." },
- -- readables = caches.getreadablepaths and caches.getreadablepaths(category,subcategory) or { "." },
- }
- setmetatable(s,mt)
- c[subcategory] = s
- end
- return s
+function containers.define(category,subcategory,version,enabled)
+ if category and subcategory then
+ local c=allocated[category]
+ if not c then
+ c={}
+ allocated[category]=c
+ end
+ local s=c[subcategory]
+ if not s then
+ s={
+ category=category,
+ subcategory=subcategory,
+ storage={},
+ enabled=enabled,
+ version=version or math.pi,
+ trace=false,
+ }
+ setmetatable(s,mt)
+ c[subcategory]=s
end
+ return s
+ end
end
-
-function containers.is_usable(container, name)
- return container.enabled and caches and caches.is_writable(container.writable, name)
+function containers.is_usable(container,name)
+ return container.enabled and caches and caches.is_writable(container.writable,name)
end
-
-function containers.is_valid(container, name)
- if name and name ~= "" then
- local storage = container.storage[name]
- return storage and storage.cache_version == container.version
- else
- return false
- end
+function containers.is_valid(container,name)
+ if name and name~="" then
+ local storage=container.storage[name]
+ return storage and storage.cache_version==container.version
+ else
+ return false
+ end
end
-
function containers.read(container,name)
- local storage = container.storage
- local stored = storage[name]
- if not stored and container.enabled and caches and containers.usecache then
- stored = caches.loaddata(container.readables,name)
- if stored and stored.cache_version == container.version then
- report(container,"loaded",name)
- else
- stored = nil
- end
- storage[name] = stored
- elseif stored then
- report(container,"reusing",name)
- end
- return stored
-end
-
-function containers.write(container, name, data)
- if data then
- data.cache_version = container.version
- if container.enabled and caches then
- local unique, shared = data.unique, data.shared
- data.unique, data.shared = nil, nil
- caches.savedata(container.writable, name, data)
- report(container,"saved",name)
- data.unique, data.shared = unique, shared
- end
- report(container,"stored",name)
- container.storage[name] = data
- end
- return data
+ local storage=container.storage
+ local stored=storage[name]
+ if not stored and container.enabled and caches and containers.usecache then
+ stored=caches.loaddata(container.readables,name)
+ if stored and stored.cache_version==container.version then
+ report(container,"loaded",name)
+ else
+ stored=nil
+ end
+ storage[name]=stored
+ elseif stored then
+ report(container,"reusing",name)
+ end
+ return stored
+end
+function containers.write(container,name,data)
+ if data then
+ data.cache_version=container.version
+ if container.enabled and caches then
+ local unique,shared=data.unique,data.shared
+ data.unique,data.shared=nil,nil
+ caches.savedata(container.writable,name,data)
+ report(container,"saved",name)
+ data.unique,data.shared=unique,shared
+ end
+ report(container,"stored",name)
+ container.storage[name]=data
+ end
+ return data
end
-
function containers.content(container,name)
- return container.storage[name]
+ return container.storage[name]
end
-
function containers.cleanname(name)
- return (gsub(lower(name),"[^%w%d]+","-"))
+ return (gsub(lower(name),"[^%w%d]+","-"))
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-fonts-nod'] = {
- version = 1.001,
- comment = "companion to luatex-fonts.lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-fonts-nod']={
+ version=1.001,
+ comment="companion to luatex-fonts.lua",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
-
--- Don't depend on code here as it is only needed to complement the
--- font handler code.
-
--- Attributes:
-
-if tex.attribute[0] ~= 0 then
-
- texio.write_nl("log","!")
- texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
- texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
- texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.")
- texio.write_nl("log","!")
-
- tex.attribute[0] = 0 -- else no features
-
-end
-
-attributes = { }
-attributes.unsetvalue = -0x7FFFFFFF
-
-local numbers, last = { }, 127
-
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+if tex.attribute[0]~=0 then
+ texio.write_nl("log","!")
+ texio.write_nl("log","! Attribute 0 is reserved for ConTeXt's font feature management and has to be")
+ texio.write_nl("log","! set to zero. Also, some attributes in the range 1-255 are used for special")
+ texio.write_nl("log","! purposes so setting them at the TeX end might break the font handler.")
+ texio.write_nl("log","!")
+ tex.attribute[0]=0
+end
+attributes={}
+attributes.unsetvalue=-0x7FFFFFFF
+local numbers,last={},127
function attributes.private(name)
- local number = numbers[name]
- if not number then
- if last < 255 then
- last = last + 1
- end
- number = last
- numbers[name] = number
+ local number=numbers[name]
+ if not number then
+ if last<255 then
+ last=last+1
+ end
+ number=last
+ numbers[name]=number
+ end
+ return number
+end
+nodes={}
+nodes.pool={}
+nodes.handlers={}
+local nodecodes={} for k,v in next,node.types () do nodecodes[string.gsub(v,"_","")]=k end
+local whatcodes={} for k,v in next,node.whatsits() do whatcodes[string.gsub(v,"_","")]=k end
+local glyphcodes={ [0]="character","glyph","ligature","ghost","left","right" }
+nodes.nodecodes=nodecodes
+nodes.whatcodes=whatcodes
+nodes.whatsitcodes=whatcodes
+nodes.glyphcodes=glyphcodes
+local free_node=node.free
+local remove_node=node.remove
+local new_node=node.new
+local traverse_id=node.traverse_id
+local math_code=nodecodes.math
+nodes.handlers.protectglyphs=node.protect_glyphs
+nodes.handlers.unprotectglyphs=node.unprotect_glyphs
+function nodes.remove(head,current,free_too)
+ local t=current
+ head,current=remove_node(head,current)
+ if t then
+ if free_too then
+ free_node(t)
+ t=nil
+ else
+ t.next,t.prev=nil,nil
end
- return number
-end
-
--- Nodes:
-
-nodes = { }
-nodes.pool = { }
-nodes.handlers = { }
-
-local nodecodes = { } for k,v in next, node.types () do nodecodes[string.gsub(v,"_","")] = k end
-local whatcodes = { } for k,v in next, node.whatsits() do whatcodes[string.gsub(v,"_","")] = k end
-local glyphcodes = { [0] = "character", "glyph", "ligature", "ghost", "left", "right" }
-
-nodes.nodecodes = nodecodes
-nodes.whatcodes = whatcodes
-nodes.whatsitcodes = whatcodes
-nodes.glyphcodes = glyphcodes
-
-local free_node = node.free
-local remove_node = node.remove
-local new_node = node.new
-
-nodes.handlers.protectglyphs = node.protect_glyphs
-nodes.handlers.unprotectglyphs = node.unprotect_glyphs
-
-function nodes.remove(head, current, free_too)
- local t = current
- head, current = remove_node(head,current)
- if t then
- if free_too then
- free_node(t)
- t = nil
- else
- t.next, t.prev = nil, nil
- end
- end
- return head, current, t
+ end
+ return head,current,t
end
-
function nodes.delete(head,current)
- return nodes.remove(head,current,true)
+ return nodes.remove(head,current,true)
end
-
-nodes.before = node.insert_before
-nodes.after = node.insert_after
-
+nodes.before=node.insert_before
+nodes.after=node.insert_after
function nodes.pool.kern(k)
- local n = new_node("kern",1)
- n.kern = k
+ local n=new_node("kern",1)
+ n.kern=k
+ return n
+end
+function nodes.endofmath(n)
+ for n in traverse_id(math_code,n.next) do
return n
+ end
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-ini'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-ini']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
---[[ldx--
-<p>Not much is happening here.</p>
---ldx]]--
-
-local allocate = utilities.storage.allocate
-
-local report_defining = logs.reporter("fonts","defining")
-
-fonts = fonts or { }
-local fonts = fonts
-
-fonts.hashes = { identifiers = allocate() }
-
-fonts.tables = fonts.tables or { }
-fonts.helpers = fonts.helpers or { }
-fonts.tracers = fonts.tracers or { } -- for the moment till we have move to moduledata
-fonts.specifiers = fonts.specifiers or { } -- in format !
-
-fonts.analyzers = { } -- not needed here
-fonts.readers = { }
-fonts.definers = { methods = { } }
-fonts.loggers = { register = function() end }
-
-fontloader.totable = fontloader.to_table
+local allocate=utilities.storage.allocate
+local report_defining=logs.reporter("fonts","defining")
+fonts=fonts or {}
+local fonts=fonts
+fonts.hashes={ identifiers=allocate() }
+fonts.tables=fonts.tables or {}
+fonts.helpers=fonts.helpers or {}
+fonts.tracers=fonts.tracers or {}
+fonts.specifiers=fonts.specifiers or {}
+fonts.analyzers={}
+fonts.readers={}
+fonts.definers={ methods={} }
+fonts.loggers={ register=function() end }
+fontloader.totable=fontloader.to_table
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-con'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-con']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
--- some names of table entries will be changed (no _)
-
-local next, tostring, rawget = next, tostring, rawget
-local format, match, lower, gsub = string.format, string.match, string.lower, string.gsub
-local utfbyte = utf.byte
-local sort, insert, concat, sortedkeys, serialize, fastcopy = table.sort, table.insert, table.concat, table.sortedkeys, table.serialize, table.fastcopy
-local derivetable = table.derive
-
-local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end)
-local trace_scaling = false trackers.register("fonts.scaling" , function(v) trace_scaling = v end)
-
-local report_defining = logs.reporter("fonts","defining")
-
--- watch out: no negative depths and negative eights permitted in regular fonts
-
---[[ldx--
-<p>Here we only implement a few helper functions.</p>
---ldx]]--
-
-local fonts = fonts
-local constructors = fonts.constructors or { }
-fonts.constructors = constructors
-local handlers = fonts.handlers or { } -- can have preloaded tables
-fonts.handlers = handlers
-
-local specifiers = fonts.specifiers
-local contextsetups = specifiers.contextsetups
-local contextnumbers = specifiers.contextnumbers
-
-local allocate = utilities.storage.allocate
-local setmetatableindex = table.setmetatableindex
-
--- will be directives
-
-constructors.dontembed = allocate()
-constructors.autocleanup = true
-constructors.namemode = "fullpath" -- will be a function
-
-constructors.version = 1.01
-constructors.cache = containers.define("fonts", "constructors", constructors.version, false)
-
-constructors.privateoffset = 0xF0000 -- 0x10FFFF
-
--- Some experimental helpers (handy for tracing):
---
--- todo: extra:
---
--- extra_space => space.extra
--- space => space.width
--- space_stretch => space.stretch
--- space_shrink => space.shrink
-
--- We do keep the x-height, extra_space, space_shrink and space_stretch
--- around as these are low level official names.
-
-constructors.keys = {
- properties = {
- encodingbytes = "number",
- embedding = "number",
- cidinfo = {
- },
- format = "string",
- fontname = "string",
- fullname = "string",
- filename = "filename",
- psname = "string",
- name = "string",
- virtualized = "boolean",
- hasitalics = "boolean",
- autoitalicamount = "basepoints",
- nostackmath = "boolean",
- noglyphnames = "boolean",
- mode = "string",
- hasmath = "boolean",
- mathitalics = "boolean",
- textitalics = "boolean",
- finalized = "boolean",
- },
- parameters = {
- mathsize = "number",
- scriptpercentage = "float",
- scriptscriptpercentage = "float",
- units = "cardinal",
- designsize = "scaledpoints",
- expansion = {
- stretch = "integerscale", -- might become float
- shrink = "integerscale", -- might become float
- step = "integerscale", -- might become float
- auto = "boolean",
- },
- protrusion = {
- auto = "boolean",
- },
- slantfactor = "float",
- extendfactor = "float",
- factor = "float",
- hfactor = "float",
- vfactor = "float",
- size = "scaledpoints",
- units = "scaledpoints",
- scaledpoints = "scaledpoints",
- slantperpoint = "scaledpoints",
- spacing = {
- width = "scaledpoints",
- stretch = "scaledpoints",
- shrink = "scaledpoints",
- extra = "scaledpoints",
- },
- xheight = "scaledpoints",
- quad = "scaledpoints",
- ascender = "scaledpoints",
- descender = "scaledpoints",
- synonyms = {
- space = "spacing.width",
- spacestretch = "spacing.stretch",
- spaceshrink = "spacing.shrink",
- extraspace = "spacing.extra",
- x_height = "xheight",
- space_stretch = "spacing.stretch",
- space_shrink = "spacing.shrink",
- extra_space = "spacing.extra",
- em = "quad",
- ex = "xheight",
- slant = "slantperpoint",
- },
- },
- description = {
- width = "basepoints",
- height = "basepoints",
- depth = "basepoints",
- boundingbox = { },
- },
- character = {
- width = "scaledpoints",
- height = "scaledpoints",
- depth = "scaledpoints",
- italic = "scaledpoints",
- },
+local next,tostring,rawget=next,tostring,rawget
+local format,match,lower,gsub=string.format,string.match,string.lower,string.gsub
+local utfbyte=utf.byte
+local sort,insert,concat,sortedkeys,serialize,fastcopy=table.sort,table.insert,table.concat,table.sortedkeys,table.serialize,table.fastcopy
+local derivetable=table.derive
+local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end)
+local trace_scaling=false trackers.register("fonts.scaling",function(v) trace_scaling=v end)
+local report_defining=logs.reporter("fonts","defining")
+local fonts=fonts
+local constructors=fonts.constructors or {}
+fonts.constructors=constructors
+local handlers=fonts.handlers or {}
+fonts.handlers=handlers
+local specifiers=fonts.specifiers
+local contextsetups=specifiers.contextsetups
+local contextnumbers=specifiers.contextnumbers
+local allocate=utilities.storage.allocate
+local setmetatableindex=table.setmetatableindex
+constructors.dontembed=allocate()
+constructors.autocleanup=true
+constructors.namemode="fullpath"
+constructors.version=1.01
+constructors.cache=containers.define("fonts","constructors",constructors.version,false)
+constructors.privateoffset=0xF0000
+constructors.keys={
+ properties={
+ encodingbytes="number",
+ embedding="number",
+ cidinfo={},
+ format="string",
+ fontname="string",
+ fullname="string",
+ filename="filename",
+ psname="string",
+ name="string",
+ virtualized="boolean",
+ hasitalics="boolean",
+ autoitalicamount="basepoints",
+ nostackmath="boolean",
+ noglyphnames="boolean",
+ mode="string",
+ hasmath="boolean",
+ mathitalics="boolean",
+ textitalics="boolean",
+ finalized="boolean",
+ },
+ parameters={
+ mathsize="number",
+ scriptpercentage="float",
+ scriptscriptpercentage="float",
+ units="cardinal",
+ designsize="scaledpoints",
+ expansion={
+ stretch="integerscale",
+ shrink="integerscale",
+ step="integerscale",
+ auto="boolean",
+ },
+ protrusion={
+ auto="boolean",
+ },
+ slantfactor="float",
+ extendfactor="float",
+ factor="float",
+ hfactor="float",
+ vfactor="float",
+ size="scaledpoints",
+ units="scaledpoints",
+ scaledpoints="scaledpoints",
+ slantperpoint="scaledpoints",
+ spacing={
+ width="scaledpoints",
+ stretch="scaledpoints",
+ shrink="scaledpoints",
+ extra="scaledpoints",
+ },
+ xheight="scaledpoints",
+ quad="scaledpoints",
+ ascender="scaledpoints",
+ descender="scaledpoints",
+ synonyms={
+ space="spacing.width",
+ spacestretch="spacing.stretch",
+ spaceshrink="spacing.shrink",
+ extraspace="spacing.extra",
+ x_height="xheight",
+ space_stretch="spacing.stretch",
+ space_shrink="spacing.shrink",
+ extra_space="spacing.extra",
+ em="quad",
+ ex="xheight",
+ slant="slantperpoint",
+ },
+ },
+ description={
+ width="basepoints",
+ height="basepoints",
+ depth="basepoints",
+ boundingbox={},
+ },
+ character={
+ width="scaledpoints",
+ height="scaledpoints",
+ depth="scaledpoints",
+ italic="scaledpoints",
+ },
}
-
--- This might become an interface:
-
-local designsizes = allocate()
-constructors.designsizes = designsizes
-local loadedfonts = allocate()
-constructors.loadedfonts = loadedfonts
-
---[[ldx--
-<p>We need to normalize the scale factor (in scaled points). This has to
-do with the fact that <l n='tex'/> uses a negative multiple of 1000 as
-a signal for a font scaled based on the design size.</p>
---ldx]]--
-
-local factors = {
- pt = 65536.0,
- bp = 65781.8,
+local designsizes=allocate()
+constructors.designsizes=designsizes
+local loadedfonts=allocate()
+constructors.loadedfonts=loadedfonts
+local factors={
+ pt=65536.0,
+ bp=65781.8,
}
-
function constructors.setfactor(f)
- constructors.factor = factors[f or 'pt'] or factors.pt
+ constructors.factor=factors[f or 'pt'] or factors.pt
end
-
constructors.setfactor()
-
-function constructors.scaled(scaledpoints, designsize) -- handles designsize in sp as well
- if scaledpoints < 0 then
- if designsize then
- local factor = constructors.factor
- if designsize > factor then -- or just 1000 / when? mp?
- return (- scaledpoints/1000) * designsize -- sp's
- else
- return (- scaledpoints/1000) * designsize * factor
- end
- else
- return (- scaledpoints/1000) * 10 * factor
- end
+function constructors.scaled(scaledpoints,designsize)
+ if scaledpoints<0 then
+ if designsize then
+ local factor=constructors.factor
+ if designsize>factor then
+ return (- scaledpoints/1000)*designsize
+ else
+ return (- scaledpoints/1000)*designsize*factor
+ end
else
- return scaledpoints
+ return (- scaledpoints/1000)*10*factor
end
+ else
+ return scaledpoints
+ end
end
-
---[[ldx--
-<p>Beware, the boundingbox is passed as reference so we may not overwrite it
-in the process; numbers are of course copies. Here 65536 equals 1pt. (Due to
-excessive memory usage in CJK fonts, we no longer pass the boundingbox.)</p>
---ldx]]--
-
--- The scaler is only used for otf and afm and virtual fonts. If
--- a virtual font has italic correction make sure to set the
--- hasitalics flag. Some more flags will be added in
--- the future.
-
---[[ldx--
-<p>The reason why the scaler was originally split, is that for a while we experimented
-with a helper function. However, in practice the <l n='api'/> calls are too slow to
-make this profitable and the <l n='lua'/> based variant was just faster. A days
-wasted day but an experience richer.</p>
---ldx]]--
-
--- we can get rid of the tfm instance when we have fast access to the
--- scaled character dimensions at the tex end, e.g. a fontobject.width
--- actually we already have soem of that now as virtual keys in glyphs
---
--- flushing the kern and ligature tables from memory saves a lot (only
--- base mode) but it complicates vf building where the new characters
--- demand this data .. solution: functions that access them
-
function constructors.cleanuptable(tfmdata)
- if constructors.autocleanup and tfmdata.properties.virtualized then
- for k, v in next, tfmdata.characters do
- if v.commands then v.commands = nil end
- -- if v.kerns then v.kerns = nil end
- end
+ if constructors.autocleanup and tfmdata.properties.virtualized then
+ for k,v in next,tfmdata.characters do
+ if v.commands then v.commands=nil end
end
+ end
end
-
--- experimental, sharing kerns (unscaled and scaled) saves memory
--- local sharedkerns, basekerns = constructors.check_base_kerns(tfmdata)
--- loop over descriptions (afm and otf have descriptions, tfm not)
--- there is no need (yet) to assign a value to chr.tonunicode
-
--- constructors.prepare_base_kerns(tfmdata) -- optimalization
-
--- we have target.name=metricfile and target.fullname=RealName and target.filename=diskfilename
--- when collapsing fonts, luatex looks as both target.name and target.fullname as ttc files
--- can have multiple subfonts
-
function constructors.calculatescale(tfmdata,scaledpoints)
- local parameters = tfmdata.parameters
- if scaledpoints < 0 then
- scaledpoints = (- scaledpoints/1000) * (tfmdata.designsize or parameters.designsize) -- already in sp
- end
- return scaledpoints, scaledpoints / (parameters.units or 1000) -- delta
-end
-
-local unscaled = {
- ScriptPercentScaleDown = true,
- ScriptScriptPercentScaleDown = true,
- RadicalDegreeBottomRaisePercent = true
+ local parameters=tfmdata.parameters
+ if scaledpoints<0 then
+ scaledpoints=(- scaledpoints/1000)*(tfmdata.designsize or parameters.designsize)
+ end
+ return scaledpoints,scaledpoints/(parameters.units or 1000)
+end
+local unscaled={
+ ScriptPercentScaleDown=true,
+ ScriptScriptPercentScaleDown=true,
+ RadicalDegreeBottomRaisePercent=true
}
-
-function constructors.assignmathparameters(target,original) -- simple variant, not used in context
- -- when a tfm file is loaded, it has already been scaled
- -- and it never enters the scaled so this is otf only and
- -- even then we do some extra in the context math plugins
- local mathparameters = original.mathparameters
- if mathparameters and next(mathparameters) then
- local targetparameters = target.parameters
- local targetproperties = target.properties
- local targetmathparameters = { }
- local factor = targetproperties.math_is_scaled and 1 or targetparameters.factor
- for name, value in next, mathparameters do
- if unscaled[name] then
- targetmathparameters[name] = value
- else
- targetmathparameters[name] = value * factor
- end
- end
- if not targetmathparameters.FractionDelimiterSize then
- targetmathparameters.FractionDelimiterSize = 1.01 * targetparameters.size
- end
- if not mathparameters.FractionDelimiterDisplayStyleSize then
- targetmathparameters.FractionDelimiterDisplayStyleSize = 2.40 * targetparameters.size
- end
- target.mathparameters = targetmathparameters
- end
+function constructors.assignmathparameters(target,original)
+ local mathparameters=original.mathparameters
+ if mathparameters and next(mathparameters) then
+ local targetparameters=target.parameters
+ local targetproperties=target.properties
+ local targetmathparameters={}
+ local factor=targetproperties.math_is_scaled and 1 or targetparameters.factor
+ for name,value in next,mathparameters do
+ if unscaled[name] then
+ targetmathparameters[name]=value
+ else
+ targetmathparameters[name]=value*factor
+ end
+ end
+ if not targetmathparameters.FractionDelimiterSize then
+ targetmathparameters.FractionDelimiterSize=1.01*targetparameters.size
+ end
+ if not mathparameters.FractionDelimiterDisplayStyleSize then
+ targetmathparameters.FractionDelimiterDisplayStyleSize=2.40*targetparameters.size
+ end
+ target.mathparameters=targetmathparameters
+ end
end
-
function constructors.beforecopyingcharacters(target,original)
- -- can be used for additional tweaking
end
-
function constructors.aftercopyingcharacters(target,original)
- -- can be used for additional tweaking
end
-
function constructors.enhanceparameters(parameters)
- local xheight = parameters.x_height
- local quad = parameters.quad
- local space = parameters.space
- local stretch = parameters.space_stretch
- local shrink = parameters.space_shrink
- local extra = parameters.extra_space
- local slant = parameters.slant
- parameters.xheight = xheight
- parameters.spacestretch = stretch
- parameters.spaceshrink = shrink
- parameters.extraspace = extra
- parameters.em = quad
- parameters.ex = xheight
- parameters.slantperpoint = slant
- parameters.spacing = {
- width = space,
- stretch = stretch,
- shrink = shrink,
- extra = extra,
- }
+ local xheight=parameters.x_height
+ local quad=parameters.quad
+ local space=parameters.space
+ local stretch=parameters.space_stretch
+ local shrink=parameters.space_shrink
+ local extra=parameters.extra_space
+ local slant=parameters.slant
+ parameters.xheight=xheight
+ parameters.spacestretch=stretch
+ parameters.spaceshrink=shrink
+ parameters.extraspace=extra
+ parameters.em=quad
+ parameters.ex=xheight
+ parameters.slantperpoint=slant
+ parameters.spacing={
+ width=space,
+ stretch=stretch,
+ shrink=shrink,
+ extra=extra,
+ }
end
-
function constructors.scale(tfmdata,specification)
- local target = { } -- the new table
- --
- if tonumber(specification) then
- specification = { size = specification }
- end
- --
- local scaledpoints = specification.size
- local relativeid = specification.relativeid
- --
- local properties = tfmdata.properties or { }
- local goodies = tfmdata.goodies or { }
- local resources = tfmdata.resources or { }
- local descriptions = tfmdata.descriptions or { } -- bad news if empty
- local characters = tfmdata.characters or { } -- bad news if empty
- local changed = tfmdata.changed or { } -- for base mode
- local shared = tfmdata.shared or { }
- local parameters = tfmdata.parameters or { }
- local mathparameters = tfmdata.mathparameters or { }
- --
- local targetcharacters = { }
- local targetdescriptions = derivetable(descriptions)
- local targetparameters = derivetable(parameters)
- local targetproperties = derivetable(properties)
- local targetgoodies = goodies -- we need to loop so no metatable
- target.characters = targetcharacters
- target.descriptions = targetdescriptions
- target.parameters = targetparameters
- -- target.mathparameters = targetmathparameters -- happens elsewhere
- target.properties = targetproperties
- target.goodies = targetgoodies
- target.shared = shared
- target.resources = resources
- target.unscaled = tfmdata -- the original unscaled one
- --
- -- specification.mathsize : 1=text 2=script 3=scriptscript
- -- specification.textsize : natural (text)size
- -- parameters.mathsize : 1=text 2=script 3=scriptscript >1000 enforced size (feature value other than yes)
- --
- local mathsize = tonumber(specification.mathsize) or 0
- local textsize = tonumber(specification.textsize) or scaledpoints
- local forcedsize = tonumber(parameters.mathsize ) or 0
- local extrafactor = tonumber(specification.factor ) or 1
- if (mathsize == 2 or forcedsize == 2) and parameters.scriptpercentage then
- scaledpoints = parameters.scriptpercentage * textsize / 100
- elseif (mathsize == 3 or forcedsize == 3) and parameters.scriptscriptpercentage then
- scaledpoints = parameters.scriptscriptpercentage * textsize / 100
- elseif forcedsize > 1000 then -- safeguard
- scaledpoints = forcedsize
- end
- targetparameters.mathsize = mathsize -- context specific
- targetparameters.textsize = textsize -- context specific
- targetparameters.forcedsize = forcedsize -- context specific
- targetparameters.extrafactor = extrafactor -- context specific
- --
- local tounicode = resources.tounicode
- local defaultwidth = resources.defaultwidth or 0
- local defaultheight = resources.defaultheight or 0
- local defaultdepth = resources.defaultdepth or 0
- local units = parameters.units or 1000
- --
- if target.fonts then
- target.fonts = fastcopy(target.fonts) -- maybe we virtualize more afterwards
- end
- --
- -- boundary keys are no longer needed as we now have a string 'right_boundary'
- -- that can be used in relevant tables (kerns and ligatures) ... not that I ever
- -- used them
- --
- -- boundarychar_label = 0, -- not needed
- -- boundarychar = 65536, -- there is now a string 'right_boundary'
- -- false_boundarychar = 65536, -- produces invalid tfm in luatex
- --
- targetproperties.language = properties.language or "dflt" -- inherited
- targetproperties.script = properties.script or "dflt" -- inherited
- targetproperties.mode = properties.mode or "base" -- inherited
- --
- local askedscaledpoints = scaledpoints
- local scaledpoints, delta = constructors.calculatescale(tfmdata,scaledpoints) -- no shortcut, dan be redefined
- --
- local hdelta = delta
- local vdelta = delta
- --
- target.designsize = parameters.designsize -- not really needed so it muight become obsolete
- target.units_per_em = units -- just a trigger for the backend (does luatex use this? if not it will go)
- --
- local direction = properties.direction or tfmdata.direction or 0 -- pointless, as we don't use omf fonts at all
- target.direction = direction
- properties.direction = direction
- --
- target.size = scaledpoints
- --
- target.encodingbytes = properties.encodingbytes or 1
- target.embedding = properties.embedding or "subset"
- target.tounicode = 1
- target.cidinfo = properties.cidinfo
- target.format = properties.format
- --
- local fontname = properties.fontname or tfmdata.fontname -- for the moment we fall back on
- local fullname = properties.fullname or tfmdata.fullname -- names in the tfmdata although
- local filename = properties.filename or tfmdata.filename -- that is not the right place to
- local psname = properties.psname or tfmdata.psname -- pass them
- local name = properties.name or tfmdata.name
- --
- if not psname or psname == "" then
- -- name used in pdf file as well as for selecting subfont in ttc/dfont
- psname = fontname or (fullname and fonts.names.cleanname(fullname))
- end
- target.fontname = fontname
- target.fullname = fullname
- target.filename = filename
- target.psname = psname
- target.name = name
- --
- -- inspect(properties)
- --
- properties.fontname = fontname
- properties.fullname = fullname
- properties.filename = filename
- properties.psname = psname
- properties.name = name
- -- expansion (hz)
- local expansion = parameters.expansion
- if expansion then
- target.stretch = expansion.stretch
- target.shrink = expansion.shrink
- target.step = expansion.step
- target.auto_expand = expansion.auto
- end
- -- protrusion
- local protrusion = parameters.protrusion
- if protrusion then
- target.auto_protrude = protrusion.auto
- end
- -- widening
- local extendfactor = parameters.extendfactor or 0
- if extendfactor ~= 0 and extendfactor ~= 1 then
- hdelta = hdelta * extendfactor
- target.extend = extendfactor * 1000 -- extent ?
- else
- target.extend = 1000 -- extent ?
- end
- -- slanting
- local slantfactor = parameters.slantfactor or 0
- if slantfactor ~= 0 then
- target.slant = slantfactor * 1000
- else
- target.slant = 0
- end
- --
- targetparameters.factor = delta
- targetparameters.hfactor = hdelta
- targetparameters.vfactor = vdelta
- targetparameters.size = scaledpoints
- targetparameters.units = units
- targetparameters.scaledpoints = askedscaledpoints
- --
- local isvirtual = properties.virtualized or tfmdata.type == "virtual"
- local hasquality = target.auto_expand or target.auto_protrude
- local hasitalics = properties.hasitalics
- local autoitalicamount = properties.autoitalicamount
- local stackmath = not properties.nostackmath
- local nonames = properties.noglyphnames
- local nodemode = properties.mode == "node"
- --
- if changed and not next(changed) then
- changed = false
- end
- --
- target.type = isvirtual and "virtual" or "real"
- --
- target.postprocessors = tfmdata.postprocessors
- --
- local targetslant = (parameters.slant or parameters[1] or 0)
- local targetspace = (parameters.space or parameters[2] or 0)*hdelta
- local targetspace_stretch = (parameters.space_stretch or parameters[3] or 0)*hdelta
- local targetspace_shrink = (parameters.space_shrink or parameters[4] or 0)*hdelta
- local targetx_height = (parameters.x_height or parameters[5] or 0)*vdelta
- local targetquad = (parameters.quad or parameters[6] or 0)*hdelta
- local targetextra_space = (parameters.extra_space or parameters[7] or 0)*hdelta
- --
- targetparameters.slant = targetslant -- slantperpoint
- targetparameters.space = targetspace
- targetparameters.space_stretch = targetspace_stretch
- targetparameters.space_shrink = targetspace_shrink
- targetparameters.x_height = targetx_height
- targetparameters.quad = targetquad
- targetparameters.extra_space = targetextra_space
- --
- local ascender = parameters.ascender
- if ascender then
- targetparameters.ascender = delta * ascender
- end
- local descender = parameters.descender
- if descender then
- targetparameters.descender = delta * descender
- end
- --
- constructors.enhanceparameters(targetparameters) -- official copies for us
- --
- local protrusionfactor = (targetquad ~= 0 and 1000/targetquad) or 0
- local scaledwidth = defaultwidth * hdelta
- local scaledheight = defaultheight * vdelta
- local scaleddepth = defaultdepth * vdelta
- --
+ local target={}
+ if tonumber(specification) then
+ specification={ size=specification }
+ end
+ local scaledpoints=specification.size
+ local relativeid=specification.relativeid
+ local properties=tfmdata.properties or {}
+ local goodies=tfmdata.goodies or {}
+ local resources=tfmdata.resources or {}
+ local descriptions=tfmdata.descriptions or {}
+ local characters=tfmdata.characters or {}
+ local changed=tfmdata.changed or {}
+ local shared=tfmdata.shared or {}
+ local parameters=tfmdata.parameters or {}
+ local mathparameters=tfmdata.mathparameters or {}
+ local targetcharacters={}
+ local targetdescriptions=derivetable(descriptions)
+ local targetparameters=derivetable(parameters)
+ local targetproperties=derivetable(properties)
+ local targetgoodies=goodies
+ target.characters=targetcharacters
+ target.descriptions=targetdescriptions
+ target.parameters=targetparameters
+ target.properties=targetproperties
+ target.goodies=targetgoodies
+ target.shared=shared
+ target.resources=resources
+ target.unscaled=tfmdata
+ local mathsize=tonumber(specification.mathsize) or 0
+ local textsize=tonumber(specification.textsize) or scaledpoints
+ local forcedsize=tonumber(parameters.mathsize ) or 0
+ local extrafactor=tonumber(specification.factor ) or 1
+ if (mathsize==2 or forcedsize==2) and parameters.scriptpercentage then
+ scaledpoints=parameters.scriptpercentage*textsize/100
+ elseif (mathsize==3 or forcedsize==3) and parameters.scriptscriptpercentage then
+ scaledpoints=parameters.scriptscriptpercentage*textsize/100
+ elseif forcedsize>1000 then
+ scaledpoints=forcedsize
+ end
+ targetparameters.mathsize=mathsize
+ targetparameters.textsize=textsize
+ targetparameters.forcedsize=forcedsize
+ targetparameters.extrafactor=extrafactor
+ local tounicode=resources.tounicode
+ local defaultwidth=resources.defaultwidth or 0
+ local defaultheight=resources.defaultheight or 0
+ local defaultdepth=resources.defaultdepth or 0
+ local units=parameters.units or 1000
+ if target.fonts then
+ target.fonts=fastcopy(target.fonts)
+ end
+ targetproperties.language=properties.language or "dflt"
+ targetproperties.script=properties.script or "dflt"
+ targetproperties.mode=properties.mode or "base"
+ local askedscaledpoints=scaledpoints
+ local scaledpoints,delta=constructors.calculatescale(tfmdata,scaledpoints)
+ local hdelta=delta
+ local vdelta=delta
+ target.designsize=parameters.designsize
+ target.units_per_em=units
+ local direction=properties.direction or tfmdata.direction or 0
+ target.direction=direction
+ properties.direction=direction
+ target.size=scaledpoints
+ target.encodingbytes=properties.encodingbytes or 1
+ target.embedding=properties.embedding or "subset"
+ target.tounicode=1
+ target.cidinfo=properties.cidinfo
+ target.format=properties.format
+ local fontname=properties.fontname or tfmdata.fontname
+ local fullname=properties.fullname or tfmdata.fullname
+ local filename=properties.filename or tfmdata.filename
+ local psname=properties.psname or tfmdata.psname
+ local name=properties.name or tfmdata.name
+ if not psname or psname=="" then
+ psname=fontname or (fullname and fonts.names.cleanname(fullname))
+ end
+ target.fontname=fontname
+ target.fullname=fullname
+ target.filename=filename
+ target.psname=psname
+ target.name=name
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.filename=filename
+ properties.psname=psname
+ properties.name=name
+ local expansion=parameters.expansion
+ if expansion then
+ target.stretch=expansion.stretch
+ target.shrink=expansion.shrink
+ target.step=expansion.step
+ target.auto_expand=expansion.auto
+ end
+ local protrusion=parameters.protrusion
+ if protrusion then
+ target.auto_protrude=protrusion.auto
+ end
+ local extendfactor=parameters.extendfactor or 0
+ if extendfactor~=0 and extendfactor~=1 then
+ hdelta=hdelta*extendfactor
+ target.extend=extendfactor*1000
+ else
+ target.extend=1000
+ end
+ local slantfactor=parameters.slantfactor or 0
+ if slantfactor~=0 then
+ target.slant=slantfactor*1000
+ else
+ target.slant=0
+ end
+ targetparameters.factor=delta
+ targetparameters.hfactor=hdelta
+ targetparameters.vfactor=vdelta
+ targetparameters.size=scaledpoints
+ targetparameters.units=units
+ targetparameters.scaledpoints=askedscaledpoints
+ local isvirtual=properties.virtualized or tfmdata.type=="virtual"
+ local hasquality=target.auto_expand or target.auto_protrude
+ local hasitalics=properties.hasitalics
+ local autoitalicamount=properties.autoitalicamount
+ local stackmath=not properties.nostackmath
+ local nonames=properties.noglyphnames
+ local nodemode=properties.mode=="node"
+ if changed and not next(changed) then
+ changed=false
+ end
+ target.type=isvirtual and "virtual" or "real"
+ target.postprocessors=tfmdata.postprocessors
+ local targetslant=(parameters.slant or parameters[1] or 0)
+ local targetspace=(parameters.space or parameters[2] or 0)*hdelta
+ local targetspace_stretch=(parameters.space_stretch or parameters[3] or 0)*hdelta
+ local targetspace_shrink=(parameters.space_shrink or parameters[4] or 0)*hdelta
+ local targetx_height=(parameters.x_height or parameters[5] or 0)*vdelta
+ local targetquad=(parameters.quad or parameters[6] or 0)*hdelta
+ local targetextra_space=(parameters.extra_space or parameters[7] or 0)*hdelta
+ targetparameters.slant=targetslant
+ targetparameters.space=targetspace
+ targetparameters.space_stretch=targetspace_stretch
+ targetparameters.space_shrink=targetspace_shrink
+ targetparameters.x_height=targetx_height
+ targetparameters.quad=targetquad
+ targetparameters.extra_space=targetextra_space
+ local ascender=parameters.ascender
+ if ascender then
+ targetparameters.ascender=delta*ascender
+ end
+ local descender=parameters.descender
+ if descender then
+ targetparameters.descender=delta*descender
+ end
+ constructors.enhanceparameters(targetparameters)
+ local protrusionfactor=(targetquad~=0 and 1000/targetquad) or 0
+ local scaledwidth=defaultwidth*hdelta
+ local scaledheight=defaultheight*vdelta
+ local scaleddepth=defaultdepth*vdelta
+ if trace_defining then
+ report_defining("scaling by (%s,%s): name '%s', fullname: '%s', filename: '%s'",
+ hdelta,vdelta,name or "noname",fullname or "nofullname",filename or "nofilename")
+ end
+ local hasmath=(properties.hasmath or next(mathparameters)) and true
+ if hasmath then
if trace_defining then
- report_defining("scaling by (%s,%s): name '%s', fullname: '%s', filename: '%s'",
- hdelta,vdelta,name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- --
- local hasmath = (properties.hasmath or next(mathparameters)) and true
- if hasmath then
- if trace_defining then
- report_defining("math enabled for: name '%s', fullname: '%s', filename: '%s'",
- name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- constructors.assignmathparameters(target,tfmdata) -- does scaling and whatever is needed
- properties.hasmath = true
- target.nomath = false
- target.MathConstants = target.mathparameters
+ report_defining("math enabled for: name '%s', fullname: '%s', filename: '%s'",
+ name or "noname",fullname or "nofullname",filename or "nofilename")
+ end
+ constructors.assignmathparameters(target,tfmdata)
+ properties.hasmath=true
+ target.nomath=false
+ target.MathConstants=target.mathparameters
+ else
+ if trace_defining then
+ report_defining("math disabled for: name '%s', fullname: '%s', filename: '%s'",
+ name or "noname",fullname or "nofullname",filename or "nofilename")
+ end
+ properties.hasmath=false
+ target.nomath=true
+ target.mathparameters=nil
+ end
+ local italickey="italic"
+ if hasmath then
+ autoitalicamount=false
+ else
+ if properties.textitalics then
+ italickey="italic_correction"
+ if trace_defining then
+ report_defining("text italics disabled for: name '%s', fullname: '%s', filename: '%s'",
+ name or "noname",fullname or "nofullname",filename or "nofilename")
+ end
+ if properties.delaytextitalics then
+ autoitalicamount=false
+ end
+ end
+ end
+ constructors.beforecopyingcharacters(target,tfmdata)
+ local sharedkerns={}
+ for unicode,character in next,characters do
+ local chr,description,index,touni
+ if changed then
+ local c=changed[unicode]
+ if c then
+ description=descriptions[c] or descriptions[unicode] or character
+ character=characters[c] or character
+ index=description.index or c
+ if tounicode then
+ touni=tounicode[index]
+ if not touni then
+ local d=descriptions[unicode] or characters[unicode]
+ local i=d.index or unicode
+ touni=tounicode[i]
+ end
+ end
+ else
+ description=descriptions[unicode] or character
+ index=description.index or unicode
+ if tounicode then
+ touni=tounicode[index]
+ end
+ end
else
- if trace_defining then
- report_defining("math disabled for: name '%s', fullname: '%s', filename: '%s'",
- name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- properties.hasmath = false
- target.nomath = true
- target.mathparameters = nil -- nop
- end
- --
- local italickey = "italic"
- --
- -- some context specific trickery (this will move to a plugin)
- --
- if hasmath then
- -- the latest luatex can deal with it itself so we now disable this
- -- mechanism here
- --
- -- if properties.mathitalics then
- -- italickey = "italic_correction"
- -- if trace_defining then
- -- report_defining("math italics disabled for: name '%s', fullname: '%s', filename: '%s'",
- -- name or "noname",fullname or "nofullname",filename or "nofilename")
- -- end
- -- end
- autoitalicamount = false -- new
+ description=descriptions[unicode] or character
+ index=description.index or unicode
+ if tounicode then
+ touni=tounicode[index]
+ end
+ end
+ local width=description.width
+ local height=description.height
+ local depth=description.depth
+ if width then width=hdelta*width else width=scaledwidth end
+ if height then height=vdelta*height else height=scaledheight end
+ if depth and depth~=0 then
+ depth=delta*depth
+ if nonames then
+ chr={
+ index=index,
+ height=height,
+ depth=depth,
+ width=width,
+ }
+ else
+ chr={
+ name=description.name,
+ index=index,
+ height=height,
+ depth=depth,
+ width=width,
+ }
+ end
else
- if properties.textitalics then
- italickey = "italic_correction"
- if trace_defining then
- report_defining("text italics disabled for: name '%s', fullname: '%s', filename: '%s'",
- name or "noname",fullname or "nofullname",filename or "nofilename")
- end
- if properties.delaytextitalics then
- autoitalicamount = false
- end
- end
+ if nonames then
+ chr={
+ index=index,
+ height=height,
+ width=width,
+ }
+ else
+ chr={
+ name=description.name,
+ index=index,
+ height=height,
+ width=width,
+ }
+ end
+ end
+ if touni then
+ chr.tounicode=touni
+ end
+ if hasquality then
+ local ve=character.expansion_factor
+ if ve then
+ chr.expansion_factor=ve*1000
+ end
+ local vl=character.left_protruding
+ if vl then
+ chr.left_protruding=protrusionfactor*width*vl
+ end
+ local vr=character.right_protruding
+ if vr then
+ chr.right_protruding=protrusionfactor*width*vr
+ end
+ end
+ if autoitalicamount then
+ local vi=description.italic
+ if not vi then
+ local vi=description.boundingbox[3]-description.width+autoitalicamount
+ if vi>0 then
+ chr[italickey]=vi*hdelta
+ end
+ elseif vi~=0 then
+ chr[italickey]=vi*hdelta
+ end
+ elseif hasitalics then
+ local vi=description.italic
+ if vi and vi~=0 then
+ chr[italickey]=vi*hdelta
+ end
end
- --
- -- end of context specific trickery
- --
- constructors.beforecopyingcharacters(target,tfmdata)
- --
- local sharedkerns = { }
- --
- -- we can have a dumb mode (basemode without math etc) that skips most
- --
- for unicode, character in next, characters do
- local chr, description, index, touni
- if changed then
- -- basemode hack (we try to catch missing tounicodes, e.g. needed for ssty in math cambria)
- local c = changed[unicode]
- if c then
- description = descriptions[c] or descriptions[unicode] or character
- character = characters[c] or character
- index = description.index or c
- if tounicode then
- touni = tounicode[index] -- nb: index!
- if not touni then -- goodie
- local d = descriptions[unicode] or characters[unicode]
- local i = d.index or unicode
- touni = tounicode[i] -- nb: index!
- end
- end
- else
- description = descriptions[unicode] or character
- index = description.index or unicode
- if tounicode then
- touni = tounicode[index] -- nb: index!
- end
- end
+ if hasmath then
+ local vn=character.next
+ if vn then
+ chr.next=vn
+ else
+ local vv=character.vert_variants
+ if vv then
+ local t={}
+ for i=1,#vv do
+ local vvi=vv[i]
+ t[i]={
+ ["start"]=(vvi["start"] or 0)*vdelta,
+ ["end"]=(vvi["end"] or 0)*vdelta,
+ ["advance"]=(vvi["advance"] or 0)*vdelta,
+ ["extender"]=vvi["extender"],
+ ["glyph"]=vvi["glyph"],
+ }
+ end
+ chr.vert_variants=t
else
- description = descriptions[unicode] or character
- index = description.index or unicode
- if tounicode then
- touni = tounicode[index] -- nb: index!
- end
- end
- local width = description.width
- local height = description.height
- local depth = description.depth
- if width then width = hdelta*width else width = scaledwidth end
- if height then height = vdelta*height else height = scaledheight end
- -- if depth then depth = vdelta*depth else depth = scaleddepth end
- if depth and depth ~= 0 then
- depth = delta*depth
- if nonames then
- chr = {
- index = index,
- height = height,
- depth = depth,
- width = width,
- }
- else
- chr = {
- name = description.name,
- index = index,
- height = height,
- depth = depth,
- width = width,
- }
- end
+ local hv=character.horiz_variants
+ if hv then
+ local t={}
+ for i=1,#hv do
+ local hvi=hv[i]
+ t[i]={
+ ["start"]=(hvi["start"] or 0)*hdelta,
+ ["end"]=(hvi["end"] or 0)*hdelta,
+ ["advance"]=(hvi["advance"] or 0)*hdelta,
+ ["extender"]=hvi["extender"],
+ ["glyph"]=hvi["glyph"],
+ }
+ end
+ chr.horiz_variants=t
+ end
+ end
+ end
+ local va=character.top_accent
+ if va then
+ chr.top_accent=vdelta*va
+ end
+ if stackmath then
+ local mk=character.mathkerns
+ if mk then
+ local kerns={}
+ local v=mk.top_right if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.top_right=k end
+ local v=mk.top_left if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.top_left=k end
+ local v=mk.bottom_left if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.bottom_left=k end
+ local v=mk.bottom_right if v then local k={} for i=1,#v do local vi=v[i]
+ k[i]={ height=vdelta*vi.height,kern=vdelta*vi.kern }
+ end kerns.bottom_right=k end
+ chr.mathkern=kerns
+ end
+ end
+ end
+ if not nodemode then
+ local vk=character.kerns
+ if vk then
+ local s=sharedkerns[vk]
+ if not s then
+ s={}
+ for k,v in next,vk do s[k]=v*hdelta end
+ sharedkerns[vk]=s
+ end
+ chr.kerns=s
+ end
+ local vl=character.ligatures
+ if vl then
+ if true then
+ chr.ligatures=vl
else
- -- this saves a little bit of memory time and memory, esp for big cjk fonts
- if nonames then
- chr = {
- index = index,
- height = height,
- width = width,
- }
- else
- chr = {
- name = description.name,
- index = index,
- height = height,
- width = width,
- }
- end
- end
- if touni then
- chr.tounicode = touni
- end
- -- if trace_scaling then
- -- report_defining("t=%s, u=%s, i=%s, n=%s c=%s",k,chr.tounicode or "",index or 0,description.name or '-',description.class or '-')
- -- end
- if hasquality then
- -- we could move these calculations elsewhere (saves calculations)
- local ve = character.expansion_factor
- if ve then
- chr.expansion_factor = ve*1000 -- expansionfactor, hm, can happen elsewhere
- end
- local vl = character.left_protruding
- if vl then
- chr.left_protruding = protrusionfactor*width*vl
- end
- local vr = character.right_protruding
- if vr then
- chr.right_protruding = protrusionfactor*width*vr
- end
- end
- --
- if autoitalicamount then
- local vi = description.italic
- if not vi then
- local vi = description.boundingbox[3] - description.width + autoitalicamount
- if vi > 0 then -- < 0 indicates no overshoot or a very small auto italic
- chr[italickey] = vi*hdelta
- end
- elseif vi ~= 0 then
- chr[italickey] = vi*hdelta
- end
- elseif hasitalics then
- local vi = description.italic
- if vi and vi ~= 0 then
- chr[italickey] = vi*hdelta
- end
- end
- -- to be tested
- if hasmath then
- -- todo, just operate on descriptions.math
- local vn = character.next
- if vn then
- chr.next = vn
- -- if character.vert_variants or character.horiz_variants then
- -- report_defining("glyph U+%05X has combination of next, vert_variants and horiz_variants",index)
- -- end
- else
- local vv = character.vert_variants
- if vv then
- local t = { }
- for i=1,#vv do
- local vvi = vv[i]
- t[i] = {
- ["start"] = (vvi["start"] or 0)*vdelta,
- ["end"] = (vvi["end"] or 0)*vdelta,
- ["advance"] = (vvi["advance"] or 0)*vdelta,
- ["extender"] = vvi["extender"],
- ["glyph"] = vvi["glyph"],
- }
- end
- chr.vert_variants = t
- else
- local hv = character.horiz_variants
- if hv then
- local t = { }
- for i=1,#hv do
- local hvi = hv[i]
- t[i] = {
- ["start"] = (hvi["start"] or 0)*hdelta,
- ["end"] = (hvi["end"] or 0)*hdelta,
- ["advance"] = (hvi["advance"] or 0)*hdelta,
- ["extender"] = hvi["extender"],
- ["glyph"] = hvi["glyph"],
- }
- end
- chr.horiz_variants = t
- end
- end
- end
- local va = character.top_accent
- if va then
- chr.top_accent = vdelta*va
- end
- if stackmath then
- local mk = character.mathkerns -- not in math ?
- if mk then
- local kerns = { }
- local v = mk.top_right if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.top_right = k end
- local v = mk.top_left if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.top_left = k end
- local v = mk.bottom_left if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.bottom_left = k end
- local v = mk.bottom_right if v then local k = { } for i=1,#v do local vi = v[i]
- k[i] = { height = vdelta*vi.height, kern = vdelta*vi.kern }
- end kerns.bottom_right = k end
- chr.mathkern = kerns -- singular -> should be patched in luatex !
- end
- end
- end
- if not nodemode then
- local vk = character.kerns
- if vk then
- local s = sharedkerns[vk]
- if not s then
- s = { }
- for k,v in next, vk do s[k] = v*hdelta end
- sharedkerns[vk] = s
- end
- chr.kerns = s
- end
- local vl = character.ligatures
- if vl then
- if true then
- chr.ligatures = vl -- shared
- else
- local tt = { }
- for i,l in next, vl do
- tt[i] = l
- end
- chr.ligatures = tt
- end
- end
- end
- if isvirtual then
- local vc = character.commands
- if vc then
- -- we assume non scaled commands here
- -- tricky .. we need to scale pseudo math glyphs too
- -- which is why we deal with rules too
- local ok = false
- for i=1,#vc do
- local key = vc[i][1]
- if key == "right" or key == "down" then
- ok = true
- break
- end
- end
- if ok then
- local tt = { }
- for i=1,#vc do
- local ivc = vc[i]
- local key = ivc[1]
- if key == "right" then
- tt[i] = { key, ivc[2]*hdelta }
- elseif key == "down" then
- tt[i] = { key, ivc[2]*vdelta }
- elseif key == "rule" then
- tt[i] = { key, ivc[2]*vdelta, ivc[3]*hdelta }
- else -- not comment
- tt[i] = ivc -- shared since in cache and untouched
- end
- end
- chr.commands = tt
- else
- chr.commands = vc
- end
- chr.index = nil
- end
+ local tt={}
+ for i,l in next,vl do
+ tt[i]=l
+ end
+ chr.ligatures=tt
+ end
+ end
+ end
+ if isvirtual then
+ local vc=character.commands
+ if vc then
+ local ok=false
+ for i=1,#vc do
+ local key=vc[i][1]
+ if key=="right" or key=="down" then
+ ok=true
+ break
+ end
+ end
+ if ok then
+ local tt={}
+ for i=1,#vc do
+ local ivc=vc[i]
+ local key=ivc[1]
+ if key=="right" then
+ tt[i]={ key,ivc[2]*hdelta }
+ elseif key=="down" then
+ tt[i]={ key,ivc[2]*vdelta }
+ elseif key=="rule" then
+ tt[i]={ key,ivc[2]*vdelta,ivc[3]*hdelta }
+ else
+ tt[i]=ivc
+ end
+ end
+ chr.commands=tt
+ else
+ chr.commands=vc
end
- targetcharacters[unicode] = chr
+ chr.index=nil
+ end
end
- --
- constructors.aftercopyingcharacters(target,tfmdata)
- --
- return target
+ targetcharacters[unicode]=chr
+ end
+ constructors.aftercopyingcharacters(target,tfmdata)
+ return target
end
-
function constructors.finalize(tfmdata)
- if tfmdata.properties and tfmdata.properties.finalized then
- return
- end
- --
- if not tfmdata.characters then
- return nil
- end
- --
- if not tfmdata.goodies then
- tfmdata.goodies = { } -- context specific
- end
- --
- local parameters = tfmdata.parameters
- if not parameters then
- return nil
- end
- --
- if not parameters.expansion then
- parameters.expansion = {
- stretch = tfmdata.stretch or 0,
- shrink = tfmdata.shrink or 0,
- step = tfmdata.step or 0,
- auto = tfmdata.auto_expand or false,
- }
- end
- --
- if not parameters.protrusion then
- parameters.protrusion = {
- auto = auto_protrude
- }
- end
- --
- if not parameters.size then
- parameters.size = tfmdata.size
- end
- --
- if not parameters.extendfactor then
- parameters.extendfactor = tfmdata.extend or 0
- end
- --
- if not parameters.slantfactor then
- parameters.slantfactor = tfmdata.slant or 0
- end
- --
- if not parameters.designsize then
- parameters.designsize = tfmdata.designsize or 655360
- end
- --
- if not parameters.units then
- parameters.units = tfmdata.units_per_em or 1000
- end
- --
- if not tfmdata.descriptions then
- local descriptions = { } -- yes or no
- setmetatableindex(descriptions, function(t,k) local v = { } t[k] = v return v end)
- tfmdata.descriptions = descriptions
- end
- --
- local properties = tfmdata.properties
- if not properties then
- properties = { }
- tfmdata.properties = properties
- end
- --
- if not properties.virtualized then
- properties.virtualized = tfmdata.type == "virtual"
- end
- --
- if not tfmdata.properties then
- tfmdata.properties = {
- fontname = tfmdata.fontname,
- filename = tfmdata.filename,
- fullname = tfmdata.fullname,
- name = tfmdata.name,
- psname = tfmdata.psname,
- --
- encodingbytes = tfmdata.encodingbytes or 1,
- embedding = tfmdata.embedding or "subset",
- tounicode = tfmdata.tounicode or 1,
- cidinfo = tfmdata.cidinfo or nil,
- format = tfmdata.format or "type1",
- direction = tfmdata.direction or 0,
- }
- end
- if not tfmdata.resources then
- tfmdata.resources = { }
- end
- if not tfmdata.shared then
- tfmdata.shared = { }
- end
- --
- -- tfmdata.fonts
- -- tfmdata.unscaled
- --
- if not properties.hasmath then
- properties.hasmath = not tfmdata.nomath
- end
- --
- tfmdata.MathConstants = nil
- tfmdata.postprocessors = nil
- --
- tfmdata.fontname = nil
- tfmdata.filename = nil
- tfmdata.fullname = nil
- tfmdata.name = nil -- most tricky part
- tfmdata.psname = nil
- --
- tfmdata.encodingbytes = nil
- tfmdata.embedding = nil
- tfmdata.tounicode = nil
- tfmdata.cidinfo = nil
- tfmdata.format = nil
- tfmdata.direction = nil
- tfmdata.type = nil
- tfmdata.nomath = nil
- tfmdata.designsize = nil
- --
- tfmdata.size = nil
- tfmdata.stretch = nil
- tfmdata.shrink = nil
- tfmdata.step = nil
- tfmdata.auto_expand = nil
- tfmdata.auto_protrude = nil
- tfmdata.extend = nil
- tfmdata.slant = nil
- tfmdata.units_per_em = nil
- --
- properties.finalized = true
- --
- return tfmdata
-end
-
---[[ldx--
-<p>A unique hash value is generated by:</p>
---ldx]]--
-
-local hashmethods = { }
-constructors.hashmethods = hashmethods
-
-function constructors.hashfeatures(specification) -- will be overloaded
- local features = specification.features
- if features then
- local t, tn = { }, 0
- for category, list in next, features do
- if next(list) then
- local hasher = hashmethods[category]
- if hasher then
- local hash = hasher(list)
- if hash then
- tn = tn + 1
- t[tn] = category .. ":" .. hash
- end
- end
- end
- end
- if tn > 0 then
- return concat(t," & ")
- end
- end
- return "unknown"
-end
-
-hashmethods.normal = function(list)
- local s = { }
- local n = 0
- for k, v in next, list do
- if k ~= "number" and k ~= "features" then -- I need to figure this out, features
- n = n + 1
- s[n] = k
- end
- end
- if n > 0 then
- sort(s)
- for i=1,n do
- local k = s[i]
- s[i] = k .. '=' .. tostring(list[k])
- end
- return concat(s,"+")
- end
+ if tfmdata.properties and tfmdata.properties.finalized then
+ return
+ end
+ if not tfmdata.characters then
+ return nil
+ end
+ if not tfmdata.goodies then
+ tfmdata.goodies={}
+ end
+ local parameters=tfmdata.parameters
+ if not parameters then
+ return nil
+ end
+ if not parameters.expansion then
+ parameters.expansion={
+ stretch=tfmdata.stretch or 0,
+ shrink=tfmdata.shrink or 0,
+ step=tfmdata.step or 0,
+ auto=tfmdata.auto_expand or false,
+ }
+ end
+ if not parameters.protrusion then
+ parameters.protrusion={
+ auto=auto_protrude
+ }
+ end
+ if not parameters.size then
+ parameters.size=tfmdata.size
+ end
+ if not parameters.extendfactor then
+ parameters.extendfactor=tfmdata.extend or 0
+ end
+ if not parameters.slantfactor then
+ parameters.slantfactor=tfmdata.slant or 0
+ end
+ if not parameters.designsize then
+ parameters.designsize=tfmdata.designsize or 655360
+ end
+ if not parameters.units then
+ parameters.units=tfmdata.units_per_em or 1000
+ end
+ if not tfmdata.descriptions then
+ local descriptions={}
+ setmetatableindex(descriptions,function(t,k) local v={} t[k]=v return v end)
+ tfmdata.descriptions=descriptions
+ end
+ local properties=tfmdata.properties
+ if not properties then
+ properties={}
+ tfmdata.properties=properties
+ end
+ if not properties.virtualized then
+ properties.virtualized=tfmdata.type=="virtual"
+ end
+ if not tfmdata.properties then
+ tfmdata.properties={
+ fontname=tfmdata.fontname,
+ filename=tfmdata.filename,
+ fullname=tfmdata.fullname,
+ name=tfmdata.name,
+ psname=tfmdata.psname,
+ encodingbytes=tfmdata.encodingbytes or 1,
+ embedding=tfmdata.embedding or "subset",
+ tounicode=tfmdata.tounicode or 1,
+ cidinfo=tfmdata.cidinfo or nil,
+ format=tfmdata.format or "type1",
+ direction=tfmdata.direction or 0,
+ }
+ end
+ if not tfmdata.resources then
+ tfmdata.resources={}
+ end
+ if not tfmdata.shared then
+ tfmdata.shared={}
+ end
+ if not properties.hasmath then
+ properties.hasmath=not tfmdata.nomath
+ end
+ tfmdata.MathConstants=nil
+ tfmdata.postprocessors=nil
+ tfmdata.fontname=nil
+ tfmdata.filename=nil
+ tfmdata.fullname=nil
+ tfmdata.name=nil
+ tfmdata.psname=nil
+ tfmdata.encodingbytes=nil
+ tfmdata.embedding=nil
+ tfmdata.tounicode=nil
+ tfmdata.cidinfo=nil
+ tfmdata.format=nil
+ tfmdata.direction=nil
+ tfmdata.type=nil
+ tfmdata.nomath=nil
+ tfmdata.designsize=nil
+ tfmdata.size=nil
+ tfmdata.stretch=nil
+ tfmdata.shrink=nil
+ tfmdata.step=nil
+ tfmdata.auto_expand=nil
+ tfmdata.auto_protrude=nil
+ tfmdata.extend=nil
+ tfmdata.slant=nil
+ tfmdata.units_per_em=nil
+ properties.finalized=true
+ return tfmdata
+end
+local hashmethods={}
+constructors.hashmethods=hashmethods
+function constructors.hashfeatures(specification)
+ local features=specification.features
+ if features then
+ local t,tn={},0
+ for category,list in next,features do
+ if next(list) then
+ local hasher=hashmethods[category]
+ if hasher then
+ local hash=hasher(list)
+ if hash then
+ tn=tn+1
+ t[tn]=category..":"..hash
+ end
+ end
+ end
+ end
+ if tn>0 then
+ return concat(t," & ")
+ end
+ end
+ return "unknown"
+end
+hashmethods.normal=function(list)
+ local s={}
+ local n=0
+ for k,v in next,list do
+ if k~="number" and k~="features" then
+ n=n+1
+ s[n]=k
+ end
+ end
+ if n>0 then
+ sort(s)
+ for i=1,n do
+ local k=s[i]
+ s[i]=k..'='..tostring(list[k])
+ end
+ return concat(s,"+")
+ end
end
-
---[[ldx--
-<p>In principle we can share tfm tables when we are in node for a font, but then
-we need to define a font switch as an id/attr switch which is no fun, so in that
-case users can best use dynamic features ... so, we will not use that speedup. Okay,
-when we get rid of base mode we can optimize even further by sharing, but then we
-loose our testcases for <l n='luatex'/>.</p>
---ldx]]--
-
function constructors.hashinstance(specification,force)
- local hash, size, fallbacks = specification.hash, specification.size, specification.fallbacks
- if force or not hash then
- hash = constructors.hashfeatures(specification)
- specification.hash = hash
- end
- if size < 1000 and designsizes[hash] then
- size = math.round(constructors.scaled(size,designsizes[hash]))
- specification.size = size
- end
- -- local mathsize = specification.mathsize or 0
- -- if mathsize > 0 then
- -- local textsize = specification.textsize
- -- if fallbacks then
- -- return hash .. ' @ ' .. tostring(size) .. ' [ ' .. tostring(mathsize) .. ' : ' .. tostring(textsize) .. ' ] @ ' .. fallbacks
- -- else
- -- return hash .. ' @ ' .. tostring(size) .. ' [ ' .. tostring(mathsize) .. ' : ' .. tostring(textsize) .. ' ]'
- -- end
- -- else
- if fallbacks then
- return hash .. ' @ ' .. tostring(size) .. ' @ ' .. fallbacks
- else
- return hash .. ' @ ' .. tostring(size)
- end
- -- end
-end
-
-function constructors.setname(tfmdata,specification) -- todo: get specification from tfmdata
- if constructors.namemode == "specification" then
- -- not to be used in context !
- local specname = specification.specification
- if specname then
- tfmdata.properties.name = specname
- if trace_defining then
- report_otf("overloaded fontname: '%s'",specname)
- end
- end
+ local hash,size,fallbacks=specification.hash,specification.size,specification.fallbacks
+ if force or not hash then
+ hash=constructors.hashfeatures(specification)
+ specification.hash=hash
+ end
+ if size<1000 and designsizes[hash] then
+ size=math.round(constructors.scaled(size,designsizes[hash]))
+ specification.size=size
+ end
+ if fallbacks then
+ return hash..' @ '..tostring(size)..' @ '..fallbacks
+ else
+ return hash..' @ '..tostring(size)
end
end
-
-function constructors.checkedfilename(data)
- local foundfilename = data.foundfilename
- if not foundfilename then
- local askedfilename = data.filename or ""
- if askedfilename ~= "" then
- askedfilename = resolvers.resolve(askedfilename) -- no shortcut
- foundfilename = resolvers.findbinfile(askedfilename,"") or ""
- if foundfilename == "" then
- report_defining("source file '%s' is not found",askedfilename)
- foundfilename = resolvers.findbinfile(file.basename(askedfilename),"") or ""
- if foundfilename ~= "" then
- report_defining("using source file '%s' (cache mismatch)",foundfilename)
- end
- end
- end
- data.foundfilename = foundfilename
+function constructors.setname(tfmdata,specification)
+ if constructors.namemode=="specification" then
+ local specname=specification.specification
+ if specname then
+ tfmdata.properties.name=specname
+ if trace_defining then
+ report_otf("overloaded fontname: '%s'",specname)
+ end
end
- return foundfilename
+ end
end
-
-local formats = allocate()
-fonts.formats = formats
-
-setmetatableindex(formats, function(t,k)
- local l = lower(k)
- if rawget(t,k) then
- t[k] = l
- return l
- end
- return rawget(t,file.suffix(l))
+function constructors.checkedfilename(data)
+ local foundfilename=data.foundfilename
+ if not foundfilename then
+ local askedfilename=data.filename or ""
+ if askedfilename~="" then
+ askedfilename=resolvers.resolve(askedfilename)
+ foundfilename=resolvers.findbinfile(askedfilename,"") or ""
+ if foundfilename=="" then
+ report_defining("source file '%s' is not found",askedfilename)
+ foundfilename=resolvers.findbinfile(file.basename(askedfilename),"") or ""
+ if foundfilename~="" then
+ report_defining("using source file '%s' (cache mismatch)",foundfilename)
+ end
+ end
+ end
+ data.foundfilename=foundfilename
+ end
+ return foundfilename
+end
+local formats=allocate()
+fonts.formats=formats
+setmetatableindex(formats,function(t,k)
+ local l=lower(k)
+ if rawget(t,k) then
+ t[k]=l
+ return l
+ end
+ return rawget(t,file.suffix(l))
end)
-
-local locations = { }
-
+local locations={}
local function setindeed(mode,target,group,name,action,position)
- local t = target[mode]
- if not t then
- report_defining("fatal error in setting feature '%s', group '%s', mode '%s'",name or "?",group or "?",mode)
- os.exit()
- elseif position then
- -- todo: remove existing
- insert(t, position, { name = name, action = action })
- else
- for i=1,#t do
- local ti = t[i]
- if ti.name == name then
- ti.action = action
- return
- end
- end
- insert(t, { name = name, action = action })
+ local t=target[mode]
+ if not t then
+ report_defining("fatal error in setting feature '%s', group '%s', mode '%s'",name or "?",group or "?",mode)
+ os.exit()
+ elseif position then
+ insert(t,position,{ name=name,action=action })
+ else
+ for i=1,#t do
+ local ti=t[i]
+ if ti.name==name then
+ ti.action=action
+ return
+ end
end
+ insert(t,{ name=name,action=action })
+ end
end
-
local function set(group,name,target,source)
- target = target[group]
- if not target then
- report_defining("fatal target error in setting feature '%s', group '%s'",name or "?",group or "?")
- os.exit()
+ target=target[group]
+ if not target then
+ report_defining("fatal target error in setting feature '%s', group '%s'",name or "?",group or "?")
+ os.exit()
+ end
+ local source=source[group]
+ if not source then
+ report_defining("fatal source error in setting feature '%s', group '%s'",name or "?",group or "?")
+ os.exit()
+ end
+ local node=source.node
+ local base=source.base
+ local position=source.position
+ if node then
+ setindeed("node",target,group,name,node,position)
+ end
+ if base then
+ setindeed("base",target,group,name,base,position)
+ end
+end
+local function register(where,specification)
+ local name=specification.name
+ if name and name~="" then
+ local default=specification.default
+ local description=specification.description
+ local initializers=specification.initializers
+ local processors=specification.processors
+ local manipulators=specification.manipulators
+ local modechecker=specification.modechecker
+ if default then
+ where.defaults[name]=default
end
- local source = source[group]
- if not source then
- report_defining("fatal source error in setting feature '%s', group '%s'",name or "?",group or "?")
- os.exit()
+ if description and description~="" then
+ where.descriptions[name]=description
end
- local node = source.node
- local base = source.base
- local position = source.position
- if node then
- setindeed("node",target,group,name,node,position)
+ if initializers then
+ set('initializers',name,where,specification)
end
- if base then
- setindeed("base",target,group,name,base,position)
+ if processors then
+ set('processors',name,where,specification)
end
-end
-
-local function register(where,specification)
- local name = specification.name
- if name and name ~= "" then
- local default = specification.default
- local description = specification.description
- local initializers = specification.initializers
- local processors = specification.processors
- local manipulators = specification.manipulators
- local modechecker = specification.modechecker
- if default then
- where.defaults[name] = default
- end
- if description and description ~= "" then
- where.descriptions[name] = description
- end
- if initializers then
- set('initializers',name,where,specification)
- end
- if processors then
- set('processors', name,where,specification)
- end
- if manipulators then
- set('manipulators',name,where,specification)
- end
- if modechecker then
- where.modechecker = modechecker
- end
+ if manipulators then
+ set('manipulators',name,where,specification)
end
-end
-
-constructors.registerfeature = register
-
-function constructors.getfeatureaction(what,where,mode,name)
- what = handlers[what].features
- if what then
- where = what[where]
- if where then
- mode = where[mode]
- if mode then
- for i=1,#mode do
- local m = mode[i]
- if m.name == name then
- return m.action
- end
- end
- end
- end
- end
-end
-
-function constructors.newhandler(what) -- could be a metatable newindex
- local handler = handlers[what]
- if not handler then
- handler = { }
- handlers[what] = handler
+ if modechecker then
+ where.modechecker=modechecker
end
- return handler
+ end
end
-
-function constructors.newfeatures(what) -- could be a metatable newindex
- local handler = handlers[what]
- local features = handler.features
- if not features then
- local tables = handler.tables -- can be preloaded
- local statistics = handler.statistics -- can be preloaded
- features = allocate {
- defaults = { },
- descriptions = tables and tables.features or { },
- used = statistics and statistics.usedfeatures or { },
- initializers = { base = { }, node = { } },
- processors = { base = { }, node = { } },
- manipulators = { base = { }, node = { } },
- }
- features.register = function(specification) return register(features,specification) end
- handler.features = features -- will also become hidden
- end
- return features
+constructors.registerfeature=register
+function constructors.getfeatureaction(what,where,mode,name)
+ what=handlers[what].features
+ if what then
+ where=what[where]
+ if where then
+ mode=where[mode]
+ if mode then
+ for i=1,#mode do
+ local m=mode[i]
+ if m.name==name then
+ return m.action
+ end
+ end
+ end
+ end
+ end
+end
+function constructors.newhandler(what)
+ local handler=handlers[what]
+ if not handler then
+ handler={}
+ handlers[what]=handler
+ end
+ return handler
+end
+function constructors.newfeatures(what)
+ local handler=handlers[what]
+ local features=handler.features
+ if not features then
+ local tables=handler.tables
+ local statistics=handler.statistics
+ features=allocate {
+ defaults={},
+ descriptions=tables and tables.features or {},
+ used=statistics and statistics.usedfeatures or {},
+ initializers={ base={},node={} },
+ processors={ base={},node={} },
+ manipulators={ base={},node={} },
+ }
+ features.register=function(specification) return register(features,specification) end
+ handler.features=features
+ end
+ return features
end
-
---[[ldx--
-<p>We need to check for default features. For this we provide
-a helper function.</p>
---ldx]]--
-
function constructors.checkedfeatures(what,features)
- local defaults = handlers[what].features.defaults
- if features and next(features) then
- features = fastcopy(features) -- can be inherited (mt) but then no loops possible
- for key, value in next, defaults do
- if features[key] == nil then
- features[key] = value
- end
- end
- return features
- else
- return fastcopy(defaults) -- we can change features in place
+ local defaults=handlers[what].features.defaults
+ if features and next(features) then
+ features=fastcopy(features)
+ for key,value in next,defaults do
+ if features[key]==nil then
+ features[key]=value
+ end
end
+ return features
+ else
+ return fastcopy(defaults)
+ end
end
-
--- before scaling
-
function constructors.initializefeatures(what,tfmdata,features,trace,report)
- if features and next(features) then
- local properties = tfmdata.properties or { } -- brrr
- local whathandler = handlers[what]
- local whatfeatures = whathandler.features
- local whatinitializers = whatfeatures.initializers
- local whatmodechecker = whatfeatures.modechecker
- -- properties.mode can be enforces (for instance in font-otd)
- local mode = properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base"
- properties.mode = mode -- also status
- features.mode = mode -- both properties.mode or features.mode can be changed
- --
- local done = { }
- while true do
- local redo = false
- local initializers = whatfeatures.initializers[mode]
- if initializers then
- for i=1,#initializers do
- local step = initializers[i]
- local feature = step.name
--- we could intercept mode here .. needs a rewrite of this whole loop then but it's cleaner that way
- local value = features[feature]
- if not value then
- -- disabled
- elseif done[feature] then
- -- already done
- else
- local action = step.action
- if trace then
- report("initializing feature %s to %s for mode %s for font %s",feature,
- tostring(value),mode or 'unknown', tfmdata.properties.fullname or 'unknown')
- end
- action(tfmdata,value,features) -- can set mode (e.g. goodies) so it can trigger a restart
- if mode ~= properties.mode or mode ~= features.mode then
- if whatmodechecker then
- properties.mode = whatmodechecker(tfmdata,features,properties.mode) -- force checking
- features.mode = properties.mode
- end
- if mode ~= properties.mode then
- mode = properties.mode
- redo = true
- end
- end
- done[feature] = true
- end
- if redo then
- break
- end
- end
- if not redo then
- break
- end
- else
- break
- end
+ if features and next(features) then
+ local properties=tfmdata.properties or {}
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatinitializers=whatfeatures.initializers
+ local whatmodechecker=whatfeatures.modechecker
+ local mode=properties.mode or (whatmodechecker and whatmodechecker(tfmdata,features,features.mode)) or features.mode or "base"
+ properties.mode=mode
+ features.mode=mode
+ local done={}
+ while true do
+ local redo=false
+ local initializers=whatfeatures.initializers[mode]
+ if initializers then
+ for i=1,#initializers do
+ local step=initializers[i]
+ local feature=step.name
+ local value=features[feature]
+ if not value then
+ elseif done[feature] then
+ else
+ local action=step.action
+ if trace then
+ report("initializing feature %s to %s for mode %s for font %s",feature,
+ tostring(value),mode or 'unknown',tfmdata.properties.fullname or 'unknown')
+ end
+ action(tfmdata,value,features)
+ if mode~=properties.mode or mode~=features.mode then
+ if whatmodechecker then
+ properties.mode=whatmodechecker(tfmdata,features,properties.mode)
+ features.mode=properties.mode
+ end
+ if mode~=properties.mode then
+ mode=properties.mode
+ redo=true
+ end
+ end
+ done[feature]=true
+ end
+ if redo then
+ break
+ end
end
- properties.mode = mode -- to be sure
- return true
- else
- return false
+ if not redo then
+ break
+ end
+ else
+ break
+ end
end
+ properties.mode=mode
+ return true
+ else
+ return false
+ end
end
-
--- while typesetting
-
function constructors.collectprocessors(what,tfmdata,features,trace,report)
- local processes, nofprocesses = { }, 0
- if features and next(features) then
- local properties = tfmdata.properties
- local whathandler = handlers[what]
- local whatfeatures = whathandler.features
- local whatprocessors = whatfeatures.processors
- local processors = whatprocessors[properties.mode]
- if processors then
- for i=1,#processors do
- local step = processors[i]
- local feature = step.name
- if features[feature] then
- local action = step.action
- if trace then
- report("installing feature processor %s for mode %s for font %s",feature,
- mode or 'unknown', tfmdata.properties.fullname or 'unknown')
- end
- if action then
- nofprocesses = nofprocesses + 1
- processes[nofprocesses] = action
- end
- end
- end
- elseif trace then
- report("no feature processors for mode %s for font %s",
- mode or 'unknown', tfmdata.properties.fullname or 'unknown')
- end
- end
- return processes
+ local processes,nofprocesses={},0
+ if features and next(features) then
+ local properties=tfmdata.properties
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatprocessors=whatfeatures.processors
+ local processors=whatprocessors[properties.mode]
+ if processors then
+ for i=1,#processors do
+ local step=processors[i]
+ local feature=step.name
+ if features[feature] then
+ local action=step.action
+ if trace then
+ report("installing feature processor %s for mode %s for font %s",feature,
+ mode or 'unknown',tfmdata.properties.fullname or 'unknown')
+ end
+ if action then
+ nofprocesses=nofprocesses+1
+ processes[nofprocesses]=action
+ end
+ end
+ end
+ elseif trace then
+ report("no feature processors for mode %s for font %s",
+ mode or 'unknown',tfmdata.properties.fullname or 'unknown')
+ end
+ end
+ return processes
end
-
--- after scaling
-
function constructors.applymanipulators(what,tfmdata,features,trace,report)
- if features and next(features) then
- local properties = tfmdata.properties
- local whathandler = handlers[what]
- local whatfeatures = whathandler.features
- local whatmanipulators = whatfeatures.manipulators
- local manipulators = whatmanipulators[properties.mode]
- if manipulators then
- for i=1,#manipulators do
- local step = manipulators[i]
- local feature = step.name
- local value = features[feature]
- if value then
- local action = step.action
- if trace then
- report("applying feature manipulator %s for mode %s for font %s",feature,
- mode or 'unknown', tfmdata.properties.fullname or 'unknown')
- end
- if action then
- action(tfmdata,feature,value)
- end
- end
- end
- end
- end
+ if features and next(features) then
+ local properties=tfmdata.properties
+ local whathandler=handlers[what]
+ local whatfeatures=whathandler.features
+ local whatmanipulators=whatfeatures.manipulators
+ local manipulators=whatmanipulators[properties.mode]
+ if manipulators then
+ for i=1,#manipulators do
+ local step=manipulators[i]
+ local feature=step.name
+ local value=features[feature]
+ if value then
+ local action=step.action
+ if trace then
+ report("applying feature manipulator %s for mode %s for font %s",feature,
+ mode or 'unknown',tfmdata.properties.fullname or 'unknown')
+ end
+ if action then
+ action(tfmdata,feature,value)
+ end
+ end
+ end
+ end
+ end
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-font-enc'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-font-enc']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
-
-local fonts = fonts
-fonts.encodings = { }
-fonts.encodings.agl = { }
-
-setmetatable(fonts.encodings.agl, { __index = function(t,k)
- if k == "unicodes" then
- texio.write(" <loading (extended) adobe glyph list>")
- local unicodes = dofile(resolvers.findfile("font-age.lua"))
- fonts.encodings.agl = { unicodes = unicodes }
- return unicodes
- else
- return nil
- end
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.encodings={}
+fonts.encodings.agl={}
+setmetatable(fonts.encodings.agl,{ __index=function(t,k)
+ if k=="unicodes" then
+ texio.write(" <loading (extended) adobe glyph list>")
+ local unicodes=dofile(resolvers.findfile("font-age.lua"))
+ fonts.encodings.agl={ unicodes=unicodes }
+ return unicodes
+ else
+ return nil
+ end
end })
-
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-cid'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (cidmaps)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-cid']={
+ version=1.001,
+ comment="companion to font-otf.lua (cidmaps)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local format, match, lower = string.format, string.match, string.lower
-local tonumber = tonumber
-local P, S, R, C, V, lpegmatch = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.match
-
-local fonts, logs, trackers = fonts, logs, trackers
-
-local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end)
-
-local report_otf = logs.reporter("fonts","otf loading")
-
-local cid = { }
-fonts.cid = cid
-
-local cidmap = { }
-local cidmax = 10
-
--- original string parser: 0.109, lpeg parser: 0.036 seconds for Adobe-CNS1-4.cidmap
---
--- 18964 18964 (leader)
--- 0 /.notdef
--- 1..95 0020
--- 99 3000
-
-local number = C(R("09","af","AF")^1)
-local space = S(" \n\r\t")
-local spaces = space^0
-local period = P(".")
-local periods = period * period
-local name = P("/") * C((1-space)^1)
-
-local unicodes, names = { }, { } -- we could use Carg now
-
+local format,match,lower=string.format,string.match,string.lower
+local tonumber=tonumber
+local P,S,R,C,V,lpegmatch=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.V,lpeg.match
+local fonts,logs,trackers=fonts,logs,trackers
+local trace_loading=false trackers.register("otf.loading",function(v) trace_loading=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local cid={}
+fonts.cid=cid
+local cidmap={}
+local cidmax=10
+local number=C(R("09","af","AF")^1)
+local space=S(" \n\r\t")
+local spaces=space^0
+local period=P(".")
+local periods=period*period
+local name=P("/")*C((1-space)^1)
+local unicodes,names={},{}
local function do_one(a,b)
- unicodes[tonumber(a)] = tonumber(b,16)
+ unicodes[tonumber(a)]=tonumber(b,16)
end
-
local function do_range(a,b,c)
- c = tonumber(c,16)
- for i=tonumber(a),tonumber(b) do
- unicodes[i] = c
- c = c + 1
- end
+ c=tonumber(c,16)
+ for i=tonumber(a),tonumber(b) do
+ unicodes[i]=c
+ c=c+1
+ end
end
-
local function do_name(a,b)
- names[tonumber(a)] = b
-end
-
-local grammar = P { "start",
- start = number * spaces * number * V("series"),
- series = (spaces * (V("one") + V("range") + V("named")))^1,
- one = (number * spaces * number) / do_one,
- range = (number * periods * number * spaces * number) / do_range,
- named = (number * spaces * name) / do_name
+ names[tonumber(a)]=b
+end
+local grammar=P { "start",
+ start=number*spaces*number*V("series"),
+ series=(spaces*(V("one")+V("range")+V("named")))^1,
+ one=(number*spaces*number)/do_one,
+ range=(number*periods*number*spaces*number)/do_range,
+ named=(number*spaces*name)/do_name
}
-
local function loadcidfile(filename)
- local data = io.loaddata(filename)
- if data then
- unicodes, names = { }, { }
- lpegmatch(grammar,data)
- local supplement, registry, ordering = match(filename,"^(.-)%-(.-)%-()%.(.-)$")
- return {
- supplement = supplement,
- registry = registry,
- ordering = ordering,
- filename = filename,
- unicodes = unicodes,
- names = names
- }
- end
+ local data=io.loaddata(filename)
+ if data then
+ unicodes,names={},{}
+ lpegmatch(grammar,data)
+ local supplement,registry,ordering=match(filename,"^(.-)%-(.-)%-()%.(.-)$")
+ return {
+ supplement=supplement,
+ registry=registry,
+ ordering=ordering,
+ filename=filename,
+ unicodes=unicodes,
+ names=names
+ }
+ end
end
-
-cid.loadfile = loadcidfile -- we use the frozen variant
-local template = "%s-%s-%s.cidmap"
-
+cid.loadfile=loadcidfile
+local template="%s-%s-%s.cidmap"
local function locate(registry,ordering,supplement)
- local filename = format(template,registry,ordering,supplement)
- local hashname = lower(filename)
- local found = cidmap[hashname]
- if not found then
+ local filename=format(template,registry,ordering,supplement)
+ local hashname=lower(filename)
+ local found=cidmap[hashname]
+ if not found then
+ if trace_loading then
+ report_otf("checking cidmap, registry: %s, ordering: %s, supplement: %s, filename: %s",registry,ordering,supplement,filename)
+ end
+ local fullname=resolvers.findfile(filename,'cid') or ""
+ if fullname~="" then
+ found=loadcidfile(fullname)
+ if found then
if trace_loading then
- report_otf("checking cidmap, registry: %s, ordering: %s, supplement: %s, filename: %s",registry,ordering,supplement,filename)
- end
- local fullname = resolvers.findfile(filename,'cid') or ""
- if fullname ~= "" then
- found = loadcidfile(fullname)
- if found then
- if trace_loading then
- report_otf("using cidmap file %s",filename)
- end
- cidmap[hashname] = found
- found.usedname = file.basename(filename)
- end
+ report_otf("using cidmap file %s",filename)
end
+ cidmap[hashname]=found
+ found.usedname=file.basename(filename)
+ end
end
- return found
+ end
+ return found
end
-
--- cf Arthur R. we can safely scan upwards since cids are downward compatible
-
function cid.getmap(specification)
- if not specification then
- report_otf("invalid cidinfo specification (table expected)")
- return
- end
- local registry = specification.registry
- local ordering = specification.ordering
- local supplement = specification.supplement
- -- check for already loaded file
- local filename = format(registry,ordering,supplement)
- local found = cidmap[lower(filename)]
- if found then
- return found
- end
- if trace_loading then
- report_otf("needed cidmap, registry: %s, ordering: %s, supplement: %s",registry,ordering,supplement)
- end
- found = locate(registry,ordering,supplement)
- if not found then
- local supnum = tonumber(supplement)
- local cidnum = nil
- -- next highest (alternatively we could start high)
- if supnum < cidmax then
- for s=supnum+1,cidmax do
- local c = locate(registry,ordering,s)
- if c then
- found, cidnum = c, s
- break
- end
- end
- end
- -- next lowest (least worse fit)
- if not found and supnum > 0 then
- for s=supnum-1,0,-1 do
- local c = locate(registry,ordering,s)
- if c then
- found, cidnum = c, s
- break
- end
- end
- end
- -- prevent further lookups -- somewhat tricky
- registry = lower(registry)
- ordering = lower(ordering)
- if found and cidnum > 0 then
- for s=0,cidnum-1 do
- local filename = format(template,registry,ordering,s)
- if not cidmap[filename] then
- cidmap[filename] = found
- end
- end
- end
- end
+ if not specification then
+ report_otf("invalid cidinfo specification (table expected)")
+ return
+ end
+ local registry=specification.registry
+ local ordering=specification.ordering
+ local supplement=specification.supplement
+ local filename=format(registry,ordering,supplement)
+ local found=cidmap[lower(filename)]
+ if found then
return found
+ end
+ if trace_loading then
+ report_otf("needed cidmap, registry: %s, ordering: %s, supplement: %s",registry,ordering,supplement)
+ end
+ found=locate(registry,ordering,supplement)
+ if not found then
+ local supnum=tonumber(supplement)
+ local cidnum=nil
+ if supnum<cidmax then
+ for s=supnum+1,cidmax do
+ local c=locate(registry,ordering,s)
+ if c then
+ found,cidnum=c,s
+ break
+ end
+ end
+ end
+ if not found and supnum>0 then
+ for s=supnum-1,0,-1 do
+ local c=locate(registry,ordering,s)
+ if c then
+ found,cidnum=c,s
+ break
+ end
+ end
+ end
+ registry=lower(registry)
+ ordering=lower(ordering)
+ if found and cidnum>0 then
+ for s=0,cidnum-1 do
+ local filename=format(template,registry,ordering,s)
+ if not cidmap[filename] then
+ cidmap[filename]=found
+ end
+ end
+ end
+ end
+ return found
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-map'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-map']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local tonumber = tonumber
-
-local match, format, find, concat, gsub, lower = string.match, string.format, string.find, table.concat, string.gsub, string.lower
-local P, R, S, C, Ct, Cc, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.Cc, lpeg.match
-local utfbyte = utf.byte
-local floor = math.floor
-
-local trace_loading = false trackers.register("fonts.loading", function(v) trace_loading = v end)
-local trace_mapping = false trackers.register("fonts.mapping", function(v) trace_unimapping = v end)
-
-local report_fonts = logs.reporter("fonts","loading") -- not otf only
-
-local fonts = fonts
-local mappings = fonts.mappings or { }
-fonts.mappings = mappings
-
---[[ldx--
-<p>Eventually this code will disappear because map files are kind
-of obsolete. Some code may move to runtime or auxiliary modules.</p>
-<p>The name to unciode related code will stay of course.</p>
---ldx]]--
-
-local function loadlumtable(filename) -- will move to font goodies
- local lumname = file.replacesuffix(file.basename(filename),"lum")
- local lumfile = resolvers.findfile(lumname,"map") or ""
- if lumfile ~= "" and lfs.isfile(lumfile) then
- if trace_loading or trace_mapping then
- report_fonts("enhance: loading %s ",lumfile)
- end
- lumunic = dofile(lumfile)
- return lumunic, lumfile
- end
-end
-
-local hex = R("AF","09")
-local hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end
-local hexsix = (hex^1) / function(s) return tonumber(s,16) end
-local dec = (R("09")^1) / tonumber
-local period = P(".")
-local unicode = P("uni") * (hexfour * (period + P(-1)) * Cc(false) + Ct(hexfour^1) * Cc(true))
-local ucode = P("u") * (hexsix * (period + P(-1)) * Cc(false) + Ct(hexsix ^1) * Cc(true))
-local index = P("index") * dec * Cc(false)
-
-local parser = unicode + ucode + index
-
-local parsers = { }
-
+local tonumber=tonumber
+local match,format,find,concat,gsub,lower=string.match,string.format,string.find,table.concat,string.gsub,string.lower
+local P,R,S,C,Ct,Cc,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.Cc,lpeg.match
+local utfbyte=utf.byte
+local floor=math.floor
+local trace_loading=false trackers.register("fonts.loading",function(v) trace_loading=v end)
+local trace_mapping=false trackers.register("fonts.mapping",function(v) trace_unimapping=v end)
+local report_fonts=logs.reporter("fonts","loading")
+local fonts=fonts
+local mappings=fonts.mappings or {}
+fonts.mappings=mappings
+local function loadlumtable(filename)
+ local lumname=file.replacesuffix(file.basename(filename),"lum")
+ local lumfile=resolvers.findfile(lumname,"map") or ""
+ if lumfile~="" and lfs.isfile(lumfile) then
+ if trace_loading or trace_mapping then
+ report_fonts("enhance: loading %s ",lumfile)
+ end
+ lumunic=dofile(lumfile)
+ return lumunic,lumfile
+ end
+end
+local hex=R("AF","09")
+local hexfour=(hex*hex*hex*hex)/function(s) return tonumber(s,16) end
+local hexsix=(hex^1)/function(s) return tonumber(s,16) end
+local dec=(R("09")^1)/tonumber
+local period=P(".")
+local unicode=P("uni")*(hexfour*(period+P(-1))*Cc(false)+Ct(hexfour^1)*Cc(true))
+local ucode=P("u")*(hexsix*(period+P(-1))*Cc(false)+Ct(hexsix^1)*Cc(true))
+local index=P("index")*dec*Cc(false)
+local parser=unicode+ucode+index
+local parsers={}
local function makenameparser(str)
- if not str or str == "" then
- return parser
- else
- local p = parsers[str]
- if not p then
- p = P(str) * period * dec * Cc(false)
- parsers[str] = p
- end
- return p
+ if not str or str=="" then
+ return parser
+ else
+ local p=parsers[str]
+ if not p then
+ p=P(str)*period*dec*Cc(false)
+ parsers[str]=p
end
+ return p
+ end
end
-
--- local parser = mappings.makenameparser("Japan1")
--- local parser = mappings.makenameparser()
--- local function test(str)
--- local b, a = lpegmatch(parser,str)
--- print((a and table.serialize(b)) or b)
--- end
--- test("a.sc")
--- test("a")
--- test("uni1234")
--- test("uni1234.xx")
--- test("uni12349876")
--- test("index1234")
--- test("Japan1.123")
-
local function tounicode16(unicode)
- if unicode < 0x10000 then
- return format("%04X",unicode)
- elseif unicode < 0x1FFFFFFFFF then
- return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
- else
- report_fonts("can't convert %s into tounicode",unicode)
- end
+ if unicode<0x10000 then
+ return format("%04X",unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
+ else
+ report_fonts("can't convert %s into tounicode",unicode)
+ end
end
-
local function tounicode16sequence(unicodes)
- local t = { }
- for l=1,#unicodes do
- local unicode = unicodes[l]
- if unicode < 0x10000 then
- t[l] = format("%04X",unicode)
- elseif unicode < 0x1FFFFFFFFF then
- t[l] = format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
- else
- report_fonts ("can't convert %s into tounicode",unicode)
- end
- end
- return concat(t)
-end
-
-local function fromunicode16(str)
- if #str == 4 then
- return tonumber(str,16)
+ local t={}
+ for l=1,#unicodes do
+ local unicode=unicodes[l]
+ if unicode<0x10000 then
+ t[l]=format("%04X",unicode)
+ elseif unicode<0x1FFFFFFFFF then
+ t[l]=format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00)
else
- local l, r = match(str,"(....)(....)")
- return (tonumber(l,16)- 0xD800)*0x400 + tonumber(r,16) - 0xDC00
+ report_fonts ("can't convert %s into tounicode",unicode)
end
+ end
+ return concat(t)
end
-
---~ This is quite a bit faster but at the cost of some memory but if we
---~ do this we will also use it elsewhere so let's not follow this route
---~ now. I might use this method in the plain variant (no caching there)
---~ but then I need a flag that distinguishes between code branches.
---~
---~ local cache = { }
---~
---~ function mappings.tounicode16(unicode)
---~ local s = cache[unicode]
---~ if not s then
---~ if unicode < 0x10000 then
---~ s = format("%04X",unicode)
---~ else
---~ s = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00)
---~ end
---~ cache[unicode] = s
---~ end
---~ return s
---~ end
-
-mappings.loadlumtable = loadlumtable
-mappings.makenameparser = makenameparser
-mappings.tounicode16 = tounicode16
-mappings.tounicode16sequence = tounicode16sequence
-mappings.fromunicode16 = fromunicode16
-
-local separator = S("_.")
-local other = C((1 - separator)^1)
-local ligsplitter = Ct(other * (separator * other)^0)
-
---~ print(table.serialize(lpegmatch(ligsplitter,"this")))
---~ print(table.serialize(lpegmatch(ligsplitter,"this.that")))
---~ print(table.serialize(lpegmatch(ligsplitter,"japan1.123")))
---~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more")))
---~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more.that")))
-
+local function fromunicode16(str)
+ if #str==4 then
+ return tonumber(str,16)
+ else
+ local l,r=match(str,"(....)(....)")
+ return (tonumber(l,16)- 0xD800)*0x400+tonumber(r,16)-0xDC00
+ end
+end
+mappings.loadlumtable=loadlumtable
+mappings.makenameparser=makenameparser
+mappings.tounicode16=tounicode16
+mappings.tounicode16sequence=tounicode16sequence
+mappings.fromunicode16=fromunicode16
+local separator=S("_.")
+local other=C((1-separator)^1)
+local ligsplitter=Ct(other*(separator*other)^0)
function mappings.addtounicode(data,filename)
- local resources = data.resources
- local properties = data.properties
- local descriptions = data.descriptions
- local unicodes = resources.unicodes
- if not unicodes then
- return
- end
- -- we need to move this code
- unicodes['space'] = unicodes['space'] or 32
- unicodes['hyphen'] = unicodes['hyphen'] or 45
- unicodes['zwj'] = unicodes['zwj'] or 0x200D
- unicodes['zwnj'] = unicodes['zwnj'] or 0x200C
- -- the tounicode mapping is sparse and only needed for alternatives
- local private = fonts.constructors.privateoffset
- local unknown = format("%04X",utfbyte("?"))
- local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context
- local tounicode = { }
- local originals = { }
- resources.tounicode = tounicode
- resources.originals = originals
- local lumunic, uparser, oparser
- local cidinfo, cidnames, cidcodes, usedmap
- if false then -- will become an option
- lumunic = loadlumtable(filename)
- lumunic = lumunic and lumunic.tounicode
- end
- --
- cidinfo = properties.cidinfo
- usedmap = cidinfo and fonts.cid.getmap(cidinfo)
- --
- if usedmap then
- oparser = usedmap and makenameparser(cidinfo.ordering)
- cidnames = usedmap.names
- cidcodes = usedmap.unicodes
- end
- uparser = makenameparser()
- local ns, nl = 0, 0
- for unic, glyph in next, descriptions do
- local index = glyph.index
- local name = glyph.name
- if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then
- local unicode = lumunic and lumunic[name] or unicodevector[name]
- if unicode then
- originals[index] = unicode
- tounicode[index] = tounicode16(unicode)
- ns = ns + 1
- end
- -- cidmap heuristics, beware, there is no guarantee for a match unless
- -- the chain resolves
- if (not unicode) and usedmap then
- local foundindex = lpegmatch(oparser,name)
- if foundindex then
- unicode = cidcodes[foundindex] -- name to number
- if unicode then
- originals[index] = unicode
- tounicode[index] = tounicode16(unicode)
- ns = ns + 1
- else
- local reference = cidnames[foundindex] -- number to name
- if reference then
- local foundindex = lpegmatch(oparser,reference)
- if foundindex then
- unicode = cidcodes[foundindex]
- if unicode then
- originals[index] = unicode
- tounicode[index] = tounicode16(unicode)
- ns = ns + 1
- end
- end
- if not unicode then
- local foundcodes, multiple = lpegmatch(uparser,reference)
- if foundcodes then
- originals[index] = foundcodes
- if multiple then
- tounicode[index] = tounicode16sequence(foundcodes)
- nl = nl + 1
- unicode = true
- else
- tounicode[index] = tounicode16(foundcodes)
- ns = ns + 1
- unicode = foundcodes
- end
- end
- end
- end
- end
- end
- end
- -- a.whatever or a_b_c.whatever or a_b_c (no numbers)
- if not unicode then
- local split = lpegmatch(ligsplitter,name)
- local nplit = split and #split or 0
- if nplit >= 2 then
- local t, n = { }, 0
- for l=1,nplit do
- local base = split[l]
- local u = unicodes[base] or unicodevector[base]
- if not u then
- break
- elseif type(u) == "table" then
- n = n + 1
- t[n] = u[1]
- else
- n = n + 1
- t[n] = u
- end
- end
- if n == 0 then -- done then
- -- nothing
- elseif n == 1 then
- originals[index] = t[1]
- tounicode[index] = tounicode16(t[1])
- else
- originals[index] = t
- tounicode[index] = tounicode16sequence(t)
- end
- nl = nl + 1
- unicode = true
- else
- -- skip: already checked and we don't want privates here
- end
- end
- -- last resort (we might need to catch private here as well)
- if not unicode then
- local foundcodes, multiple = lpegmatch(uparser,name)
+ local resources=data.resources
+ local properties=data.properties
+ local descriptions=data.descriptions
+ local unicodes=resources.unicodes
+ if not unicodes then
+ return
+ end
+ unicodes['space']=unicodes['space'] or 32
+ unicodes['hyphen']=unicodes['hyphen'] or 45
+ unicodes['zwj']=unicodes['zwj'] or 0x200D
+ unicodes['zwnj']=unicodes['zwnj'] or 0x200C
+ local private=fonts.constructors.privateoffset
+ local unknown=format("%04X",utfbyte("?"))
+ local unicodevector=fonts.encodings.agl.unicodes
+ local tounicode={}
+ local originals={}
+ resources.tounicode=tounicode
+ resources.originals=originals
+ local lumunic,uparser,oparser
+ local cidinfo,cidnames,cidcodes,usedmap
+ if false then
+ lumunic=loadlumtable(filename)
+ lumunic=lumunic and lumunic.tounicode
+ end
+ cidinfo=properties.cidinfo
+ usedmap=cidinfo and fonts.cid.getmap(cidinfo)
+ if usedmap then
+ oparser=usedmap and makenameparser(cidinfo.ordering)
+ cidnames=usedmap.names
+ cidcodes=usedmap.unicodes
+ end
+ uparser=makenameparser()
+ local ns,nl=0,0
+ for unic,glyph in next,descriptions do
+ local index=glyph.index
+ local name=glyph.name
+ if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then
+ local unicode=lumunic and lumunic[name] or unicodevector[name]
+ if unicode then
+ originals[index]=unicode
+ tounicode[index]=tounicode16(unicode)
+ ns=ns+1
+ end
+ if (not unicode) and usedmap then
+ local foundindex=lpegmatch(oparser,name)
+ if foundindex then
+ unicode=cidcodes[foundindex]
+ if unicode then
+ originals[index]=unicode
+ tounicode[index]=tounicode16(unicode)
+ ns=ns+1
+ else
+ local reference=cidnames[foundindex]
+ if reference then
+ local foundindex=lpegmatch(oparser,reference)
+ if foundindex then
+ unicode=cidcodes[foundindex]
+ if unicode then
+ originals[index]=unicode
+ tounicode[index]=tounicode16(unicode)
+ ns=ns+1
+ end
+ end
+ if not unicode then
+ local foundcodes,multiple=lpegmatch(uparser,reference)
if foundcodes then
- if multiple then
- originals[index] = foundcodes
- tounicode[index] = tounicode16sequence(foundcodes)
- nl = nl + 1
- unicode = true
- else
- originals[index] = foundcodes
- tounicode[index] = tounicode16(foundcodes)
- ns = ns + 1
- unicode = foundcodes
- end
- end
- end
- -- if not unicode then
- -- originals[index] = 0xFFFD
- -- tounicode[index] = "FFFD"
- -- end
- end
- end
- if trace_mapping then
- for unic, glyph in table.sortedhash(descriptions) do
- local name = glyph.name
- local index = glyph.index
- local toun = tounicode[index]
- if toun then
- report_fonts("internal: 0x%05X, name: %s, unicode: U+%05X, tounicode: %s",index,name,unic,toun)
+ originals[index]=foundcodes
+ if multiple then
+ tounicode[index]=tounicode16sequence(foundcodes)
+ nl=nl+1
+ unicode=true
+ else
+ tounicode[index]=tounicode16(foundcodes)
+ ns=ns+1
+ unicode=foundcodes
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ if not unicode then
+ local split=lpegmatch(ligsplitter,name)
+ local nplit=split and #split or 0
+ if nplit>=2 then
+ local t,n={},0
+ for l=1,nplit do
+ local base=split[l]
+ local u=unicodes[base] or unicodevector[base]
+ if not u then
+ break
+ elseif type(u)=="table" then
+ n=n+1
+ t[n]=u[1]
else
- report_fonts("internal: 0x%05X, name: %s, unicode: U+%05X",index,name,unic)
- end
+ n=n+1
+ t[n]=u
+ end
+ end
+ if n==0 then
+ elseif n==1 then
+ originals[index]=t[1]
+ tounicode[index]=tounicode16(t[1])
+ else
+ originals[index]=t
+ tounicode[index]=tounicode16sequence(t)
+ end
+ nl=nl+1
+ unicode=true
+ else
end
- end
- if trace_loading and (ns > 0 or nl > 0) then
- report_fonts("enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns)
- end
+ end
+ if not unicode then
+ local foundcodes,multiple=lpegmatch(uparser,name)
+ if foundcodes then
+ if multiple then
+ originals[index]=foundcodes
+ tounicode[index]=tounicode16sequence(foundcodes)
+ nl=nl+1
+ unicode=true
+ else
+ originals[index]=foundcodes
+ tounicode[index]=tounicode16(foundcodes)
+ ns=ns+1
+ unicode=foundcodes
+ end
+ end
+ end
+ end
+ end
+ if trace_mapping then
+ for unic,glyph in table.sortedhash(descriptions) do
+ local name=glyph.name
+ local index=glyph.index
+ local toun=tounicode[index]
+ if toun then
+ report_fonts("internal: 0x%05X, name: %s, unicode: U+%05X, tounicode: %s",index,name,unic,toun)
+ else
+ report_fonts("internal: 0x%05X, name: %s, unicode: U+%05X",index,name,unic)
+ end
+ end
+ end
+ if trace_loading and (ns>0 or nl>0) then
+ report_fonts("enhance: %s tounicode entries added (%s ligatures)",nl+ns,ns)
+ end
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-fonts-syn'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-fonts-syn']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
-
--- Generic font names support.
---
--- Watch out, the version number is the same as the one used in
--- the mtx-fonts.lua function scripts.fonts.names as we use a
--- simplified font database in the plain solution and by using
--- a different number we're less dependent on context.
---
--- mtxrun --script font --reload --simple
---
--- The format of the file is as follows:
---
--- return {
--- ["version"] = 1.001,
--- ["mappings"] = {
--- ["somettcfontone"] = { "Some TTC Font One", "SomeFontA.ttc", 1 },
--- ["somettcfonttwo"] = { "Some TTC Font Two", "SomeFontA.ttc", 2 },
--- ["somettffont"] = { "Some TTF Font", "SomeFontB.ttf" },
--- ["someotffont"] = { "Some OTF Font", "SomeFontC.otf" },
--- },
--- }
-
-local fonts = fonts
-fonts.names = fonts.names or { }
-
-fonts.names.version = 1.001 -- not the same as in context
-fonts.names.basename = "luatex-fonts-names.lua"
-fonts.names.new_to_old = { }
-fonts.names.old_to_new = { }
-
-local data, loaded = nil, false
-
-local fileformats = { "lua", "tex", "other text files" }
-
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
+end
+local fonts=fonts
+fonts.names=fonts.names or {}
+fonts.names.version=1.001
+fonts.names.basename="luatex-fonts-names.lua"
+fonts.names.new_to_old={}
+fonts.names.old_to_new={}
+local data,loaded=nil,false
+local fileformats={ "lua","tex","other text files" }
function fonts.names.resolve(name,sub)
- if not loaded then
- local basename = fonts.names.basename
- if basename and basename ~= "" then
- for i=1,#fileformats do
- local format = fileformats[i]
- local foundname = resolvers.findfile(basename,format) or ""
- if foundname ~= "" then
- data = dofile(foundname)
- texio.write("<font database loaded: ",foundname,">")
- break
- end
- end
- end
- loaded = true
- end
- if type(data) == "table" and data.version == fonts.names.version then
- local condensed = string.gsub(string.lower(name),"[^%a%d]","")
- local found = data.mappings and data.mappings[condensed]
- if found then
- local fontname, filename, subfont = found[1], found[2], found[3]
- if subfont then
- return filename, fontname
- else
- return filename, false
- end
- else
- return name, false -- fallback to filename
- end
+ if not loaded then
+ local basename=fonts.names.basename
+ if basename and basename~="" then
+ for i=1,#fileformats do
+ local format=fileformats[i]
+ local foundname=resolvers.findfile(basename,format) or ""
+ if foundname~="" then
+ data=dofile(foundname)
+ texio.write("<font database loaded: ",foundname,">")
+ break
+ end
+ end
+ end
+ loaded=true
+ end
+ if type(data)=="table" and data.version==fonts.names.version then
+ local condensed=string.gsub(string.lower(name),"[^%a%d]","")
+ local found=data.mappings and data.mappings[condensed]
+ if found then
+ local fontname,filename,subfont=found[1],found[2],found[3]
+ if subfont then
+ return filename,fontname
+ else
+ return filename,false
+ end
+ else
+ return name,false
end
+ end
end
-
-fonts.names.resolvespec = fonts.names.resolve -- only supported in mkiv
-
-function fonts.names.getfilename(askedname,suffix) -- only supported in mkiv
- return ""
+fonts.names.resolvespec=fonts.names.resolve
+function fonts.names.getfilename(askedname,suffix)
+ return ""
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-fonts-tfm'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-fonts-tfm']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
end
-
-local fonts = fonts
-local tfm = { }
-fonts.handlers.tfm = tfm
-fonts.formats.tfm = "type1" -- we need to have at least a value here
-
+local fonts=fonts
+local tfm={}
+fonts.handlers.tfm=tfm
+fonts.formats.tfm="type1"
function fonts.readers.tfm(specification)
- local fullname = specification.filename or ""
- if fullname == "" then
- local forced = specification.forced or ""
- if forced ~= "" then
- fullname = specification.name .. "." .. forced
- else
- fullname = specification.name
- end
- end
- local foundname = resolvers.findbinfile(fullname, 'tfm') or ""
- if foundname == "" then
- foundname = resolvers.findbinfile(fullname, 'ofm') or ""
- end
- if foundname ~= "" then
- specification.filename = foundname
- specification.format = "ofm"
- return font.read_tfm(specification.filename,specification.size)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
end
+ end
+ local foundname=resolvers.findbinfile(fullname,'tfm') or ""
+ if foundname=="" then
+ foundname=resolvers.findbinfile(fullname,'ofm') or ""
+ end
+ if foundname~="" then
+ specification.filename=foundname
+ specification.format="ofm"
+ return font.read_tfm(specification.filename,specification.size)
+ end
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-oti'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-oti']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
-local lower = string.lower
-
-local fonts = fonts
-local constructors = fonts.constructors
-
-local otf = constructors.newhandler("otf")
-local otffeatures = constructors.newfeatures("otf")
-local otftables = otf.tables
-local registerotffeature = otffeatures.register
-
-local allocate = utilities.storage.allocate
-
+local lower=string.lower
+local fonts=fonts
+local constructors=fonts.constructors
+local otf=constructors.newhandler("otf")
+local otffeatures=constructors.newfeatures("otf")
+local otftables=otf.tables
+local registerotffeature=otffeatures.register
+local allocate=utilities.storage.allocate
registerotffeature {
- name = "features",
- description = "initialization of feature handler",
- default = true,
+ name="features",
+ description="initialization of feature handler",
+ default=true,
}
-
--- these are later hooked into node and base initializaters
-
local function setmode(tfmdata,value)
- if value then
- tfmdata.properties.mode = lower(value)
- end
+ if value then
+ tfmdata.properties.mode=lower(value)
+ end
end
-
local function setlanguage(tfmdata,value)
- if value then
- local cleanvalue = lower(value)
- local languages = otftables and otftables.languages
- local properties = tfmdata.properties
- if not languages then
- properties.language = cleanvalue
- elseif languages[value] then
- properties.language = cleanvalue
- else
- properties.language = "dflt"
- end
+ if value then
+ local cleanvalue=lower(value)
+ local languages=otftables and otftables.languages
+ local properties=tfmdata.properties
+ if not languages then
+ properties.language=cleanvalue
+ elseif languages[value] then
+ properties.language=cleanvalue
+ else
+ properties.language="dflt"
end
+ end
end
-
local function setscript(tfmdata,value)
- if value then
- local cleanvalue = lower(value)
- local scripts = otftables and otftables.scripts
- local properties = tfmdata.properties
- if not scripts then
- properties.script = cleanvalue
- elseif scripts[value] then
- properties.script = cleanvalue
- else
- properties.script = "dflt"
- end
+ if value then
+ local cleanvalue=lower(value)
+ local scripts=otftables and otftables.scripts
+ local properties=tfmdata.properties
+ if not scripts then
+ properties.script=cleanvalue
+ elseif scripts[value] then
+ properties.script=cleanvalue
+ else
+ properties.script="dflt"
end
+ end
end
-
registerotffeature {
- name = "mode",
- description = "mode",
- initializers = {
- base = setmode,
- node = setmode,
- }
+ name="mode",
+ description="mode",
+ initializers={
+ base=setmode,
+ node=setmode,
+ }
}
-
registerotffeature {
- name = "language",
- description = "language",
- initializers = {
- base = setlanguage,
- node = setlanguage,
- }
+ name="language",
+ description="language",
+ initializers={
+ base=setlanguage,
+ node=setlanguage,
+ }
}
-
registerotffeature {
- name = "script",
- description = "script",
- initializers = {
- base = setscript,
- node = setscript,
- }
+ name="script",
+ description="script",
+ initializers={
+ base=setscript,
+ node=setscript,
+ }
}
-
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-otf'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-otf']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
--- langs -> languages enz
--- anchor_classes vs kernclasses
--- modification/creationtime in subfont is runtime dus zinloos
--- to_table -> totable
--- ascent descent
-
--- more checking against low level calls of functions
-
-local utfbyte = utf.byte
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local abs = math.abs
-local getn = table.getn
-local lpegmatch = lpeg.match
-local reversed, concat, remove = table.reversed, table.concat, table.remove
-local ioflush = io.flush
-local fastcopy, tohash, derivetable = table.fastcopy, table.tohash, table.derive
-
-local allocate = utilities.storage.allocate
-local registertracker = trackers.register
-local registerdirective = directives.register
-local starttiming = statistics.starttiming
-local stoptiming = statistics.stoptiming
-local elapsedtime = statistics.elapsedtime
-local findbinfile = resolvers.findbinfile
-
-local trace_private = false registertracker("otf.private", function(v) trace_private = v end)
-local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end)
-local trace_features = false registertracker("otf.features", function(v) trace_features = v end)
-local trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end)
-local trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end)
-local trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end)
-local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end)
-
-local report_otf = logs.reporter("fonts","otf loading")
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-otf.glists = { "gsub", "gpos" }
-
-otf.version = 2.741 -- beware: also sync font-mis.lua
-otf.cache = containers.define("fonts", "otf", otf.version, true)
-
-local fontdata = fonts.hashes.identifiers
-local chardata = characters and characters.data -- not used
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
-local enhancers = allocate()
-otf.enhancers = enhancers
-local patches = { }
-enhancers.patches = patches
-
-local definers = fonts.definers
-local readers = fonts.readers
-local constructors = fonts.constructors
-
-local forceload = false
-local cleanup = 0 -- mk: 0=885M 1=765M 2=735M (regular run 730M)
-local usemetatables = false -- .4 slower on mk but 30 M less mem so we might change the default -- will be directive
-local packdata = true
-local syncspace = true
-local forcenotdef = false
-
-local wildcard = "*"
-local default = "dflt"
-
-local fontloaderfields = fontloader.fields
-local mainfields = nil
-local glyphfields = nil -- not used yet
-
-registerdirective("fonts.otf.loader.cleanup", function(v) cleanup = tonumber(v) or (v and 1) or 0 end)
-registerdirective("fonts.otf.loader.force", function(v) forceload = v end)
-registerdirective("fonts.otf.loader.usemetatables", function(v) usemetatables = v end)
-registerdirective("fonts.otf.loader.pack", function(v) packdata = v end)
-registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end)
-registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end)
-
+local utfbyte=utf.byte
+local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local abs=math.abs
+local getn=table.getn
+local lpegmatch=lpeg.match
+local reversed,concat,remove=table.reversed,table.concat,table.remove
+local ioflush=io.flush
+local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive
+local allocate=utilities.storage.allocate
+local registertracker=trackers.register
+local registerdirective=directives.register
+local starttiming=statistics.starttiming
+local stoptiming=statistics.stoptiming
+local elapsedtime=statistics.elapsedtime
+local findbinfile=resolvers.findbinfile
+local trace_private=false registertracker("otf.private",function(v) trace_private=v end)
+local trace_loading=false registertracker("otf.loading",function(v) trace_loading=v end)
+local trace_features=false registertracker("otf.features",function(v) trace_features=v end)
+local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dynamics=v end)
+local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end)
+local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end)
+local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end)
+local report_otf=logs.reporter("fonts","otf loading")
+local fonts=fonts
+local otf=fonts.handlers.otf
+otf.glists={ "gsub","gpos" }
+otf.version=2.741
+otf.cache=containers.define("fonts","otf",otf.version,true)
+local fontdata=fonts.hashes.identifiers
+local chardata=characters and characters.data
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local enhancers=allocate()
+otf.enhancers=enhancers
+local patches={}
+enhancers.patches=patches
+local definers=fonts.definers
+local readers=fonts.readers
+local constructors=fonts.constructors
+local forceload=false
+local cleanup=0
+local usemetatables=false
+local packdata=true
+local syncspace=true
+local forcenotdef=false
+local wildcard="*"
+local default="dflt"
+local fontloaderfields=fontloader.fields
+local mainfields=nil
+local glyphfields=nil
+registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end)
+registerdirective("fonts.otf.loader.force",function(v) forceload=v end)
+registerdirective("fonts.otf.loader.usemetatables",function(v) usemetatables=v end)
+registerdirective("fonts.otf.loader.pack",function(v) packdata=v end)
+registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end)
+registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end)
local function load_featurefile(raw,featurefile)
- if featurefile and featurefile ~= "" then
- if trace_loading then
- report_otf("featurefile: %s", featurefile)
- end
- fontloader.apply_featurefile(raw, featurefile)
+ if featurefile and featurefile~="" then
+ if trace_loading then
+ report_otf("featurefile: %s",featurefile)
end
+ fontloader.apply_featurefile(raw,featurefile)
+ end
end
-
local function showfeatureorder(rawdata,filename)
- local sequences = rawdata.resources.sequences
- if sequences and #sequences > 0 then
- if trace_loading then
- report_otf("font %s has %s sequences",filename,#sequences)
- report_otf(" ")
- end
- for nos=1,#sequences do
- local sequence = sequences[nos]
- local typ = sequence.type or "no-type"
- local name = sequence.name or "no-name"
- local subtables = sequence.subtables or { "no-subtables" }
- local features = sequence.features
+ local sequences=rawdata.resources.sequences
+ if sequences and #sequences>0 then
+ if trace_loading then
+ report_otf("font %s has %s sequences",filename,#sequences)
+ report_otf(" ")
+ end
+ for nos=1,#sequences do
+ local sequence=sequences[nos]
+ local typ=sequence.type or "no-type"
+ local name=sequence.name or "no-name"
+ local subtables=sequence.subtables or { "no-subtables" }
+ local features=sequence.features
+ if trace_loading then
+ report_otf("%3i %-15s %-20s [%s]",nos,name,typ,concat(subtables,","))
+ end
+ if features then
+ for feature,scripts in next,features do
+ local tt={}
+ if type(scripts)=="table" then
+ for script,languages in next,scripts do
+ local ttt={}
+ for language,_ in next,languages do
+ ttt[#ttt+1]=language
+ end
+ tt[#tt+1]=format("[%s: %s]",script,concat(ttt," "))
+ end
if trace_loading then
- report_otf("%3i %-15s %-20s [%s]",nos,name,typ,concat(subtables,","))
+ report_otf(" %s: %s",feature,concat(tt," "))
end
- if features then
- for feature, scripts in next, features do
- local tt = { }
- if type(scripts) == "table" then
- for script, languages in next, scripts do
- local ttt = { }
- for language, _ in next, languages do
- ttt[#ttt+1] = language
- end
- tt[#tt+1] = format("[%s: %s]",script,concat(ttt," "))
- end
- if trace_loading then
- report_otf(" %s: %s",feature,concat(tt," "))
- end
- else
- if trace_loading then
- report_otf(" %s: %s",feature,tostring(scripts))
- end
- end
- end
+ else
+ if trace_loading then
+ report_otf(" %s: %s",feature,tostring(scripts))
end
+ end
end
- if trace_loading then
- report_otf("\n")
- end
- elseif trace_loading then
- report_otf("font %s has no sequences",filename)
+ end
end
-end
-
---[[ldx--
-<p>We start with a lot of tables and related functions.</p>
---ldx]]--
-
-local valid_fields = table.tohash {
- -- "anchor_classes",
- "ascent",
- -- "cache_version",
- "cidinfo",
- "copyright",
- -- "creationtime",
- "descent",
- "design_range_bottom",
- "design_range_top",
- "design_size",
- "encodingchanged",
- "extrema_bound",
- "familyname",
- "fontname",
- "fontname",
- "fontstyle_id",
- "fontstyle_name",
- "fullname",
- -- "glyphs",
- "hasvmetrics",
- -- "head_optimized_for_cleartype",
- "horiz_base",
- "issans",
- "isserif",
- "italicangle",
- -- "kerns",
- -- "lookups",
- "macstyle",
- -- "modificationtime",
- "onlybitmaps",
- "origname",
- "os2_version",
- "pfminfo",
- -- "private",
- "serifcheck",
- "sfd_version",
- -- "size",
- "strokedfont",
- "strokewidth",
- -- "subfonts",
- "table_version",
- -- "tables",
- -- "ttf_tab_saved",
- "ttf_tables",
- "uni_interp",
- "uniqueid",
- "units_per_em",
- "upos",
- "use_typo_metrics",
- "uwidth",
- -- "validation_state",
- "version",
- "vert_base",
- "weight",
- "weight_width_slope_only",
- -- "xuid",
+ if trace_loading then
+ report_otf("\n")
+ end
+ elseif trace_loading then
+ report_otf("font %s has no sequences",filename)
+ end
+end
+local valid_fields=table.tohash {
+ "ascent",
+ "cidinfo",
+ "copyright",
+ "descent",
+ "design_range_bottom",
+ "design_range_top",
+ "design_size",
+ "encodingchanged",
+ "extrema_bound",
+ "familyname",
+ "fontname",
+ "fontname",
+ "fontstyle_id",
+ "fontstyle_name",
+ "fullname",
+ "hasvmetrics",
+ "horiz_base",
+ "issans",
+ "isserif",
+ "italicangle",
+ "macstyle",
+ "onlybitmaps",
+ "origname",
+ "os2_version",
+ "pfminfo",
+ "serifcheck",
+ "sfd_version",
+ "strokedfont",
+ "strokewidth",
+ "table_version",
+ "ttf_tables",
+ "uni_interp",
+ "uniqueid",
+ "units_per_em",
+ "upos",
+ "use_typo_metrics",
+ "uwidth",
+ "version",
+ "vert_base",
+ "weight",
+ "weight_width_slope_only",
}
-
-local ordered_enhancers = {
- "prepare tables",
- "prepare glyphs",
- "prepare lookups",
-
- "analyze glyphs",
- "analyze math",
-
- "prepare tounicode", -- maybe merge with prepare
-
- "reorganize lookups",
- "reorganize mark classes",
- "reorganize anchor classes",
-
- "reorganize glyph kerns",
- "reorganize glyph lookups",
- "reorganize glyph anchors",
-
- "merge kern classes",
-
- "reorganize features",
- "reorganize subtables",
-
- "check glyphs",
- "check metadata",
- "check extra features", -- after metadata
-
- "add duplicates",
- "check encoding",
-
- "cleanup tables",
+local ordered_enhancers={
+ "prepare tables",
+ "prepare glyphs",
+ "prepare lookups",
+ "analyze glyphs",
+ "analyze math",
+ "prepare tounicode",
+ "reorganize lookups",
+ "reorganize mark classes",
+ "reorganize anchor classes",
+ "reorganize glyph kerns",
+ "reorganize glyph lookups",
+ "reorganize glyph anchors",
+ "merge kern classes",
+ "reorganize features",
+ "reorganize subtables",
+ "check glyphs",
+ "check metadata",
+ "check extra features",
+ "add duplicates",
+ "check encoding",
+ "cleanup tables",
}
-
---[[ldx--
-<p>Here we go.</p>
---ldx]]--
-
-local actions = allocate()
-local before = allocate()
-local after = allocate()
-
-patches.before = before
-patches.after = after
-
+local actions=allocate()
+local before=allocate()
+local after=allocate()
+patches.before=before
+patches.after=after
local function enhance(name,data,filename,raw)
- local enhancer = actions[name]
- if enhancer then
- if trace_loading then
- report_otf("enhance: %s (%s)",name,filename)
- ioflush()
- end
- enhancer(data,filename,raw)
- elseif trace_loading then
- -- report_otf("enhance: %s is undefined",name)
+ local enhancer=actions[name]
+ if enhancer then
+ if trace_loading then
+ report_otf("enhance: %s (%s)",name,filename)
+ ioflush()
end
+ enhancer(data,filename,raw)
+ elseif trace_loading then
+ end
end
-
function enhancers.apply(data,filename,raw)
- local basename = file.basename(lower(filename))
- if trace_loading then
- report_otf("start enhancing: %s",filename)
- end
- ioflush() -- we want instant messages
- for e=1,#ordered_enhancers do
- local enhancer = ordered_enhancers[e]
- local b = before[enhancer]
- if b then
- for pattern, action in next, b do
- if find(basename,pattern) then
- action(data,filename,raw)
- end
- end
- end
- enhance(enhancer,data,filename,raw)
- local a = after[enhancer]
- if a then
- for pattern, action in next, a do
- if find(basename,pattern) then
- action(data,filename,raw)
- end
- end
- end
- ioflush() -- we want instant messages
- end
- if trace_loading then
- report_otf("stop enhancing")
- end
- ioflush() -- we want instant messages
+ local basename=file.basename(lower(filename))
+ if trace_loading then
+ report_otf("start enhancing: %s",filename)
+ end
+ ioflush()
+ for e=1,#ordered_enhancers do
+ local enhancer=ordered_enhancers[e]
+ local b=before[enhancer]
+ if b then
+ for pattern,action in next,b do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ enhance(enhancer,data,filename,raw)
+ local a=after[enhancer]
+ if a then
+ for pattern,action in next,a do
+ if find(basename,pattern) then
+ action(data,filename,raw)
+ end
+ end
+ end
+ ioflush()
+ end
+ if trace_loading then
+ report_otf("stop enhancing")
+ end
+ ioflush()
end
-
--- patches.register("before","migrate metadata","cambria",function() end)
-
function patches.register(what,where,pattern,action)
- local pw = patches[what]
- if pw then
- local ww = pw[where]
- if ww then
- ww[pattern] = action
- else
- pw[where] = { [pattern] = action}
- end
+ local pw=patches[what]
+ if pw then
+ local ww=pw[where]
+ if ww then
+ ww[pattern]=action
+ else
+ pw[where]={ [pattern]=action}
end
+ end
end
-
function patches.report(fmt,...)
- if trace_loading then
- report_otf("patching: " ..fmt,...)
- end
+ if trace_loading then
+ report_otf("patching: "..fmt,...)
+ end
end
-
-function enhancers.register(what,action) -- only already registered can be overloaded
- actions[what] = action
+function enhancers.register(what,action)
+ actions[what]=action
end
-
function otf.load(filename,format,sub,featurefile)
- local name = file.basename(file.removesuffix(filename))
- local attr = lfs.attributes(filename)
- local size = attr and attr.size or 0
- local time = attr and attr.modification or 0
- if featurefile then
- name = name .. "@" .. file.removesuffix(file.basename(featurefile))
+ local name=file.basename(file.removesuffix(filename))
+ local attr=lfs.attributes(filename)
+ local size=attr and attr.size or 0
+ local time=attr and attr.modification or 0
+ if featurefile then
+ name=name.."@"..file.removesuffix(file.basename(featurefile))
+ end
+ if sub=="" then
+ sub=false
+ end
+ local hash=name
+ if sub then
+ hash=hash.."-"..sub
+ end
+ hash=containers.cleanname(hash)
+ local featurefiles
+ if featurefile then
+ featurefiles={}
+ for s in gmatch(featurefile,"[^,]+") do
+ local name=resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
+ if name=="" then
+ report_otf("loading: no featurefile '%s'",s)
+ else
+ local attr=lfs.attributes(name)
+ featurefiles[#featurefiles+1]={
+ name=name,
+ size=attr and attr.size or 0,
+ time=attr and attr.modification or 0,
+ }
+ end
+ end
+ if #featurefiles==0 then
+ featurefiles=nil
+ end
+ end
+ local data=containers.read(otf.cache,hash)
+ local reload=not data or data.size~=size or data.time~=time
+ if forceload then
+ report_otf("loading: forced reload due to hard coded flag")
+ reload=true
+ end
+ if not reload then
+ local featuredata=data.featuredata
+ if featurefiles then
+ if not featuredata or #featuredata~=#featurefiles then
+ reload=true
+ else
+ for i=1,#featurefiles do
+ local fi,fd=featurefiles[i],featuredata[i]
+ if fi.name~=fd.name or fi.size~=fd.size or fi.time~=fd.time then
+ reload=true
+ break
+ end
+ end
+ end
+ elseif featuredata then
+ reload=true
end
- if sub == "" then
- sub = false
+ if reload then
+ report_otf("loading: forced reload due to changed featurefile specification: %s",featurefile or "--")
end
- local hash = name
+ end
+ if reload then
+ report_otf("loading: %s (hash: %s)",filename,hash)
+ local fontdata,messages
if sub then
- hash = hash .. "-" .. sub
- end
- hash = containers.cleanname(hash)
- local featurefiles
- if featurefile then
- featurefiles = { }
- for s in gmatch(featurefile,"[^,]+") do
- local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or ""
- if name == "" then
- report_otf("loading: no featurefile '%s'",s)
- else
- local attr = lfs.attributes(name)
- featurefiles[#featurefiles+1] = {
- name = name,
- size = attr and attr.size or 0,
- time = attr and attr.modification or 0,
- }
- end
- end
- if #featurefiles == 0 then
- featurefiles = nil
- end
- end
- local data = containers.read(otf.cache,hash)
- local reload = not data or data.size ~= size or data.time ~= time
- if forceload then
- report_otf("loading: forced reload due to hard coded flag")
- reload = true
+ fontdata,messages=fontloader.open(filename,sub)
+ else
+ fontdata,messages=fontloader.open(filename)
end
- if not reload then
- local featuredata = data.featuredata
- if featurefiles then
- if not featuredata or #featuredata ~= #featurefiles then
- reload = true
- else
- for i=1,#featurefiles do
- local fi, fd = featurefiles[i], featuredata[i]
- if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then
- reload = true
- break
- end
- end
- end
- elseif featuredata then
- reload = true
- end
- if reload then
- report_otf("loading: forced reload due to changed featurefile specification: %s",featurefile or "--")
- end
- end
- if reload then
- report_otf("loading: %s (hash: %s)",filename,hash)
- local fontdata, messages
- if sub then
- fontdata, messages = fontloader.open(filename,sub)
- else
- fontdata, messages = fontloader.open(filename)
- end
- if fontdata then
- mainfields = mainfields or (fontloaderfields and fontloaderfields(fontdata))
- end
- if trace_loading and messages and #messages > 0 then
- if type(messages) == "string" then
- report_otf("warning: %s",messages)
- else
- for m=1,#messages do
- report_otf("warning: %s",tostring(messages[m]))
- end
- end
- else
- report_otf("font loaded okay")
- end
- if fontdata then
- if featurefiles then
- for i=1,#featurefiles do
- load_featurefile(fontdata,featurefiles[i].name)
- end
- end
- local unicodes = {
- -- names to unicodes
- }
- local splitter = lpeg.splitter(" ",unicodes)
- data = {
- size = size,
- time = time,
- format = format,
- featuredata = featurefiles,
- resources = {
- filename = resolvers.unresolve(filename), -- no shortcut
- version = otf.version,
- creator = "context mkiv",
- unicodes = unicodes,
- indices = {
- -- index to unicodes
- },
- duplicates = {
- -- alternative unicodes
- },
- variants = {
- -- alternative unicodes (variants)
- },
- lookuptypes = {
- },
- },
- metadata = {
- -- raw metadata, not to be used
- },
- properties = {
- -- normalized metadata
- },
- descriptions = {
- },
- goodies = {
- },
- helpers = {
- tounicodelist = splitter,
- tounicodetable = lpeg.Ct(splitter),
- },
- }
- starttiming(data)
- report_otf("file size: %s", size)
- enhancers.apply(data,filename,fontdata)
- local packtime = { }
- if packdata then
- if cleanup > 0 then
- collectgarbage("collect")
- end
- starttiming(packtime)
- enhance("pack",data,filename,nil)
- stoptiming(packtime)
- end
- report_otf("saving in cache: %s",filename)
- data = containers.write(otf.cache, hash, data)
- if cleanup > 1 then
- collectgarbage("collect")
- end
- stoptiming(data)
- if elapsedtime then -- not in generic
- report_otf("preprocessing and caching took %s seconds (packtime: %s)",
- elapsedtime(data),packdata and elapsedtime(packtime) or 0)
- end
- fontloader.close(fontdata) -- free memory
- if cleanup > 3 then
- collectgarbage("collect")
- end
- data = containers.read(otf.cache, hash) -- this frees the old table and load the sparse one
- if cleanup > 2 then
- collectgarbage("collect")
- end
- else
- data = nil
- report_otf("loading failed (file read error)")
- end
+ if fontdata then
+ mainfields=mainfields or (fontloaderfields and fontloaderfields(fontdata))
end
- if data then
- if trace_defining then
- report_otf("loading from cache: %s",hash)
- end
- enhance("unpack",data,filename,nil,false)
- enhance("add dimensions",data,filename,nil,false)
- if trace_sequences then
- showfeatureorder(data,filename)
+ if trace_loading and messages and #messages>0 then
+ if type(messages)=="string" then
+ report_otf("warning: %s",messages)
+ else
+ for m=1,#messages do
+ report_otf("warning: %s",tostring(messages[m]))
end
+ end
+ else
+ report_otf("font loaded okay")
end
- return data
-end
-
-local mt = {
- __index = function(t,k) -- maybe set it
- if k == "height" then
- local ht = t.boundingbox[4]
- return ht < 0 and 0 or ht
- elseif k == "depth" then
- local dp = -t.boundingbox[2]
- return dp < 0 and 0 or dp
- elseif k == "width" then
- return 0
- elseif k == "name" then -- or maybe uni*
- return forcenotdef and ".notdef"
- end
+ if fontdata then
+ if featurefiles then
+ for i=1,#featurefiles do
+ load_featurefile(fontdata,featurefiles[i].name)
+ end
+ end
+ local unicodes={
+ }
+ local splitter=lpeg.splitter(" ",unicodes)
+ data={
+ size=size,
+ time=time,
+ format=format,
+ featuredata=featurefiles,
+ resources={
+ filename=resolvers.unresolve(filename),
+ version=otf.version,
+ creator="context mkiv",
+ unicodes=unicodes,
+ indices={
+ },
+ duplicates={
+ },
+ variants={
+ },
+ lookuptypes={},
+ },
+ metadata={
+ },
+ properties={
+ },
+ descriptions={},
+ goodies={},
+ helpers={
+ tounicodelist=splitter,
+ tounicodetable=lpeg.Ct(splitter),
+ },
+ }
+ starttiming(data)
+ report_otf("file size: %s",size)
+ enhancers.apply(data,filename,fontdata)
+ local packtime={}
+ if packdata then
+ if cleanup>0 then
+ collectgarbage("collect")
+ end
+ starttiming(packtime)
+ enhance("pack",data,filename,nil)
+ stoptiming(packtime)
+ end
+ report_otf("saving in cache: %s",filename)
+ data=containers.write(otf.cache,hash,data)
+ if cleanup>1 then
+ collectgarbage("collect")
+ end
+ stoptiming(data)
+ if elapsedtime then
+ report_otf("preprocessing and caching took %s seconds (packtime: %s)",
+ elapsedtime(data),packdata and elapsedtime(packtime) or 0)
+ end
+ fontloader.close(fontdata)
+ if cleanup>3 then
+ collectgarbage("collect")
+ end
+ data=containers.read(otf.cache,hash)
+ if cleanup>2 then
+ collectgarbage("collect")
+ end
+ else
+ data=nil
+ report_otf("loading failed (file read error)")
end
+ end
+ if data then
+ if trace_defining then
+ report_otf("loading from cache: %s",hash)
+ end
+ enhance("unpack",data,filename,nil,false)
+ enhance("add dimensions",data,filename,nil,false)
+ if trace_sequences then
+ showfeatureorder(data,filename)
+ end
+ end
+ return data
+end
+local mt={
+ __index=function(t,k)
+ if k=="height" then
+ local ht=t.boundingbox[4]
+ return ht<0 and 0 or ht
+ elseif k=="depth" then
+ local dp=-t.boundingbox[2]
+ return dp<0 and 0 or dp
+ elseif k=="width" then
+ return 0
+ elseif k=="name" then
+ return forcenotdef and ".notdef"
+ end
+ end
}
-
-actions["prepare tables"] = function(data,filename,raw)
- data.properties.hasitalics = false
-end
-
-actions["add dimensions"] = function(data,filename)
- -- todo: forget about the width if it's the defaultwidth (saves mem)
- -- we could also build the marks hash here (instead of storing it)
- if data then
- local descriptions = data.descriptions
- local resources = data.resources
- local defaultwidth = resources.defaultwidth or 0
- local defaultheight = resources.defaultheight or 0
- local defaultdepth = resources.defaultdepth or 0
- if usemetatables then
- for _, d in next, descriptions do
- local wd = d.width
- if not wd then
- d.width = defaultwidth
- elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
- report_otf("mark with width %s (%s) in %s",wd,d.name or "<noname>",file.basename(filename))
- -- d.width = -wd
- end
- setmetatable(d,mt)
- end
+actions["prepare tables"]=function(data,filename,raw)
+ data.properties.hasitalics=false
+end
+actions["add dimensions"]=function(data,filename)
+ if data then
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local defaultwidth=resources.defaultwidth or 0
+ local defaultheight=resources.defaultheight or 0
+ local defaultdepth=resources.defaultdepth or 0
+ if usemetatables then
+ for _,d in next,descriptions do
+ local wd=d.width
+ if not wd then
+ d.width=defaultwidth
+ elseif trace_markwidth and wd~=0 and d.class=="mark" then
+ report_otf("mark with width %s (%s) in %s",wd,d.name or "<noname>",file.basename(filename))
+ end
+ setmetatable(d,mt)
+ end
+ else
+ for _,d in next,descriptions do
+ local bb,wd=d.boundingbox,d.width
+ if not wd then
+ d.width=defaultwidth
+ elseif trace_markwidth and wd~=0 and d.class=="mark" then
+ report_otf("mark with width %s (%s) in %s",wd,d.name or "<noname>",file.basename(filename))
+ end
+ if bb then
+ local ht,dp=bb[4],-bb[2]
+ if ht==0 or ht<0 then
+ else
+ d.height=ht
+ end
+ if dp==0 or dp<0 then
+ else
+ d.depth=dp
+ end
+ end
+ end
+ end
+ end
+end
+local function somecopy(old)
+ if old then
+ local new={}
+ if type(old)=="table" then
+ for k,v in next,old do
+ if k=="glyphs" then
+ elseif type(v)=="table" then
+ new[k]=somecopy(v)
else
- for _, d in next, descriptions do
- local bb, wd = d.boundingbox, d.width
- if not wd then
- d.width = defaultwidth
- elseif trace_markwidth and wd ~= 0 and d.class == "mark" then
- report_otf("mark with width %s (%s) in %s",wd,d.name or "<noname>",file.basename(filename))
- -- d.width = -wd
- end
- -- if forcenotdef and not d.name then
- -- d.name = ".notdef"
- -- end
- if bb then
- local ht, dp = bb[4], -bb[2]
- if ht == 0 or ht < 0 then
- -- not set
- else
- d.height = ht
- end
- if dp == 0 or dp < 0 then
- -- not set
- else
- d.depth = dp
- end
- end
- end
+ new[k]=v
end
- end
-end
-
-local function somecopy(old) -- fast one
- if old then
- local new = { }
- if type(old) == "table" then
- for k, v in next, old do
- if k == "glyphs" then
- -- skip
- elseif type(v) == "table" then
- new[k] = somecopy(v)
- else
- new[k] = v
- end
- end
+ end
+ else
+ for i=1,#mainfields do
+ local k=mainfields[i]
+ local v=old[k]
+ if k=="glyphs" then
+ elseif type(v)=="table" then
+ new[k]=somecopy(v)
else
- for i=1,#mainfields do
- local k = mainfields[i]
- local v = old[k]
- if k == "glyphs" then
- -- skip
- elseif type(v) == "table" then
- new[k] = somecopy(v)
- else
- new[k] = v
- end
- end
+ new[k]=v
end
- return new
- else
- return { }
+ end
end
-end
-
--- not setting hasitalics and class (when nil) during
--- table cronstruction can save some mem
-
-actions["prepare glyphs"] = function(data,filename,raw)
- local rawglyphs = raw.glyphs
- local rawsubfonts = raw.subfonts
- local rawcidinfo = raw.cidinfo
- local criterium = constructors.privateoffset
- local private = criterium
- local resources = data.resources
- local metadata = data.metadata
- local properties = data.properties
- local descriptions = data.descriptions
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicode
- local duplicates = resources.duplicates
- local variants = resources.variants
-
- if rawsubfonts then
-
- metadata.subfonts = { }
- properties.cidinfo = rawcidinfo
-
- if rawcidinfo.registry then
- local cidmap = fonts.cid.getmap(rawcidinfo)
- if cidmap then
- rawcidinfo.usedname = cidmap.usedname
- local nofnames, nofunicodes = 0, 0
- local cidunicodes, cidnames = cidmap.unicodes, cidmap.names
- for cidindex=1,#rawsubfonts do
- local subfont = rawsubfonts[cidindex]
- local cidglyphs = subfont.glyphs
- metadata.subfonts[cidindex] = somecopy(subfont)
- for index=0,subfont.glyphcnt-1 do -- we could take the previous glyphcnt instead of 0
- local glyph = cidglyphs[index]
- if glyph then
- local unicode = glyph.unicode
- local name = glyph.name or cidnames[index]
- if not unicode or unicode == -1 or unicode >= criterium then
- unicode = cidunicodes[index]
- end
- if not unicode or unicode == -1 or unicode >= criterium then
- if not name then
- name = format("u%06X",private)
- end
- unicode = private
- unicodes[name] = private
- if trace_private then
- report_otf("enhance: glyph %s at index 0x%04X is moved to private unicode slot U+%05X",name,index,private)
- end
- private = private + 1
- nofnames = nofnames + 1
- else
- if not name then
- name = format("u%06X",unicode)
- end
- unicodes[name] = unicode
- nofunicodes = nofunicodes + 1
- end
- indices[index] = unicode -- each index is unique (at least now)
-
- local description = {
- -- width = glyph.width,
- boundingbox = glyph.boundingbox,
- name = glyph.name or name or "unknown", -- uniXXXX
- cidindex = cidindex,
- index = index,
- glyph = glyph,
- }
-
- descriptions[unicode] = description
- else
- -- report_otf("potential problem: glyph 0x%04X is used but empty",index)
- end
- end
- end
- if trace_loading then
- report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes, nofnames, nofunicodes+nofnames)
- end
- elseif trace_loading then
- report_otf("unable to remap cid font, missing cid file for %s",filename)
- end
- elseif trace_loading then
- report_otf("font %s has no glyphs",filename)
- end
-
- else
-
- for index=0,raw.glyphcnt-1 do -- not raw.glyphmax-1 (as that will crash)
- local glyph = rawglyphs[index]
+ return new
+ else
+ return {}
+ end
+end
+actions["prepare glyphs"]=function(data,filename,raw)
+ local rawglyphs=raw.glyphs
+ local rawsubfonts=raw.subfonts
+ local rawcidinfo=raw.cidinfo
+ local criterium=constructors.privateoffset
+ local private=criterium
+ local resources=data.resources
+ local metadata=data.metadata
+ local properties=data.properties
+ local descriptions=data.descriptions
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ local variants=resources.variants
+ if rawsubfonts then
+ metadata.subfonts={}
+ properties.cidinfo=rawcidinfo
+ if rawcidinfo.registry then
+ local cidmap=fonts.cid.getmap(rawcidinfo)
+ if cidmap then
+ rawcidinfo.usedname=cidmap.usedname
+ local nofnames,nofunicodes=0,0
+ local cidunicodes,cidnames=cidmap.unicodes,cidmap.names
+ for cidindex=1,#rawsubfonts do
+ local subfont=rawsubfonts[cidindex]
+ local cidglyphs=subfont.glyphs
+ metadata.subfonts[cidindex]=somecopy(subfont)
+ for index=0,subfont.glyphcnt-1 do
+ local glyph=cidglyphs[index]
if glyph then
- local unicode = glyph.unicode
- local name = glyph.name
- if not unicode or unicode == -1 or unicode >= criterium then
- unicode = private
- unicodes[name] = private
- if trace_private then
- report_otf("enhance: glyph %s at index 0x%04X is moved to private unicode slot U+%05X",name,index,private)
- end
- private = private + 1
- else
- unicodes[name] = unicode
- end
- indices[index] = unicode
+ local unicode=glyph.unicode
+ local name=glyph.name or cidnames[index]
+ if not unicode or unicode==-1 or unicode>=criterium then
+ unicode=cidunicodes[index]
+ end
+ if not unicode or unicode==-1 or unicode>=criterium then
if not name then
- name = format("u%06X",unicode)
+ name=format("u%06X",private)
end
- descriptions[unicode] = {
- -- width = glyph.width,
- boundingbox = glyph.boundingbox,
- name = name,
- index = index,
- glyph = glyph,
- }
- local altuni = glyph.altuni
- if altuni then
- local d
- for i=1,#altuni do
- local a = altuni[i]
- local u = a.unicode
- local v = a.variant
- if v then
- local vv = variants[v]
- if vv then
- vv[u] = unicode
- else -- xits-math has some:
- vv = { [u] = unicode }
- variants[v] = vv
- end
- elseif d then
- d[#d+1] = u
- else
- d = { u }
- end
- end
- if d then
- duplicates[unicode] = d
- end
+ unicode=private
+ unicodes[name]=private
+ if trace_private then
+ report_otf("enhance: glyph %s at index 0x%04X is moved to private unicode slot U+%05X",name,index,private)
end
+ private=private+1
+ nofnames=nofnames+1
+ else
+ if not name then
+ name=format("u%06X",unicode)
+ end
+ unicodes[name]=unicode
+ nofunicodes=nofunicodes+1
+ end
+ indices[index]=unicode
+ local description={
+ boundingbox=glyph.boundingbox,
+ name=glyph.name or name or "unknown",
+ cidindex=cidindex,
+ index=index,
+ glyph=glyph,
+ }
+ descriptions[unicode]=description
else
- report_otf("potential problem: glyph 0x%04X is used but empty",index)
end
+ end
end
-
- end
-
- resources.private = private
-
-end
-
--- the next one is still messy but will get better when we have
--- flattened map/enc tables in the font loader
-
-actions["check encoding"] = function(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local properties = data.properties
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicodes
-
- -- begin of messy (not needed when cidmap)
-
- local mapdata = raw.map or { }
- local unicodetoindex = mapdata and mapdata.map or { }
- -- local encname = lower(data.enc_name or raw.enc_name or mapdata.enc_name or "")
- local encname = lower(data.enc_name or mapdata.enc_name or "")
- local criterium = 0xFFFF -- for instance cambria has a lot of mess up there
-
- -- end of messy
-
- if find(encname,"unicode") then -- unicodebmp, unicodefull, ...
if trace_loading then
- report_otf("checking embedded unicode map '%s'",encname)
+ report_otf("cid font remapped, %s unicode points, %s symbolic names, %s glyphs",nofunicodes,nofnames,nofunicodes+nofnames)
end
- for unicode, index in next, unicodetoindex do -- altuni already covers this
- if unicode <= criterium and not descriptions[unicode] then
- local parent = indices[index] -- why nil?
- if parent then
- report_otf("weird, unicode U+%05X points to U+%05X with index 0x%04X",unicode,parent,index)
- else
- report_otf("weird, unicode U+%05X points to nowhere with index 0x%04X",unicode,index)
- end
- end
+ elseif trace_loading then
+ report_otf("unable to remap cid font, missing cid file for %s",filename)
+ end
+ elseif trace_loading then
+ report_otf("font %s has no glyphs",filename)
+ end
+ else
+ for index=0,raw.glyphcnt-1 do
+ local glyph=rawglyphs[index]
+ if glyph then
+ local unicode=glyph.unicode
+ local name=glyph.name
+ if not unicode or unicode==-1 or unicode>=criterium then
+ unicode=private
+ unicodes[name]=private
+ if trace_private then
+ report_otf("enhance: glyph %s at index 0x%04X is moved to private unicode slot U+%05X",name,index,private)
+ end
+ private=private+1
+ else
+ unicodes[name]=unicode
end
- elseif properties.cidinfo then
- report_otf("warning: no unicode map, used cidmap '%s'",properties.cidinfo.usedname or "?")
- else
- report_otf("warning: non unicode map '%s', only using glyph unicode data",encname or "whatever")
- end
-
- if mapdata then
- mapdata.map = { } -- clear some memory
- end
-end
-
--- for the moment we assume that a fotn with lookups will not use
--- altuni so we stick to kerns only
-
-actions["add duplicates"] = function(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local properties = data.properties
- local unicodes = resources.unicodes -- name to unicode
- local indices = resources.indices -- index to unicodes
- local duplicates = resources.duplicates
-
- for unicode, d in next, duplicates do
- for i=1,#d do
- local u = d[i]
- if not descriptions[u] then
- local description = descriptions[unicode]
- local duplicate = table.copy(description) -- else packing problem
- duplicate.comment = format("copy of U+%05X", unicode)
- descriptions[u] = duplicate
- local n = 0
- for _, description in next, descriptions do
- if kerns then
- local kerns = description.kerns
- for _, k in next, kerns do
- local ku = k[unicode]
- if ku then
- k[u] = ku
- n = n + 1
- end
- end
- end
- -- todo: lookups etc
- end
- if trace_loading then
- report_otf("duplicating U+%05X to U+%05X with index 0x%04X (%s kerns)",unicode,u,description.index,n)
- end
- end
+ indices[index]=unicode
+ if not name then
+ name=format("u%06X",unicode)
end
+ descriptions[unicode]={
+ boundingbox=glyph.boundingbox,
+ name=name,
+ index=index,
+ glyph=glyph,
+ }
+ local altuni=glyph.altuni
+ if altuni then
+ local d
+ for i=1,#altuni do
+ local a=altuni[i]
+ local u=a.unicode
+ local v=a.variant
+ if v then
+ local vv=variants[v]
+ if vv then
+ vv[u]=unicode
+ else
+ vv={ [u]=unicode }
+ variants[v]=vv
+ end
+ elseif d then
+ d[#d+1]=u
+ else
+ d={ u }
+ end
+ end
+ if d then
+ duplicates[unicode]=d
+ end
+ end
+ else
+ report_otf("potential problem: glyph 0x%04X is used but empty",index)
+ end
+ end
+ end
+ resources.private=private
+end
+actions["check encoding"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local properties=data.properties
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local mapdata=raw.map or {}
+ local unicodetoindex=mapdata and mapdata.map or {}
+ local encname=lower(data.enc_name or mapdata.enc_name or "")
+ local criterium=0xFFFF
+ if find(encname,"unicode") then
+ if trace_loading then
+ report_otf("checking embedded unicode map '%s'",encname)
end
-end
-
--- class : nil base mark ligature component (maybe we don't need it in description)
--- boundingbox: split into ht/dp takes more memory (larger tables and less sharing)
-
-actions["analyze glyphs"] = function(data,filename,raw) -- maybe integrate this in the previous
- local descriptions = data.descriptions
- local resources = data.resources
- local metadata = data.metadata
- local properties = data.properties
- local hasitalics = false
- local widths = { }
- local marks = { } -- always present (saves checking)
- for unicode, description in next, descriptions do
- local glyph = description.glyph
- local italic = glyph.italic_correction
- if not italic then
- -- skip
- elseif italic == 0 then
- -- skip
+ for unicode,index in next,unicodetoindex do
+ if unicode<=criterium and not descriptions[unicode] then
+ local parent=indices[index]
+ if parent then
+ report_otf("weird, unicode U+%05X points to U+%05X with index 0x%04X",unicode,parent,index)
else
- description.italic = italic
- hasitalics = true
- end
- local width = glyph.width
- widths[width] = (widths[width] or 0) + 1
- local class = glyph.class
- if class then
- if class == "mark" then
- marks[unicode] = true
- end
- description.class = class
+ report_otf("weird, unicode U+%05X points to nowhere with index 0x%04X",unicode,index)
+ end
+ end
+ end
+ elseif properties.cidinfo then
+ report_otf("warning: no unicode map, used cidmap '%s'",properties.cidinfo.usedname or "?")
+ else
+ report_otf("warning: non unicode map '%s', only using glyph unicode data",encname or "whatever")
+ end
+ if mapdata then
+ mapdata.map={}
+ end
+end
+actions["add duplicates"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local properties=data.properties
+ local unicodes=resources.unicodes
+ local indices=resources.indices
+ local duplicates=resources.duplicates
+ for unicode,d in next,duplicates do
+ for i=1,#d do
+ local u=d[i]
+ if not descriptions[u] then
+ local description=descriptions[unicode]
+ local duplicate=table.copy(description)
+ duplicate.comment=format("copy of U+%05X",unicode)
+ descriptions[u]=duplicate
+ local n=0
+ for _,description in next,descriptions do
+ if kerns then
+ local kerns=description.kerns
+ for _,k in next,kerns do
+ local ku=k[unicode]
+ if ku then
+ k[u]=ku
+ n=n+1
+ end
+ end
+ end
end
- end
- -- flag italic
- properties.hasitalics = hasitalics
- -- flag marks
- resources.marks = marks
- -- share most common width for cjk fonts
- local wd, most = 0, 1
- for k,v in next, widths do
- if v > most then
- wd, most = k, v
- end
- end
- if most > 1000 then -- maybe 500
if trace_loading then
- report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
- end
- for unicode, description in next, descriptions do
- if description.width == wd then
- -- description.width = nil
- else
- description.width = description.glyph.width
- end
- end
- resources.defaultwidth = wd
+ report_otf("duplicating U+%05X to U+%05X with index 0x%04X (%s kerns)",unicode,u,description.index,n)
+ end
+ end
+ end
+ end
+end
+actions["analyze glyphs"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local metadata=data.metadata
+ local properties=data.properties
+ local hasitalics=false
+ local widths={}
+ local marks={}
+ for unicode,description in next,descriptions do
+ local glyph=description.glyph
+ local italic=glyph.italic_correction
+ if not italic then
+ elseif italic==0 then
else
- for unicode, description in next, descriptions do
- description.width = description.glyph.width
- end
- end
-end
-
-actions["reorganize mark classes"] = function(data,filename,raw)
- local mark_classes = raw.mark_classes
- if mark_classes then
- local resources = data.resources
- local unicodes = resources.unicodes
- local markclasses = { }
- resources.markclasses = markclasses -- reversed
- for name, class in next, mark_classes do
- local t = { }
- for s in gmatch(class,"[^ ]+") do
- t[unicodes[s]] = true
- end
- markclasses[name] = t
- end
- end
-end
-
-actions["reorganize features"] = function(data,filename,raw) -- combine with other
- local features = { }
- data.resources.features = features
- for k, what in next, otf.glists do
- local dw = raw[what]
- if dw then
- local f = { }
- features[what] = f
- for i=1,#dw do
- local d= dw[i]
- local dfeatures = d.features
- if dfeatures then
- for i=1,#dfeatures do
- local df = dfeatures[i]
- local tag = strip(lower(df.tag))
- local ft = f[tag]
- if not ft then
- ft = { }
- f[tag] = ft
- end
- local dscripts = df.scripts
- for i=1,#dscripts do
- local d = dscripts[i]
- local languages = d.langs
- local script = strip(lower(d.script))
- local fts = ft[script] if not fts then fts = {} ft[script] = fts end
- for i=1,#languages do
- fts[strip(lower(languages[i]))] = true
- end
- end
- end
- end
- end
- end
- end
-end
-
-actions["reorganize anchor classes"] = function(data,filename,raw)
- local resources = data.resources
- local anchor_to_lookup = { }
- local lookup_to_anchor = { }
- resources.anchor_to_lookup = anchor_to_lookup
- resources.lookup_to_anchor = lookup_to_anchor
- local classes = raw.anchor_classes -- anchor classes not in final table
- if classes then
- for c=1,#classes do
- local class = classes[c]
- local anchor = class.name
- local lookups = class.lookup
- if type(lookups) ~= "table" then
- lookups = { lookups }
- end
- local a = anchor_to_lookup[anchor]
- if not a then
- a = { }
- anchor_to_lookup[anchor] = a
- end
- for l=1,#lookups do
- local lookup = lookups[l]
- local l = lookup_to_anchor[lookup]
- if l then
- l[anchor] = true
- else
- l = { [anchor] = true }
- lookup_to_anchor[lookup] = l
- end
- a[lookup] = true
- end
+ description.italic=italic
+ hasitalics=true
+ end
+ local width=glyph.width
+ widths[width]=(widths[width] or 0)+1
+ local class=glyph.class
+ if class then
+ if class=="mark" then
+ marks[unicode]=true
+ end
+ description.class=class
+ end
+ end
+ properties.hasitalics=hasitalics
+ resources.marks=marks
+ local wd,most=0,1
+ for k,v in next,widths do
+ if v>most then
+ wd,most=k,v
+ end
+ end
+ if most>1000 then
+ if trace_loading then
+ report_otf("most common width: %s (%s times), sharing (cjk font)",wd,most)
+ end
+ for unicode,description in next,descriptions do
+ if description.width==wd then
+ else
+ description.width=description.glyph.width
+ end
+ end
+ resources.defaultwidth=wd
+ else
+ for unicode,description in next,descriptions do
+ description.width=description.glyph.width
+ end
+ end
+end
+actions["reorganize mark classes"]=function(data,filename,raw)
+ local mark_classes=raw.mark_classes
+ if mark_classes then
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local markclasses={}
+ resources.markclasses=markclasses
+ for name,class in next,mark_classes do
+ local t={}
+ for s in gmatch(class,"[^ ]+") do
+ t[unicodes[s]]=true
+ end
+ markclasses[name]=t
+ end
+ end
+end
+actions["reorganize features"]=function(data,filename,raw)
+ local features={}
+ data.resources.features=features
+ for k,what in next,otf.glists do
+ local dw=raw[what]
+ if dw then
+ local f={}
+ features[what]=f
+ for i=1,#dw do
+ local d=dw[i]
+ local dfeatures=d.features
+ if dfeatures then
+ for i=1,#dfeatures do
+ local df=dfeatures[i]
+ local tag=strip(lower(df.tag))
+ local ft=f[tag]
+ if not ft then
+ ft={}
+ f[tag]=ft
+ end
+ local dscripts=df.scripts
+ for i=1,#dscripts do
+ local d=dscripts[i]
+ local languages=d.langs
+ local script=strip(lower(d.script))
+ local fts=ft[script] if not fts then fts={} ft[script]=fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))]=true
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
+actions["reorganize anchor classes"]=function(data,filename,raw)
+ local resources=data.resources
+ local anchor_to_lookup={}
+ local lookup_to_anchor={}
+ resources.anchor_to_lookup=anchor_to_lookup
+ resources.lookup_to_anchor=lookup_to_anchor
+ local classes=raw.anchor_classes
+ if classes then
+ for c=1,#classes do
+ local class=classes[c]
+ local anchor=class.name
+ local lookups=class.lookup
+ if type(lookups)~="table" then
+ lookups={ lookups }
+ end
+ local a=anchor_to_lookup[anchor]
+ if not a then
+ a={}
+ anchor_to_lookup[anchor]=a
+ end
+ for l=1,#lookups do
+ local lookup=lookups[l]
+ local l=lookup_to_anchor[lookup]
+ if l then
+ l[anchor]=true
+ else
+ l={ [anchor]=true }
+ lookup_to_anchor[lookup]=l
end
+ a[lookup]=true
+ end
end
+ end
end
-
-actions["prepare tounicode"] = function(data,filename,raw)
- fonts.mappings.addtounicode(data,filename)
+actions["prepare tounicode"]=function(data,filename,raw)
+ fonts.mappings.addtounicode(data,filename)
end
-
-local g_directions = {
- gsub_contextchain = 1,
- gpos_contextchain = 1,
- -- gsub_context = 1,
- -- gpos_context = 1,
- gsub_reversecontextchain = -1,
- gpos_reversecontextchain = -1,
+local g_directions={
+ gsub_contextchain=1,
+ gpos_contextchain=1,
+ gsub_reversecontextchain=-1,
+ gpos_reversecontextchain=-1,
}
-
--- Research by Khaled Hosny has demonstrated that the font loader merges
--- regular and AAT features and that these can interfere (especially because
--- we dropped checking for valid features elsewhere. So, we just check for
--- the special flag and drop the feature if such a tag is found.
-
local function supported(features)
- for i=1,#features do
- if features[i].ismac then
- return false
- end
- end
- return true
-end
-
-actions["reorganize subtables"] = function(data,filename,raw)
- local resources = data.resources
- local sequences = { }
- local lookups = { }
- local chainedfeatures = { }
- resources.sequences = sequences
- resources.lookups = lookups
- for _, what in next, otf.glists do
- local dw = raw[what]
- if dw then
- for k=1,#dw do
- local gk = dw[k]
- local features = gk.features
--- if features and supported(features) then
- if not features or supported(features) then -- not always features !
- local typ = gk.type
- local chain = g_directions[typ] or 0
- local subtables = gk.subtables
- if subtables then
- local t = { }
- for s=1,#subtables do
- t[s] = subtables[s].name
- end
- subtables = t
- end
- local flags, markclass = gk.flags, nil
- if flags then
- local t = { -- forcing false packs nicer
- (flags.ignorecombiningmarks and "mark") or false,
- (flags.ignoreligatures and "ligature") or false,
- (flags.ignorebaseglyphs and "base") or false,
- flags.r2l or false,
- }
- markclass = flags.mark_class
- if markclass then
- markclass = resources.markclasses[markclass]
- end
- flags = t
- end
- --
- local name = gk.name
- --
- if not name then
- -- in fact an error
- report_otf("skipping weird lookup number %s",k)
- elseif features then
- -- scripts, tag, ismac
- local f = { }
- for i=1,#features do
- local df = features[i]
- local tag = strip(lower(df.tag))
- local ft = f[tag] if not ft then ft = {} f[tag] = ft end
- local dscripts = df.scripts
- for i=1,#dscripts do
- local d = dscripts[i]
- local languages = d.langs
- local script = strip(lower(d.script))
- local fts = ft[script] if not fts then fts = {} ft[script] = fts end
- for i=1,#languages do
- fts[strip(lower(languages[i]))] = true
- end
- end
- end
- sequences[#sequences+1] = {
- type = typ,
- chain = chain,
- flags = flags,
- name = name,
- subtables = subtables,
- markclass = markclass,
- features = f,
- }
- else
- lookups[name] = {
- type = typ,
- chain = chain,
- flags = flags,
- subtables = subtables,
- markclass = markclass,
- }
- end
- end
- end
+ for i=1,#features do
+ if features[i].ismac then
+ return false
+ end
+ end
+ return true
+end
+actions["reorganize subtables"]=function(data,filename,raw)
+ local resources=data.resources
+ local sequences={}
+ local lookups={}
+ local chainedfeatures={}
+ resources.sequences=sequences
+ resources.lookups=lookups
+ for _,what in next,otf.glists do
+ local dw=raw[what]
+ if dw then
+ for k=1,#dw do
+ local gk=dw[k]
+ local features=gk.features
+ if not features or supported(features) then
+ local typ=gk.type
+ local chain=g_directions[typ] or 0
+ local subtables=gk.subtables
+ if subtables then
+ local t={}
+ for s=1,#subtables do
+ t[s]=subtables[s].name
+ end
+ subtables=t
+ end
+ local flags,markclass=gk.flags,nil
+ if flags then
+ local t={
+ (flags.ignorecombiningmarks and "mark") or false,
+ (flags.ignoreligatures and "ligature") or false,
+ (flags.ignorebaseglyphs and "base") or false,
+ flags.r2l or false,
+ }
+ markclass=flags.mark_class
+ if markclass then
+ markclass=resources.markclasses[markclass]
+ end
+ flags=t
+ end
+ local name=gk.name
+ if not name then
+ report_otf("skipping weird lookup number %s",k)
+ elseif features then
+ local f={}
+ for i=1,#features do
+ local df=features[i]
+ local tag=strip(lower(df.tag))
+ local ft=f[tag] if not ft then ft={} f[tag]=ft end
+ local dscripts=df.scripts
+ for i=1,#dscripts do
+ local d=dscripts[i]
+ local languages=d.langs
+ local script=strip(lower(d.script))
+ local fts=ft[script] if not fts then fts={} ft[script]=fts end
+ for i=1,#languages do
+ fts[strip(lower(languages[i]))]=true
+ end
+ end
+ end
+ sequences[#sequences+1]={
+ type=typ,
+ chain=chain,
+ flags=flags,
+ name=name,
+ subtables=subtables,
+ markclass=markclass,
+ features=f,
+ }
+ else
+ lookups[name]={
+ type=typ,
+ chain=chain,
+ flags=flags,
+ subtables=subtables,
+ markclass=markclass,
+ }
+ end
end
+ end
end
+ end
end
-
--- test this:
---
--- for _, what in next, otf.glists do
--- raw[what] = nil
--- end
-
-actions["prepare lookups"] = function(data,filename,raw)
- local lookups = raw.lookups
- if lookups then
- data.lookups = lookups
- end
+actions["prepare lookups"]=function(data,filename,raw)
+ local lookups=raw.lookups
+ if lookups then
+ data.lookups=lookups
+ end
end
-
--- The reverse handler does a bit redundant splitting but it's seldom
--- seen so we don't bother too much. We could store the replacement
--- in the current list (value instead of true) but it makes other code
--- uglier. Maybe some day.
-
local function t_uncover(splitter,cache,covers)
- local result = { }
- for n=1,#covers do
- local cover = covers[n]
- local uncovered = cache[cover]
- if not uncovered then
- uncovered = lpegmatch(splitter,cover)
- cache[cover] = uncovered
- end
- result[n] = uncovered
- end
- return result
+ local result={}
+ for n=1,#covers do
+ local cover=covers[n]
+ local uncovered=cache[cover]
+ if not uncovered then
+ uncovered=lpegmatch(splitter,cover)
+ cache[cover]=uncovered
+ end
+ result[n]=uncovered
+ end
+ return result
end
-
local function s_uncover(splitter,cache,cover)
- if cover == "" then
- return nil
- else
- local uncovered = cache[cover]
- if not uncovered then
- uncovered = lpegmatch(splitter,cover)
--- for i=1,#uncovered do
--- uncovered[i] = { [uncovered[i]] = true }
--- end
- cache[cover] = uncovered
- end
- return { uncovered }
+ if cover=="" then
+ return nil
+ else
+ local uncovered=cache[cover]
+ if not uncovered then
+ uncovered=lpegmatch(splitter,cover)
+ cache[cover]=uncovered
end
+ return { uncovered }
+ end
end
-
local function t_hashed(t,cache)
- if t then
- local ht = { }
- for i=1,#t do
- local ti = t[i]
- local tih = cache[ti]
- if not tih then
- tih = { }
- for i=1,#ti do
- tih[ti[i]] = true
- end
- cache[ti] = tih
- end
- ht[i] = tih
- end
- return ht
- else
- return nil
- end
+ if t then
+ local ht={}
+ for i=1,#t do
+ local ti=t[i]
+ local tih=cache[ti]
+ if not tih then
+ tih={}
+ for i=1,#ti do
+ tih[ti[i]]=true
+ end
+ cache[ti]=tih
+ end
+ ht[i]=tih
+ end
+ return ht
+ else
+ return nil
+ end
end
-
-local s_hashed = t_hashed
-
+local s_hashed=t_hashed
local function r_uncover(splitter,cache,cover,replacements)
- if cover == "" then
- return nil
- else
- -- we always have current as { } even in the case of one
- local uncovered = cover[1]
- local replaced = cache[replacements]
- if not replaced then
- replaced = lpegmatch(splitter,replacements)
- cache[replacements] = replaced
- end
- local nu, nr = #uncovered, #replaced
- local r = { }
- if nu == nr then
- for i=1,nu do
- r[uncovered[i]] = replaced[i]
- end
- end
- return r
- end
-end
-
-actions["reorganize lookups"] = function(data,filename,raw) -- we could check for "" and n == 0
- -- we prefer the before lookups in a normal order
- if data.lookups then
- local splitter = data.helpers.tounicodetable
- local t_u_cache = { }
- local s_u_cache = t_u_cache -- string keys
- local t_h_cache = { }
- local s_h_cache = t_h_cache -- table keys (so we could use one cache)
- local r_u_cache = { } -- maybe shared
- for _, lookup in next, data.lookups do
- local rules = lookup.rules
- if rules then
- local format = lookup.format
- if format == "class" then
- local before_class = lookup.before_class
- if before_class then
- before_class = t_uncover(splitter,t_u_cache,reversed(before_class))
- end
- local current_class = lookup.current_class
- if current_class then
- current_class = t_uncover(splitter,t_u_cache,current_class)
- end
- local after_class = lookup.after_class
- if after_class then
- after_class = t_uncover(splitter,t_u_cache,after_class)
- end
- for i=1,#rules do
- local rule = rules[i]
- local class = rule.class
- local before = class.before
- if before then
- for i=1,#before do
- before[i] = before_class[before[i]] or { }
- end
- rule.before = t_hashed(before,t_h_cache)
- end
- local current = class.current
- local lookups = rule.lookups
- if current then
- for i=1,#current do
- current[i] = current_class[current[i]] or { }
- if lookups and not lookups[i] then
- lookups[i] = "" -- (was: false) e.g. we can have two lookups and one replacement
- end
- end
- rule.current = t_hashed(current,t_h_cache)
- end
- local after = class.after
- if after then
- for i=1,#after do
- after[i] = after_class[after[i]] or { }
- end
- rule.after = t_hashed(after,t_h_cache)
- end
- rule.class = nil
- end
- lookup.before_class = nil
- lookup.current_class = nil
- lookup.after_class = nil
- lookup.format = "coverage"
- elseif format == "coverage" then
- for i=1,#rules do
- local rule = rules[i]
- local coverage = rule.coverage
- if coverage then
- local before = coverage.before
- if before then
- before = t_uncover(splitter,t_u_cache,reversed(before))
- rule.before = t_hashed(before,t_h_cache)
- end
- local current = coverage.current
- if current then
- current = t_uncover(splitter,t_u_cache,current)
- rule.current = t_hashed(current,t_h_cache)
- end
- local after = coverage.after
- if after then
- after = t_uncover(splitter,t_u_cache,after)
- rule.after = t_hashed(after,t_h_cache)
- end
- rule.coverage = nil
- end
- end
- elseif format == "reversecoverage" then -- special case, single substitution only
- for i=1,#rules do
- local rule = rules[i]
- local reversecoverage = rule.reversecoverage
- if reversecoverage then
- local before = reversecoverage.before
- if before then
- before = t_uncover(splitter,t_u_cache,reversed(before))
- rule.before = t_hashed(before,t_h_cache)
- end
- local current = reversecoverage.current
- if current then
- current = t_uncover(splitter,t_u_cache,current)
- rule.current = t_hashed(current,t_h_cache)
- end
- local after = reversecoverage.after
- if after then
- after = t_uncover(splitter,t_u_cache,after)
- rule.after = t_hashed(after,t_h_cache)
- end
- local replacements = reversecoverage.replacements
- if replacements then
- rule.replacements = r_uncover(splitter,r_u_cache,current,replacements)
- end
- rule.reversecoverage = nil
- end
- end
- elseif format == "glyphs" then
- for i=1,#rules do
- local rule = rules[i]
- local glyphs = rule.glyphs
- if glyphs then
- local fore = glyphs.fore
- if fore and fore ~= "" then
- fore = s_uncover(splitter,s_u_cache,fore)
- rule.before = s_hashed(fore,s_h_cache)
- end
- local back = glyphs.back
- if back then
- back = s_uncover(splitter,s_u_cache,back)
- rule.after = s_hashed(back,s_h_cache)
- end
- local names = glyphs.names
- if names then
- names = s_uncover(splitter,s_u_cache,names)
- rule.current = s_hashed(names,s_h_cache)
- end
- rule.glyphs = nil
- end
- end
- end
- end
- end
- end
+ if cover=="" then
+ return nil
+ else
+ local uncovered=cover[1]
+ local replaced=cache[replacements]
+ if not replaced then
+ replaced=lpegmatch(splitter,replacements)
+ cache[replacements]=replaced
+ end
+ local nu,nr=#uncovered,#replaced
+ local r={}
+ if nu==nr then
+ for i=1,nu do
+ r[uncovered[i]]=replaced[i]
+ end
+ end
+ return r
+ end
+end
+actions["reorganize lookups"]=function(data,filename,raw)
+ if data.lookups then
+ local splitter=data.helpers.tounicodetable
+ local t_u_cache={}
+ local s_u_cache=t_u_cache
+ local t_h_cache={}
+ local s_h_cache=t_h_cache
+ local r_u_cache={}
+ for _,lookup in next,data.lookups do
+ local rules=lookup.rules
+ if rules then
+ local format=lookup.format
+ if format=="class" then
+ local before_class=lookup.before_class
+ if before_class then
+ before_class=t_uncover(splitter,t_u_cache,reversed(before_class))
+ end
+ local current_class=lookup.current_class
+ if current_class then
+ current_class=t_uncover(splitter,t_u_cache,current_class)
+ end
+ local after_class=lookup.after_class
+ if after_class then
+ after_class=t_uncover(splitter,t_u_cache,after_class)
+ end
+ for i=1,#rules do
+ local rule=rules[i]
+ local class=rule.class
+ local before=class.before
+ if before then
+ for i=1,#before do
+ before[i]=before_class[before[i]] or {}
+ end
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=class.current
+ local lookups=rule.lookups
+ if current then
+ for i=1,#current do
+ current[i]=current_class[current[i]] or {}
+ if lookups and not lookups[i] then
+ lookups[i]=""
+ end
+ end
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=class.after
+ if after then
+ for i=1,#after do
+ after[i]=after_class[after[i]] or {}
+ end
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ rule.class=nil
+ end
+ lookup.before_class=nil
+ lookup.current_class=nil
+ lookup.after_class=nil
+ lookup.format="coverage"
+ elseif format=="coverage" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local coverage=rule.coverage
+ if coverage then
+ local before=coverage.before
+ if before then
+ before=t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=coverage.current
+ if current then
+ current=t_uncover(splitter,t_u_cache,current)
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=coverage.after
+ if after then
+ after=t_uncover(splitter,t_u_cache,after)
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ rule.coverage=nil
+ end
+ end
+ elseif format=="reversecoverage" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local reversecoverage=rule.reversecoverage
+ if reversecoverage then
+ local before=reversecoverage.before
+ if before then
+ before=t_uncover(splitter,t_u_cache,reversed(before))
+ rule.before=t_hashed(before,t_h_cache)
+ end
+ local current=reversecoverage.current
+ if current then
+ current=t_uncover(splitter,t_u_cache,current)
+ rule.current=t_hashed(current,t_h_cache)
+ end
+ local after=reversecoverage.after
+ if after then
+ after=t_uncover(splitter,t_u_cache,after)
+ rule.after=t_hashed(after,t_h_cache)
+ end
+ local replacements=reversecoverage.replacements
+ if replacements then
+ rule.replacements=r_uncover(splitter,r_u_cache,current,replacements)
+ end
+ rule.reversecoverage=nil
+ end
+ end
+ elseif format=="glyphs" then
+ for i=1,#rules do
+ local rule=rules[i]
+ local glyphs=rule.glyphs
+ if glyphs then
+ local fore=glyphs.fore
+ if fore and fore~="" then
+ fore=s_uncover(splitter,s_u_cache,fore)
+ rule.before=s_hashed(fore,s_h_cache)
+ end
+ local back=glyphs.back
+ if back then
+ back=s_uncover(splitter,s_u_cache,back)
+ rule.after=s_hashed(back,s_h_cache)
+ end
+ local names=glyphs.names
+ if names then
+ names=s_uncover(splitter,s_u_cache,names)
+ rule.current=s_hashed(names,s_h_cache)
+ end
+ rule.glyphs=nil
+ end
+ end
+ end
+ end
+ end
+ end
end
-
local function check_variants(unicode,the_variants,splitter,unicodes)
- local variants = the_variants.variants
- if variants then -- use splitter
- local glyphs = lpegmatch(splitter,variants)
- local done = { [unicode] = true }
- local n = 0
- for i=1,#glyphs do
- local g = glyphs[i]
- if done[g] then
- report_otf("skipping cyclic reference U+%05X in math variant U+%05X",g,unicode)
+ local variants=the_variants.variants
+ if variants then
+ local glyphs=lpegmatch(splitter,variants)
+ local done={ [unicode]=true }
+ local n=0
+ for i=1,#glyphs do
+ local g=glyphs[i]
+ if done[g] then
+ report_otf("skipping cyclic reference U+%05X in math variant U+%05X",g,unicode)
+ else
+ if n==0 then
+ n=1
+ variants={ g }
+ else
+ n=n+1
+ variants[n]=g
+ end
+ done[g]=true
+ end
+ end
+ if n==0 then
+ variants=nil
+ end
+ end
+ local parts=the_variants.parts
+ if parts then
+ local p=#parts
+ if p>0 then
+ for i=1,p do
+ local pi=parts[i]
+ pi.glyph=unicodes[pi.component] or 0
+ pi.component=nil
+ end
+ else
+ parts=nil
+ end
+ end
+ local italic_correction=the_variants.italic_correction
+ if italic_correction and italic_correction==0 then
+ italic_correction=nil
+ end
+ return variants,parts,italic_correction
+end
+actions["analyze math"]=function(data,filename,raw)
+ if raw.math then
+ data.metadata.math=raw.math
+ local unicodes=data.resources.unicodes
+ local splitter=data.helpers.tounicodetable
+ for unicode,description in next,data.descriptions do
+ local glyph=description.glyph
+ local mathkerns=glyph.mathkern
+ local horiz_variants=glyph.horiz_variants
+ local vert_variants=glyph.vert_variants
+ local top_accent=glyph.top_accent
+ if mathkerns or horiz_variants or vert_variants or top_accent then
+ local math={}
+ if top_accent then
+ math.top_accent=top_accent
+ end
+ if mathkerns then
+ for k,v in next,mathkerns do
+ if not next(v) then
+ mathkerns[k]=nil
else
- if n == 0 then
- n = 1
- variants = { g }
- else
- n = n + 1
- variants[n] = g
+ for k,v in next,v do
+ if v==0 then
+ k[v]=nil
end
- done[g] = true
+ end
end
+ end
+ math.kerns=mathkerns
end
- if n == 0 then
- variants = nil
+ if horiz_variants then
+ math.horiz_variants,math.horiz_parts,math.horiz_italic_correction=check_variants(unicode,horiz_variants,splitter,unicodes)
end
- end
- local parts = the_variants.parts
- if parts then
- local p = #parts
- if p > 0 then
- for i=1,p do
- local pi = parts[i]
- pi.glyph = unicodes[pi.component] or 0
- pi.component = nil
- end
- else
- parts = nil
+ if vert_variants then
+ math.vert_variants,math.vert_parts,math.vert_italic_correction=check_variants(unicode,vert_variants,splitter,unicodes)
end
- end
- local italic_correction = the_variants.italic_correction
- if italic_correction and italic_correction == 0 then
- italic_correction = nil
- end
- return variants, parts, italic_correction
-end
-
-actions["analyze math"] = function(data,filename,raw)
- if raw.math then
- data.metadata.math = raw.math
- local unicodes = data.resources.unicodes
- local splitter = data.helpers.tounicodetable
- for unicode, description in next, data.descriptions do
- local glyph = description.glyph
- local mathkerns = glyph.mathkern -- singular
- local horiz_variants = glyph.horiz_variants
- local vert_variants = glyph.vert_variants
- local top_accent = glyph.top_accent
- if mathkerns or horiz_variants or vert_variants or top_accent then
- local math = { }
- if top_accent then
- math.top_accent = top_accent
- end
- if mathkerns then
- for k, v in next, mathkerns do
- if not next(v) then
- mathkerns[k] = nil
- else
- for k, v in next, v do
- if v == 0 then
- k[v] = nil -- height / kern can be zero
- end
- end
- end
- end
- math.kerns = mathkerns
- end
- if horiz_variants then
- math.horiz_variants, math.horiz_parts, math.horiz_italic_correction = check_variants(unicode,horiz_variants,splitter,unicodes)
- end
- if vert_variants then
- math.vert_variants, math.vert_parts, math.vert_italic_correction = check_variants(unicode,vert_variants,splitter,unicodes)
- end
- local italic_correction = description.italic
- if italic_correction and italic_correction ~= 0 then
- math.italic_correction = italic_correction
- end
- description.math = math
- end
+ local italic_correction=description.italic
+ if italic_correction and italic_correction~=0 then
+ math.italic_correction=italic_correction
end
+ description.math=math
+ end
end
+ end
end
-
-actions["reorganize glyph kerns"] = function(data,filename,raw)
- local descriptions = data.descriptions
- local resources = data.resources
- local unicodes = resources.unicodes
- for unicode, description in next, descriptions do
- local kerns = description.glyph.kerns
- if kerns then
- local newkerns = { }
- for k, kern in next, kerns do
- local name = kern.char
- local offset = kern.off
- local lookup = kern.lookup
- if name and offset and lookup then
- local unicode = unicodes[name]
- if unicode then
- if type(lookup) == "table" then
- for l=1,#lookup do
- local lookup = lookup[l]
- local lookupkerns = newkerns[lookup]
- if lookupkerns then
- lookupkerns[unicode] = offset
- else
- newkerns[lookup] = { [unicode] = offset }
- end
- end
- else
- local lookupkerns = newkerns[lookup]
- if lookupkerns then
- lookupkerns[unicode] = offset
- else
- newkerns[lookup] = { [unicode] = offset }
- end
- end
- elseif trace_loading then
- report_otf("problems with unicode %s of kern %s of glyph U+%05X",name,k,unicode)
- end
+actions["reorganize glyph kerns"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ for unicode,description in next,descriptions do
+ local kerns=description.glyph.kerns
+ if kerns then
+ local newkerns={}
+ for k,kern in next,kerns do
+ local name=kern.char
+ local offset=kern.off
+ local lookup=kern.lookup
+ if name and offset and lookup then
+ local unicode=unicodes[name]
+ if unicode then
+ if type(lookup)=="table" then
+ for l=1,#lookup do
+ local lookup=lookup[l]
+ local lookupkerns=newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode]=offset
+ else
+ newkerns[lookup]={ [unicode]=offset }
end
- end
- description.kerns = newkerns
- end
- end
-end
-
-actions["merge kern classes"] = function(data,filename,raw)
- local gposlist = raw.gpos
- if gposlist then
- local descriptions = data.descriptions
- local resources = data.resources
- local unicodes = resources.unicodes
- local splitter = data.helpers.tounicodetable
- for gp=1,#gposlist do
- local gpos = gposlist[gp]
- local subtables = gpos.subtables
- if subtables then
- for s=1,#subtables do
- local subtable = subtables[s]
- local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes
- if kernclass then -- the next one is quite slow
- local split = { } -- saves time
- for k=1,#kernclass do
- local kcl = kernclass[k]
- local firsts = kcl.firsts
- local seconds = kcl.seconds
- local offsets = kcl.offsets
- local lookups = kcl.lookup -- singular
- if type(lookups) ~= "table" then
- lookups = { lookups }
- end
- -- if offsets[1] == nil then
- -- offsets[1] = ""
- -- end
- -- we can check the max in the loop
- -- local maxseconds = getn(seconds)
- for n, s in next, firsts do
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- local maxseconds = 0
- for n, s in next, seconds do
- if n > maxseconds then
- maxseconds = n
- end
- split[s] = split[s] or lpegmatch(splitter,s)
- end
- for l=1,#lookups do
- local lookup = lookups[l]
- for fk=1,#firsts do -- maxfirsts ?
- local fv = firsts[fk]
- local splt = split[fv]
- if splt then
- local extrakerns = { }
- local baseoffset = (fk-1) * maxseconds
- for sk=2,maxseconds do -- will become 1 based in future luatex
- local sv = seconds[sk]
- -- for sk, sv in next, seconds do
- local splt = split[sv]
- if splt then -- redundant test
- local offset = offsets[baseoffset + sk]
- if offset then
- for i=1,#splt do
- extrakerns[splt[i]] = offset
- end
- end
- end
- end
- for i=1,#splt do
- local first_unicode = splt[i]
- local description = descriptions[first_unicode]
- if description then
- local kerns = description.kerns
- if not kerns then
- kerns = { } -- unicode indexed !
- description.kerns = kerns
- end
- local lookupkerns = kerns[lookup]
- if not lookupkerns then
- lookupkerns = { }
- kerns[lookup] = lookupkerns
- end
- for second_unicode, kern in next, extrakerns do
- lookupkerns[second_unicode] = kern
- end
- elseif trace_loading then
- report_otf("no glyph data for U+%05X", first_unicode)
- end
- end
- end
- end
- end
+ end
+ else
+ local lookupkerns=newkerns[lookup]
+ if lookupkerns then
+ lookupkerns[unicode]=offset
+ else
+ newkerns[lookup]={ [unicode]=offset }
+ end
+ end
+ elseif trace_loading then
+ report_otf("problems with unicode %s of kern %s of glyph U+%05X",name,k,unicode)
+ end
+ end
+ end
+ description.kerns=newkerns
+ end
+ end
+end
+actions["merge kern classes"]=function(data,filename,raw)
+ local gposlist=raw.gpos
+ if gposlist then
+ local descriptions=data.descriptions
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local splitter=data.helpers.tounicodetable
+ for gp=1,#gposlist do
+ local gpos=gposlist[gp]
+ local subtables=gpos.subtables
+ if subtables then
+ for s=1,#subtables do
+ local subtable=subtables[s]
+ local kernclass=subtable.kernclass
+ if kernclass then
+ local split={}
+ for k=1,#kernclass do
+ local kcl=kernclass[k]
+ local firsts=kcl.firsts
+ local seconds=kcl.seconds
+ local offsets=kcl.offsets
+ local lookups=kcl.lookup
+ if type(lookups)~="table" then
+ lookups={ lookups }
+ end
+ for n,s in next,firsts do
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ local maxseconds=0
+ for n,s in next,seconds do
+ if n>maxseconds then
+ maxseconds=n
+ end
+ split[s]=split[s] or lpegmatch(splitter,s)
+ end
+ for l=1,#lookups do
+ local lookup=lookups[l]
+ for fk=1,#firsts do
+ local fv=firsts[fk]
+ local splt=split[fv]
+ if splt then
+ local extrakerns={}
+ local baseoffset=(fk-1)*maxseconds
+ for sk=2,maxseconds do
+ local sv=seconds[sk]
+ local splt=split[sv]
+ if splt then
+ local offset=offsets[baseoffset+sk]
+ if offset then
+ for i=1,#splt do
+ extrakerns[splt[i]]=offset
+ end
end
- subtable.kernclass = { }
- end
- end
- end
- end
- end
-end
-
-actions["check glyphs"] = function(data,filename,raw)
- for unicode, description in next, data.descriptions do
- description.glyph = nil
- end
-end
-
--- future versions will remove _
-
-actions["check metadata"] = function(data,filename,raw)
- local metadata = data.metadata
- for _, k in next, mainfields do
- if valid_fields[k] then
- local v = raw[k]
- if not metadata[k] then
- metadata[k] = v
- end
- end
- end
- -- metadata.pfminfo = raw.pfminfo -- not already done?
- local ttftables = metadata.ttf_tables
- if ttftables then
- for i=1,#ttftables do
- ttftables[i].data = "deleted"
- end
- end
-end
-
-actions["cleanup tables"] = function(data,filename,raw)
- data.resources.indices = nil -- not needed
- data.helpers = nil
-end
-
--- kern: ttf has a table with kerns
---
--- Weird, as maxfirst and maxseconds can have holes, first seems to be indexed, but
--- seconds can start at 2 .. this need to be fixed as getn as well as # are sort of
--- unpredictable alternatively we could force an [1] if not set (maybe I will do that
--- anyway).
-
--- we can share { } as it is never set
-
---- ligatures have an extra specification.char entry that we don't use
-
-actions["reorganize glyph lookups"] = function(data,filename,raw)
- local resources = data.resources
- local unicodes = resources.unicodes
- local descriptions = data.descriptions
- local splitter = data.helpers.tounicodelist
-
- local lookuptypes = resources.lookuptypes
-
- for unicode, description in next, descriptions do
- local lookups = description.glyph.lookups
- if lookups then
- for tag, lookuplist in next, lookups do
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local specification = lookup.specification
- local lookuptype = lookup.type
- local lt = lookuptypes[tag]
- if not lt then
- lookuptypes[tag] = lookuptype
- elseif lt ~= lookuptype then
- report_otf("conflicting lookuptypes: %s => %s and %s",tag,lt,lookuptype)
- end
- if lookuptype == "ligature" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "alternate" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "substitution" then
- lookuplist[l] = unicodes[specification.variant]
- elseif lookuptype == "multiple" then
- lookuplist[l] = { lpegmatch(splitter,specification.components) }
- elseif lookuptype == "position" then
- lookuplist[l] = {
- specification.x or 0,
- specification.y or 0,
- specification.h or 0,
- specification.v or 0
- }
- elseif lookuptype == "pair" then
- local one = specification.offsets[1]
- local two = specification.offsets[2]
- local paired = unicodes[specification.paired]
- if one then
- if two then
- lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0 } }
- else
- lookuplist[l] = { paired, { one.x or 0, one.y or 0, one.h or 0, one.v or 0 } }
- end
- else
- if two then
- lookuplist[l] = { paired, { }, { two.x or 0, two.y or 0, two.h or 0, two.v or 0} } -- maybe nil instead of { }
- else
- lookuplist[l] = { paired }
- end
+ end
+ end
+ for i=1,#splt do
+ local first_unicode=splt[i]
+ local description=descriptions[first_unicode]
+ if description then
+ local kerns=description.kerns
+ if not kerns then
+ kerns={}
+ description.kerns=kerns
end
- end
- end
- end
- local slookups, mlookups
- for tag, lookuplist in next, lookups do
- if #lookuplist == 1 then
- if slookups then
- slookups[tag] = lookuplist[1]
- else
- slookups = { [tag] = lookuplist[1] }
- end
- else
- if mlookups then
- mlookups[tag] = lookuplist
- else
- mlookups = { [tag] = lookuplist }
- end
- end
- end
- if slookups then
- description.slookups = slookups
- end
- if mlookups then
- description.mlookups = mlookups
- end
- end
- end
-
-end
-
-actions["reorganize glyph anchors"] = function(data,filename,raw) -- when we replace inplace we safe entries
- local descriptions = data.descriptions
- for unicode, description in next, descriptions do
- local anchors = description.glyph.anchors
- if anchors then
- for class, data in next, anchors do
- if class == "baselig" then
- for tag, specification in next, data do
- for i=1,#specification do
- local si = specification[i]
- specification[i] = { si.x or 0, si.y or 0 }
+ local lookupkerns=kerns[lookup]
+ if not lookupkerns then
+ lookupkerns={}
+ kerns[lookup]=lookupkerns
end
- end
- else
- for tag, specification in next, data do
- data[tag] = { specification.x or 0, specification.y or 0 }
- end
- end
- end
- description.anchors = anchors
+ for second_unicode,kern in next,extrakerns do
+ lookupkerns[second_unicode]=kern
+ end
+ elseif trace_loading then
+ report_otf("no glyph data for U+%05X",first_unicode)
+ end
+ end
+ end
+ end
+ end
+ end
+ subtable.kernclass={}
+ end
+ end
+ end
+ end
+ end
+end
+actions["check glyphs"]=function(data,filename,raw)
+ for unicode,description in next,data.descriptions do
+ description.glyph=nil
+ end
+end
+actions["check metadata"]=function(data,filename,raw)
+ local metadata=data.metadata
+ for _,k in next,mainfields do
+ if valid_fields[k] then
+ local v=raw[k]
+ if not metadata[k] then
+ metadata[k]=v
+ end
+ end
+ end
+ local ttftables=metadata.ttf_tables
+ if ttftables then
+ for i=1,#ttftables do
+ ttftables[i].data="deleted"
+ end
+ end
+end
+actions["cleanup tables"]=function(data,filename,raw)
+ data.resources.indices=nil
+ data.helpers=nil
+end
+actions["reorganize glyph lookups"]=function(data,filename,raw)
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local descriptions=data.descriptions
+ local splitter=data.helpers.tounicodelist
+ local lookuptypes=resources.lookuptypes
+ for unicode,description in next,descriptions do
+ local lookups=description.glyph.lookups
+ if lookups then
+ for tag,lookuplist in next,lookups do
+ for l=1,#lookuplist do
+ local lookup=lookuplist[l]
+ local specification=lookup.specification
+ local lookuptype=lookup.type
+ local lt=lookuptypes[tag]
+ if not lt then
+ lookuptypes[tag]=lookuptype
+ elseif lt~=lookuptype then
+ report_otf("conflicting lookuptypes: %s => %s and %s",tag,lt,lookuptype)
+ end
+ if lookuptype=="ligature" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="alternate" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="substitution" then
+ lookuplist[l]=unicodes[specification.variant]
+ elseif lookuptype=="multiple" then
+ lookuplist[l]={ lpegmatch(splitter,specification.components) }
+ elseif lookuptype=="position" then
+ lookuplist[l]={
+ specification.x or 0,
+ specification.y or 0,
+ specification.h or 0,
+ specification.v or 0
+ }
+ elseif lookuptype=="pair" then
+ local one=specification.offsets[1]
+ local two=specification.offsets[2]
+ local paired=unicodes[specification.paired]
+ if one then
+ if two then
+ lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 },{ two.x or 0,two.y or 0,two.h or 0,two.v or 0 } }
+ else
+ lookuplist[l]={ paired,{ one.x or 0,one.y or 0,one.h or 0,one.v or 0 } }
+ end
+ else
+ if two then
+ lookuplist[l]={ paired,{},{ two.x or 0,two.y or 0,two.h or 0,two.v or 0} }
+ else
+ lookuplist[l]={ paired }
+ end
+ end
+ end
+ end
+ end
+ local slookups,mlookups
+ for tag,lookuplist in next,lookups do
+ if #lookuplist==1 then
+ if slookups then
+ slookups[tag]=lookuplist[1]
+ else
+ slookups={ [tag]=lookuplist[1] }
+ end
+ else
+ if mlookups then
+ mlookups[tag]=lookuplist
+ else
+ mlookups={ [tag]=lookuplist }
+ end
+ end
+ end
+ if slookups then
+ description.slookups=slookups
+ end
+ if mlookups then
+ description.mlookups=mlookups
+ end
+ end
+ end
+end
+actions["reorganize glyph anchors"]=function(data,filename,raw)
+ local descriptions=data.descriptions
+ for unicode,description in next,descriptions do
+ local anchors=description.glyph.anchors
+ if anchors then
+ for class,data in next,anchors do
+ if class=="baselig" then
+ for tag,specification in next,data do
+ for i=1,#specification do
+ local si=specification[i]
+ specification[i]={ si.x or 0,si.y or 0 }
+ end
+ end
+ else
+ for tag,specification in next,data do
+ data[tag]={ specification.x or 0,specification.y or 0 }
+ end
end
+ end
+ description.anchors=anchors
end
+ end
end
-
--- modes: node, base, none
-
function otf.setfeatures(tfmdata,features)
- local okay = constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
- if okay then
- return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
- else
- return { } -- will become false
- end
+ local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf)
+ if okay then
+ return constructors.collectprocessors("otf",tfmdata,features,trace_features,report_otf)
+ else
+ return {}
+ end
end
-
--- the first version made a top/mid/not extensible table, now we just
--- pass on the variants data and deal with it in the tfm scaler (there
--- is no longer an extensible table anyway)
---
--- we cannot share descriptions as virtual fonts might extend them (ok,
--- we could use a cache with a hash
---
--- we already assing an empty tabel to characters as we can add for
--- instance protruding info and loop over characters; one is not supposed
--- to change descriptions and if one does so one should make a copy!
-
local function copytotfm(data,cache_id)
- if data then
- local metadata = data.metadata
- local resources = data.resources
- local properties = derivetable(data.properties)
- local descriptions = derivetable(data.descriptions)
- local goodies = derivetable(data.goodies)
- local characters = { }
- local parameters = { }
- local mathparameters = { }
- --
- local pfminfo = metadata.pfminfo or { }
- local resources = data.resources
- local unicodes = resources.unicodes
- -- local mode = data.mode or "base"
- local spaceunits = 500
- local spacer = "space"
- local designsize = metadata.designsize or metadata.design_size or 100
- local mathspecs = metadata.math
- --
- if designsize == 0 then
- designsize = 100
- end
- if mathspecs then
- for name, value in next, mathspecs do
- mathparameters[name] = value
- end
- end
- for unicode, _ in next, data.descriptions do -- use parent table
- characters[unicode] = { }
- end
- if mathspecs then
- -- we could move this to the scaler but not that much is saved
- -- and this is cleaner
- for unicode, character in next, characters do
- local d = descriptions[unicode]
- local m = d.math
- if m then
- -- watch out: luatex uses horiz_variants for the parts
- local variants = m.horiz_variants
- local parts = m.horiz_parts
- -- local done = { [unicode] = true }
- if variants then
- local c = character
- for i=1,#variants do
- local un = variants[i]
- -- if done[un] then
- -- -- report_otf("skipping cyclic reference U+%05X in math variant U+%05X",un,unicode)
- -- else
- c.next = un
- c = characters[un]
- -- done[un] = true
- -- end
- end -- c is now last in chain
- c.horiz_variants = parts
- elseif parts then
- character.horiz_variants = parts
- end
- local variants = m.vert_variants
- local parts = m.vert_parts
- -- local done = { [unicode] = true }
- if variants then
- local c = character
- for i=1,#variants do
- local un = variants[i]
- -- if done[un] then
- -- -- report_otf("skipping cyclic reference U+%05X in math variant U+%05X",un,unicode)
- -- else
- c.next = un
- c = characters[un]
- -- done[un] = true
- -- end
- end -- c is now last in chain
- c.vert_variants = parts
- elseif parts then
- character.vert_variants = parts
- end
- local italic_correction = m.vert_italic_correction
- if italic_correction then
- character.vert_italic_correction = italic_correction -- was c.
- end
- local top_accent = m.top_accent
- if top_accent then
- character.top_accent = top_accent
- end
- local kerns = m.kerns
- if kerns then
- character.mathkerns = kerns
- end
- end
- end
- end
- -- end math
- local monospaced = metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion == "Monospaced")
- local charwidth = pfminfo.avgwidth -- or unset
- local italicangle = metadata.italicangle
- local charxheight = pfminfo.os2_xheight and pfminfo.os2_xheight > 0 and pfminfo.os2_xheight
- properties.monospaced = monospaced
- parameters.italicangle = italicangle
- parameters.charwidth = charwidth
- parameters.charxheight = charxheight
- --
- local space = 0x0020 -- unicodes['space'], unicodes['emdash']
- local emdash = 0x2014 -- unicodes['space'], unicodes['emdash']
- if monospaced then
- if descriptions[space] then
- spaceunits, spacer = descriptions[space].width, "space"
- end
- if not spaceunits and descriptions[emdash] then
- spaceunits, spacer = descriptions[emdash].width, "emdash"
- end
- if not spaceunits and charwidth then
- spaceunits, spacer = charwidth, "charwidth"
- end
- else
- if descriptions[space] then
- spaceunits, spacer = descriptions[space].width, "space"
- end
- if not spaceunits and descriptions[emdash] then
- spaceunits, spacer = descriptions[emdash].width/2, "emdash/2"
- end
- if not spaceunits and charwidth then
- spaceunits, spacer = charwidth, "charwidth"
- end
- end
- spaceunits = tonumber(spaceunits) or 500 -- brrr
- -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?)
- local filename = constructors.checkedfilename(resources)
- local fontname = metadata.fontname
- local fullname = metadata.fullname or fontname
- local units = metadata.units_per_em or 1000
- --
- if units == 0 then -- catch bugs in fonts
- units = 1000
- metadata.units_per_em = 1000
- end
- --
- parameters.slant = 0
- parameters.space = spaceunits -- 3.333 (cmr10)
- parameters.space_stretch = units/2 -- 500 -- 1.666 (cmr10)
- parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10)
- parameters.x_height = 2*units/5 -- 400
- parameters.quad = units -- 1000
- if spaceunits < 2*units/5 then
- -- todo: warning
- end
- if italicangle then
- parameters.italicangle = italicangle
- parameters.italicfactor = math.cos(math.rad(90+italicangle))
- parameters.slant = - math.round(math.tan(italicangle*math.pi/180))
- end
- if monospaced then
- parameters.space_stretch = 0
- parameters.space_shrink = 0
- elseif syncspace then --
- parameters.space_stretch = spaceunits/2
- parameters.space_shrink = spaceunits/3
- end
- parameters.extra_space = parameters.space_shrink -- 1.111 (cmr10)
- if charxheight then
- parameters.x_height = charxheight
- else
- local x = 0x78 -- unicodes['x']
- if x then
- local x = descriptions[x]
- if x then
- parameters.x_height = x.height
- end
- end
- end
- --
- parameters.designsize = (designsize/10)*65536
- parameters.ascender = abs(metadata.ascent or 0)
- parameters.descender = abs(metadata.descent or 0)
- parameters.units = units
- --
- properties.space = spacer
- properties.encodingbytes = 2
- properties.format = data.format or fonts.formats[filename] or "opentype"
- properties.noglyphnames = true
- properties.filename = filename
- properties.fontname = fontname
- properties.fullname = fullname
- properties.psname = fontname or fullname
- properties.name = filename or fullname
- --
- -- properties.name = specification.name
- -- properties.sub = specification.sub
- return {
- characters = characters,
- descriptions = descriptions,
- parameters = parameters,
- mathparameters = mathparameters,
- resources = resources,
- properties = properties,
- goodies = goodies,
- }
- end
+ if data then
+ local metadata=data.metadata
+ local resources=data.resources
+ local properties=derivetable(data.properties)
+ local descriptions=derivetable(data.descriptions)
+ local goodies=derivetable(data.goodies)
+ local characters={}
+ local parameters={}
+ local mathparameters={}
+ local pfminfo=metadata.pfminfo or {}
+ local resources=data.resources
+ local unicodes=resources.unicodes
+ local spaceunits=500
+ local spacer="space"
+ local designsize=metadata.designsize or metadata.design_size or 100
+ local mathspecs=metadata.math
+ if designsize==0 then
+ designsize=100
+ end
+ if mathspecs then
+ for name,value in next,mathspecs do
+ mathparameters[name]=value
+ end
+ end
+ for unicode,_ in next,data.descriptions do
+ characters[unicode]={}
+ end
+ if mathspecs then
+ for unicode,character in next,characters do
+ local d=descriptions[unicode]
+ local m=d.math
+ if m then
+ local variants=m.horiz_variants
+ local parts=m.horiz_parts
+ if variants then
+ local c=character
+ for i=1,#variants do
+ local un=variants[i]
+ c.next=un
+ c=characters[un]
+ end
+ c.horiz_variants=parts
+ elseif parts then
+ character.horiz_variants=parts
+ end
+ local variants=m.vert_variants
+ local parts=m.vert_parts
+ if variants then
+ local c=character
+ for i=1,#variants do
+ local un=variants[i]
+ c.next=un
+ c=characters[un]
+ end
+ c.vert_variants=parts
+ elseif parts then
+ character.vert_variants=parts
+ end
+ local italic_correction=m.vert_italic_correction
+ if italic_correction then
+ character.vert_italic_correction=italic_correction
+ end
+ local top_accent=m.top_accent
+ if top_accent then
+ character.top_accent=top_accent
+ end
+ local kerns=m.kerns
+ if kerns then
+ character.mathkerns=kerns
+ end
+ end
+ end
+ end
+ local monospaced=metadata.isfixedpitch or (pfminfo.panose and pfminfo.panose.proportion=="Monospaced")
+ local charwidth=pfminfo.avgwidth
+ local italicangle=metadata.italicangle
+ local charxheight=pfminfo.os2_xheight and pfminfo.os2_xheight>0 and pfminfo.os2_xheight
+ properties.monospaced=monospaced
+ parameters.italicangle=italicangle
+ parameters.charwidth=charwidth
+ parameters.charxheight=charxheight
+ local space=0x0020
+ local emdash=0x2014
+ if monospaced then
+ if descriptions[space] then
+ spaceunits,spacer=descriptions[space].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width,"emdash"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ else
+ if descriptions[space] then
+ spaceunits,spacer=descriptions[space].width,"space"
+ end
+ if not spaceunits and descriptions[emdash] then
+ spaceunits,spacer=descriptions[emdash].width/2,"emdash/2"
+ end
+ if not spaceunits and charwidth then
+ spaceunits,spacer=charwidth,"charwidth"
+ end
+ end
+ spaceunits=tonumber(spaceunits) or 500
+ local filename=constructors.checkedfilename(resources)
+ local fontname=metadata.fontname
+ local fullname=metadata.fullname or fontname
+ local units=metadata.units_per_em or 1000
+ if units==0 then
+ units=1000
+ metadata.units_per_em=1000
+ end
+ parameters.slant=0
+ parameters.space=spaceunits
+ parameters.space_stretch=units/2
+ parameters.space_shrink=1*units/3
+ parameters.x_height=2*units/5
+ parameters.quad=units
+ if spaceunits<2*units/5 then
+ end
+ if italicangle then
+ parameters.italicangle=italicangle
+ parameters.italicfactor=math.cos(math.rad(90+italicangle))
+ parameters.slant=- math.round(math.tan(italicangle*math.pi/180))
+ end
+ if monospaced then
+ parameters.space_stretch=0
+ parameters.space_shrink=0
+ elseif syncspace then
+ parameters.space_stretch=spaceunits/2
+ parameters.space_shrink=spaceunits/3
+ end
+ parameters.extra_space=parameters.space_shrink
+ if charxheight then
+ parameters.x_height=charxheight
+ else
+ local x=0x78
+ if x then
+ local x=descriptions[x]
+ if x then
+ parameters.x_height=x.height
+ end
+ end
+ end
+ parameters.designsize=(designsize/10)*65536
+ parameters.ascender=abs(metadata.ascent or 0)
+ parameters.descender=abs(metadata.descent or 0)
+ parameters.units=units
+ properties.space=spacer
+ properties.encodingbytes=2
+ properties.format=data.format or fonts.formats[filename] or "opentype"
+ properties.noglyphnames=true
+ properties.filename=filename
+ properties.fontname=fontname
+ properties.fullname=fullname
+ properties.psname=fontname or fullname
+ properties.name=filename or fullname
+ return {
+ characters=characters,
+ descriptions=descriptions,
+ parameters=parameters,
+ mathparameters=mathparameters,
+ resources=resources,
+ properties=properties,
+ goodies=goodies,
+ }
+ end
end
-
local function otftotfm(specification)
- local cache_id = specification.hash
- local tfmdata = containers.read(constructors.cache,cache_id)
- if not tfmdata then
- local name = specification.name
- local sub = specification.sub
- local filename = specification.filename
- local format = specification.format
- local features = specification.features.normal
- local rawdata = otf.load(filename,format,sub,features and features.featurefile)
- if rawdata and next(rawdata) then
- rawdata.lookuphash = { }
- tfmdata = copytotfm(rawdata,cache_id)
- if tfmdata and next(tfmdata) then
- -- at this moment no characters are assigned yet, only empty slots
- local features = constructors.checkedfeatures("otf",features)
- local shared = tfmdata.shared
- if not shared then
- shared = { }
- tfmdata.shared = shared
- end
- shared.rawdata = rawdata
- -- shared.features = features -- default
- shared.dynamics = { }
- -- shared.processes = { }
- tfmdata.changed = { }
- shared.features = features
- shared.processes = otf.setfeatures(tfmdata,features)
- end
- end
- containers.write(constructors.cache,cache_id,tfmdata)
- end
- return tfmdata
+ local cache_id=specification.hash
+ local tfmdata=containers.read(constructors.cache,cache_id)
+ if not tfmdata then
+ local name=specification.name
+ local sub=specification.sub
+ local filename=specification.filename
+ local format=specification.format
+ local features=specification.features.normal
+ local rawdata=otf.load(filename,format,sub,features and features.featurefile)
+ if rawdata and next(rawdata) then
+ rawdata.lookuphash={}
+ tfmdata=copytotfm(rawdata,cache_id)
+ if tfmdata and next(tfmdata) then
+ local features=constructors.checkedfeatures("otf",features)
+ local shared=tfmdata.shared
+ if not shared then
+ shared={}
+ tfmdata.shared=shared
+ end
+ shared.rawdata=rawdata
+ shared.dynamics={}
+ tfmdata.changed={}
+ shared.features=features
+ shared.processes=otf.setfeatures(tfmdata,features)
+ end
+ end
+ containers.write(constructors.cache,cache_id,tfmdata)
+ end
+ return tfmdata
end
-
local function read_from_otf(specification)
- local tfmdata = otftotfm(specification)
- if tfmdata then
- -- this late ? .. needs checking
- tfmdata.properties.name = specification.name
- tfmdata.properties.sub = specification.sub
- --
- tfmdata = constructors.scale(tfmdata,specification)
- local allfeatures = tfmdata.shared.features or specification.features.normal
- constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
- constructors.setname(tfmdata,specification) -- only otf?
- fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification)
- end
- return tfmdata
+ local tfmdata=otftotfm(specification)
+ if tfmdata then
+ tfmdata.properties.name=specification.name
+ tfmdata.properties.sub=specification.sub
+ tfmdata=constructors.scale(tfmdata,specification)
+ local allfeatures=tfmdata.shared.features or specification.features.normal
+ constructors.applymanipulators("otf",tfmdata,allfeatures,trace_features,report_otf)
+ constructors.setname(tfmdata,specification)
+ fonts.loggers.register(tfmdata,file.suffix(specification.filename),specification)
+ end
+ return tfmdata
end
-
local function checkmathsize(tfmdata,mathsize)
- local mathdata = tfmdata.shared.rawdata.metadata.math
- local mathsize = tonumber(mathsize)
- if mathdata then -- we cannot use mathparameters as luatex will complain
- local parameters = tfmdata.parameters
- parameters.scriptpercentage = mathdata.ScriptPercentScaleDown
- parameters.scriptscriptpercentage = mathdata.ScriptScriptPercentScaleDown
- parameters.mathsize = mathsize
- end
+ local mathdata=tfmdata.shared.rawdata.metadata.math
+ local mathsize=tonumber(mathsize)
+ if mathdata then
+ local parameters=tfmdata.parameters
+ parameters.scriptpercentage=mathdata.ScriptPercentScaleDown
+ parameters.scriptscriptpercentage=mathdata.ScriptScriptPercentScaleDown
+ parameters.mathsize=mathsize
+ end
end
-
registerotffeature {
- name = "mathsize",
- description = "apply mathsize as specified in the font",
- initializers = {
- base = checkmathsize,
- node = checkmathsize,
- }
+ name="mathsize",
+ description="apply mathsize as specified in the font",
+ initializers={
+ base=checkmathsize,
+ node=checkmathsize,
+ }
}
-
--- helpers
-
function otf.collectlookups(rawdata,kind,script,language)
- local sequences = rawdata.resources.sequences
- if sequences then
- local featuremap, featurelist = { }, { }
- for s=1,#sequences do
- local sequence = sequences[s]
- local features = sequence.features
- features = features and features[kind]
- features = features and (features[script] or features[default] or features[wildcard])
- features = features and (features[language] or features[default] or features[wildcard])
- if features then
- local subtables = sequence.subtables
- if subtables then
- for s=1,#subtables do
- local ss = subtables[s]
- if not featuremap[s] then
- featuremap[ss] = true
- featurelist[#featurelist+1] = ss
- end
- end
- end
- end
- end
- if #featurelist > 0 then
- return featuremap, featurelist
- end
- end
- return nil, nil
+ local sequences=rawdata.resources.sequences
+ if sequences then
+ local featuremap,featurelist={},{}
+ for s=1,#sequences do
+ local sequence=sequences[s]
+ local features=sequence.features
+ features=features and features[kind]
+ features=features and (features[script] or features[default] or features[wildcard])
+ features=features and (features[language] or features[default] or features[wildcard])
+ if features then
+ local subtables=sequence.subtables
+ if subtables then
+ for s=1,#subtables do
+ local ss=subtables[s]
+ if not featuremap[s] then
+ featuremap[ss]=true
+ featurelist[#featurelist+1]=ss
+ end
+ end
+ end
+ end
+ end
+ if #featurelist>0 then
+ return featuremap,featurelist
+ end
+ end
+ return nil,nil
end
-
--- readers
-
local function check_otf(forced,specification,suffix,what)
- local name = specification.name
- if forced then
- name = file.addsuffix(name,suffix,true)
- end
- local fullname = findbinfile(name,suffix) or ""
- if fullname == "" then
- fullname = fonts.names.getfilename(name,suffix) or ""
- end
- if fullname ~= "" then
- specification.filename = fullname
- specification.format = what
- return read_from_otf(specification)
- end
+ local name=specification.name
+ if forced then
+ name=file.addsuffix(name,suffix,true)
+ end
+ local fullname=findbinfile(name,suffix) or ""
+ if fullname=="" then
+ fullname=fonts.names.getfilename(name,suffix) or ""
+ end
+ if fullname~="" then
+ specification.filename=fullname
+ specification.format=what
+ return read_from_otf(specification)
+ end
end
-
local function opentypereader(specification,suffix,what)
- local forced = specification.forced or ""
- if forced == "otf" then
- return check_otf(true,specification,forced,"opentype")
- elseif forced == "ttf" or forced == "ttc" or forced == "dfont" then
- return check_otf(true,specification,forced,"truetype")
- else
- return check_otf(false,specification,suffix,what)
- end
-end
-
-readers.opentype = opentypereader
-
-local formats = fonts.formats
-
-formats.otf = "opentype"
-formats.ttf = "truetype"
-formats.ttc = "truetype"
-formats.dfont = "truetype"
-
-function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end
-function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end
-function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end
+ local forced=specification.forced or ""
+ if forced=="otf" then
+ return check_otf(true,specification,forced,"opentype")
+ elseif forced=="ttf" or forced=="ttc" or forced=="dfont" then
+ return check_otf(true,specification,forced,"truetype")
+ else
+ return check_otf(false,specification,suffix,what)
+ end
+end
+readers.opentype=opentypereader
+local formats=fonts.formats
+formats.otf="opentype"
+formats.ttf="truetype"
+formats.ttc="truetype"
+formats.dfont="truetype"
+function readers.otf (specification) return opentypereader(specification,"otf",formats.otf ) end
+function readers.ttf (specification) return opentypereader(specification,"ttf",formats.ttf ) end
+function readers.ttc (specification) return opentypereader(specification,"ttf",formats.ttc ) end
function readers.dfont(specification) return opentypereader(specification,"ttf",formats.dfont) end
-
--- this will be overloaded
-
function otf.scriptandlanguage(tfmdata,attr)
- local properties = tfmdata.properties
- return properties.script or "dflt", properties.language or "dflt"
+ local properties=tfmdata.properties
+ return properties.script or "dflt",properties.language or "dflt"
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-otb'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-otb']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-local concat = table.concat
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local lpegmatch = lpeg.match
-local utfchar = utf.char
-
-local trace_baseinit = false trackers.register("otf.baseinit", function(v) trace_baseinit = v end)
-local trace_singles = false trackers.register("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false trackers.register("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false trackers.register("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false trackers.register("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_ligatures_detail = false trackers.register("otf.ligatures.detail", function(v) trace_ligatures_detail = v end)
-local trace_kerns = false trackers.register("otf.kerns", function(v) trace_kerns = v end)
-local trace_preparing = false trackers.register("otf.preparing", function(v) trace_preparing = v end)
-
-local report_prepare = logs.reporter("fonts","otf prepare")
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-local otffeatures = otf.features
-local registerotffeature = otffeatures.register
-
-otf.defaultbasealternate = "none" -- first last
-
-local wildcard = "*"
-local default = "dflt"
-
+local concat=table.concat
+local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local lpegmatch=lpeg.match
+local utfchar=utf.char
+local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end)
+local trace_singles=false trackers.register("otf.singles",function(v) trace_singles=v end)
+local trace_multiples=false trackers.register("otf.multiples",function(v) trace_multiples=v end)
+local trace_alternatives=false trackers.register("otf.alternatives",function(v) trace_alternatives=v end)
+local trace_ligatures=false trackers.register("otf.ligatures",function(v) trace_ligatures=v end)
+local trace_ligatures_detail=false trackers.register("otf.ligatures.detail",function(v) trace_ligatures_detail=v end)
+local trace_kerns=false trackers.register("otf.kerns",function(v) trace_kerns=v end)
+local trace_preparing=false trackers.register("otf.preparing",function(v) trace_preparing=v end)
+local report_prepare=logs.reporter("fonts","otf prepare")
+local fonts=fonts
+local otf=fonts.handlers.otf
+local otffeatures=otf.features
+local registerotffeature=otffeatures.register
+otf.defaultbasealternate="none"
+local wildcard="*"
+local default="dflt"
local function gref(descriptions,n)
- if type(n) == "number" then
- local name = descriptions[n].name
- if name then
- return format("U+%05X (%s)",n,name)
- else
- return format("U+%05X")
- end
- elseif n then
- local num, nam = { }, { }
- for i=2,#n do -- first is likely a key
- local ni = n[i]
- num[i-1] = format("U+%05X",ni)
- nam[i-1] = descriptions[ni].name or "?"
- end
- return format("%s (%s)",concat(num," "), concat(nam," "))
+ if type(n)=="number" then
+ local name=descriptions[n].name
+ if name then
+ return format("U+%05X (%s)",n,name)
else
- return "?"
+ return format("U+%05X")
+ end
+ elseif n then
+ local num,nam={},{}
+ for i=2,#n do
+ local ni=n[i]
+ num[i-1]=format("U+%05X",ni)
+ nam[i-1]=descriptions[ni].name or "?"
end
+ return format("%s (%s)",concat(num," "),concat(nam," "))
+ else
+ return "?"
+ end
end
-
local function cref(feature,lookupname)
- if lookupname then
- return format("feature %s, lookup %s",feature,lookupname)
- else
- return format("feature %s",feature)
- end
+ if lookupname then
+ return format("feature %s, lookup %s",feature,lookupname)
+ else
+ return format("feature %s",feature)
+ end
end
-
local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment)
- report_prepare("%s: base alternate %s => %s (%s => %s)",cref(feature,lookupname),
- gref(descriptions,unicode),replacement and gref(descriptions,replacement) or "-",
- tostring(value),comment)
+ report_prepare("%s: base alternate %s => %s (%s => %s)",cref(feature,lookupname),
+ gref(descriptions,unicode),replacement and gref(descriptions,replacement) or "-",
+ tostring(value),comment)
end
-
local function report_substitution(feature,lookupname,descriptions,unicode,substitution)
- report_prepare("%s: base substitution %s => %s",cref(feature,lookupname),
- gref(descriptions,unicode),gref(descriptions,substitution))
+ report_prepare("%s: base substitution %s => %s",cref(feature,lookupname),
+ gref(descriptions,unicode),gref(descriptions,substitution))
end
-
local function report_ligature(feature,lookupname,descriptions,unicode,ligature)
- report_prepare("%s: base ligature %s => %s",cref(feature,lookupname),
- gref(descriptions,ligature),gref(descriptions,unicode))
+ report_prepare("%s: base ligature %s => %s",cref(feature,lookupname),
+ gref(descriptions,ligature),gref(descriptions,unicode))
end
-
-local basemethods = { }
-local basemethod = "<unset>"
-
+local basemethods={}
+local basemethod="<unset>"
local function applybasemethod(what,...)
- local m = basemethods[basemethod][what]
- if m then
- return m(...)
- end
+ local m=basemethods[basemethod][what]
+ if m then
+ return m(...)
+ end
end
-
--- We need to make sure that luatex sees the difference between
--- base fonts that have different glyphs in the same slots in fonts
--- that have the same fullname (or filename). LuaTeX will merge fonts
--- eventually (and subset later on). If needed we can use a more
--- verbose name as long as we don't use <()<>[]{}/%> and the length
--- is < 128.
-
-local basehash, basehashes, applied = { }, 1, { }
-
+local basehash,basehashes,applied={},1,{}
local function registerbasehash(tfmdata)
- local properties = tfmdata.properties
- local hash = concat(applied," ")
- local base = basehash[hash]
- if not base then
- basehashes = basehashes + 1
- base = basehashes
- basehash[hash] = base
- end
- properties.basehash = base
- properties.fullname = properties.fullname .. "-" .. base
- -- report_prepare("fullname base hash: '%s', featureset '%s'",tfmdata.properties.fullname,hash)
- applied = { }
+ local properties=tfmdata.properties
+ local hash=concat(applied," ")
+ local base=basehash[hash]
+ if not base then
+ basehashes=basehashes+1
+ base=basehashes
+ basehash[hash]=base
+ end
+ properties.basehash=base
+ properties.fullname=properties.fullname.."-"..base
+ applied={}
end
-
local function registerbasefeature(feature,value)
- applied[#applied+1] = feature .. "=" .. tostring(value)
+ applied[#applied+1]=feature.."="..tostring(value)
end
-
--- The original basemode ligature builder used the names of components
--- and did some expression juggling to get the chain right. The current
--- variant starts with unicodes but still uses names to make the chain.
--- This is needed because we have to create intermediates when needed
--- but use predefined snippets when available. To some extend the
--- current builder is more stupid but I don't worry that much about it
--- as ligatures are rather predicatable.
---
--- Personally I think that an ff + i == ffi rule as used in for instance
--- latin modern is pretty weird as no sane person will key that in and
--- expect a glyph for that ligature plus the following character. Anyhow,
--- as we need to deal with this, we do, but no guarantes are given.
---
--- latin modern dejavu
---
--- f+f 102 102 102 102
--- f+i 102 105 102 105
--- f+l 102 108 102 108
--- f+f+i 102 102 105
--- f+f+l 102 102 108 102 102 108
--- ff+i 64256 105 64256 105
--- ff+l 64256 108
---
--- As you can see here, latin modern is less complete than dejavu but
--- in practice one will not notice it.
---
--- The while loop is needed because we need to resolve for instance
--- pseudo names like hyphen_hyphen to endash so in practice we end
--- up with a bit too many definitions but the overhead is neglectable.
---
--- Todo: if changed[first] or changed[second] then ... end
-
-local trace = false
-
+local trace=false
local function finalize_ligatures(tfmdata,ligatures)
- local nofligatures = #ligatures
- if nofligatures > 0 then
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local unicodes = resources.unicodes
- local private = resources.private
- local alldone = false
- while not alldone do
- local done = 0
- for i=1,nofligatures do
- local ligature = ligatures[i]
- if ligature then
- local unicode, lookupdata = ligature[1], ligature[2]
- if trace then
- trace_ligatures_detail("building %q into %q",concat(lookupdata," "),unicode)
- end
- local size = #lookupdata
- local firstcode = lookupdata[1] -- [2]
- local firstdata = characters[firstcode]
- local okay = false
- if firstdata then
- local firstname = "ctx_" .. firstcode
- for i=1,size-1 do -- for i=2,size-1 do
- local firstdata = characters[firstcode]
- if not firstdata then
- firstcode = private
- if trace then
- trace_ligatures_detail("defining %q as %q",firstname,firstcode)
- end
- unicodes[firstname] = firstcode
- firstdata = { intermediate = true, ligatures = { } }
- characters[firstcode] = firstdata
- descriptions[firstcode] = { name = firstname }
- private = private + 1
- end
- local target
- local secondcode = lookupdata[i+1]
- local secondname = firstname .. "_" .. secondcode
- if i == size - 1 then
- target = unicode
- if not unicodes[secondname] then
- unicodes[secondname] = unicode -- map final ligature onto intermediates
- end
- okay = true
- else
- target = unicodes[secondname]
- if not target then
- break
- end
- end
- if trace then
- trace_ligatures_detail("codes (%s,%s) + (%s,%s) -> %s",firstname,firstcode,secondname,secondcode,target)
- end
- local firstligs = firstdata.ligatures
- if firstligs then
- firstligs[secondcode] = { char = target }
- else
- firstdata.ligatures = { [secondcode] = { char = target } }
- end
- firstcode = target
- firstname = secondname
- end
- end
- if okay then
- ligatures[i] = false
- done = done + 1
- end
- end
- end
- alldone = done == 0
- end
- if trace then
- for k, v in next, characters do
- if v.ligatures then table.print(v,k) end
- end
- end
- tfmdata.resources.private = private
- end
+ local nofligatures=#ligatures
+ if nofligatures>0 then
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local unicodes=resources.unicodes
+ local private=resources.private
+ local alldone=false
+ while not alldone do
+ local done=0
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ if ligature then
+ local unicode,lookupdata=ligature[1],ligature[2]
+ if trace then
+ trace_ligatures_detail("building %q into %q",concat(lookupdata," "),unicode)
+ end
+ local size=#lookupdata
+ local firstcode=lookupdata[1]
+ local firstdata=characters[firstcode]
+ local okay=false
+ if firstdata then
+ local firstname="ctx_"..firstcode
+ for i=1,size-1 do
+ local firstdata=characters[firstcode]
+ if not firstdata then
+ firstcode=private
+ if trace then
+ trace_ligatures_detail("defining %q as %q",firstname,firstcode)
+ end
+ unicodes[firstname]=firstcode
+ firstdata={ intermediate=true,ligatures={} }
+ characters[firstcode]=firstdata
+ descriptions[firstcode]={ name=firstname }
+ private=private+1
+ end
+ local target
+ local secondcode=lookupdata[i+1]
+ local secondname=firstname.."_"..secondcode
+ if i==size-1 then
+ target=unicode
+ if not unicodes[secondname] then
+ unicodes[secondname]=unicode
+ end
+ okay=true
+ else
+ target=unicodes[secondname]
+ if not target then
+ break
+ end
+ end
+ if trace then
+ trace_ligatures_detail("codes (%s,%s) + (%s,%s) -> %s",firstname,firstcode,secondname,secondcode,target)
+ end
+ local firstligs=firstdata.ligatures
+ if firstligs then
+ firstligs[secondcode]={ char=target }
+ else
+ firstdata.ligatures={ [secondcode]={ char=target } }
+ end
+ firstcode=target
+ firstname=secondname
+ end
+ end
+ if okay then
+ ligatures[i]=false
+ done=done+1
+ end
+ end
+ end
+ alldone=done==0
+ end
+ if trace then
+ for k,v in next,characters do
+ if v.ligatures then table.print(v,k) end
+ end
+ end
+ tfmdata.resources.private=private
+ end
end
-
local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local changed = tfmdata.changed
- local unicodes = resources.unicodes
- local lookuphash = resources.lookuphash
- local lookuptypes = resources.lookuptypes
-
- local ligatures = { }
- local alternate = tonumber(value)
- local defaultalt = otf.defaultbasealternate
-
- local trace_singles = trace_baseinit and trace_singles
- local trace_alternatives = trace_baseinit and trace_alternatives
- local trace_ligatures = trace_baseinit and trace_ligatures
-
- local actions = {
- substitution = function(lookupdata,lookupname,description,unicode)
- if trace_singles then
- report_substitution(feature,lookupname,descriptions,unicode,lookupdata)
- end
- changed[unicode] = lookupdata
- end,
- alternate = function(lookupdata,lookupname,description,unicode)
- local replacement = lookupdata[alternate]
- if replacement then
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
- end
- elseif defaultalt == "first" then
- replacement = lookupdata[1]
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- elseif defaultalt == "last" then
- replacement = lookupdata[#data]
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- else
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
- end
- end
- end,
- ligature = function(lookupdata,lookupname,description,unicode)
- if trace_ligatures then
- report_ligature(feature,lookupname,descriptions,unicode,lookupdata)
- end
- ligatures[#ligatures+1] = { unicode, lookupdata }
- end,
- }
-
- for unicode, character in next, characters do
- local description = descriptions[unicode]
- local lookups = description.slookups
- if lookups then
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookups[lookupname]
- if lookupdata then
- local lookuptype = lookuptypes[lookupname]
- local action = actions[lookuptype]
- if action then
- action(lookupdata,lookupname,description,unicode)
- end
- end
- end
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local changed=tfmdata.changed
+ local unicodes=resources.unicodes
+ local lookuphash=resources.lookuphash
+ local lookuptypes=resources.lookuptypes
+ local ligatures={}
+ local alternate=tonumber(value)
+ local defaultalt=otf.defaultbasealternate
+ local trace_singles=trace_baseinit and trace_singles
+ local trace_alternatives=trace_baseinit and trace_alternatives
+ local trace_ligatures=trace_baseinit and trace_ligatures
+ local actions={
+ substitution=function(lookupdata,lookupname,description,unicode)
+ if trace_singles then
+ report_substitution(feature,lookupname,descriptions,unicode,lookupdata)
+ end
+ changed[unicode]=lookupdata
+ end,
+ alternate=function(lookupdata,lookupname,description,unicode)
+ local replacement=lookupdata[alternate]
+ if replacement then
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
end
- local lookups = description.mlookups
- if lookups then
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookuplist = lookups[lookupname]
- if lookuplist then
- local lookuptype = lookuptypes[lookupname]
- local action = actions[lookuptype]
- if action then
- for i=1,#lookuplist do
- action(lookuplist[i],lookupname,description,unicode)
- end
- end
- end
- end
+ elseif defaultalt=="first" then
+ replacement=lookupdata[1]
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
end
- end
-
- finalize_ligatures(tfmdata,ligatures)
-end
-
-local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) -- todo what kind of kerns, currently all
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local unicodes = resources.unicodes
- local sharedkerns = { }
- local traceindeed = trace_baseinit and trace_kerns
- for unicode, character in next, characters do
- local description = descriptions[unicode]
- local rawkerns = description.kerns -- shared
- if rawkerns then
- local s = sharedkerns[rawkerns]
- if s == false then
- -- skip
- elseif s then
- character.kerns = s
- else
- local newkerns = character.kerns
- local done = false
- for l=1,#lookuplist do
- local lookup = lookuplist[l]
- local kerns = rawkerns[lookup]
- if kerns then
- for otherunicode, value in next, kerns do
- if value == 0 then
- -- maybe no 0 test here
- elseif not newkerns then
- newkerns = { [otherunicode] = value }
- done = true
- if traceindeed then
- report_prepare("%s: base kern %s + %s => %s",cref(feature,lookup),
- gref(descriptions,unicode),gref(descriptions,otherunicode),value)
- end
- elseif not newkerns[otherunicode] then -- first wins
- newkerns[otherunicode] = value
- done = true
- if traceindeed then
- report_prepare("%s: base kern %s + %s => %s",cref(feature,lookup),
- gref(descriptions,unicode),gref(descriptions,otherunicode),value)
- end
- end
- end
- end
- end
- if done then
- sharedkerns[rawkerns] = newkerns
- character.kerns = newkerns -- no empty assignments
- else
- sharedkerns[rawkerns] = false
- end
- end
+ elseif defaultalt=="last" then
+ replacement=lookupdata[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ end,
+ ligature=function(lookupdata,lookupname,description,unicode)
+ if trace_ligatures then
+ report_ligature(feature,lookupname,descriptions,unicode,lookupdata)
+ end
+ ligatures[#ligatures+1]={ unicode,lookupdata }
+ end,
+ }
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ local lookups=description.slookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookups[lookupname]
+ if lookupdata then
+ local lookuptype=lookuptypes[lookupname]
+ local action=actions[lookuptype]
+ if action then
+ action(lookupdata,lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ local lookups=description.mlookups
+ if lookups then
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookuplist=lookups[lookupname]
+ if lookuplist then
+ local lookuptype=lookuptypes[lookupname]
+ local action=actions[lookuptype]
+ if action then
+ for i=1,#lookuplist do
+ action(lookuplist[i],lookupname,description,unicode)
+ end
+ end
+ end
+ end
+ end
+ end
+ finalize_ligatures(tfmdata,ligatures)
+end
+local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local unicodes=resources.unicodes
+ local sharedkerns={}
+ local traceindeed=trace_baseinit and trace_kerns
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ local rawkerns=description.kerns
+ if rawkerns then
+ local s=sharedkerns[rawkerns]
+ if s==false then
+ elseif s then
+ character.kerns=s
+ else
+ local newkerns=character.kerns
+ local done=false
+ for l=1,#lookuplist do
+ local lookup=lookuplist[l]
+ local kerns=rawkerns[lookup]
+ if kerns then
+ for otherunicode,value in next,kerns do
+ if value==0 then
+ elseif not newkerns then
+ newkerns={ [otherunicode]=value }
+ done=true
+ if traceindeed then
+ report_prepare("%s: base kern %s + %s => %s",cref(feature,lookup),
+ gref(descriptions,unicode),gref(descriptions,otherunicode),value)
+ end
+ elseif not newkerns[otherunicode] then
+ newkerns[otherunicode]=value
+ done=true
+ if traceindeed then
+ report_prepare("%s: base kern %s + %s => %s",cref(feature,lookup),
+ gref(descriptions,unicode),gref(descriptions,otherunicode),value)
+ end
+ end
+ end
+ end
+ end
+ if done then
+ sharedkerns[rawkerns]=newkerns
+ character.kerns=newkerns
+ else
+ sharedkerns[rawkerns]=false
end
+ end
end
+ end
end
-
-basemethods.independent = {
- preparesubstitutions = preparesubstitutions,
- preparepositionings = preparepositionings,
+basemethods.independent={
+ preparesubstitutions=preparesubstitutions,
+ preparepositionings=preparepositionings,
}
-
local function makefake(tfmdata,name,present)
- local resources = tfmdata.resources
- local private = resources.private
- local character = { intermediate = true, ligatures = { } }
- resources.unicodes[name] = private
- tfmdata.characters[private] = character
- tfmdata.descriptions[private] = { name = name }
- resources.private = private + 1
- present[name] = private
- return character
+ local resources=tfmdata.resources
+ local private=resources.private
+ local character={ intermediate=true,ligatures={} }
+ resources.unicodes[name]=private
+ tfmdata.characters[private]=character
+ tfmdata.descriptions[private]={ name=name }
+ resources.private=private+1
+ present[name]=private
+ return character
end
-
local function make_1(present,tree,name)
- for k, v in next, tree do
- if k == "ligature" then
- present[name] = v
- else
- make_1(present,v,name .. "_" .. k)
- end
+ for k,v in next,tree do
+ if k=="ligature" then
+ present[name]=v
+ else
+ make_1(present,v,name.."_"..k)
end
+ end
end
-
local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname)
- for k, v in next, tree do
- if k == "ligature" then
- local character = characters[preceding]
- if not character then
- if trace_baseinit then
- report_prepare("weird ligature in lookup %s: U+%05X (%s), preceding U+%05X (%s)",lookupname,v,utfchar(v),preceding,utfchar(preceding))
- end
- character = makefake(tfmdata,name,present)
- end
- local ligatures = character.ligatures
- if ligatures then
- ligatures[unicode] = { char = v }
- else
- character.ligatures = { [unicode] = { char = v } }
- end
- if done then
- local d = done[lookupname]
- if not d then
- done[lookupname] = { "dummy", v }
- else
- d[#d+1] = v
- end
- end
+ for k,v in next,tree do
+ if k=="ligature" then
+ local character=characters[preceding]
+ if not character then
+ if trace_baseinit then
+ report_prepare("weird ligature in lookup %s: U+%05X (%s), preceding U+%05X (%s)",lookupname,v,utfchar(v),preceding,utfchar(preceding))
+ end
+ character=makefake(tfmdata,name,present)
+ end
+ local ligatures=character.ligatures
+ if ligatures then
+ ligatures[unicode]={ char=v }
+ else
+ character.ligatures={ [unicode]={ char=v } }
+ end
+ if done then
+ local d=done[lookupname]
+ if not d then
+ done[lookupname]={ "dummy",v }
else
- local code = present[name] or unicode
- local name = name .. "_" .. k
- make_2(present,tfmdata,characters,v,name,code,k,done,lookupname)
+ d[#d+1]=v
end
+ end
+ else
+ local code=present[name] or unicode
+ local name=name.."_"..k
+ make_2(present,tfmdata,characters,v,name,code,k,done,lookupname)
end
+ end
end
-
local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local changed = tfmdata.changed
- local lookuphash = resources.lookuphash
- local lookuptypes = resources.lookuptypes
-
- local ligatures = { }
- local alternate = tonumber(value)
- local defaultalt = otf.defaultbasealternate
-
- local trace_singles = trace_baseinit and trace_singles
- local trace_alternatives = trace_baseinit and trace_alternatives
- local trace_ligatures = trace_baseinit and trace_ligatures
-
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookuphash[lookupname]
- local lookuptype = lookuptypes[lookupname]
- for unicode, data in next, lookupdata do
- if lookuptype == "substitution" then
- if trace_singles then
- report_substitution(feature,lookupname,descriptions,unicode,data)
- end
- changed[unicode] = data
- elseif lookuptype == "alternate" then
- local replacement = data[alternate]
- if replacement then
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
- end
- elseif defaultalt == "first" then
- replacement = data[1]
- changed[unicode] = replacement
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- elseif defaultalt == "last" then
- replacement = data[#data]
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
- end
- else
- if trace_alternatives then
- report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
- end
- end
- elseif lookuptype == "ligature" then
- ligatures[#ligatures+1] = { unicode, data, lookupname }
- if trace_ligatures then
- report_ligature(feature,lookupname,descriptions,unicode,data)
- end
- end
- end
- end
-
- local nofligatures = #ligatures
-
- if nofligatures > 0 then
-
- local characters = tfmdata.characters
- local present = { }
- local done = trace_baseinit and trace_ligatures and { }
-
- for i=1,nofligatures do
- local ligature = ligatures[i]
- local unicode, tree = ligature[1], ligature[2]
- make_1(present,tree,"ctx_"..unicode)
- end
-
- for i=1,nofligatures do
- local ligature = ligatures[i]
- local unicode, tree, lookupname = ligature[1], ligature[2], ligature[3]
- make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname)
- end
-
- end
-
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local changed=tfmdata.changed
+ local lookuphash=resources.lookuphash
+ local lookuptypes=resources.lookuptypes
+ local ligatures={}
+ local alternate=tonumber(value)
+ local defaultalt=otf.defaultbasealternate
+ local trace_singles=trace_baseinit and trace_singles
+ local trace_alternatives=trace_baseinit and trace_alternatives
+ local trace_ligatures=trace_baseinit and trace_ligatures
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookuphash[lookupname]
+ local lookuptype=lookuptypes[lookupname]
+ for unicode,data in next,lookupdata do
+ if lookuptype=="substitution" then
+ if trace_singles then
+ report_substitution(feature,lookupname,descriptions,unicode,data)
+ end
+ changed[unicode]=data
+ elseif lookuptype=="alternate" then
+ local replacement=data[alternate]
+ if replacement then
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal")
+ end
+ elseif defaultalt=="first" then
+ replacement=data[1]
+ changed[unicode]=replacement
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ elseif defaultalt=="last" then
+ replacement=data[#data]
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt)
+ end
+ else
+ if trace_alternatives then
+ report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown")
+ end
+ end
+ elseif lookuptype=="ligature" then
+ ligatures[#ligatures+1]={ unicode,data,lookupname }
+ if trace_ligatures then
+ report_ligature(feature,lookupname,descriptions,unicode,data)
+ end
+ end
+ end
+ end
+ local nofligatures=#ligatures
+ if nofligatures>0 then
+ local characters=tfmdata.characters
+ local present={}
+ local done=trace_baseinit and trace_ligatures and {}
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ local unicode,tree=ligature[1],ligature[2]
+ make_1(present,tree,"ctx_"..unicode)
+ end
+ for i=1,nofligatures do
+ local ligature=ligatures[i]
+ local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3]
+ make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname)
+ end
+ end
end
-
local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist)
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local resources = tfmdata.resources
- local lookuphash = resources.lookuphash
- local traceindeed = trace_baseinit and trace_kerns
-
- -- check out this sharedkerns trickery
-
- for l=1,#lookuplist do
- local lookupname = lookuplist[l]
- local lookupdata = lookuphash[lookupname]
- for unicode, data in next, lookupdata do
- local character = characters[unicode]
- local kerns = character.kerns
- if not kerns then
- kerns = { }
- character.kerns = kerns
- end
- if traceindeed then
- for otherunicode, kern in next, data do
- if not kerns[otherunicode] and kern ~= 0 then
- kerns[otherunicode] = kern
- report_prepare("%s: base kern %s + %s => %s",cref(feature,lookup),
- gref(descriptions,unicode),gref(descriptions,otherunicode),kern)
- end
- end
- else
- for otherunicode, kern in next, data do
- if not kerns[otherunicode] and kern ~= 0 then
- kerns[otherunicode] = kern
- end
- end
- end
- end
- end
-
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local resources=tfmdata.resources
+ local lookuphash=resources.lookuphash
+ local traceindeed=trace_baseinit and trace_kerns
+ for l=1,#lookuplist do
+ local lookupname=lookuplist[l]
+ local lookupdata=lookuphash[lookupname]
+ for unicode,data in next,lookupdata do
+ local character=characters[unicode]
+ local kerns=character.kerns
+ if not kerns then
+ kerns={}
+ character.kerns=kerns
+ end
+ if traceindeed then
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ report_prepare("%s: base kern %s + %s => %s",cref(feature,lookup),
+ gref(descriptions,unicode),gref(descriptions,otherunicode),kern)
+ end
+ end
+ else
+ for otherunicode,kern in next,data do
+ if not kerns[otherunicode] and kern~=0 then
+ kerns[otherunicode]=kern
+ end
+ end
+ end
+ end
+ end
end
-
local function initializehashes(tfmdata)
- nodeinitializers.features(tfmdata)
+ nodeinitializers.features(tfmdata)
end
-
-basemethods.shared = {
- initializehashes = initializehashes,
- preparesubstitutions = preparesubstitutions,
- preparepositionings = preparepositionings,
+basemethods.shared={
+ initializehashes=initializehashes,
+ preparesubstitutions=preparesubstitutions,
+ preparepositionings=preparepositionings,
}
-
-basemethod = "independent"
-
+basemethod="independent"
local function featuresinitializer(tfmdata,value)
- if true then -- value then
- local t = trace_preparing and os.clock()
- local features = tfmdata.shared.features
- if features then
- applybasemethod("initializehashes",tfmdata)
- local collectlookups = otf.collectlookups
- local rawdata = tfmdata.shared.rawdata
- local properties = tfmdata.properties
- local script = properties.script
- local language = properties.language
- local basesubstitutions = rawdata.resources.features.gsub
- local basepositionings = rawdata.resources.features.gpos
- if basesubstitutions then
- for feature, data in next, basesubstitutions do
- local value = features[feature]
- if value then
- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
- registerbasefeature(feature,value)
- end
- end
- end
- end
- if basepositions then
- for feature, data in next, basepositions do
- local value = features[feature]
- if value then
- local validlookups, lookuplist = collectlookups(rawdata,feature,script,language)
- if validlookups then
- applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
- registerbasefeature(feature,value)
- end
- end
- end
- end
- registerbasehash(tfmdata)
- end
- if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.properties.fullname or "?")
- end
- end
+ if true then
+ local t=trace_preparing and os.clock()
+ local features=tfmdata.shared.features
+ if features then
+ applybasemethod("initializehashes",tfmdata)
+ local collectlookups=otf.collectlookups
+ local rawdata=tfmdata.shared.rawdata
+ local properties=tfmdata.properties
+ local script=properties.script
+ local language=properties.language
+ local basesubstitutions=rawdata.resources.features.gsub
+ local basepositionings=rawdata.resources.features.gpos
+ if basesubstitutions then
+ for feature,data in next,basesubstitutions do
+ local value=features[feature]
+ if value then
+ local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
+ if validlookups then
+ applybasemethod("preparesubstitutions",tfmdata,feature,value,validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
+ end
+ if basepositions then
+ for feature,data in next,basepositions do
+ local value=features[feature]
+ if value then
+ local validlookups,lookuplist=collectlookups(rawdata,feature,script,language)
+ if validlookups then
+ applybasemethod("preparepositionings",tfmdata,feature,features[feature],validlookups,lookuplist)
+ registerbasefeature(feature,value)
+ end
+ end
+ end
+ end
+ registerbasehash(tfmdata)
+ end
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %s",os.clock()-t,tfmdata.properties.fullname or "?")
+ end
+ end
end
-
registerotffeature {
- name = "features",
- description = "features",
- default = true,
- initializers = {
- -- position = 1, -- after setscript (temp hack ... we need to force script / language to 1
- base = featuresinitializer,
- }
+ name="features",
+ description="features",
+ default=true,
+ initializers={
+ base=featuresinitializer,
+ }
}
-
--- independent : collect lookups independently (takes more runtime ... neglectable)
--- shared : shares lookups with node mode (takes more memory unless also a node mode variant is used ... noticeable)
-
-directives.register("fonts.otf.loader.basemethod", function(v)
- if basemethods[v] then
- basemethod = v
- end
+directives.register("fonts.otf.loader.basemethod",function(v)
+ if basemethods[v] then
+ basemethod=v
+ end
end)
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['node-inj'] = {
- version = 1.001,
- comment = "companion to node-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
+if not modules then modules={} end modules ['node-inj']={
+ version=1.001,
+ comment="companion to node-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
}
-
--- This is very experimental (this will change when we have luatex > .50 and
--- a few pending thingies are available. Also, Idris needs to make a few more
--- test fonts. Btw, future versions of luatex will have extended glyph properties
--- that can be of help. Some optimizations can go away when we have faster machines.
-
-local next = next
-local utfchar = utf.char
-
-local trace_injections = false trackers.register("nodes.injections", function(v) trace_injections = v end)
-
-local report_injections = logs.reporter("nodes","injections")
-
-local attributes, nodes, node = attributes, nodes, node
-
-fonts = fonts
-local fontdata = fonts.hashes.identifiers
-
-nodes.injections = nodes.injections or { }
-local injections = nodes.injections
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-local nodepool = nodes.pool
-local newkern = nodepool.kern
-
-local traverse_id = node.traverse_id
-local insert_node_before = node.insert_before
-local insert_node_after = node.insert_after
-
-local a_kernpair = attributes.private('kernpair')
-local a_ligacomp = attributes.private('ligacomp')
-local a_markbase = attributes.private('markbase')
-local a_markmark = attributes.private('markmark')
-local a_markdone = attributes.private('markdone')
-local a_cursbase = attributes.private('cursbase')
-local a_curscurs = attributes.private('curscurs')
-local a_cursdone = attributes.private('cursdone')
-
--- This injector has been tested by Idris Samawi Hamid (several arabic fonts as well as
--- the rather demanding Husayni font), Khaled Hosny (latin and arabic) and Kaj Eigner
--- (arabic, hebrew and thai) and myself (whatever font I come across). I'm pretty sure
--- that this code is not 100% okay but examples are needed to figure things out.
-
+local next=next
+local utfchar=utf.char
+local trace_injections=false trackers.register("nodes.injections",function(v) trace_injections=v end)
+local report_injections=logs.reporter("nodes","injections")
+local attributes,nodes,node=attributes,nodes,node
+fonts=fonts
+local fontdata=fonts.hashes.identifiers
+nodes.injections=nodes.injections or {}
+local injections=nodes.injections
+local nodecodes=nodes.nodecodes
+local glyph_code=nodecodes.glyph
+local nodepool=nodes.pool
+local newkern=nodepool.kern
+local traverse_id=node.traverse_id
+local insert_node_before=node.insert_before
+local insert_node_after=node.insert_after
+local a_kernpair=attributes.private('kernpair')
+local a_ligacomp=attributes.private('ligacomp')
+local a_markbase=attributes.private('markbase')
+local a_markmark=attributes.private('markmark')
+local a_markdone=attributes.private('markdone')
+local a_cursbase=attributes.private('cursbase')
+local a_curscurs=attributes.private('curscurs')
+local a_cursdone=attributes.private('cursdone')
function injections.installnewkern(nk)
- newkern = nk or newkern
+ newkern=nk or newkern
end
-
-local cursives = { }
-local marks = { }
-local kerns = { }
-
--- Currently we do gpos/kern in a bit inofficial way but when we have the extra fields in
--- glyphnodes to manipulate ht/dp/wd explicitly I will provide an alternative; also, we
--- can share tables.
-
--- For the moment we pass the r2l key ... volt/arabtype tests .. idris: this needs
--- checking with husayni (volt and fontforge).
-
+local cursives={}
+local marks={}
+local kerns={}
function injections.setcursive(start,nxt,factor,rlmode,exit,entry,tfmstart,tfmnext)
- local dx, dy = factor*(exit[1]-entry[1]), factor*(exit[2]-entry[2])
- local ws, wn = tfmstart.width, tfmnext.width
- local bound = #cursives + 1
- start[a_cursbase] = bound
- nxt[a_curscurs] = bound
- cursives[bound] = { rlmode, dx, dy, ws, wn }
- return dx, dy, bound
+ local dx,dy=factor*(exit[1]-entry[1]),factor*(exit[2]-entry[2])
+ local ws,wn=tfmstart.width,tfmnext.width
+ local bound=#cursives+1
+ start[a_cursbase]=bound
+ nxt[a_curscurs]=bound
+ cursives[bound]={ rlmode,dx,dy,ws,wn }
+ return dx,dy,bound
end
-
function injections.setpair(current,factor,rlmode,r2lflag,spec,tfmchr)
- local x, y, w, h = factor*spec[1], factor*spec[2], factor*spec[3], factor*spec[4]
- -- dy = y - h
- if x ~= 0 or w ~= 0 or y ~= 0 or h ~= 0 then
- local bound = current[a_kernpair]
- if bound then
- local kb = kerns[bound]
- -- inefficient but singles have less, but weird anyway, needs checking
- kb[2], kb[3], kb[4], kb[5] = (kb[2] or 0) + x, (kb[3] or 0) + y, (kb[4] or 0)+ w, (kb[5] or 0) + h
- else
- bound = #kerns + 1
- current[a_kernpair] = bound
- kerns[bound] = { rlmode, x, y, w, h, r2lflag, tfmchr.width }
- end
- return x, y, w, h, bound
+ local x,y,w,h=factor*spec[1],factor*spec[2],factor*spec[3],factor*spec[4]
+ if x~=0 or w~=0 or y~=0 or h~=0 then
+ local bound=current[a_kernpair]
+ if bound then
+ local kb=kerns[bound]
+ kb[2],kb[3],kb[4],kb[5]=(kb[2] or 0)+x,(kb[3] or 0)+y,(kb[4] or 0)+w,(kb[5] or 0)+h
+ else
+ bound=#kerns+1
+ current[a_kernpair]=bound
+ kerns[bound]={ rlmode,x,y,w,h,r2lflag,tfmchr.width }
end
- return x, y, w, h -- no bound
+ return x,y,w,h,bound
+ end
+ return x,y,w,h
end
-
function injections.setkern(current,factor,rlmode,x,tfmchr)
- local dx = factor*x
- if dx ~= 0 then
- local bound = #kerns + 1
- current[a_kernpair] = bound
- kerns[bound] = { rlmode, dx }
- return dx, bound
+ local dx=factor*x
+ if dx~=0 then
+ local bound=#kerns+1
+ current[a_kernpair]=bound
+ kerns[bound]={ rlmode,dx }
+ return dx,bound
+ else
+ return 0,0
+ end
+end
+function injections.setmark(start,base,factor,rlmode,ba,ma,index)
+ local dx,dy=factor*(ba[1]-ma[1]),factor*(ba[2]-ma[2])
+ local bound=base[a_markbase]
+local index=1
+ if bound then
+ local mb=marks[bound]
+ if mb then
+index=#mb+1
+ mb[index]={ dx,dy,rlmode }
+ start[a_markmark]=bound
+ start[a_markdone]=index
+ return dx,dy,bound
else
- return 0, 0
+ report_injections("possible problem, U+%05X is base mark without data (id: %s)",base.char,bound)
end
+ end
+ index=index or 1
+ bound=#marks+1
+ base[a_markbase]=bound
+ start[a_markmark]=bound
+ start[a_markdone]=index
+ marks[bound]={ [index]={ dx,dy,rlmode } }
+ return dx,dy,bound
end
-
-function injections.setmark(start,base,factor,rlmode,ba,ma,index) -- ba=baseanchor, ma=markanchor
- local dx, dy = factor*(ba[1]-ma[1]), factor*(ba[2]-ma[2]) -- the index argument is no longer used but when this
- local bound = base[a_markbase] -- fails again we should pass it
-local index = 1
- if bound then
- local mb = marks[bound]
- if mb then
- -- if not index then index = #mb + 1 end
-index = #mb + 1
- mb[index] = { dx, dy, rlmode }
- start[a_markmark] = bound
- start[a_markdone] = index
- return dx, dy, bound
- else
- report_injections("possible problem, U+%05X is base mark without data (id: %s)",base.char,bound)
- end
- end
--- index = index or 1
- index = index or 1
- bound = #marks + 1
- base[a_markbase] = bound
- start[a_markmark] = bound
- start[a_markdone] = index
- marks[bound] = { [index] = { dx, dy, rlmode } }
- return dx, dy, bound
-end
-
local function dir(n)
- return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
+ return (n and n<0 and "r-to-l") or (n and n>0 and "l-to-r") or "unset"
end
-
local function trace(head)
- report_injections("begin run")
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local kp = n[a_kernpair]
- local mb = n[a_markbase]
- local mm = n[a_markmark]
- local md = n[a_markdone]
- local cb = n[a_cursbase]
- local cc = n[a_curscurs]
- local char = n.char
- report_injections("char U+%05X, font %s, glyph %s",char,n.font,utfchar(char))
- if kp then
- local k = kerns[kp]
- if k[3] then
- report_injections(" pairkern: dir=%s, x=%s, y=%s, w=%s, h=%s",dir(k[1]),k[2] or "?",k[3] or "?",k[4] or "?",k[5] or "?")
- else
- report_injections(" kern: dir=%s, dx=%s",dir(k[1]),k[2] or "?")
- end
- end
- if mb then
- report_injections(" markbase: bound=%s",mb)
- end
- if mm then
- local m = marks[mm]
- if mb then
- local m = m[mb]
- if m then
- report_injections(" markmark: bound=%s, index=%s, dx=%s, dy=%s",mm,md or "?",m[1] or "?",m[2] or "?")
- else
- report_injections(" markmark: bound=%s, missing index",mm)
- end
- else
- m = m[1]
- report_injections(" markmark: bound=%s, dx=%s, dy=%s",mm,m and m[1] or "?",m and m[2] or "?")
- end
- end
- if cb then
- report_injections(" cursbase: bound=%s",cb)
- end
- if cc then
- local c = cursives[cc]
- report_injections(" curscurs: bound=%s, dir=%s, dx=%s, dy=%s",cc,dir(c[1]),c[2] or "?",c[3] or "?")
- end
+ report_injections("begin run")
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ local kp=n[a_kernpair]
+ local mb=n[a_markbase]
+ local mm=n[a_markmark]
+ local md=n[a_markdone]
+ local cb=n[a_cursbase]
+ local cc=n[a_curscurs]
+ local char=n.char
+ report_injections("char U+%05X, font %s, glyph %s",char,n.font,utfchar(char))
+ if kp then
+ local k=kerns[kp]
+ if k[3] then
+ report_injections(" pairkern: dir=%s, x=%s, y=%s, w=%s, h=%s",dir(k[1]),k[2] or "?",k[3] or "?",k[4] or "?",k[5] or "?")
+ else
+ report_injections(" kern: dir=%s, dx=%s",dir(k[1]),k[2] or "?")
+ end
+ end
+ if mb then
+ report_injections(" markbase: bound=%s",mb)
+ end
+ if mm then
+ local m=marks[mm]
+ if mb then
+ local m=m[mb]
+ if m then
+ report_injections(" markmark: bound=%s, index=%s, dx=%s, dy=%s",mm,md or "?",m[1] or "?",m[2] or "?")
+ else
+ report_injections(" markmark: bound=%s, missing index",mm)
+ end
+ else
+ m=m[1]
+ report_injections(" markmark: bound=%s, dx=%s, dy=%s",mm,m and m[1] or "?",m and m[2] or "?")
end
+ end
+ if cb then
+ report_injections(" cursbase: bound=%s",cb)
+ end
+ if cc then
+ local c=cursives[cc]
+ report_injections(" curscurs: bound=%s, dir=%s, dx=%s, dy=%s",cc,dir(c[1]),c[2] or "?",c[3] or "?")
+ end
end
- report_injections("end run")
+ end
+ report_injections("end run")
end
-
--- todo: reuse tables (i.e. no collection), but will be extra fields anyway
--- todo: check for attribute
-
--- We can have a fast test on a font being processed, so we can check faster for marks etc
--- but I'll make a context variant anyway.
-
function injections.handler(head,where,keep)
- local has_marks, has_cursives, has_kerns = next(marks), next(cursives), next(kerns)
- if has_marks or has_cursives then
- if trace_injections then
- trace(head)
- end
- -- in the future variant we will not copy items but refs to tables
- local done, ky, rl, valid, cx, wx, mk, nofvalid = false, { }, { }, { }, { }, { }, { }, 0
- if has_kerns then -- move outside loop
- local nf, tm = nil, nil
- for n in traverse_id(glyph_code,head) do -- only needed for relevant fonts
- if n.subtype < 256 then
- nofvalid = nofvalid + 1
- valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
- end
- if tm then
- mk[n] = tm[n.char]
- end
- local k = n[a_kernpair]
+ local has_marks,has_cursives,has_kerns=next(marks),next(cursives),next(kerns)
+ if has_marks or has_cursives then
+ if trace_injections then
+ trace(head)
+ end
+ local done,ky,rl,valid,cx,wx,mk,nofvalid=false,{},{},{},{},{},{},0
+ if has_kerns then
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ nofvalid=nofvalid+1
+ valid[nofvalid]=n
+ if n.font~=nf then
+ nf=n.font
+ tm=fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n]=tm[n.char]
+ end
+ local k=n[a_kernpair]
+ if k then
+ local kk=kerns[k]
+ if kk then
+ local x,y,w,h=kk[2] or 0,kk[3] or 0,kk[4] or 0,kk[5] or 0
+ local dy=y-h
+ if dy~=0 then
+ ky[n]=dy
+ end
+ if w~=0 or x~=0 then
+ wx[n]=kk
+ end
+ rl[n]=kk[1]
+ end
+ end
+ end
+ end
+ else
+ local nf,tm=nil,nil
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ nofvalid=nofvalid+1
+ valid[nofvalid]=n
+ if n.font~=nf then
+ nf=n.font
+ tm=fontdata[nf].resources.marks
+ end
+ if tm then
+ mk[n]=tm[n.char]
+ end
+ end
+ end
+ end
+ if nofvalid>0 then
+ local cx={}
+ if has_kerns and next(ky) then
+ for n,k in next,ky do
+ n.yoffset=k
+ end
+ end
+ if has_cursives then
+ local p_cursbase,p=nil,nil
+ local t,d,maxt={},{},0
+ for i=1,nofvalid do
+ local n=valid[i]
+ if not mk[n] then
+ local n_cursbase=n[a_cursbase]
+ if p_cursbase then
+ local n_curscurs=n[a_curscurs]
+ if p_cursbase==n_curscurs then
+ local c=cursives[n_curscurs]
+ if c then
+ local rlmode,dx,dy,ws,wn=c[1],c[2],c[3],c[4],c[5]
+ if rlmode>=0 then
+ dx=dx-ws
+ else
+ dx=dx+wn
+ end
+ if dx~=0 then
+ cx[n]=dx
+ rl[n]=rlmode
+ end
+ dy=-dy
+ maxt=maxt+1
+ t[maxt]=p
+ d[maxt]=dy
+ else
+ maxt=0
+ end
+ end
+ elseif maxt>0 then
+ local ny=n.yoffset
+ for i=maxt,1,-1 do
+ ny=ny+d[i]
+ local ti=t[i]
+ ti.yoffset=ti.yoffset+ny
+ end
+ maxt=0
+ end
+ if not n_cursbase and maxt>0 then
+ local ny=n.yoffset
+ for i=maxt,1,-1 do
+ ny=ny+d[i]
+ local ti=t[i]
+ ti.yoffset=ny
+ end
+ maxt=0
+ end
+ p_cursbase,p=n_cursbase,n
+ end
+ end
+ if maxt>0 then
+ local ny=n.yoffset
+ for i=maxt,1,-1 do
+ ny=ny+d[i]
+ local ti=t[i]
+ ti.yoffset=ny
+ end
+ maxt=0
+ end
+ if not keep then
+ cursives={}
+ end
+ end
+ if has_marks then
+ for i=1,nofvalid do
+ local p=valid[i]
+ local p_markbase=p[a_markbase]
+ if p_markbase then
+ local mrks=marks[p_markbase]
+ local nofmarks=#mrks
+ for n in traverse_id(glyph_code,p.next) do
+ local n_markmark=n[a_markmark]
+ if p_markbase==n_markmark then
+ local index=n[a_markdone] or 1
+ local d=mrks[index]
+ if d then
+ local rlmode=d[3]
+ if rlmode and rlmode>=0 then
+ local k=wx[p]
if k then
- local kk = kerns[k]
- if kk then
- local x, y, w, h = kk[2] or 0, kk[3] or 0, kk[4] or 0, kk[5] or 0
- local dy = y - h
- if dy ~= 0 then
- ky[n] = dy
- end
- if w ~= 0 or x ~= 0 then
- wx[n] = kk
- end
- rl[n] = kk[1] -- could move in test
- end
- end
- end
- end
- else
- local nf, tm = nil, nil
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- nofvalid = nofvalid + 1
- valid[nofvalid] = n
- if n.font ~= nf then
- nf = n.font
- tm = fontdata[nf].resources.marks
- end
- if tm then
- mk[n] = tm[n.char]
+ n.xoffset=p.xoffset-p.width+d[1]-k[2]
+ else
+ n.xoffset=p.xoffset-p.width+d[1]
end
+ else
+ local k=wx[p]
+ if k then
+ n.xoffset=p.xoffset-d[1]-k[2]
+ else
+ n.xoffset=p.xoffset-d[1]
+ end
+ end
+ if mk[p] then
+ n.yoffset=p.yoffset+d[2]
+ else
+ n.yoffset=n.yoffset+p.yoffset+d[2]
+ end
+ if nofmarks==1 then
+ break
+ else
+ nofmarks=nofmarks-1
+ end
end
+ else
+ end
end
+ end
end
- if nofvalid > 0 then
- -- we can assume done == true because we have cursives and marks
- local cx = { }
- if has_kerns and next(ky) then
- for n, k in next, ky do
- n.yoffset = k
- end
- end
- -- todo: reuse t and use maxt
- if has_cursives then
- local p_cursbase, p = nil, nil
- -- since we need valid[n+1] we can also use a "while true do"
- local t, d, maxt = { }, { }, 0
- for i=1,nofvalid do -- valid == glyphs
- local n = valid[i]
- if not mk[n] then
- local n_cursbase = n[a_cursbase]
- if p_cursbase then
- local n_curscurs = n[a_curscurs]
- if p_cursbase == n_curscurs then
- local c = cursives[n_curscurs]
- if c then
- local rlmode, dx, dy, ws, wn = c[1], c[2], c[3], c[4], c[5]
- if rlmode >= 0 then
- dx = dx - ws
- else
- dx = dx + wn
- end
- if dx ~= 0 then
- cx[n] = dx
- rl[n] = rlmode
- end
- -- if rlmode and rlmode < 0 then
- dy = -dy
- -- end
- maxt = maxt + 1
- t[maxt] = p
- d[maxt] = dy
- else
- maxt = 0
- end
- end
- elseif maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ti.yoffset + ny
- end
- maxt = 0
- end
- if not n_cursbase and maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- p_cursbase, p = n_cursbase, n
- end
- end
- if maxt > 0 then
- local ny = n.yoffset
- for i=maxt,1,-1 do
- ny = ny + d[i]
- local ti = t[i]
- ti.yoffset = ny
- end
- maxt = 0
- end
- if not keep then
- cursives = { }
- end
- end
- if has_marks then
- for i=1,nofvalid do
- local p = valid[i]
- local p_markbase = p[a_markbase]
- if p_markbase then
- local mrks = marks[p_markbase]
- local nofmarks = #mrks
- for n in traverse_id(glyph_code,p.next) do
- local n_markmark = n[a_markmark]
- if p_markbase == n_markmark then
- local index = n[a_markdone] or 1
- local d = mrks[index]
- if d then
- local rlmode = d[3]
- if rlmode and rlmode >= 0 then
- -- new per 2010-10-06, width adapted per 2010-02-03
- -- we used to negate the width of marks because in tfm
- -- that makes sense but we no longer do that so as a
- -- consequence the sign of p.width was changed
- local k = wx[p]
- if k then
- -- brill roman: A\char"0300 (but ugly anyway)
- n.xoffset = p.xoffset - p.width + d[1] - k[2] -- was + p.width
- else
- -- lucida: U\char"032F (default+mark)
- n.xoffset = p.xoffset - p.width + d[1] -- 01-05-2011
- end
- else
- local k = wx[p]
- if k then
- n.xoffset = p.xoffset - d[1] - k[2]
- else
- n.xoffset = p.xoffset - d[1]
- end
- end
- if mk[p] then
- n.yoffset = p.yoffset + d[2]
- else
- n.yoffset = n.yoffset + p.yoffset + d[2]
- end
- if nofmarks == 1 then
- break
- else
- nofmarks = nofmarks - 1
- end
- end
- else
- -- KE: there can be <mark> <mkmk> <mark> sequences in ligatures
- end
- end
- end
- end
- if not keep then
- marks = { }
- end
- end
- -- todo : combine
- if next(wx) then
- for n, k in next, wx do
- -- only w can be nil (kernclasses), can be sped up when w == nil
- local x, w = k[2] or 0, k[4]
- if w then
- local rl = k[1] -- r2l = k[6]
- local wx = w - x
- if rl < 0 then -- KE: don't use r2l here
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- if wx ~= 0 then
- insert_node_after(head,n,newkern(wx))
- end
- end
- elseif x ~= 0 then
- -- this needs checking for rl < 0 but it is unlikely that a r2l script
- -- uses kernclasses between glyphs so we're probably safe (KE has a
- -- problematic font where marks interfere with rl < 0 in the previous
- -- case)
- insert_node_before(head,n,newkern(x))
- end
- end
- end
- if next(cx) then
- for n, k in next, cx do
- if k ~= 0 then
- local rln = rl[n]
- if rln and rln < 0 then
- insert_node_before(head,n,newkern(-k))
- else
- insert_node_before(head,n,newkern(k))
- end
- end
- end
- end
- if not keep then
- kerns = { }
- end
- return head, true
- elseif not keep then
- kerns, cursives, marks = { }, { }, { }
- end
- elseif has_kerns then
- if trace_injections then
- trace(head)
- end
- for n in traverse_id(glyph_code,head) do
- if n.subtype < 256 then
- local k = n[a_kernpair]
- if k then
- local kk = kerns[k]
- if kk then
- local rl, x, y, w = kk[1], kk[2] or 0, kk[3], kk[4]
- if y and y ~= 0 then
- n.yoffset = y -- todo: h ?
- end
- if w then
- -- copied from above
- -- local r2l = kk[6]
- local wx = w - x
- if rl < 0 then -- KE: don't use r2l here
- if wx ~= 0 then
- insert_node_before(head,n,newkern(wx))
- end
- if x ~= 0 then
- insert_node_after (head,n,newkern(x))
- end
- else
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- if wx ~= 0 then
- insert_node_after(head,n,newkern(wx))
- end
- end
- else
- -- simple (e.g. kernclass kerns)
- if x ~= 0 then
- insert_node_before(head,n,newkern(x))
- end
- end
- end
- end
+ if not keep then
+ marks={}
+ end
+ end
+ if next(wx) then
+ for n,k in next,wx do
+ local x,w=k[2] or 0,k[4]
+ if w then
+ local rl=k[1]
+ local wx=w-x
+ if rl<0 then
+ if wx~=0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x~=0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx~=0 then
+ insert_node_after(head,n,newkern(wx))
+ end
+ end
+ elseif x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ end
+ end
+ if next(cx) then
+ for n,k in next,cx do
+ if k~=0 then
+ local rln=rl[n]
+ if rln and rln<0 then
+ insert_node_before(head,n,newkern(-k))
+ else
+ insert_node_before(head,n,newkern(k))
end
+ end
end
- if not keep then
- kerns = { }
+ end
+ if not keep then
+ kerns={}
+ end
+ return head,true
+ elseif not keep then
+ kerns,cursives,marks={},{},{}
+ end
+ elseif has_kerns then
+ if trace_injections then
+ trace(head)
+ end
+ for n in traverse_id(glyph_code,head) do
+ if n.subtype<256 then
+ local k=n[a_kernpair]
+ if k then
+ local kk=kerns[k]
+ if kk then
+ local rl,x,y,w=kk[1],kk[2] or 0,kk[3],kk[4]
+ if y and y~=0 then
+ n.yoffset=y
+ end
+ if w then
+ local wx=w-x
+ if rl<0 then
+ if wx~=0 then
+ insert_node_before(head,n,newkern(wx))
+ end
+ if x~=0 then
+ insert_node_after (head,n,newkern(x))
+ end
+ else
+ if x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ if wx~=0 then
+ insert_node_after(head,n,newkern(wx))
+ end
+ end
+ else
+ if x~=0 then
+ insert_node_before(head,n,newkern(x))
+ end
+ end
+ end
end
- return head, true
- else
- -- no tracing needed
+ end
end
- return head, false
+ if not keep then
+ kerns={}
+ end
+ return head,true
+ else
+ end
+ return head,false
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-ota'] = {
- version = 1.001,
- comment = "companion to font-otf.lua (analysing)",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-ota']={
+ version=1.001,
+ comment="companion to font-otf.lua (analysing)",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
--- this might become scrp-*.lua
-
-local type = type
-
-if not trackers then trackers = { register = function() end } end
-
-local trace_analyzing = false trackers.register("otf.analyzing", function(v) trace_analyzing = v end)
-
-local fonts, nodes, node = fonts, nodes, node
-
-local allocate = utilities.storage.allocate
-
-local otf = fonts.handlers.otf
-
-local analyzers = fonts.analyzers
-local initializers = allocate()
-local methods = allocate()
-
-analyzers.initializers = initializers
-analyzers.methods = methods
-analyzers.useunicodemarks = false
-
-local a_state = attributes.private('state')
-
-local nodecodes = nodes.nodecodes
-local glyph_code = nodecodes.glyph
-
-local traverse_id = node.traverse_id
-local traverse_node_list = node.traverse
-
-local fontdata = fonts.hashes.identifiers
-local categories = characters and characters.categories or { } -- sorry, only in context
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
---[[ldx--
-<p>Analyzers run per script and/or language and are needed in order to
-process features right.</p>
---ldx]]--
-
--- never use these numbers directly
-
-local s_init = 1 local s_rphf = 7
-local s_medi = 2 local s_half = 8
-local s_fina = 3 local s_pref = 9
-local s_isol = 4 local s_blwf = 10
-local s_mark = 5 local s_pstf = 11
-local s_rest = 6
-
-local states = {
- init = s_init,
- medi = s_medi,
- fina = s_fina,
- isol = s_isol,
- mark = s_mark,
- rest = s_rest,
- rphf = s_rphf,
- half = s_half,
- pref = s_pref,
- blwf = s_blwf,
- pstf = s_pstf,
+local type=type
+if not trackers then trackers={ register=function() end } end
+local trace_analyzing=false trackers.register("otf.analyzing",function(v) trace_analyzing=v end)
+local fonts,nodes,node=fonts,nodes,node
+local allocate=utilities.storage.allocate
+local otf=fonts.handlers.otf
+local analyzers=fonts.analyzers
+local initializers=allocate()
+local methods=allocate()
+analyzers.initializers=initializers
+analyzers.methods=methods
+analyzers.useunicodemarks=false
+local a_state=attributes.private('state')
+local nodecodes=nodes.nodecodes
+local glyph_code=nodecodes.glyph
+local math_code=nodecodes.math
+local traverse_id=node.traverse_id
+local traverse_node_list=node.traverse
+local endofmath=nodes.endofmath
+local fontdata=fonts.hashes.identifiers
+local categories=characters and characters.categories or {}
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local s_init=1 local s_rphf=7
+local s_medi=2 local s_half=8
+local s_fina=3 local s_pref=9
+local s_isol=4 local s_blwf=10
+local s_mark=5 local s_pstf=11
+local s_rest=6
+local states={
+ init=s_init,
+ medi=s_medi,
+ fina=s_fina,
+ isol=s_isol,
+ mark=s_mark,
+ rest=s_rest,
+ rphf=s_rphf,
+ half=s_half,
+ pref=s_pref,
+ blwf=s_blwf,
+ pstf=s_pstf,
}
-
-local features = {
- init = s_init,
- medi = s_medi,
- fina = s_fina,
- isol = s_isol,
- -- mark = s_mark,
+local features={
+ init=s_init,
+ medi=s_medi,
+ fina=s_fina,
+ isol=s_isol,
}
-
-analyzers.states = states
-analyzers.features = features
-
--- todo: analyzers per script/lang, cross font, so we need an font id hash -> script
--- e.g. latin -> hyphenate, arab -> 1/2/3 analyze -- its own namespace
-
+analyzers.states=states
+analyzers.features=features
function analyzers.setstate(head,font)
- local useunicodemarks = analyzers.useunicodemarks
- local tfmdata = fontdata[font]
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
- local first, last, current, n, done = nil, nil, head, 0, false -- maybe make n boolean
- while current do
- local id = current.id
- if id == glyph_code and current.font == font then
- local char = current.char
- local d = descriptions[char]
- if d then
- if d.class == "mark" or (useunicodemarks and categories[char] == "mn") then
- done = true
- current[a_state] = s_mark
- elseif n == 0 then
- first, last, n = current, current, 1
- current[a_state] = s_init
- else
- last, n = current, n+1
- current[a_state] = s_medi
- end
- else -- finish
- if first and first == last then
- last[a_state] = s_isol
- elseif last then
- last[a_state] = s_fina
- end
- first, last, n = nil, nil, 0
- end
- elseif id == disc_code then
- -- always in the middle
- current[a_state] = s_midi
- last = current
- else -- finish
- if first and first == last then
- last[a_state] = s_isol
- elseif last then
- last[a_state] = s_fina
- end
- first, last, n = nil, nil, 0
- end
- current = current.next
- end
- if first and first == last then
- last[a_state] = s_isol
- elseif last then
- last[a_state] = s_fina
- end
- return head, done
-end
-
--- in the future we will use language/script attributes instead of the
--- font related value, but then we also need dynamic features which is
--- somewhat slower; and .. we need a chain of them
-
-local function analyzeinitializer(tfmdata,value) -- attr
- local script, language = otf.scriptandlanguage(tfmdata) -- attr
- local action = initializers[script]
- if not action then
- -- skip
- elseif type(action) == "function" then
- return action(tfmdata,value)
- else
- local action = action[language]
- if action then
- return action(tfmdata,value)
- end
- end
+ local useunicodemarks=analyzers.useunicodemarks
+ local tfmdata=fontdata[font]
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ local first,last,current,n,done=nil,nil,head,0,false
+ while current do
+ local id=current.id
+ if id==glyph_code and current.font==font then
+ local char=current.char
+ local d=descriptions[char]
+ if d then
+ if d.class=="mark" or (useunicodemarks and categories[char]=="mn") then
+ done=true
+ current[a_state]=s_mark
+ elseif n==0 then
+ first,last,n=current,current,1
+ current[a_state]=s_init
+ else
+ last,n=current,n+1
+ current[a_state]=s_medi
+ end
+ else
+ if first and first==last then
+ last[a_state]=s_isol
+ elseif last then
+ last[a_state]=s_fina
+ end
+ first,last,n=nil,nil,0
+ end
+ elseif id==disc_code then
+ current[a_state]=s_midi
+ last=current
+ else
+ if first and first==last then
+ last[a_state]=s_isol
+ elseif last then
+ last[a_state]=s_fina
+ end
+ first,last,n=nil,nil,0
+ end
+ current=current.next
+ end
+ if first and first==last then
+ last[a_state]=s_isol
+ elseif last then
+ last[a_state]=s_fina
+ end
+ return head,done
+end
+local function analyzeinitializer(tfmdata,value)
+ local script,language=otf.scriptandlanguage(tfmdata)
+ local action=initializers[script]
+ if not action then
+ elseif type(action)=="function" then
+ return action(tfmdata,value)
+ else
+ local action=action[language]
+ if action then
+ return action(tfmdata,value)
+ end
+ end
end
-
local function analyzeprocessor(head,font,attr)
- local tfmdata = fontdata[font]
- local script, language = otf.scriptandlanguage(tfmdata,attr)
- local action = methods[script]
- if not action then
- -- skip
- elseif type(action) == "function" then
- return action(head,font,attr)
- else
- action = action[language]
- if action then
- return action(head,font,attr)
- end
- end
- return head, false
+ local tfmdata=fontdata[font]
+ local script,language=otf.scriptandlanguage(tfmdata,attr)
+ local action=methods[script]
+ if not action then
+ elseif type(action)=="function" then
+ return action(head,font,attr)
+ else
+ action=action[language]
+ if action then
+ return action(head,font,attr)
+ end
+ end
+ return head,false
end
-
registerotffeature {
- name = "analyze",
- description = "analysis of (for instance) character classes",
- default = true,
- initializers = {
- node = analyzeinitializer,
- },
- processors = {
- position = 1,
- node = analyzeprocessor,
- }
+ name="analyze",
+ description="analysis of (for instance) character classes",
+ default=true,
+ initializers={
+ node=analyzeinitializer,
+ },
+ processors={
+ position=1,
+ node=analyzeprocessor,
+ }
}
-
--- latin
-
-methods.latn = analyzers.setstate
-
--- this info eventually will go into char-def and we will have a state
--- table for generic then
-
-local zwnj = 0x200C
-local zwj = 0x200D
-
-local isol = {
- [0x0600] = true, [0x0601] = true, [0x0602] = true, [0x0603] = true,
- [0x0608] = true, [0x060B] = true, [0x0621] = true, [0x0674] = true,
- [0x06DD] = true, [zwnj] = true,
+methods.latn=analyzers.setstate
+local zwnj=0x200C
+local zwj=0x200D
+local isol={
+ [0x0600]=true,[0x0601]=true,[0x0602]=true,[0x0603]=true,
+ [0x0608]=true,[0x060B]=true,[0x0621]=true,[0x0674]=true,
+ [0x06DD]=true,[zwnj]=true,
}
-
-local isol_fina = {
- [0x0622] = true, [0x0623] = true, [0x0624] = true, [0x0625] = true,
- [0x0627] = true, [0x0629] = true, [0x062F] = true, [0x0630] = true,
- [0x0631] = true, [0x0632] = true, [0x0648] = true, [0x0671] = true,
- [0x0672] = true, [0x0673] = true, [0x0675] = true, [0x0676] = true,
- [0x0677] = true, [0x0688] = true, [0x0689] = true, [0x068A] = true,
- [0x068B] = true, [0x068C] = true, [0x068D] = true, [0x068E] = true,
- [0x068F] = true, [0x0690] = true, [0x0691] = true, [0x0692] = true,
- [0x0693] = true, [0x0694] = true, [0x0695] = true, [0x0696] = true,
- [0x0697] = true, [0x0698] = true, [0x0699] = true, [0x06C0] = true,
- [0x06C3] = true, [0x06C4] = true, [0x06C5] = true, [0x06C6] = true,
- [0x06C7] = true, [0x06C8] = true, [0x06C9] = true, [0x06CA] = true,
- [0x06CB] = true, [0x06CD] = true, [0x06CF] = true, [0x06D2] = true,
- [0x06D3] = true, [0x06D5] = true, [0x06EE] = true, [0x06EF] = true,
- [0x0759] = true, [0x075A] = true, [0x075B] = true, [0x076B] = true,
- [0x076C] = true, [0x0771] = true, [0x0773] = true, [0x0774] = true,
- [0x0778] = true, [0x0779] = true, [0xFEF5] = true, [0xFEF7] = true,
- [0xFEF9] = true, [0xFEFB] = true,
-
- -- syriac
-
- [0x0710] = true, [0x0715] = true, [0x0716] = true, [0x0717] = true,
- [0x0718] = true, [0x0719] = true, [0x0728] = true, [0x072A] = true,
- [0x072C] = true, [0x071E] = true,
+local isol_fina={
+ [0x0622]=true,[0x0623]=true,[0x0624]=true,[0x0625]=true,
+ [0x0627]=true,[0x0629]=true,[0x062F]=true,[0x0630]=true,
+ [0x0631]=true,[0x0632]=true,[0x0648]=true,[0x0671]=true,
+ [0x0672]=true,[0x0673]=true,[0x0675]=true,[0x0676]=true,
+ [0x0677]=true,[0x0688]=true,[0x0689]=true,[0x068A]=true,
+ [0x068B]=true,[0x068C]=true,[0x068D]=true,[0x068E]=true,
+ [0x068F]=true,[0x0690]=true,[0x0691]=true,[0x0692]=true,
+ [0x0693]=true,[0x0694]=true,[0x0695]=true,[0x0696]=true,
+ [0x0697]=true,[0x0698]=true,[0x0699]=true,[0x06C0]=true,
+ [0x06C3]=true,[0x06C4]=true,[0x06C5]=true,[0x06C6]=true,
+ [0x06C7]=true,[0x06C8]=true,[0x06C9]=true,[0x06CA]=true,
+ [0x06CB]=true,[0x06CD]=true,[0x06CF]=true,[0x06D2]=true,
+ [0x06D3]=true,[0x06D5]=true,[0x06EE]=true,[0x06EF]=true,
+ [0x0759]=true,[0x075A]=true,[0x075B]=true,[0x076B]=true,
+ [0x076C]=true,[0x0771]=true,[0x0773]=true,[0x0774]=true,
+ [0x0778]=true,[0x0779]=true,[0xFEF5]=true,[0xFEF7]=true,
+ [0xFEF9]=true,[0xFEFB]=true,
+ [0x0710]=true,[0x0715]=true,[0x0716]=true,[0x0717]=true,
+ [0x0718]=true,[0x0719]=true,[0x0728]=true,[0x072A]=true,
+ [0x072C]=true,[0x071E]=true,
}
-
-local isol_fina_medi_init = {
- [0x0626] = true, [0x0628] = true, [0x062A] = true, [0x062B] = true,
- [0x062C] = true, [0x062D] = true, [0x062E] = true, [0x0633] = true,
- [0x0634] = true, [0x0635] = true, [0x0636] = true, [0x0637] = true,
- [0x0638] = true, [0x0639] = true, [0x063A] = true, [0x063B] = true,
- [0x063C] = true, [0x063D] = true, [0x063E] = true, [0x063F] = true,
- [0x0640] = true, [0x0641] = true, [0x0642] = true, [0x0643] = true,
- [0x0644] = true, [0x0645] = true, [0x0646] = true, [0x0647] = true,
- [0x0649] = true, [0x064A] = true, [0x066E] = true, [0x066F] = true,
- [0x0678] = true, [0x0679] = true, [0x067A] = true, [0x067B] = true,
- [0x067C] = true, [0x067D] = true, [0x067E] = true, [0x067F] = true,
- [0x0680] = true, [0x0681] = true, [0x0682] = true, [0x0683] = true,
- [0x0684] = true, [0x0685] = true, [0x0686] = true, [0x0687] = true,
- [0x069A] = true, [0x069B] = true, [0x069C] = true, [0x069D] = true,
- [0x069E] = true, [0x069F] = true, [0x06A0] = true, [0x06A1] = true,
- [0x06A2] = true, [0x06A3] = true, [0x06A4] = true, [0x06A5] = true,
- [0x06A6] = true, [0x06A7] = true, [0x06A8] = true, [0x06A9] = true,
- [0x06AA] = true, [0x06AB] = true, [0x06AC] = true, [0x06AD] = true,
- [0x06AE] = true, [0x06AF] = true, [0x06B0] = true, [0x06B1] = true,
- [0x06B2] = true, [0x06B3] = true, [0x06B4] = true, [0x06B5] = true,
- [0x06B6] = true, [0x06B7] = true, [0x06B8] = true, [0x06B9] = true,
- [0x06BA] = true, [0x06BB] = true, [0x06BC] = true, [0x06BD] = true,
- [0x06BE] = true, [0x06BF] = true, [0x06C1] = true, [0x06C2] = true,
- [0x06CC] = true, [0x06CE] = true, [0x06D0] = true, [0x06D1] = true,
- [0x06FA] = true, [0x06FB] = true, [0x06FC] = true, [0x06FF] = true,
- [0x0750] = true, [0x0751] = true, [0x0752] = true, [0x0753] = true,
- [0x0754] = true, [0x0755] = true, [0x0756] = true, [0x0757] = true,
- [0x0758] = true, [0x075C] = true, [0x075D] = true, [0x075E] = true,
- [0x075F] = true, [0x0760] = true, [0x0761] = true, [0x0762] = true,
- [0x0763] = true, [0x0764] = true, [0x0765] = true, [0x0766] = true,
- [0x0767] = true, [0x0768] = true, [0x0769] = true, [0x076A] = true,
- [0x076D] = true, [0x076E] = true, [0x076F] = true, [0x0770] = true,
- [0x0772] = true, [0x0775] = true, [0x0776] = true, [0x0777] = true,
- [0x077A] = true, [0x077B] = true, [0x077C] = true, [0x077D] = true,
- [0x077E] = true, [0x077F] = true,
-
- -- syriac
-
- [0x0712] = true, [0x0713] = true, [0x0714] = true, [0x071A] = true,
- [0x071B] = true, [0x071C] = true, [0x071D] = true, [0x071F] = true,
- [0x0720] = true, [0x0721] = true, [0x0722] = true, [0x0723] = true,
- [0x0724] = true, [0x0725] = true, [0x0726] = true, [0x0727] = true,
- [0x0729] = true, [0x072B] = true,
-
- -- also
-
- [zwj] = true,
+local isol_fina_medi_init={
+ [0x0626]=true,[0x0628]=true,[0x062A]=true,[0x062B]=true,
+ [0x062C]=true,[0x062D]=true,[0x062E]=true,[0x0633]=true,
+ [0x0634]=true,[0x0635]=true,[0x0636]=true,[0x0637]=true,
+ [0x0638]=true,[0x0639]=true,[0x063A]=true,[0x063B]=true,
+ [0x063C]=true,[0x063D]=true,[0x063E]=true,[0x063F]=true,
+ [0x0640]=true,[0x0641]=true,[0x0642]=true,[0x0643]=true,
+ [0x0644]=true,[0x0645]=true,[0x0646]=true,[0x0647]=true,
+ [0x0649]=true,[0x064A]=true,[0x066E]=true,[0x066F]=true,
+ [0x0678]=true,[0x0679]=true,[0x067A]=true,[0x067B]=true,
+ [0x067C]=true,[0x067D]=true,[0x067E]=true,[0x067F]=true,
+ [0x0680]=true,[0x0681]=true,[0x0682]=true,[0x0683]=true,
+ [0x0684]=true,[0x0685]=true,[0x0686]=true,[0x0687]=true,
+ [0x069A]=true,[0x069B]=true,[0x069C]=true,[0x069D]=true,
+ [0x069E]=true,[0x069F]=true,[0x06A0]=true,[0x06A1]=true,
+ [0x06A2]=true,[0x06A3]=true,[0x06A4]=true,[0x06A5]=true,
+ [0x06A6]=true,[0x06A7]=true,[0x06A8]=true,[0x06A9]=true,
+ [0x06AA]=true,[0x06AB]=true,[0x06AC]=true,[0x06AD]=true,
+ [0x06AE]=true,[0x06AF]=true,[0x06B0]=true,[0x06B1]=true,
+ [0x06B2]=true,[0x06B3]=true,[0x06B4]=true,[0x06B5]=true,
+ [0x06B6]=true,[0x06B7]=true,[0x06B8]=true,[0x06B9]=true,
+ [0x06BA]=true,[0x06BB]=true,[0x06BC]=true,[0x06BD]=true,
+ [0x06BE]=true,[0x06BF]=true,[0x06C1]=true,[0x06C2]=true,
+ [0x06CC]=true,[0x06CE]=true,[0x06D0]=true,[0x06D1]=true,
+ [0x06FA]=true,[0x06FB]=true,[0x06FC]=true,[0x06FF]=true,
+ [0x0750]=true,[0x0751]=true,[0x0752]=true,[0x0753]=true,
+ [0x0754]=true,[0x0755]=true,[0x0756]=true,[0x0757]=true,
+ [0x0758]=true,[0x075C]=true,[0x075D]=true,[0x075E]=true,
+ [0x075F]=true,[0x0760]=true,[0x0761]=true,[0x0762]=true,
+ [0x0763]=true,[0x0764]=true,[0x0765]=true,[0x0766]=true,
+ [0x0767]=true,[0x0768]=true,[0x0769]=true,[0x076A]=true,
+ [0x076D]=true,[0x076E]=true,[0x076F]=true,[0x0770]=true,
+ [0x0772]=true,[0x0775]=true,[0x0776]=true,[0x0777]=true,
+ [0x077A]=true,[0x077B]=true,[0x077C]=true,[0x077D]=true,
+ [0x077E]=true,[0x077F]=true,
+ [0x0712]=true,[0x0713]=true,[0x0714]=true,[0x071A]=true,
+ [0x071B]=true,[0x071C]=true,[0x071D]=true,[0x071F]=true,
+ [0x0720]=true,[0x0721]=true,[0x0722]=true,[0x0723]=true,
+ [0x0724]=true,[0x0725]=true,[0x0726]=true,[0x0727]=true,
+ [0x0729]=true,[0x072B]=true,
+ [zwj]=true,
}
-
-local arab_warned = { }
-
--- todo: gref
-
+local arab_warned={}
local function warning(current,what)
- local char = current.char
- if not arab_warned[char] then
- log.report("analyze","arab: character %s (U+%05X) has no %s class", char, char, what)
- arab_warned[char] = true
- end
+ local char=current.char
+ if not arab_warned[char] then
+ log.report("analyze","arab: character %s (U+%05X) has no %s class",char,char,what)
+ arab_warned[char]=true
+ end
end
-
local function finish(first,last)
- if last then
- if first == last then
- local fc = first.char
- if isol_fina_medi_init[fc] or isol_fina[fc] then
- first[a_state] = s_isol
- else
- warning(first,"isol")
- first[a_state] = s_error
- end
- else
- local lc = last.char
- if isol_fina_medi_init[lc] or isol_fina[lc] then -- why isol here ?
- -- if laststate == 1 or laststate == 2 or laststate == 4 then
- last[a_state] = s_fina
- else
- warning(last,"fina")
- last[a_state] = s_error
- end
- end
- first, last = nil, nil
- elseif first then
- -- first and last are either both set so we never com here
- local fc = first.char
- if isol_fina_medi_init[fc] or isol_fina[fc] then
- first[a_state] = s_isol
- else
- warning(first,"isol")
- first[a_state] = s_error
- end
- first = nil
- end
- return first, last
-end
-
-function methods.arab(head,font,attr) -- maybe make a special version with no trace
- local useunicodemarks = analyzers.useunicodemarks
- local tfmdata = fontdata[font]
- local marks = tfmdata.resources.marks
- local first, last, current, done = nil, nil, head, false
- while current do
- if current.id == glyph_code and current.font == font and current.subtype<256 and not current[a_state] then
- done = true
- local char = current.char
- if marks[char] or (useunicodemarks and categories[char] == "mn") then
- current[a_state] = s_mark
- elseif isol[char] then -- can be zwj or zwnj too
- first, last = finish(first,last)
- current[a_state] = s_isol
- first, last = nil, nil
- elseif not first then
- if isol_fina_medi_init[char] then
- current[a_state] = s_init
- first, last = first or current, current
- elseif isol_fina[char] then
- current[a_state] = s_isol
- first, last = nil, nil
- else -- no arab
- first, last = finish(first,last)
- end
- elseif isol_fina_medi_init[char] then
- first, last = first or current, current
- current[a_state] = s_medi
- elseif isol_fina[char] then
- if not last[a_state] == s_init then
- -- tricky, we need to check what last may be !
- last[a_state] = s_medi
- end
- current[a_state] = s_fina
- first, last = nil, nil
- elseif char >= 0x0600 and char <= 0x06FF then
- current[a_state] = s_rest
- first, last = finish(first,last)
- else --no
- first, last = finish(first,last)
- end
- else
- first, last = finish(first,last)
- end
- current = current.next
+ if last then
+ if first==last then
+ local fc=first.char
+ if isol_fina_medi_init[fc] or isol_fina[fc] then
+ first[a_state]=s_isol
+ else
+ warning(first,"isol")
+ first[a_state]=s_error
+ end
+ else
+ local lc=last.char
+ if isol_fina_medi_init[lc] or isol_fina[lc] then
+ last[a_state]=s_fina
+ else
+ warning(last,"fina")
+ last[a_state]=s_error
+ end
+ end
+ first,last=nil,nil
+ elseif first then
+ local fc=first.char
+ if isol_fina_medi_init[fc] or isol_fina[fc] then
+ first[a_state]=s_isol
+ else
+ warning(first,"isol")
+ first[a_state]=s_error
+ end
+ first=nil
+ end
+ return first,last
+end
+function methods.arab(head,font,attr)
+ local useunicodemarks=analyzers.useunicodemarks
+ local tfmdata=fontdata[font]
+ local marks=tfmdata.resources.marks
+ local first,last,current,done=nil,nil,head,false
+ while current do
+ if current.id==glyph_code and current.font==font and current.subtype<256 and not current[a_state] then
+ done=true
+ local char=current.char
+ if marks[char] or (useunicodemarks and categories[char]=="mn") then
+ current[a_state]=s_mark
+ elseif isol[char] then
+ first,last=finish(first,last)
+ current[a_state]=s_isol
+ first,last=nil,nil
+ elseif not first then
+ if isol_fina_medi_init[char] then
+ current[a_state]=s_init
+ first,last=first or current,current
+ elseif isol_fina[char] then
+ current[a_state]=s_isol
+ first,last=nil,nil
+ else
+ first,last=finish(first,last)
+ end
+ elseif isol_fina_medi_init[char] then
+ first,last=first or current,current
+ current[a_state]=s_medi
+ elseif isol_fina[char] then
+ if not last[a_state]==s_init then
+ last[a_state]=s_medi
+ end
+ current[a_state]=s_fina
+ first,last=nil,nil
+ elseif char>=0x0600 and char<=0x06FF then
+ current[a_state]=s_rest
+ first,last=finish(first,last)
+ else
+ first,last=finish(first,last)
+ end
+ else
+ first,last=finish(first,last)
end
- first, last = finish(first,last)
- return head, done
+ current=current.next
+ end
+ first,last=finish(first,last)
+ return head,done
end
-
-methods.syrc = methods.arab
-
+methods.syrc=methods.arab
directives.register("otf.analyze.useunicodemarks",function(v)
- analyzers.useunicodemarks = v
+ analyzers.useunicodemarks=v
end)
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-otn'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files",
+if not modules then modules={} end modules ['font-otn']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files",
}
-
--- preprocessors = { "nodes" }
-
--- this is still somewhat preliminary and it will get better in due time;
--- much functionality could only be implemented thanks to the husayni font
--- of Idris Samawi Hamid to who we dedicate this module.
-
--- in retrospect it always looks easy but believe it or not, it took a lot
--- of work to get proper open type support done: buggy fonts, fuzzy specs,
--- special made testfonts, many skype sessions between taco, idris and me,
--- torture tests etc etc ... unfortunately the code does not show how much
--- time it took ...
-
--- todo:
---
--- kerning is probably not yet ok for latin around dics nodes
--- extension infrastructure (for usage out of context)
--- sorting features according to vendors/renderers
--- alternative loop quitters
--- check cursive and r2l
--- find out where ignore-mark-classes went
--- default features (per language, script)
--- handle positions (we need example fonts)
--- handle gpos_single (we might want an extra width field in glyph nodes because adding kerns might interfere)
--- mark (to mark) code is still not what it should be (too messy but we need some more extreem husayni tests)
--- remove some optimizations (when I have a faster machine)
-
-
---[[ldx--
-<p>This module is a bit more split up that I'd like but since we also want to test
-with plain <l n='tex'/> it has to be so. This module is part of <l n='context'/>
-and discussion about improvements and functionality mostly happens on the
-<l n='context'/> mailing list.</p>
-
-<p>The specification of OpenType is kind of vague. Apart from a lack of a proper
-free specifications there's also the problem that Microsoft and Adobe
-may have their own interpretation of how and in what order to apply features.
-In general the Microsoft website has more detailed specifications and is a
-better reference. There is also some information in the FontForge help files.</p>
-
-<p>Because there is so much possible, fonts might contain bugs and/or be made to
-work with certain rederers. These may evolve over time which may have the side
-effect that suddenly fonts behave differently.</p>
-
-<p>After a lot of experiments (mostly by Taco, me and Idris) we're now at yet another
-implementation. Of course all errors are mine and of course the code can be
-improved. There are quite some optimizations going on here and processing speed
-is currently acceptable. Not all functions are implemented yet, often because I
-lack the fonts for testing. Many scripts are not yet supported either, but I will
-look into them as soon as <l n='context'/> users ask for it.</p>
-
-<p>Because there are different interpretations possible, I will extend the code
-with more (configureable) variants. I can also add hooks for users so that they can
-write their own extensions.</p>
-
-<p>Glyphs are indexed not by unicode but in their own way. This is because there is no
-relationship with unicode at all, apart from the fact that a font might cover certain
-ranges of characters. One character can have multiple shapes. However, at the
-<l n='tex'/> end we use unicode so and all extra glyphs are mapped into a private
-space. This is needed because we need to access them and <l n='tex'/> has to include
-then in the output eventually.</p>
-
-<p>The raw table as it coms from <l n='fontforge'/> gets reorganized in to fit out needs.
-In <l n='context'/> that table is packed (similar tables are shared) and cached on disk
-so that successive runs can use the optimized table (after loading the table is
-unpacked). The flattening code used later is a prelude to an even more compact table
-format (and as such it keeps evolving).</p>
-
-<p>This module is sparsely documented because it is a moving target. The table format
-of the reader changes and we experiment a lot with different methods for supporting
-features.</p>
-
-<p>As with the <l n='afm'/> code, we may decide to store more information in the
-<l n='otf'/> table.</p>
-
-<p>Incrementing the version number will force a re-cache. We jump the number by one
-when there's a fix in the <l n='fontforge'/> library or <l n='lua'/> code that
-results in different tables.</p>
---ldx]]--
-
--- action handler chainproc chainmore comment
---
--- gsub_single ok ok ok
--- gsub_multiple ok ok not implemented yet
--- gsub_alternate ok ok not implemented yet
--- gsub_ligature ok ok ok
--- gsub_context ok --
--- gsub_contextchain ok --
--- gsub_reversecontextchain ok --
--- chainsub -- ok
--- reversesub -- ok
--- gpos_mark2base ok ok
--- gpos_mark2ligature ok ok
--- gpos_mark2mark ok ok
--- gpos_cursive ok untested
--- gpos_single ok ok
--- gpos_pair ok ok
--- gpos_context ok --
--- gpos_contextchain ok --
---
--- todo: contextpos and contextsub and class stuff
---
--- actions:
---
--- handler : actions triggered by lookup
--- chainproc : actions triggered by contextual lookup
--- chainmore : multiple substitutions triggered by contextual lookup (e.g. fij -> f + ij)
---
--- remark: the 'not implemented yet' variants will be done when we have fonts that use them
--- remark: we need to check what to do with discretionaries
-
--- We used to have independent hashes for lookups but as the tags are unique
--- we now use only one hash. If needed we can have multiple again but in that
--- case I will probably prefix (i.e. rename) the lookups in the cached font file.
-
--- Todo: make plugin feature that operates on char/glyphnode arrays
-
-local concat, insert, remove = table.concat, table.insert, table.remove
-local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip
-local type, next, tonumber, tostring = type, next, tonumber, tostring
-local lpegmatch = lpeg.match
-local random = math.random
-
-local logs, trackers, nodes, attributes = logs, trackers, nodes, attributes
-
-local registertracker = trackers.register
-
-local fonts = fonts
-local otf = fonts.handlers.otf
-
-local trace_lookups = false registertracker("otf.lookups", function(v) trace_lookups = v end)
-local trace_singles = false registertracker("otf.singles", function(v) trace_singles = v end)
-local trace_multiples = false registertracker("otf.multiples", function(v) trace_multiples = v end)
-local trace_alternatives = false registertracker("otf.alternatives", function(v) trace_alternatives = v end)
-local trace_ligatures = false registertracker("otf.ligatures", function(v) trace_ligatures = v end)
-local trace_contexts = false registertracker("otf.contexts", function(v) trace_contexts = v end)
-local trace_marks = false registertracker("otf.marks", function(v) trace_marks = v end)
-local trace_kerns = false registertracker("otf.kerns", function(v) trace_kerns = v end)
-local trace_cursive = false registertracker("otf.cursive", function(v) trace_cursive = v end)
-local trace_preparing = false registertracker("otf.preparing", function(v) trace_preparing = v end)
-local trace_bugs = false registertracker("otf.bugs", function(v) trace_bugs = v end)
-local trace_details = false registertracker("otf.details", function(v) trace_details = v end)
-local trace_applied = false registertracker("otf.applied", function(v) trace_applied = v end)
-local trace_steps = false registertracker("otf.steps", function(v) trace_steps = v end)
-local trace_skips = false registertracker("otf.skips", function(v) trace_skips = v end)
-local trace_directions = false registertracker("otf.directions", function(v) trace_directions = v end)
-
-local report_direct = logs.reporter("fonts","otf direct")
-local report_subchain = logs.reporter("fonts","otf subchain")
-local report_chain = logs.reporter("fonts","otf chain")
-local report_process = logs.reporter("fonts","otf process")
-local report_prepare = logs.reporter("fonts","otf prepare")
-local report_warning = logs.reporter("fonts","otf warning")
-
-registertracker("otf.verbose_chain", function(v) otf.setcontextchain(v and "verbose") end)
-registertracker("otf.normal_chain", function(v) otf.setcontextchain(v and "normal") end)
-
-registertracker("otf.replacements", "otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
+local concat,insert,remove=table.concat,table.insert,table.remove
+local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip
+local type,next,tonumber,tostring=type,next,tonumber,tostring
+local lpegmatch=lpeg.match
+local random=math.random
+local logs,trackers,nodes,attributes=logs,trackers,nodes,attributes
+local registertracker=trackers.register
+local fonts=fonts
+local otf=fonts.handlers.otf
+local trace_lookups=false registertracker("otf.lookups",function(v) trace_lookups=v end)
+local trace_singles=false registertracker("otf.singles",function(v) trace_singles=v end)
+local trace_multiples=false registertracker("otf.multiples",function(v) trace_multiples=v end)
+local trace_alternatives=false registertracker("otf.alternatives",function(v) trace_alternatives=v end)
+local trace_ligatures=false registertracker("otf.ligatures",function(v) trace_ligatures=v end)
+local trace_contexts=false registertracker("otf.contexts",function(v) trace_contexts=v end)
+local trace_marks=false registertracker("otf.marks",function(v) trace_marks=v end)
+local trace_kerns=false registertracker("otf.kerns",function(v) trace_kerns=v end)
+local trace_cursive=false registertracker("otf.cursive",function(v) trace_cursive=v end)
+local trace_preparing=false registertracker("otf.preparing",function(v) trace_preparing=v end)
+local trace_bugs=false registertracker("otf.bugs",function(v) trace_bugs=v end)
+local trace_details=false registertracker("otf.details",function(v) trace_details=v end)
+local trace_applied=false registertracker("otf.applied",function(v) trace_applied=v end)
+local trace_steps=false registertracker("otf.steps",function(v) trace_steps=v end)
+local trace_skips=false registertracker("otf.skips",function(v) trace_skips=v end)
+local trace_directions=false registertracker("otf.directions",function(v) trace_directions=v end)
+local report_direct=logs.reporter("fonts","otf direct")
+local report_subchain=logs.reporter("fonts","otf subchain")
+local report_chain=logs.reporter("fonts","otf chain")
+local report_process=logs.reporter("fonts","otf process")
+local report_prepare=logs.reporter("fonts","otf prepare")
+local report_warning=logs.reporter("fonts","otf warning")
+registertracker("otf.verbose_chain",function(v) otf.setcontextchain(v and "verbose") end)
+registertracker("otf.normal_chain",function(v) otf.setcontextchain(v and "normal") end)
+registertracker("otf.replacements","otf.singles,otf.multiples,otf.alternatives,otf.ligatures")
registertracker("otf.positions","otf.marks,otf.kerns,otf.cursive")
registertracker("otf.actions","otf.replacements,otf.positions")
registertracker("otf.injections","nodes.injections")
-
registertracker("*otf.sample","otf.steps,otf.actions,otf.analyzing")
-
-local insert_node_after = node.insert_after
-local delete_node = nodes.delete
-local copy_node = node.copy
-local find_node_tail = node.tail or node.slide
-local flush_node_list = node.flush_list
-
-local setmetatableindex = table.setmetatableindex
-
-local zwnj = 0x200C
-local zwj = 0x200D
-local wildcard = "*"
-local default = "dflt"
-
-local nodecodes = nodes.nodecodes
-local whatcodes = nodes.whatcodes
-local glyphcodes = nodes.glyphcodes
-
-local glyph_code = nodecodes.glyph
-local glue_code = nodecodes.glue
-local disc_code = nodecodes.disc
-local whatsit_code = nodecodes.whatsit
-
-local dir_code = whatcodes.dir
-local localpar_code = whatcodes.localpar
-
-local ligature_code = glyphcodes.ligature
-
-local privateattribute = attributes.private
-
--- Something is messed up: we have two mark / ligature indices, one at the injection
--- end and one here ... this is bases in KE's patches but there is something fishy
--- there as I'm pretty sure that for husayni we need some connection (as it's much
--- more complex than an average font) but I need proper examples of all cases, not
--- of only some.
-
-local a_state = privateattribute('state')
-local a_markbase = privateattribute('markbase')
-local a_markmark = privateattribute('markmark')
-local a_markdone = privateattribute('markdone') -- assigned at the injection end
-local a_cursbase = privateattribute('cursbase')
-local a_curscurs = privateattribute('curscurs')
-local a_cursdone = privateattribute('cursdone')
-local a_kernpair = privateattribute('kernpair')
-local a_ligacomp = privateattribute('ligacomp') -- assigned here (ideally it should be combined)
-
-local injections = nodes.injections
-local setmark = injections.setmark
-local setcursive = injections.setcursive
-local setkern = injections.setkern
-local setpair = injections.setpair
-
-local markonce = true
-local cursonce = true
-local kernonce = true
-
-local fonthashes = fonts.hashes
-local fontdata = fonthashes.identifiers
-
-local otffeatures = fonts.constructors.newfeatures("otf")
-local registerotffeature = otffeatures.register
-
-local onetimemessage = fonts.loggers.onetimemessage
-
-otf.defaultnodealternate = "none" -- first last
-
--- we share some vars here, after all, we have no nested lookups and less code
-
-local tfmdata = false
-local characters = false
-local descriptions = false
-local resources = false
-local marks = false
-local currentfont = false
-local lookuptable = false
-local anchorlookups = false
-local lookuptypes = false
-local handlers = { }
-local rlmode = 0
-local featurevalue = false
-
--- head is always a whatsit so we can safely assume that head is not changed
-
--- we use this for special testing and documentation
-
-local checkstep = (nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
-local registerstep = (nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
-local registermessage = (nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
-
+local insert_node_after=node.insert_after
+local delete_node=nodes.delete
+local copy_node=node.copy
+local find_node_tail=node.tail or node.slide
+local flush_node_list=node.flush_list
+local endofmath=nodes.endofmath
+local setmetatableindex=table.setmetatableindex
+local zwnj=0x200C
+local zwj=0x200D
+local wildcard="*"
+local default="dflt"
+local nodecodes=nodes.nodecodes
+local whatcodes=nodes.whatcodes
+local glyphcodes=nodes.glyphcodes
+local glyph_code=nodecodes.glyph
+local glue_code=nodecodes.glue
+local disc_code=nodecodes.disc
+local whatsit_code=nodecodes.whatsit
+local math_code=nodecodes.math
+local dir_code=whatcodes.dir
+local localpar_code=whatcodes.localpar
+local ligature_code=glyphcodes.ligature
+local privateattribute=attributes.private
+local a_state=privateattribute('state')
+local a_markbase=privateattribute('markbase')
+local a_markmark=privateattribute('markmark')
+local a_markdone=privateattribute('markdone')
+local a_cursbase=privateattribute('cursbase')
+local a_curscurs=privateattribute('curscurs')
+local a_cursdone=privateattribute('cursdone')
+local a_kernpair=privateattribute('kernpair')
+local a_ligacomp=privateattribute('ligacomp')
+local injections=nodes.injections
+local setmark=injections.setmark
+local setcursive=injections.setcursive
+local setkern=injections.setkern
+local setpair=injections.setpair
+local markonce=true
+local cursonce=true
+local kernonce=true
+local fonthashes=fonts.hashes
+local fontdata=fonthashes.identifiers
+local otffeatures=fonts.constructors.newfeatures("otf")
+local registerotffeature=otffeatures.register
+local onetimemessage=fonts.loggers.onetimemessage
+otf.defaultnodealternate="none"
+local tfmdata=false
+local characters=false
+local descriptions=false
+local resources=false
+local marks=false
+local currentfont=false
+local lookuptable=false
+local anchorlookups=false
+local lookuptypes=false
+local handlers={}
+local rlmode=0
+local featurevalue=false
+local checkstep=(nodes and nodes.tracers and nodes.tracers.steppers.check) or function() end
+local registerstep=(nodes and nodes.tracers and nodes.tracers.steppers.register) or function() end
+local registermessage=(nodes and nodes.tracers and nodes.tracers.steppers.message) or function() end
local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_direct(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_direct(...)
end
-
local function logwarning(...)
- report_direct(...)
+ report_direct(...)
end
-
local function gref(n)
- if type(n) == "number" then
- local description = descriptions[n]
- local name = description and description.name
- if name then
- return format("U+%05X (%s)",n,name)
- else
- return format("U+%05X",n)
- end
- elseif not n then
- return "<error in tracing>"
+ if type(n)=="number" then
+ local description=descriptions[n]
+ local name=description and description.name
+ if name then
+ return format("U+%05X (%s)",n,name)
else
- local num, nam = { }, { }
- for i=1,#n do
- local ni = n[i]
- if tonumber(ni) then -- later we will start at 2
- local di = descriptions[ni]
- num[i] = format("U+%05X",ni)
- nam[i] = di and di.name or "?"
- end
- end
- return format("%s (%s)",concat(num," "), concat(nam," "))
- end
+ return format("U+%05X",n)
+ end
+ elseif not n then
+ return "<error in tracing>"
+ else
+ local num,nam={},{}
+ for i=1,#n do
+ local ni=n[i]
+ if tonumber(ni) then
+ local di=descriptions[ni]
+ num[i]=format("U+%05X",ni)
+ nam[i]=di and di.name or "?"
+ end
+ end
+ return format("%s (%s)",concat(num," "),concat(nam," "))
+ end
end
-
local function cref(kind,chainname,chainlookupname,lookupname,index)
- if index then
- return format("feature %s, chain %s, sub %s, lookup %s, index %s",kind,chainname,chainlookupname,lookupname,index)
- elseif lookupname then
- return format("feature %s, chain %s, sub %s, lookup %s",kind,chainname or "?",chainlookupname or "?",lookupname)
- elseif chainlookupname then
- return format("feature %s, chain %s, sub %s",kind,chainname or "?",chainlookupname)
- elseif chainname then
- return format("feature %s, chain %s",kind,chainname)
- else
- return format("feature %s",kind)
- end
+ if index then
+ return format("feature %s, chain %s, sub %s, lookup %s, index %s",kind,chainname,chainlookupname,lookupname,index)
+ elseif lookupname then
+ return format("feature %s, chain %s, sub %s, lookup %s",kind,chainname or "?",chainlookupname or "?",lookupname)
+ elseif chainlookupname then
+ return format("feature %s, chain %s, sub %s",kind,chainname or "?",chainlookupname)
+ elseif chainname then
+ return format("feature %s, chain %s",kind,chainname)
+ else
+ return format("feature %s",kind)
+ end
end
-
local function pref(kind,lookupname)
- return format("feature %s, lookup %s",kind,lookupname)
-end
-
--- We can assume that languages that use marks are not hyphenated. We can also assume
--- that at most one discretionary is present.
-
--- We do need components in funny kerning mode but maybe I can better reconstruct then
--- as we do have the font components info available; removing components makes the
--- previous code much simpler. Also, later on copying and freeing becomes easier.
--- However, for arabic we need to keep them around for the sake of mark placement
--- and indices.
-
-local function copy_glyph(g) -- next and prev are untouched !
- local components = g.components
- if components then
- g.components = nil
- local n = copy_node(g)
- g.components = components
- return n
- else
- return copy_node(g)
- end
+ return format("feature %s, lookup %s",kind,lookupname)
+end
+local function copy_glyph(g)
+ local components=g.components
+ if components then
+ g.components=nil
+ local n=copy_node(g)
+ g.components=components
+ return n
+ else
+ return copy_node(g)
+ end
end
-
--- start is a mark and we need to keep that one
-
--- local function markstoligature(kind,lookupname,start,stop,char)
--- -- [start]..[stop]
--- local keep = start
--- local prev = start.prev
--- local next = stop.next
--- local base = copy_glyph(start)
--- local current, start = insert_node_after(start,start,base)
--- -- [current][start]..[stop]
--- current.next = next
--- if next then
--- next.prev = current
--- end
--- start.prev = nil
--- stop.next = nil
--- current.char = char
--- current.subtype = ligature_code
--- current.components = start
--- return keep
--- end
-
local function markstoligature(kind,lookupname,start,stop,char)
- if start == stop and start.char == char then
- return start
- else
- local prev = start.prev
- local next = stop.next
- start.prev = nil
- stop.next = nil
- local base = copy_glyph(start)
- base.char = char
- base.subtype = ligature_code
- base.components = start
- if prev then
- prev.next = base
- end
- if next then
- next.prev = base
- end
- base.next = next
- base.prev = prev
- return base
- end
-end
-
--- The next code is somewhat complicated by the fact that some fonts can have ligatures made
--- from ligatures that themselves have marks. This was identified by Kai in for instance
--- arabtype: KAF LAM SHADDA ALEF FATHA (0x0643 0x0644 0x0651 0x0627 0x064E). This becomes
--- KAF LAM-ALEF with a SHADDA on the first and a FATHA op de second component. In a next
--- iteration this becomes a KAF-LAM-ALEF with a SHADDA on the second and a FATHA on the
--- third component.
-
-local function getcomponentindex(start)
- if start.id ~= glyph_code then
- return 0
- elseif start.subtype == ligature_code then
- local i = 0
- local components = start.components
- while components do
- i = i + getcomponentindex(components)
- components = components.next
- end
- return i
- elseif not marks[start.char] then
- return 1
- else
- return 0
- end
-end
-
-local function toligature(kind,lookupname,start,stop,char,markflag,discfound) -- brr head
- if start == stop and start.char == char then
- start.char = char
- return start
- end
- local prev = start.prev
- local next = stop.next
- start.prev = nil
- stop.next = nil
- local base = copy_glyph(start)
- base.char = char
- base.subtype = ligature_code
- base.components = start -- start can have components
+ if start==stop and start.char==char then
+ return start
+ else
+ local prev=start.prev
+ local next=stop.next
+ start.prev=nil
+ stop.next=nil
+ local base=copy_glyph(start)
+ base.char=char
+ base.subtype=ligature_code
+ base.components=start
if prev then
- prev.next = base
+ prev.next=base
end
if next then
- next.prev = base
- end
- base.next = next
- base.prev = prev
- if not discfound then
- local deletemarks = markflag ~= "mark"
- local components = start
- local baseindex = 0
- local componentindex = 0
- local head = base
- local current = base
- while start do
- local char = start.char
- if not marks[char] then
- baseindex = baseindex + componentindex
- componentindex = getcomponentindex(start)
- elseif not deletemarks then -- quite fishy
- start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
- if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
- end
- head, current = insert_node_after(head,current,copy_node(start)) -- unlikely that mark has components
- end
- start = start.next
- end
- local start = components
- while start and start.id == glyph_code do -- hm, is id test needed ?
- local char = start.char
- if marks[char] then
- start[a_ligacomp] = baseindex + (start[a_ligacomp] or componentindex)
- if trace_marks then
- logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
- end
- else
- break
- end
- start = start.next
- end
+ next.prev=base
end
+ base.next=next
+ base.prev=prev
return base
+ end
+end
+local function getcomponentindex(start)
+ if start.id~=glyph_code then
+ return 0
+ elseif start.subtype==ligature_code then
+ local i=0
+ local components=start.components
+ while components do
+ i=i+getcomponentindex(components)
+ components=components.next
+ end
+ return i
+ elseif not marks[start.char] then
+ return 1
+ else
+ return 0
+ end
+end
+local function toligature(kind,lookupname,start,stop,char,markflag,discfound)
+ if start==stop and start.char==char then
+ start.char=char
+ return start
+ end
+ local prev=start.prev
+ local next=stop.next
+ start.prev=nil
+ stop.next=nil
+ local base=copy_glyph(start)
+ base.char=char
+ base.subtype=ligature_code
+ base.components=start
+ if prev then
+ prev.next=base
+ end
+ if next then
+ next.prev=base
+ end
+ base.next=next
+ base.prev=prev
+ if not discfound then
+ local deletemarks=markflag~="mark"
+ local components=start
+ local baseindex=0
+ local componentindex=0
+ local head=base
+ local current=base
+ while start do
+ local char=start.char
+ if not marks[char] then
+ baseindex=baseindex+componentindex
+ componentindex=getcomponentindex(start)
+ elseif not deletemarks then
+ start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ head,current=insert_node_after(head,current,copy_node(start))
+ end
+ start=start.next
+ end
+ local start=components
+ while start and start.id==glyph_code do
+ local char=start.char
+ if marks[char] then
+ start[a_ligacomp]=baseindex+(start[a_ligacomp] or componentindex)
+ if trace_marks then
+ logwarning("%s: keep mark %s, gets index %s",pref(kind,lookupname),gref(char),start[a_ligacomp])
+ end
+ else
+ break
+ end
+ start=start.next
+ end
+ end
+ return base
end
-
function handlers.gsub_single(head,start,kind,lookupname,replacement)
- if trace_singles then
- logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
- end
- start.char = replacement
- return head, start, true
+ if trace_singles then
+ logprocess("%s: replacing %s by single %s",pref(kind,lookupname),gref(start.char),gref(replacement))
+ end
+ start.char=replacement
+ return head,start,true
end
-
local function get_alternative_glyph(start,alternatives,value)
- -- needs checking: (global value, brrr)
- local choice = nil
- local n = #alternatives
- local char = start.char
- --
- if value == "random" then
- local r = random(1,n)
- value, choice = format("random, choice %s",r), alternatives[r]
- elseif value == "first" then
- value, choice = format("first, choice %s",1), alternatives[1]
- elseif value == "last" then
- value, choice = format("last, choice %s",n), alternatives[n]
- else
- value = tonumber(value)
- if type(value) ~= "number" then
- value, choice = "default, choice 1", alternatives[1]
- elseif value > n then
- local defaultalt = otf.defaultnodealternate
- if defaultalt == "first" then
- value, choice = format("no %s variants, taking %s",value,n), alternatives[n]
- elseif defaultalt == "last" then
- value, choice = format("no %s variants, taking %s",value,1), alternatives[1]
- else
- value, choice = format("no %s variants, ignoring",value), false
- end
- elseif value == 0 then
- value, choice = format("choice %s (no change)",value), char
- elseif value < 1 then
- value, choice = format("no %s variants, taking %s",value,1), alternatives[1]
- else
- value, choice = format("choice %s",value), alternatives[value]
- end
- end
- return choice
-end
-
-local function multiple_glyphs(head,start,multiple) -- marks ?
- local nofmultiples = #multiple
- if nofmultiples > 0 then
- start.char = multiple[1]
- if nofmultiples > 1 then
- local sn = start.next
- for k=2,nofmultiples do -- todo: use insert_node
- local n = copy_node(start) -- ignore components
- n.char = multiple[k]
- n.next = sn
- n.prev = start
- if sn then
- sn.prev = n
- end
- start.next = n
- start = n
- end
- end
- return head, start, true
+ local choice=nil
+ local n=#alternatives
+ local char=start.char
+ if value=="random" then
+ local r=random(1,n)
+ value,choice=format("random, choice %s",r),alternatives[r]
+ elseif value=="first" then
+ value,choice=format("first, choice %s",1),alternatives[1]
+ elseif value=="last" then
+ value,choice=format("last, choice %s",n),alternatives[n]
+ else
+ value=tonumber(value)
+ if type(value)~="number" then
+ value,choice="default, choice 1",alternatives[1]
+ elseif value>n then
+ local defaultalt=otf.defaultnodealternate
+ if defaultalt=="first" then
+ value,choice=format("no %s variants, taking %s",value,n),alternatives[n]
+ elseif defaultalt=="last" then
+ value,choice=format("no %s variants, taking %s",value,1),alternatives[1]
+ else
+ value,choice=format("no %s variants, ignoring",value),false
+ end
+ elseif value==0 then
+ value,choice=format("choice %s (no change)",value),char
+ elseif value<1 then
+ value,choice=format("no %s variants, taking %s",value,1),alternatives[1]
else
- if trace_multiples then
- logprocess("no multiple for %s",gref(start.char))
- end
- return head, start, false
+ value,choice=format("choice %s",value),alternatives[value]
+ end
+ end
+ return choice
+end
+local function multiple_glyphs(head,start,multiple)
+ local nofmultiples=#multiple
+ if nofmultiples>0 then
+ start.char=multiple[1]
+ if nofmultiples>1 then
+ local sn=start.next
+ for k=2,nofmultiples do
+ local n=copy_node(start)
+ n.char=multiple[k]
+ n.next=sn
+ n.prev=start
+ if sn then
+ sn.prev=n
+ end
+ start.next=n
+ start=n
+ end
+ end
+ return head,start,true
+ else
+ if trace_multiples then
+ logprocess("no multiple for %s",gref(start.char))
end
+ return head,start,false
+ end
end
-
function handlers.gsub_alternate(head,start,kind,lookupname,alternative,sequence)
- local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
- local choice = get_alternative_glyph(start,alternative,value)
- if choice then
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %s (%s)",pref(kind,lookupname),gref(start.char),gref(choice),choice)
- end
- start.char = choice
- else
- if trace_alternatives then
- logwarning("%s: no variant %s for %s",pref(kind,lookupname),tostring(value),gref(start.char))
- end
+ local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
+ local choice=get_alternative_glyph(start,alternative,value)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %s (%s)",pref(kind,lookupname),gref(start.char),gref(choice),choice)
+ end
+ start.char=choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %s for %s",pref(kind,lookupname),tostring(value),gref(start.char))
end
- return head, start, true
+ end
+ return head,start,true
end
-
function handlers.gsub_multiple(head,start,kind,lookupname,multiple)
- if trace_multiples then
- logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
- end
- return multiple_glyphs(head,start,multiple)
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple %s",pref(kind,lookupname),gref(start.char),gref(multiple))
+ end
+ return multiple_glyphs(head,start,multiple)
end
-
function handlers.gsub_ligature(head,start,kind,lookupname,ligature,sequence)
- local s, stop, discfound = start.next, nil, false
- local startchar = start.char
- if marks[startchar] then
- while s do
- local id = s.id
- if id == glyph_code and s.font == currentfont and s.subtype<256 then
- local lg = ligature[s.char]
- if lg then
- stop = s
- ligature = lg
- s = s.next
- else
- break
- end
- else
- break
- end
- end
- if stop then
- local lig = ligature.ligature
- if lig then
- if trace_ligatures then
- local stopchar = stop.char
- start = markstoligature(kind,lookupname,start,stop,lig)
- logprocess("%s: replacing %s upto %s by ligature %s",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
- else
- start = markstoligature(kind,lookupname,start,stop,lig)
- end
- return head, start, true
- else
- -- ok, goto next lookup
- end
- end
- else
- local skipmark = sequence.flags[1]
- while s do
- local id = s.id
- if id == glyph_code and s.subtype<256 then
- if s.font == currentfont then
- local char = s.char
- if skipmark and marks[char] then
- s = s.next
- else
- local lg = ligature[char]
- if lg then
- stop = s
- ligature = lg
- s = s.next
- else
- break
- end
- end
- else
- break
- end
- elseif id == disc_code then
- discfound = true
- s = s.next
- else
- break
- end
- end
- if stop then
- local lig = ligature.ligature
- if lig then
- if trace_ligatures then
- local stopchar = stop.char
- start = toligature(kind,lookupname,start,stop,lig,skipmark,discfound)
- logprocess("%s: replacing %s upto %s by ligature %s",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
- else
- start = toligature(kind,lookupname,start,stop,lig,skipmark,discfound)
- end
- return head, start, true
+ local s,stop,discfound=start.next,nil,false
+ local startchar=start.char
+ if marks[startchar] then
+ while s do
+ local id=s.id
+ if id==glyph_code and s.font==currentfont and s.subtype<256 then
+ local lg=ligature[s.char]
+ if lg then
+ stop=s
+ ligature=lg
+ s=s.next
+ else
+ break
+ end
+ else
+ break
+ end
+ end
+ if stop then
+ local lig=ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar=stop.char
+ start=markstoligature(kind,lookupname,start,stop,lig)
+ logprocess("%s: replacing %s upto %s by ligature %s",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ start=markstoligature(kind,lookupname,start,stop,lig)
+ end
+ return head,start,true
+ else
+ end
+ end
+ else
+ local skipmark=sequence.flags[1]
+ while s do
+ local id=s.id
+ if id==glyph_code and s.subtype<256 then
+ if s.font==currentfont then
+ local char=s.char
+ if skipmark and marks[char] then
+ s=s.next
+ else
+ local lg=ligature[char]
+ if lg then
+ stop=s
+ ligature=lg
+ s=s.next
else
- -- ok, goto next lookup
+ break
end
+ end
+ else
+ break
+ end
+ elseif id==disc_code then
+ discfound=true
+ s=s.next
+ else
+ break
+ end
+ end
+ if stop then
+ local lig=ligature.ligature
+ if lig then
+ if trace_ligatures then
+ local stopchar=stop.char
+ start=toligature(kind,lookupname,start,stop,lig,skipmark,discfound)
+ logprocess("%s: replacing %s upto %s by ligature %s",pref(kind,lookupname),gref(startchar),gref(stopchar),gref(start.char))
+ else
+ start=toligature(kind,lookupname,start,stop,lig,skipmark,discfound)
end
+ return head,start,true
+ else
+ end
end
- return head, start, false
+ end
+ return head,start,false
end
-
---[[ldx--
-<p>We get hits on a mark, but we're not sure if the it has to be applied so
-we need to explicitly test for basechar, baselig and basemark entries.</p>
---ldx]]--
-
function handlers.gpos_mark2base(head,start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
- end
- end
- end
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
+ local markchar=start.char
+ if marks[markchar] then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
end
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ end
+ if baseanchors then
+ local baseanchors=baseanchors['basechar']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ return head,start,true
+ end
end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
+ end
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
end
+ else
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
end
- return head, start, false
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
end
-
function handlers.gpos_mark2ligature(head,start,kind,lookupname,markanchors,sequence)
- -- check chainpos variant
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
- end
- end
+ local markchar=start.char
+ if marks[markchar] then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
end
- local index = start[a_ligacomp]
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
- if trace_marks then
- logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
- pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return head, start, true
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local index=start[a_ligacomp]
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ if baseanchors then
+ local baseanchors=baseanchors['baselig']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
+ pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",pref(kind,lookupname))
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",pref(kind,lookupname),gref(markchar),gref(basechar))
+ end
+ end
end
+ else
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ logwarning("%s: prev node is no char",pref(kind,lookupname))
end
- return head, start, false
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
end
-
function handlers.gpos_mark2mark(head,start,kind,lookupname,markanchors,sequence)
- local markchar = start.char
- if marks[markchar] then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- local slc = start[a_ligacomp]
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = base[a_ligacomp]
- if blc and blc ~= slc then
- base = base.prev
- else
- break
- end
+ local markchar=start.char
+ if marks[markchar] then
+ local base=start.prev
+ local slc=start[a_ligacomp]
+ if slc then
+ while base do
+ local blc=base[a_ligacomp]
+ if blc and blc~=slc then
+ base=base.prev
+ else
+ break
+ end
+ end
+ end
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ local baseanchors=descriptions[basechar]
+ if baseanchors then
+ baseanchors=baseanchors.anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
+ pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
end
- end
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
- local basechar = base.char
- local baseanchors = descriptions[basechar]
- if baseanchors then
- baseanchors = baseanchors.anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
- pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar))
- onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",pref(kind,lookupname),gref(markchar),gref(basechar))
end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",pref(kind,lookupname))
+ end
end
+ else
+ onetimemessage(currentfont,basechar,"no base anchors",report_fonts)
+ end
elseif trace_bugs then
- logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
-end
-
-function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence) -- to be checked
- local alreadydone = cursonce and start[a_cursbase]
- if not alreadydone then
- local done = false
- local startchar = start.char
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
- local nextchar = nxt.char
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = nxt.next
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
- end
- break
- end
- end
- end
- return head, start, done
+ logwarning("%s: prev node is no mark",pref(kind,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+end
+function handlers.gpos_cursive(head,start,kind,lookupname,exitanchors,sequence)
+ local alreadydone=cursonce and start[a_cursbase]
+ if not alreadydone then
+ local done=false
+ local startchar=start.char
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
- end
- return head, start, false
- end
+ local nxt=start.next
+ while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do
+ local nextchar=nxt.char
+ if marks[nextchar] then
+ nxt=nxt.next
+ else
+ local entryanchors=descriptions[nextchar]
+ if entryanchors then
+ entryanchors=entryanchors.anchors
+ if entryanchors then
+ entryanchors=entryanchors['centry']
+ if entryanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,entry in next,entryanchors do
+ if al[anchor] then
+ local exit=exitanchors[anchor]
+ if exit then
+ local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
+ end
+ done=true
+ break
+ end
+ end
+ end
+ end
+ end
+ else
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ end
+ break
+ end
+ end
+ end
+ return head,start,done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head,start,false
+ end
end
-
function handlers.gpos_single(head,start,kind,lookupname,kerns,sequence)
- local startchar = start.char
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
- end
- return head, start, false
+ local startchar=start.char
+ local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),dx,dy,w,h)
+ end
+ return head,start,false
end
-
function handlers.gpos_pair(head,start,kind,lookupname,kerns,sequence)
- -- todo: kerns in disc nodes: pre, post, replace -> loop over disc too
- -- todo: kerns in components of ligatures
- local snext = start.next
- if not snext then
- return head, start, false
- else
- local prev, done = start, false
- local factor = tfmdata.parameters.factor
- local lookuptype = lookuptypes[lookupname]
- while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
- local nextchar = snext.char
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = snext.next
- else
- local krn = kerns[nextchar]
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then -- probably not needed
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else -- wrong ... position has different entries
- report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
- -- local a, b = krn[2], krn[6]
- -- if a and a ~= 0 then
- -- local k = setkern(snext,factor,rlmode,a)
- -- if trace_kerns then
- -- logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
- -- end
- -- end
- -- if b and b ~= 0 then
- -- logwarning("%s: ignoring second kern xoff %s",pref(kind,lookupname),b*factor)
- -- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
- end
- done = true
- end
- break
- end
- end
- return head, start, done
- end
-end
-
---[[ldx--
-<p>I will implement multiple chain replacements once I run into a font that uses
-it. It's not that complex to handle.</p>
---ldx]]--
-
-local chainmores = { }
-local chainprocs = { }
-
+ local snext=start.next
+ if not snext then
+ return head,start,false
+ else
+ local prev,done=start,false
+ local factor=tfmdata.parameters.factor
+ local lookuptype=lookuptypes[lookupname]
+ while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do
+ local nextchar=snext.char
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=snext.next
+ else
+ local krn=kerns[nextchar]
+ if not krn then
+ elseif type(krn)=="table" then
+ if lookuptype=="pair" then
+ local a,b=krn[2],krn[3]
+ if a and #a>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",pref(kind,lookupname))
+ end
+ done=true
+ elseif krn~=0 then
+ local k=setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done=true
+ end
+ break
+ end
+ end
+ return head,start,done
+ end
+end
+local chainmores={}
+local chainprocs={}
local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_subchain(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_subchain(...)
end
-
-local logwarning = report_subchain
-
+local logwarning=report_subchain
local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_chain(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_chain(...)
end
-
-local logwarning = report_chain
-
--- We could share functions but that would lead to extra function calls with many
--- arguments, redundant tests and confusing messages.
-
+local logwarning=report_chain
function chainprocs.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname)
- logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return head, start, false
+ logwarning("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head,start,false
end
-
function chainmores.chainsub(head,start,stop,kind,chainname,currentcontext,lookuphash,lookuplist,chainlookupname,n)
- logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
- return head, start, false
+ logprocess("%s: a direct call to chainsub cannot happen",cref(kind,chainname,chainlookupname))
+ return head,start,false
end
-
--- The reversesub is a special case, which is why we need to store the replacements
--- in a bit weird way. There is no lookup and the replacement comes from the lookup
--- itself. It is meant mostly for dealing with Urdu.
-
function chainprocs.reversesub(head,start,stop,kind,chainname,currentcontext,lookuphash,replacements)
- local char = start.char
- local replacement = replacements[char]
- if replacement then
- if trace_singles then
- logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
- end
- start.char = replacement
- return head, start, true
- else
- return head, start, false
- end
-end
-
---[[ldx--
-<p>This chain stuff is somewhat tricky since we can have a sequence of actions to be
-applied: single, alternate, multiple or ligature where ligature can be an invalid
-one in the sense that it will replace multiple by one but not neccessary one that
-looks like the combination (i.e. it is the counterpart of multiple then). For
-example, the following is valid:</p>
-
-<typing>
-<line>xxxabcdexxx [single a->A][multiple b->BCD][ligature cde->E] xxxABCDExxx</line>
-</typing>
-
-<p>Therefore we we don't really do the replacement here already unless we have the
-single lookup case. The efficiency of the replacements can be improved by deleting
-as less as needed but that would also make the code even more messy.</p>
---ldx]]--
-
-local function delete_till_stop(start,stop,ignoremarks) -- keeps start
- local n = 1
- if start == stop then
- -- done
- elseif ignoremarks then
- repeat -- start x x m x x stop => start m
- local next = start.next
- if not marks[next.char] then
- local components = next.components
- if components then -- probably not needed
- flush_node_list(components)
- end
- delete_node(start,next)
- end
- n = n + 1
- until next == stop
- else -- start x x x stop => start
- repeat
- local next = start.next
- local components = next.components
- if components then -- probably not needed
- flush_node_list(components)
- end
- delete_node(start,next)
- n = n + 1
- until next == stop
- end
- return n
+ local char=start.char
+ local replacement=replacements[char]
+ if replacement then
+ if trace_singles then
+ logprocess("%s: single reverse replacement of %s by %s",cref(kind,chainname),gref(char),gref(replacement))
+ end
+ start.char=replacement
+ return head,start,true
+ else
+ return head,start,false
+ end
+end
+local function delete_till_stop(start,stop,ignoremarks)
+ local n=1
+ if start==stop then
+ elseif ignoremarks then
+ repeat
+ local next=start.next
+ if not marks[next.char] then
+ local components=next.components
+ if components then
+ flush_node_list(components)
+ end
+ delete_node(start,next)
+ end
+ n=n+1
+ until next==stop
+ else
+ repeat
+ local next=start.next
+ local components=next.components
+ if components then
+ flush_node_list(components)
+ end
+ delete_node(start,next)
+ n=n+1
+ until next==stop
+ end
+ return n
end
-
---[[ldx--
-<p>Here we replace start by a single variant, First we delete the rest of the
-match.</p>
---ldx]]--
-
function chainprocs.gsub_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- -- todo: marks ?
- local current = start
- local subtables = currentlookup.subtables
- if #subtables > 1 then
- logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
- end
- while current do
- if current.id == glyph_code then
- local currentchar = current.char
- local lookupname = subtables[1] -- only 1
- local replacement = lookuphash[lookupname]
- if not replacement then
- if trace_bugs then
- logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
- else
- replacement = replacement[currentchar]
- if not replacement or replacement == "" then
- if trace_bugs then
- logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
- end
- else
- if trace_singles then
- logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
- end
- current.char = replacement
- end
- end
- return head, start, true
- elseif current == stop then
- break
+ local current=start
+ local subtables=currentlookup.subtables
+ if #subtables>1 then
+ logwarning("todo: check if we need to loop over the replacements: %s",concat(subtables," "))
+ end
+ while current do
+ if current.id==glyph_code then
+ local currentchar=current.char
+ local lookupname=subtables[1]
+ local replacement=lookuphash[lookupname]
+ if not replacement then
+ if trace_bugs then
+ logwarning("%s: no single hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ replacement=replacement[currentchar]
+ if not replacement or replacement=="" then
+ if trace_bugs then
+ logwarning("%s: no single for %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar))
+ end
else
- current = current.next
- end
+ if trace_singles then
+ logprocess("%s: replacing single %s by %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(currentchar),gref(replacement))
+ end
+ current.char=replacement
+ end
+ end
+ return head,start,true
+ elseif current==stop then
+ break
+ else
+ current=current.next
end
- return head, start, false
+ end
+ return head,start,false
end
-
-chainmores.gsub_single = chainprocs.gsub_single
-
---[[ldx--
-<p>Here we replace start by a sequence of new glyphs. First we delete the rest of
-the match.</p>
---ldx]]--
-
+chainmores.gsub_single=chainprocs.gsub_single
function chainprocs.gsub_multiple(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- delete_till_stop(start,stop) -- we could pass ignoremarks as #3 ..
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local replacements = lookuphash[lookupname]
- if not replacements then
- if trace_bugs then
- logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
- end
+ delete_till_stop(start,stop)
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local replacements=lookuphash[lookupname]
+ if not replacements then
+ if trace_bugs then
+ logwarning("%s: no multiple hits",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ replacements=replacements[startchar]
+ if not replacements or replacement=="" then
+ if trace_bugs then
+ logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
+ end
else
- replacements = replacements[startchar]
- if not replacements or replacement == "" then
- if trace_bugs then
- logwarning("%s: no multiple for %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar))
- end
- else
- if trace_multiples then
- logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
- end
- return multiple_glyphs(head,start,replacements)
- end
+ if trace_multiples then
+ logprocess("%s: replacing %s by multiple characters %s",cref(kind,chainname,chainlookupname,lookupname),gref(startchar),gref(replacements))
+ end
+ return multiple_glyphs(head,start,replacements)
end
- return head, start, false
+ end
+ return head,start,false
end
-
-chainmores.gsub_multiple = chainprocs.gsub_multiple
-
---[[ldx--
-<p>Here we replace start by new glyph. First we delete the rest of the match.</p>
---ldx]]--
-
--- char_1 mark_1 -> char_x mark_1 (ignore marks)
--- char_1 mark_1 -> char_x
-
--- to be checked: do we always have just one glyph?
--- we can also have alternates for marks
--- marks come last anyway
--- are there cases where we need to delete the mark
-
+chainmores.gsub_multiple=chainprocs.gsub_multiple
function chainprocs.gsub_alternate(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local current = start
- local subtables = currentlookup.subtables
- local value = featurevalue == true and tfmdata.shared.features[kind] or featurevalue
- while current do
- if current.id == glyph_code then -- is this check needed?
- local currentchar = current.char
- local lookupname = subtables[1]
- local alternatives = lookuphash[lookupname]
- if not alternatives then
- if trace_bugs then
- logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
- end
- else
- alternatives = alternatives[currentchar]
- if alternatives then
- local choice = get_alternative_glyph(current,alternatives,value)
- if choice then
- if trace_alternatives then
- logprocess("%s: replacing %s by alternative %s (%s)",cref(kind,chainname,chainlookupname,lookupname),gref(char),gref(choice),choice)
- end
- start.char = choice
- else
- if trace_alternatives then
- logwarning("%s: no variant %s for %s",cref(kind,chainname,chainlookupname,lookupname),tostring(value),gref(char))
- end
- end
- elseif trace_bugs then
- logwarning("%s: no alternative for %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar))
- end
- end
- return head, start, true
- elseif current == stop then
- break
- else
- current = current.next
+ local current=start
+ local subtables=currentlookup.subtables
+ local value=featurevalue==true and tfmdata.shared.features[kind] or featurevalue
+ while current do
+ if current.id==glyph_code then
+ local currentchar=current.char
+ local lookupname=subtables[1]
+ local alternatives=lookuphash[lookupname]
+ if not alternatives then
+ if trace_bugs then
+ logwarning("%s: no alternative hit",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ else
+ alternatives=alternatives[currentchar]
+ if alternatives then
+ local choice=get_alternative_glyph(current,alternatives,value)
+ if choice then
+ if trace_alternatives then
+ logprocess("%s: replacing %s by alternative %s (%s)",cref(kind,chainname,chainlookupname,lookupname),gref(char),gref(choice),choice)
+ end
+ start.char=choice
+ else
+ if trace_alternatives then
+ logwarning("%s: no variant %s for %s",cref(kind,chainname,chainlookupname,lookupname),tostring(value),gref(char))
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: no alternative for %s",cref(kind,chainname,chainlookupname,lookupname),gref(currentchar))
end
+ end
+ return head,start,true
+ elseif current==stop then
+ break
+ else
+ current=current.next
end
- return head, start, false
+ end
+ return head,start,false
end
-
-chainmores.gsub_alternate = chainprocs.gsub_alternate
-
---[[ldx--
-<p>When we replace ligatures we use a helper that handles the marks. I might change
-this function (move code inline and handle the marks by a separate function). We
-assume rather stupid ligatures (no complex disc nodes).</p>
---ldx]]--
-
+chainmores.gsub_alternate=chainprocs.gsub_alternate
function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex)
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local ligatures = lookuphash[lookupname]
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local ligatures=lookuphash[lookupname]
+ if not ligatures then
+ if trace_bugs then
+ logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
+ end
+ else
+ ligatures=ligatures[startchar]
if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligature hits",cref(kind,chainname,chainlookupname,lookupname,chainindex))
- end
+ if trace_bugs then
+ logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ end
else
- ligatures = ligatures[startchar]
- if not ligatures then
- if trace_bugs then
- logwarning("%s: no ligatures starting with %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- end
+ local s=start.next
+ local discfound=false
+ local last=stop
+ local nofreplacements=0
+ local skipmark=currentlookup.flags[1]
+ while s do
+ local id=s.id
+ if id==disc_code then
+ s=s.next
+ discfound=true
else
- local s = start.next
- local discfound = false
- local last = stop
- local nofreplacements = 0
- local skipmark = currentlookup.flags[1]
- while s do
- local id = s.id
- if id == disc_code then
- s = s.next
- discfound = true
- else
- local schar = s.char
- if skipmark and marks[schar] then -- marks
- s = s.next
- else
- local lg = ligatures[schar]
- if lg then
- ligatures, last, nofreplacements = lg, s, nofreplacements + 1
- if s == stop then
- break
- else
- s = s.next
- end
- else
- break
- end
- end
- end
- end
- local l2 = ligatures.ligature
- if l2 then
- if chainindex then
- stop = last
- end
- if trace_ligatures then
- if start == stop then
- logprocess("%s: replacing character %s by ligature %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
- else
- logprocess("%s: replacing character %s upto %s by ligature %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
- end
- end
- start = toligature(kind,lookupname,start,stop,l2,currentlookup.flags[1],discfound)
- return head, start, true, nofreplacements
- elseif trace_bugs then
- if start == stop then
- logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
- else
- logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
- end
- end
+ local schar=s.char
+ if skipmark and marks[schar] then
+ s=s.next
+ else
+ local lg=ligatures[schar]
+ if lg then
+ ligatures,last,nofreplacements=lg,s,nofreplacements+1
+ if s==stop then
+ break
+ else
+ s=s.next
+ end
+ else
+ break
+ end
+ end
+ end
+ end
+ local l2=ligatures.ligature
+ if l2 then
+ if chainindex then
+ stop=last
+ end
+ if trace_ligatures then
+ if start==stop then
+ logprocess("%s: replacing character %s by ligature %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(l2))
+ else
+ logprocess("%s: replacing character %s upto %s by ligature %s",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char),gref(l2))
+ end
+ end
+ start=toligature(kind,lookupname,start,stop,l2,currentlookup.flags[1],discfound)
+ return head,start,true,nofreplacements
+ elseif trace_bugs then
+ if start==stop then
+ logwarning("%s: replacing character %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar))
+ else
+ logwarning("%s: replacing character %s upto %s by ligature fails",cref(kind,chainname,chainlookupname,lookupname,chainindex),gref(startchar),gref(stop.char))
end
+ end
end
- return head, start, false, 0
+ end
+ return head,start,false,0
end
-
-chainmores.gsub_ligature = chainprocs.gsub_ligature
-
+chainmores.gsub_ligature=chainprocs.gsub_ligature
function chainprocs.gpos_mark2base(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
- end
- return head, start, false
- end
- end
- end
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['basechar']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
+ local markchar=start.char
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",pref(kind,lookupname),gref(markchar))
+ end
+ return head,start,false
+ end
+ end
+ end
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors=baseanchors['basechar']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ if trace_bugs then
+ logwarning("%s, no matching anchors for mark %s and base %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no char",cref(kind,chainname,chainlookupname,lookupname))
+ end
elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
end
- return head, start, false
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
end
-
function chainprocs.gpos_mark2ligature(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [optional marks] [start=mark]
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- local basechar = base.char
- if marks[basechar] then
- while true do
- base = base.prev
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then
- basechar = base.char
- if not marks[basechar] then
- break
- end
- else
- if trace_bugs then
- logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
- end
- return head, start, false
- end
- end
- end
- -- todo: like marks a ligatures hash
- local index = start[a_ligacomp]
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- local baseanchors = baseanchors['baselig']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- ba = ba[index]
- if ba then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma) -- index
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
- end
- return head, start, true
- end
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
+ local markchar=start.char
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=start.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ if marks[basechar] then
+ while true do
+ base=base.prev
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ basechar=base.char
+ if not marks[basechar] then
+ break
+ end
+ else
+ if trace_bugs then
+ logwarning("%s: no base for mark %s",cref(kind,chainname,chainlookupname,lookupname),markchar)
+ end
+ return head,start,false
+ end
+ end
+ end
+ local index=start[a_ligacomp]
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ local baseanchors=baseanchors['baselig']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ ba=ba[index]
+ if ba then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and baselig %s",cref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
end
+ elseif trace_bugs then
+ logwarning("feature %s, lookup %s: prev node is no char",kind,lookupname)
+ end
elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
end
- return head, start, false
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
end
-
function chainprocs.gpos_mark2mark(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local markchar = start.char
- if marks[markchar] then
---~ local alreadydone = markonce and start[a_markmark]
---~ if not alreadydone then
- -- local markanchors = descriptions[markchar].anchors markanchors = markanchors and markanchors.mark
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local markanchors = lookuphash[lookupname]
- if markanchors then
- markanchors = markanchors[markchar]
- end
- if markanchors then
- local base = start.prev -- [glyph] [basemark] [start=mark]
- local slc = start[a_ligacomp]
- if slc then -- a rather messy loop ... needs checking with husayni
- while base do
- local blc = base[a_ligacomp]
- if blc and blc ~= slc then
- base = base.prev
- else
- break
- end
- end
- end
- if base and base.id == glyph_code and base.font == currentfont and base.subtype<256 then -- subtype test can go
- local basechar = base.char
- local baseanchors = descriptions[basechar].anchors
- if baseanchors then
- baseanchors = baseanchors['basemark']
- if baseanchors then
- local al = anchorlookups[lookupname]
- for anchor,ba in next, baseanchors do
- if al[anchor] then
- local ma = markanchors[anchor]
- if ma then
- local dx, dy, bound = setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
- if trace_marks then
- logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
- cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
- end
- return head, start, true
- end
- end
- end
- if trace_bugs then
- logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
- end
- end
- end
- elseif trace_bugs then
- logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
- end
- elseif trace_bugs then
- logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ local markchar=start.char
+ if marks[markchar] then
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local markanchors=lookuphash[lookupname]
+ if markanchors then
+ markanchors=markanchors[markchar]
+ end
+ if markanchors then
+ local base=start.prev
+ local slc=start[a_ligacomp]
+ if slc then
+ while base do
+ local blc=base[a_ligacomp]
+ if blc and blc~=slc then
+ base=base.prev
+ else
+ break
end
---~ elseif trace_marks and trace_details then
---~ logprocess("%s, mark %s is already bound (n=%s), ignoring mark2mark",pref(kind,lookupname),gref(markchar),alreadydone)
---~ end
- elseif trace_bugs then
- logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
- end
- return head, start, false
+ end
+ end
+ if base and base.id==glyph_code and base.font==currentfont and base.subtype<256 then
+ local basechar=base.char
+ local baseanchors=descriptions[basechar].anchors
+ if baseanchors then
+ baseanchors=baseanchors['basemark']
+ if baseanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,ba in next,baseanchors do
+ if al[anchor] then
+ local ma=markanchors[anchor]
+ if ma then
+ local dx,dy,bound=setmark(start,base,tfmdata.parameters.factor,rlmode,ba,ma)
+ if trace_marks then
+ logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)",
+ cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy)
+ end
+ return head,start,true
+ end
+ end
+ end
+ if trace_bugs then
+ logwarning("%s: no matching anchors for mark %s and basemark %s",gref(kind,chainname,chainlookupname,lookupname),gref(markchar),gref(basechar))
+ end
+ end
+ end
+ elseif trace_bugs then
+ logwarning("%s: prev node is no mark",cref(kind,chainname,chainlookupname,lookupname))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s has no anchors",cref(kind,chainname,chainlookupname,lookupname),gref(markchar))
+ end
+ elseif trace_bugs then
+ logwarning("%s: mark %s is no mark",cref(kind,chainname,chainlookupname),gref(markchar))
+ end
+ return head,start,false
end
-
function chainprocs.gpos_cursive(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname)
- local alreadydone = cursonce and start[a_cursbase]
- if not alreadydone then
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local exitanchors = lookuphash[lookupname]
- if exitanchors then
- exitanchors = exitanchors[startchar]
- end
- if exitanchors then
- local done = false
- if marks[startchar] then
- if trace_cursive then
- logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
- end
- else
- local nxt = start.next
- while not done and nxt and nxt.id == glyph_code and nxt.font == currentfont and nxt.subtype<256 do
- local nextchar = nxt.char
- if marks[nextchar] then
- -- should not happen (maybe warning)
- nxt = nxt.next
- else
- local entryanchors = descriptions[nextchar]
- if entryanchors then
- entryanchors = entryanchors.anchors
- if entryanchors then
- entryanchors = entryanchors['centry']
- if entryanchors then
- local al = anchorlookups[lookupname]
- for anchor, entry in next, entryanchors do
- if al[anchor] then
- local exit = exitanchors[anchor]
- if exit then
- local dx, dy, bound = setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
- if trace_cursive then
- logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
- end
- done = true
- break
- end
- end
- end
- end
- end
- else -- if trace_bugs then
- -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar))
- onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
+ local alreadydone=cursonce and start[a_cursbase]
+ if not alreadydone then
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local exitanchors=lookuphash[lookupname]
+ if exitanchors then
+ exitanchors=exitanchors[startchar]
+ end
+ if exitanchors then
+ local done=false
+ if marks[startchar] then
+ if trace_cursive then
+ logprocess("%s: ignoring cursive for mark %s",pref(kind,lookupname),gref(startchar))
+ end
+ else
+ local nxt=start.next
+ while not done and nxt and nxt.id==glyph_code and nxt.font==currentfont and nxt.subtype<256 do
+ local nextchar=nxt.char
+ if marks[nextchar] then
+ nxt=nxt.next
+ else
+ local entryanchors=descriptions[nextchar]
+ if entryanchors then
+ entryanchors=entryanchors.anchors
+ if entryanchors then
+ entryanchors=entryanchors['centry']
+ if entryanchors then
+ local al=anchorlookups[lookupname]
+ for anchor,entry in next,entryanchors do
+ if al[anchor] then
+ local exit=exitanchors[anchor]
+ if exit then
+ local dx,dy,bound=setcursive(start,nxt,tfmdata.parameters.factor,rlmode,exit,entry,characters[startchar],characters[nextchar])
+ if trace_cursive then
+ logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode)
end
+ done=true
break
+ end
end
+ end
end
+ end
+ else
+ onetimemessage(currentfont,startchar,"no entry anchors",report_fonts)
end
- return head, start, done
- else
- if trace_cursive and trace_details then
- logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
- end
- return head, start, false
+ break
+ end
end
+ end
+ return head,start,done
+ else
+ if trace_cursive and trace_details then
+ logprocess("%s, cursive %s is already done",pref(kind,lookupname),gref(start.char),alreadydone)
+ end
+ return head,start,false
end
- return head, start, false
+ end
+ return head,start,false
end
-
function chainprocs.gpos_single(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
- -- untested .. needs checking for the new model
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local kerns=lookuphash[lookupname]
+ if kerns then
+ kerns=kerns[startchar]
if kerns then
- kerns = kerns[startchar] -- needed ?
- if kerns then
- local dx, dy, w, h = setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
- end
- end
+ local dx,dy,w,h=setpair(start,tfmdata.parameters.factor,rlmode,sequence.flags[4],kerns,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h)
+ end
end
- return head, start, false
+ end
+ return head,start,false
end
-
--- when machines become faster i will make a shared function
-
function chainprocs.gpos_pair(head,start,stop,kind,chainname,currentcontext,lookuphash,currentlookup,chainlookupname,chainindex,sequence)
--- logwarning("%s: gpos_pair not yet supported",cref(kind,chainname,chainlookupname))
- local snext = start.next
- if snext then
- local startchar = start.char
- local subtables = currentlookup.subtables
- local lookupname = subtables[1]
- local kerns = lookuphash[lookupname]
- if kerns then
- kerns = kerns[startchar]
- if kerns then
- local lookuptype = lookuptypes[lookupname]
- local prev, done = start, false
- local factor = tfmdata.parameters.factor
- while snext and snext.id == glyph_code and snext.font == currentfont and snext.subtype<256 do
- local nextchar = snext.char
- local krn = kerns[nextchar]
- if not krn and marks[nextchar] then
- prev = snext
- snext = snext.next
- else
- if not krn then
- -- skip
- elseif type(krn) == "table" then
- if lookuptype == "pair" then
- local a, b = krn[2], krn[3]
- if a and #a > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
- if trace_kerns then
- logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- if b and #b > 0 then
- local startchar = start.char
- local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
- if trace_kerns then
- logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
- end
- end
- else
- report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
- local a, b = krn[2], krn[6]
- if a and a ~= 0 then
- local k = setkern(snext,factor,rlmode,a)
- if trace_kerns then
- logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
- end
- end
- if b and b ~= 0 then
- logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
- end
- end
- done = true
- elseif krn ~= 0 then
- local k = setkern(snext,factor,rlmode,krn)
- if trace_kerns then
- logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
- end
- done = true
- end
- break
- end
- end
- return head, start, done
+ local snext=start.next
+ if snext then
+ local startchar=start.char
+ local subtables=currentlookup.subtables
+ local lookupname=subtables[1]
+ local kerns=lookuphash[lookupname]
+ if kerns then
+ kerns=kerns[startchar]
+ if kerns then
+ local lookuptype=lookuptypes[lookupname]
+ local prev,done=start,false
+ local factor=tfmdata.parameters.factor
+ while snext and snext.id==glyph_code and snext.font==currentfont and snext.subtype<256 do
+ local nextchar=snext.char
+ local krn=kerns[nextchar]
+ if not krn and marks[nextchar] then
+ prev=snext
+ snext=snext.next
+ else
+ if not krn then
+ elseif type(krn)=="table" then
+ if lookuptype=="pair" then
+ local a,b=krn[2],krn[3]
+ if a and #a>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar])
+ if trace_kerns then
+ logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ if b and #b>0 then
+ local startchar=start.char
+ local x,y,w,h=setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar])
+ if trace_kerns then
+ logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h)
+ end
+ end
+ else
+ report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname))
+ local a,b=krn[2],krn[6]
+ if a and a~=0 then
+ local k=setkern(snext,factor,rlmode,a)
+ if trace_kerns then
+ logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ end
+ if b and b~=0 then
+ logwarning("%s: ignoring second kern xoff %s",cref(kind,chainname,chainlookupname),b*factor)
+ end
+ end
+ done=true
+ elseif krn~=0 then
+ local k=setkern(snext,factor,rlmode,krn)
+ if trace_kerns then
+ logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar))
+ end
+ done=true
end
+ break
+ end
end
+ return head,start,done
+ end
end
- return head, start, false
+ end
+ return head,start,false
end
-
--- what pointer to return, spec says stop
--- to be discussed ... is bidi changer a space?
--- elseif char == zwnj and sequence[n][32] then -- brrr
-
--- somehow l or f is global
--- we don't need to pass the currentcontext, saves a bit
--- make a slow variant then can be activated but with more tracing
-
local function show_skip(kind,chainname,char,ck,class)
- if ck[9] then
- logwarning("%s: skipping char %s (%s) in rule %s, lookuptype %s (%s=>%s)",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
- else
- logwarning("%s: skipping char %s (%s) in rule %s, lookuptype %s",cref(kind,chainname),gref(char),class,ck[1],ck[2])
- end
+ if ck[9] then
+ logwarning("%s: skipping char %s (%s) in rule %s, lookuptype %s (%s=>%s)",cref(kind,chainname),gref(char),class,ck[1],ck[2],ck[9],ck[10])
+ else
+ logwarning("%s: skipping char %s (%s) in rule %s, lookuptype %s",cref(kind,chainname),gref(char),class,ck[1],ck[2])
+ end
end
-
local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash)
- -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6]
- local flags = sequence.flags
- local done = false
- local skipmark = flags[1]
- local skipligature = flags[2]
- local skipbase = flags[3]
- local someskip = skipmark or skipligature or skipbase -- could be stored in flags for a fast test (hm, flags could be false !)
- local markclass = sequence.markclass -- todo, first we need a proper test
- local skipped = false
- for k=1,#contexts do
- local match = true
- local current = start
- local last = start
- local ck = contexts[k]
- local seq = ck[3]
- local s = #seq
- -- f..l = mid string
- if s == 1 then
- -- never happens
- match = current.id == glyph_code and current.font == currentfont and current.subtype<256 and seq[1][current.char]
+ local flags=sequence.flags
+ local done=false
+ local skipmark=flags[1]
+ local skipligature=flags[2]
+ local skipbase=flags[3]
+ local someskip=skipmark or skipligature or skipbase
+ local markclass=sequence.markclass
+ local skipped=false
+ for k=1,#contexts do
+ local match=true
+ local current=start
+ local last=start
+ local ck=contexts[k]
+ local seq=ck[3]
+ local s=#seq
+ if s==1 then
+ match=current.id==glyph_code and current.font==currentfont and current.subtype<256 and seq[1][current.char]
+ else
+ local f,l=ck[4],ck[5]
+ if f==1 and f==l then
+ else
+ if f==l then
else
- -- maybe we need a better space check (maybe check for glue or category or combination)
- -- we cannot optimize for n=2 because there can be disc nodes
- local f, l = ck[4], ck[5]
- -- current match
- if f == 1 and f == l then -- current only
- -- already a hit
- -- match = true
- else -- before/current/after | before/current | current/after
- -- no need to test first hit (to be optimized)
- if f == l then -- new, else last out of sync (f is > 1)
- -- match = true
- else
- local n = f + 1
- last = last.next
- while n <= l do
- if last then
- local id = last.id
- if id == glyph_code then
- if last.font == currentfont and last.subtype<256 then
- local char = last.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- last = last.next
- elseif seq[n][char] then
- if n < l then
- last = last.next
- end
- n = n + 1
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- elseif id == disc_code then
- last = last.next
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- end
- end
- end
- -- before
- if match and f > 1 then
- local prev = start.prev
- if prev then
- local n = f-1
- while n >= 1 do
- if prev then
- local id = prev.id
- if id == glyph_code then
- if prev.font == currentfont and prev.subtype<256 then -- normal char
- local char = prev.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n -1
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- elseif id == disc_code then
- -- skip 'm
- elseif seq[n][32] then
- n = n -1
- else
- match = false
- break
- end
- prev = prev.prev
- elseif seq[n][32] then -- somewhat special, as zapfino can have many preceding spaces
- n = n -1
- else
- match = false
- break
- end
+ local n=f+1
+ last=last.next
+ while n<=l do
+ if last then
+ local id=last.id
+ if id==glyph_code then
+ if last.font==currentfont and last.subtype<256 then
+ local char=last.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ last=last.next
+ elseif seq[n][char] then
+ if n<l then
+ last=last.next
+ end
+ n=n+1
+ else
+ match=false
+ break
end
- elseif f == 2 then
- match = seq[1][32]
+ else
+ match=false
+ break
+ end
else
- for n=f-1,1 do
- if not seq[n][32] then
- match = false
- break
- end
- end
+ match=false
+ break
end
+ elseif id==disc_code then
+ last=last.next
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
end
- -- after
- if match and s > l then
- local current = last and last.next
- if current then
- -- removed optimization for s-l == 1, we have to deal with marks anyway
- local n = l + 1
- while n <= s do
- if current then
- local id = current.id
- if id == glyph_code then
- if current.font == currentfont and current.subtype<256 then -- normal char
- local char = current.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- skipped = true
- if trace_skips then
- show_skip(kind,chainname,char,ck,class)
- end
- elseif seq[n][char] then
- n = n + 1
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- else
- match = false
- break
- end
- elseif id == disc_code then
- -- skip 'm
- elseif seq[n][32] then -- brrr
- n = n + 1
- else
- match = false
- break
- end
- current = current.next
- elseif seq[n][32] then
- n = n + 1
- else
- match = false
- break
- end
+ end
+ end
+ end
+ if match and f>1 then
+ local prev=start.prev
+ if prev then
+ local n=f-1
+ while n>=1 do
+ if prev then
+ local id=prev.id
+ if id==glyph_code then
+ if prev.font==currentfont and prev.subtype<256 then
+ local char=prev.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n=n -1
+ else
+ match=false
+ break
end
- elseif s-l == 1 then
- match = seq[s][32]
+ else
+ match=false
+ break
+ end
else
- for n=l+1,s do
- if not seq[n][32] then
- match = false
- break
- end
- end
- end
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ elseif seq[n][32] then
+ n=n -1
+ else
+ match=false
+ break
+ end
+ prev=prev.prev
+ elseif seq[n][32] then
+ n=n -1
+ else
+ match=false
+ break
end
- end
- if match then
- -- ck == currentcontext
- if trace_contexts then
- local rule, lookuptype, f, l = ck[1], ck[2], ck[4], ck[5]
- local char = start.char
- if ck[9] then
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s (%s=>%s)",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ end
+ elseif f==2 then
+ match=seq[1][32]
+ else
+ for n=f-1,1 do
+ if not seq[n][32] then
+ match=false
+ break
+ end
+ end
+ end
+ end
+ if match and s>l then
+ local current=last and last.next
+ if current then
+ local n=l+1
+ while n<=s do
+ if current then
+ local id=current.id
+ if id==glyph_code then
+ if current.font==currentfont and current.subtype<256 then
+ local char=current.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ skipped=true
+ if trace_skips then
+ show_skip(kind,chainname,char,ck,class)
+ end
+ elseif seq[n][char] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ else
+ match=false
+ break
+ end
else
- logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s",
- cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
- end
+ match=false
+ break
+ end
+ elseif id==disc_code then
+ elseif seq[n][32] then
+ n=n+1
+ else
+ match=false
+ break
+ end
+ current=current.next
+ elseif seq[n][32] then
+ n=n+1
+ else
+ match=false
+ break
end
- local chainlookups = ck[6]
- if chainlookups then
- local nofchainlookups = #chainlookups
- -- we can speed this up if needed
- if nofchainlookups == 1 then
- local chainlookupname = chainlookups[1]
- local chainlookup = lookuptable[chainlookupname]
- if chainlookup then
- local cp = chainprocs[chainlookup.type]
- if cp then
- head, start, done = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
- else
- logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
- end
- else -- shouldn't happen
- logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
- end
- else
- local i = 1
- repeat
- if skipped then
- while true do
- local char = start.char
- local ccd = descriptions[char]
- if ccd then
- local class = ccd.class
- if class == skipmark or class == skipligature or class == skipbase or (markclass and class == "mark" and not markclass[char]) then
- start = start.next
- else
- break
- end
- else
- break
- end
- end
- end
- local chainlookupname = chainlookups[i]
- local chainlookup = lookuptable[chainlookupname] -- can be false (n matches, <n replacement)
- local cp = chainlookup and chainmores[chainlookup.type]
- if cp then
- local ok, n
- head, start, ok, n = cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
- -- messy since last can be changed !
- if ok then
- done = true
- -- skip next one(s) if ligature
- i = i + (n or 1)
- else
- i = i + 1
- end
- else
- -- is valid
- -- logprocess("%s: multiple subchains for %s are not yet supported",cref(kind,chainname,chainlookupname),chainlookup and chainlookup.type or "?")
- i = i + 1
- end
- start = start.next
- until i > nofchainlookups
- end
+ end
+ elseif s-l==1 then
+ match=seq[s][32]
+ else
+ for n=l+1,s do
+ if not seq[n][32] then
+ match=false
+ break
+ end
+ end
+ end
+ end
+ end
+ if match then
+ if trace_contexts then
+ local rule,lookuptype,f,l=ck[1],ck[2],ck[4],ck[5]
+ local char=start.char
+ if ck[9] then
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s (%s=>%s)",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype,ck[9],ck[10])
+ else
+ logwarning("%s: rule %s matches at char %s for (%s,%s,%s) chars, lookuptype %s",
+ cref(kind,chainname),rule,gref(char),f-1,l-f+1,s-l,lookuptype)
+ end
+ end
+ local chainlookups=ck[6]
+ if chainlookups then
+ local nofchainlookups=#chainlookups
+ if nofchainlookups==1 then
+ local chainlookupname=chainlookups[1]
+ local chainlookup=lookuptable[chainlookupname]
+ if chainlookup then
+ local cp=chainprocs[chainlookup.type]
+ if cp then
+ head,start,done=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,nil,sequence)
else
- local replacements = ck[7]
- if replacements then
- head, start, done = chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements) -- sequence
+ logprocess("%s: %s is not yet supported",cref(kind,chainname,chainlookupname),chainlookup.type)
+ end
+ else
+ logprocess("%s is not yet supported",cref(kind,chainname,chainlookupname))
+ end
+ else
+ local i=1
+ repeat
+ if skipped then
+ while true do
+ local char=start.char
+ local ccd=descriptions[char]
+ if ccd then
+ local class=ccd.class
+ if class==skipmark or class==skipligature or class==skipbase or (markclass and class=="mark" and not markclass[char]) then
+ start=start.next
+ else
+ break
+ end
else
- done = true -- can be meant to be skipped
- if trace_contexts then
- logprocess("%s: skipping match",cref(kind,chainname))
- end
- end
+ break
+ end
+ end
+ end
+ local chainlookupname=chainlookups[i]
+ local chainlookup=lookuptable[chainlookupname]
+ local cp=chainlookup and chainmores[chainlookup.type]
+ if cp then
+ local ok,n
+ head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence)
+ if ok then
+ done=true
+ i=i+(n or 1)
+ else
+ i=i+1
+ end
+ else
+ i=i+1
end
+ start=start.next
+ until i>nofchainlookups
end
+ else
+ local replacements=ck[7]
+ if replacements then
+ head,start,done=chainprocs.reversesub(head,start,last,kind,chainname,ck,lookuphash,replacements)
+ else
+ done=true
+ if trace_contexts then
+ logprocess("%s: skipping match",cref(kind,chainname))
+ end
+ end
+ end
end
- return head, start, done
+ end
+ return head,start,done
end
-
--- Because we want to keep this elsewhere (an because speed is less an issue) we
--- pass the font id so that the verbose variant can access the relevant helper tables.
-
-local verbose_handle_contextchain = function(font,...)
- logwarning("no verbose handler installed, reverting to 'normal'")
- otf.setcontextchain()
- return normal_handle_contextchain(...)
+local verbose_handle_contextchain=function(font,...)
+ logwarning("no verbose handler installed, reverting to 'normal'")
+ otf.setcontextchain()
+ return normal_handle_contextchain(...)
end
-
-otf.chainhandlers = {
- normal = normal_handle_contextchain,
- verbose = verbose_handle_contextchain,
+otf.chainhandlers={
+ normal=normal_handle_contextchain,
+ verbose=verbose_handle_contextchain,
}
-
function otf.setcontextchain(method)
- if not method or method == "normal" or not otf.chainhandlers[method] then
- if handlers.contextchain then -- no need for a message while making the format
- logwarning("installing normal contextchain handler")
- end
- handlers.contextchain = normal_handle_contextchain
- else
- logwarning("installing contextchain handler '%s'",method)
- local handler = otf.chainhandlers[method]
- handlers.contextchain = function(...)
- return handler(currentfont,...) -- hm, get rid of ...
- end
- end
- handlers.gsub_context = handlers.contextchain
- handlers.gsub_contextchain = handlers.contextchain
- handlers.gsub_reversecontextchain = handlers.contextchain
- handlers.gpos_contextchain = handlers.contextchain
- handlers.gpos_context = handlers.contextchain
+ if not method or method=="normal" or not otf.chainhandlers[method] then
+ if handlers.contextchain then
+ logwarning("installing normal contextchain handler")
+ end
+ handlers.contextchain=normal_handle_contextchain
+ else
+ logwarning("installing contextchain handler '%s'",method)
+ local handler=otf.chainhandlers[method]
+ handlers.contextchain=function(...)
+ return handler(currentfont,...)
+ end
+ end
+ handlers.gsub_context=handlers.contextchain
+ handlers.gsub_contextchain=handlers.contextchain
+ handlers.gsub_reversecontextchain=handlers.contextchain
+ handlers.gpos_contextchain=handlers.contextchain
+ handlers.gpos_context=handlers.contextchain
end
-
otf.setcontextchain()
-
-local missing = { } -- we only report once
-
+local missing={}
local function logprocess(...)
- if trace_steps then
- registermessage(...)
- end
- report_process(...)
+ if trace_steps then
+ registermessage(...)
+ end
+ report_process(...)
end
-
-local logwarning = report_process
-
+local logwarning=report_process
local function report_missing_cache(typ,lookup)
- local f = missing[currentfont] if not f then f = { } missing[currentfont] = f end
- local t = f[typ] if not t then t = { } f[typ] = t end
- if not t[lookup] then
- t[lookup] = true
- logwarning("missing cache for lookup %s of type %s in font %s (%s)",lookup,typ,currentfont,tfmdata.properties.fullname)
- end
-end
-
-local resolved = { } -- we only resolve a font,script,language pair once
-
--- todo: pass all these 'locals' in a table
-
-local lookuphashes = { }
-
-setmetatableindex(lookuphashes, function(t,font)
- local lookuphash = fontdata[font].resources.lookuphash
- if not lookuphash or not next(lookuphash) then
- lookuphash = false
- end
- t[font] = lookuphash
- return lookuphash
+ local f=missing[currentfont] if not f then f={} missing[currentfont]=f end
+ local t=f[typ] if not t then t={} f[typ]=t end
+ if not t[lookup] then
+ t[lookup]=true
+ logwarning("missing cache for lookup %s of type %s in font %s (%s)",lookup,typ,currentfont,tfmdata.properties.fullname)
+ end
+end
+local resolved={}
+local lookuphashes={}
+setmetatableindex(lookuphashes,function(t,font)
+ local lookuphash=fontdata[font].resources.lookuphash
+ if not lookuphash or not next(lookuphash) then
+ lookuphash=false
+ end
+ t[font]=lookuphash
+ return lookuphash
end)
-
--- fonts.hashes.lookups = lookuphashes
-
-local autofeatures = fonts.analyzers.features -- was: constants
-
+local autofeatures=fonts.analyzers.features
local function initialize(sequence,script,language,enabled)
- local features = sequence.features
- if features then
- for kind, scripts in next, features do
- local valid = enabled[kind]
- if valid then
- local languages = scripts[script] or scripts[wildcard]
- if languages and (languages[language] or languages[wildcard]) then
- return { valid, autofeatures[kind] or false, sequence.chain or 0, kind, sequence }
- end
- end
- end
- end
- return false
-end
-
-function otf.dataset(tfmdata,font) -- generic variant, overloaded in context
- local shared = tfmdata.shared
- local properties = tfmdata.properties
- local language = properties.language or "dflt"
- local script = properties.script or "dflt"
- local enabled = shared.features
- local res = resolved[font]
- if not res then
- res = { }
- resolved[font] = res
- end
- local rs = res[script]
- if not rs then
- rs = { }
- res[script] = rs
- end
- local rl = rs[language]
- if not rl then
- rl = {
- -- indexed but we can also add specific data by key
- }
- rs[language] = rl
- local sequences = tfmdata.resources.sequences
- setmetatableindex(rl, function(t,k)
- if type(k) == "number" then
- local v = enabled and initialize(sequences[k],script,language,enabled)
- t[k] = v
- return v
- end
- end)
- end
- return rl
+ local features=sequence.features
+ if features then
+ for kind,scripts in next,features do
+ local valid=enabled[kind]
+ if valid then
+ local languages=scripts[script] or scripts[wildcard]
+ if languages and (languages[language] or languages[wildcard]) then
+ return { valid,autofeatures[kind] or false,sequence.chain or 0,kind,sequence }
+ end
+ end
+ end
+ end
+ return false
+end
+function otf.dataset(tfmdata,font)
+ local shared=tfmdata.shared
+ local properties=tfmdata.properties
+ local language=properties.language or "dflt"
+ local script=properties.script or "dflt"
+ local enabled=shared.features
+ local res=resolved[font]
+ if not res then
+ res={}
+ resolved[font]=res
+ end
+ local rs=res[script]
+ if not rs then
+ rs={}
+ res[script]=rs
+ end
+ local rl=rs[language]
+ if not rl then
+ rl={
+ }
+ rs[language]=rl
+ local sequences=tfmdata.resources.sequences
+ setmetatableindex(rl,function(t,k)
+ if type(k)=="number" then
+ local v=enabled and initialize(sequences[k],script,language,enabled)
+ t[k]=v
+ return v
+ end
+ end)
+ end
+ return rl
end
-
--- elseif id == glue_code then
--- if p[5] then -- chain
--- local pc = pp[32]
--- if pc then
--- start, ok = start, false -- p[1](start,kind,p[2],pc,p[3],p[4])
--- if ok then
--- done = true
--- end
--- if start then start = start.next end
--- else
--- start = start.next
--- end
--- else
--- start = start.next
--- end
-
--- there will be a new direction parser (pre-parsed etc)
-
local function featuresprocessor(head,font,attr)
-
- local lookuphash = lookuphashes[font] -- we can also check sequences here
-
- if not lookuphash then
- return head, false
- end
-
- if trace_steps then
- checkstep(head)
- end
-
- tfmdata = fontdata[font]
- descriptions = tfmdata.descriptions
- characters = tfmdata.characters
- resources = tfmdata.resources
-
- marks = resources.marks
- anchorlookups = resources.lookup_to_anchor
- lookuptable = resources.lookups
- lookuptypes = resources.lookuptypes
-
- currentfont = font
- rlmode = 0
-
- local sequences = resources.sequences
- local done = false
- local datasets = otf.dataset(tfmdata,font,attr)
-
- local dirstack = { } -- could move outside function
-
- -- We could work on sub start-stop ranges instead but I wonder if there is that
- -- much speed gain (experiments showed that it made not much sense) and we need
- -- to keep track of directions anyway. Also at some point I want to play with
- -- font interactions and then we do need the full sweeps.
-
- -- Keeping track of the headnode is needed for devanagari (I generalized it a bit
- -- so that multiple cases are also covered.)
-
- for s=1,#sequences do
- local dataset = datasets[s]
- if dataset then
- featurevalue = dataset[1] -- todo: pass to function instead of using a global
- if featurevalue then
- local sequence = sequences[s] -- also dataset[5]
- local rlparmode = 0
- local topstack = 0
- local success = false
- local attribute = dataset[2]
- local chain = dataset[3] -- sequence.chain or 0
- local typ = sequence.type
- local subtables = sequence.subtables
- if chain < 0 then
- -- this is a limited case, no special treatments like 'init' etc
- local handler = handlers[typ]
- -- we need to get rid of this slide! probably no longer needed in latest luatex
- local start = find_node_tail(head) -- slow (we can store tail because there's always a skip at the end): todo
- while start do
- local id = start.id
- if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
- if a then
- a = a == attr
- else
- a = true
- end
- if a then
- for i=1,#subtables do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- head, start, success = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if success then
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = start.prev end
- else
- start = start.prev
- end
- else
- start = start.prev
- end
- else
- start = start.prev
- end
- end
+ local lookuphash=lookuphashes[font]
+ if not lookuphash then
+ return head,false
+ end
+ if trace_steps then
+ checkstep(head)
+ end
+ tfmdata=fontdata[font]
+ descriptions=tfmdata.descriptions
+ characters=tfmdata.characters
+ resources=tfmdata.resources
+ marks=resources.marks
+ anchorlookups=resources.lookup_to_anchor
+ lookuptable=resources.lookups
+ lookuptypes=resources.lookuptypes
+ currentfont=font
+ rlmode=0
+ local sequences=resources.sequences
+ local done=false
+ local datasets=otf.dataset(tfmdata,font,attr)
+ local dirstack={}
+ for s=1,#sequences do
+ local dataset=datasets[s]
+ if dataset then
+ featurevalue=dataset[1]
+ if featurevalue then
+ local sequence=sequences[s]
+ local rlparmode=0
+ local topstack=0
+ local success=false
+ local attribute=dataset[2]
+ local chain=dataset[3]
+ local typ=sequence.type
+ local subtables=sequence.subtables
+ if chain<0 then
+ local handler=handlers[typ]
+ local start=find_node_tail(head)
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=a==attr
else
- local handler = handlers[typ]
- local ns = #subtables
- local start = head -- local ?
- rlmode = 0 -- to be checked ?
- if ns == 1 then -- happens often
- local lookupname = subtables[1]
- local lookupcache = lookuphash[lookupname]
- if not lookupcache then -- also check for empty cache
- report_missing_cache(typ,lookupname)
- else
- while start do
- local id = start.id
- if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
- if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
- else
- a = not attribute or start[a_state] == attribute
- end
- if a then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- sequence kan weg
- local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
- if ok then
- success = true
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- elseif id == whatsit_code then -- will be function
- local subtype = start.subtype
- if subtype == dir_code then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- elseif dir == "-TRT" or dir == "-TLT" then
- topstack = topstack - 1
- end
- local newdir = dirstack[topstack]
- if newdir == "+TRT" then
- rlmode = -1
- elseif newdir == "+TLT" then
- rlmode = 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %s: txtdir=%s:%s, parmode=%s, txtmode=%s",dir,topstack,newdir or "unset",rlparmode,rlmode)
- end
- elseif subtype == localpar_code then
- local dir = start.dir
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %s: parmode=%s, txtmode=%s",dir,rlparmode,rlmode)
- end
- end
- start = start.next
- else
- start = start.next
- end
- end
+ a=true
+ end
+ if a then
+ for i=1,#subtables do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ head,start,success=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if success then
+ break
end
+ end
else
- while start do
- local id = start.id
- if id == glyph_code then
- if start.font == font and start.subtype<256 then
- local a = start[0]
- if a then
- a = (a == attr) and (not attribute or start[a_state] == attribute)
- else
- a = not attribute or start[a_state] == attribute
- end
- if a then
- for i=1,ns do
- local lookupname = subtables[i]
- local lookupcache = lookuphash[lookupname]
- if lookupcache then
- local lookupmatch = lookupcache[start.char]
- if lookupmatch then
- -- we could move all code inline but that makes things even more unreadable
- local ok
- head, start, ok = handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
- if ok then
- success = true
- break
- end
- end
- else
- report_missing_cache(typ,lookupname)
- end
- end
- if start then start = start.next end
- else
- start = start.next
- end
- else
- start = start.next
- end
- elseif id == whatsit_code then
- local subtype = start.subtype
- if subtype == dir_code then
- local dir = start.dir
- if dir == "+TRT" or dir == "+TLT" then
- topstack = topstack + 1
- dirstack[topstack] = dir
- elseif dir == "-TRT" or dir == "-TLT" then
- topstack = topstack - 1
- end
- local newdir = dirstack[topstack]
- if newdir == "+TRT" then
- rlmode = -1
- elseif newdir == "+TLT" then
- rlmode = 1
- else
- rlmode = rlparmode
- end
- if trace_directions then
- report_process("directions after txtdir %s: txtdir=%s:%s, parmode=%s, txtmode=%s",dir,topstack,newdir or "unset",rlparmode,rlmode)
- end
- elseif subtype == localpar_code then
- local dir = start.dir
- if dir == "TRT" then
- rlparmode = -1
- elseif dir == "TLT" then
- rlparmode = 1
- else
- rlparmode = 0
- end
- rlmode = rlparmode
- if trace_directions then
- report_process("directions after pardir %s: parmode=%s, txtmode=%s",dir,rlparmode,rlmode)
- end
- end
- start = start.next
- else
- start = start.next
- end
- end
+ report_missing_cache(typ,lookupname)
end
+ end
+ if start then start=start.prev end
+ else
+ start=start.prev
end
- if success then
- done = true
- end
- if trace_steps then -- ?
- registerstep(head)
- end
- end
- end
- end
- return head, done
-end
-
-local function generic(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if target then
- target[unicode] = lookupdata
- else
- lookuphash[lookupname] = { [unicode] = lookupdata }
- end
-end
-
-local action = {
-
- substitution = generic,
- multiple = generic,
- alternate = generic,
- position = generic,
-
- ligature = function(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- for i=1,#lookupdata do
- local li = lookupdata[i]
- local tu = target[li]
- if not tu then
- tu = { }
- target[li] = tu
+ else
+ start=start.prev
+ end
+ else
+ start=start.prev
end
- target = tu
- end
- target.ligature = unicode
- end,
-
- pair = function(lookupdata,lookupname,unicode,lookuphash)
- local target = lookuphash[lookupname]
- if not target then
- target = { }
- lookuphash[lookupname] = target
- end
- local others = target[unicode]
- local paired = lookupdata[1]
- if others then
- others[paired] = lookupdata
+ end
else
- others = { [paired] = lookupdata }
- target[unicode] = others
- end
- end,
-
-}
-
-local function prepare_lookups(tfmdata)
-
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local lookuphash = resources.lookuphash
- local anchor_to_lookup = resources.anchor_to_lookup
- local lookup_to_anchor = resources.lookup_to_anchor
- local lookuptypes = resources.lookuptypes
- local characters = tfmdata.characters
- local descriptions = tfmdata.descriptions
-
- -- we cannot free the entries in the descriptions as sometimes we access
- -- then directly (for instance anchors) ... selectively freeing does save
- -- much memory as it's only a reference to a table and the slot in the
- -- description hash is not freed anyway
-
- for unicode, character in next, characters do -- we cannot loop over descriptions !
-
- local description = descriptions[unicode]
-
- if description then
-
- local lookups = description.slookups
- if lookups then
- for lookupname, lookupdata in next, lookups do
- action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
- end
- end
-
- local lookups = description.mlookups
- if lookups then
- for lookupname, lookuplist in next, lookups do
- local lookuptype = lookuptypes[lookupname]
- for l=1,#lookuplist do
- local lookupdata = lookuplist[l]
- action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
+ local handler=handlers[typ]
+ local ns=#subtables
+ local start=head
+ rlmode=0
+ if ns==1 then
+ local lookupname=subtables[1]
+ local lookupcache=lookuphash[lookupname]
+ if not lookupcache then
+ report_missing_cache(typ,lookupname)
+ else
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,1)
+ if ok then
+ success=true
+ end
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+elseif id==math_code then
+ start=endofmath(start).next
+ else
+ start=start.next
+ end
+ elseif id==whatsit_code then
+ local subtype=start.subtype
+ if subtype==dir_code then
+ local dir=start.dir
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
end
- end
- end
-
- local list = description.kerns
- if list then
- for lookup, krn in next, list do -- ref to glyph, saves lookup
- local target = lookuphash[lookup]
- if target then
- target[unicode] = krn
+ if trace_directions then
+ report_process("directions after txtdir %s: txtdir=%s:%s, parmode=%s, txtmode=%s",dir,topstack,newdir or "unset",rlparmode,rlmode)
+ end
+ elseif subtype==localpar_code then
+ local dir=start.dir
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
else
- lookuphash[lookup] = { [unicode] = krn }
+ rlparmode=0
end
- end
- end
-
- local list = description.anchors
- if list then
- for typ, anchors in next, list do -- types
- if typ == "mark" or typ == "cexit" then -- or entry?
- for name, anchor in next, anchors do
- local lookups = anchor_to_lookup[name]
- if lookups then
- for lookup, _ in next, lookups do
- local target = lookuphash[lookup]
- if target then
- target[unicode] = anchors
- else
- lookuphash[lookup] = { [unicode] = anchors }
- end
- end
- end
- end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %s: parmode=%s, txtmode=%s",dir,rlparmode,rlmode)
end
- end
- end
-
- end
-
- end
-
+ end
+ start=start.next
+elseif id==math_code then
+ start=endofmath(start).next
+ else
+ start=start.next
+ end
+ end
+ end
+ else
+ while start do
+ local id=start.id
+ if id==glyph_code then
+ if start.font==font and start.subtype<256 then
+ local a=start[0]
+ if a then
+ a=(a==attr) and (not attribute or start[a_state]==attribute)
+ else
+ a=not attribute or start[a_state]==attribute
+ end
+ if a then
+ for i=1,ns do
+ local lookupname=subtables[i]
+ local lookupcache=lookuphash[lookupname]
+ if lookupcache then
+ local lookupmatch=lookupcache[start.char]
+ if lookupmatch then
+ local ok
+ head,start,ok=handler(head,start,dataset[4],lookupname,lookupmatch,sequence,lookuphash,i)
+ if ok then
+ success=true
+ break
+ end
+ end
+ else
+ report_missing_cache(typ,lookupname)
+ end
+ end
+ if start then start=start.next end
+ else
+ start=start.next
+ end
+ else
+ start=start.next
+ end
+ elseif id==whatsit_code then
+ local subtype=start.subtype
+ if subtype==dir_code then
+ local dir=start.dir
+ if dir=="+TRT" or dir=="+TLT" then
+ topstack=topstack+1
+ dirstack[topstack]=dir
+ elseif dir=="-TRT" or dir=="-TLT" then
+ topstack=topstack-1
+ end
+ local newdir=dirstack[topstack]
+ if newdir=="+TRT" then
+ rlmode=-1
+ elseif newdir=="+TLT" then
+ rlmode=1
+ else
+ rlmode=rlparmode
+ end
+ if trace_directions then
+ report_process("directions after txtdir %s: txtdir=%s:%s, parmode=%s, txtmode=%s",dir,topstack,newdir or "unset",rlparmode,rlmode)
+ end
+ elseif subtype==localpar_code then
+ local dir=start.dir
+ if dir=="TRT" then
+ rlparmode=-1
+ elseif dir=="TLT" then
+ rlparmode=1
+ else
+ rlparmode=0
+ end
+ rlmode=rlparmode
+ if trace_directions then
+ report_process("directions after pardir %s: parmode=%s, txtmode=%s",dir,rlparmode,rlmode)
+ end
+ end
+ start=start.next
+elseif id==math_code then
+ start=endofmath(start).next
+ else
+ start=start.next
+ end
+ end
+ end
+ end
+ if success then
+ done=true
+ end
+ if trace_steps then
+ registerstep(head)
+ end
+ end
+ end
+ end
+ return head,done
end
-
-local function split(replacement,original)
- local result = { }
- for i=1,#replacement do
- result[original[i]] = replacement[i]
+local function generic(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if target then
+ target[unicode]=lookupdata
+ else
+ lookuphash[lookupname]={ [unicode]=lookupdata }
+ end
+end
+local action={
+ substitution=generic,
+ multiple=generic,
+ alternate=generic,
+ position=generic,
+ ligature=function(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if not target then
+ target={}
+ lookuphash[lookupname]=target
+ end
+ for i=1,#lookupdata do
+ local li=lookupdata[i]
+ local tu=target[li]
+ if not tu then
+ tu={}
+ target[li]=tu
+ end
+ target=tu
+ end
+ target.ligature=unicode
+ end,
+ pair=function(lookupdata,lookupname,unicode,lookuphash)
+ local target=lookuphash[lookupname]
+ if not target then
+ target={}
+ lookuphash[lookupname]=target
end
- return result
+ local others=target[unicode]
+ local paired=lookupdata[1]
+ if others then
+ others[paired]=lookupdata
+ else
+ others={ [paired]=lookupdata }
+ target[unicode]=others
+ end
+ end,
+}
+local function prepare_lookups(tfmdata)
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local lookuphash=resources.lookuphash
+ local anchor_to_lookup=resources.anchor_to_lookup
+ local lookup_to_anchor=resources.lookup_to_anchor
+ local lookuptypes=resources.lookuptypes
+ local characters=tfmdata.characters
+ local descriptions=tfmdata.descriptions
+ for unicode,character in next,characters do
+ local description=descriptions[unicode]
+ if description then
+ local lookups=description.slookups
+ if lookups then
+ for lookupname,lookupdata in next,lookups do
+ action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ local lookups=description.mlookups
+ if lookups then
+ for lookupname,lookuplist in next,lookups do
+ local lookuptype=lookuptypes[lookupname]
+ for l=1,#lookuplist do
+ local lookupdata=lookuplist[l]
+ action[lookuptype](lookupdata,lookupname,unicode,lookuphash)
+ end
+ end
+ end
+ local list=description.kerns
+ if list then
+ for lookup,krn in next,list do
+ local target=lookuphash[lookup]
+ if target then
+ target[unicode]=krn
+ else
+ lookuphash[lookup]={ [unicode]=krn }
+ end
+ end
+ end
+ local list=description.anchors
+ if list then
+ for typ,anchors in next,list do
+ if typ=="mark" or typ=="cexit" then
+ for name,anchor in next,anchors do
+ local lookups=anchor_to_lookup[name]
+ if lookups then
+ for lookup,_ in next,lookups do
+ local target=lookuphash[lookup]
+ if target then
+ target[unicode]=anchors
+ else
+ lookuphash[lookup]={ [unicode]=anchors }
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+ end
end
-
-local valid = {
- coverage = { chainsub = true, chainpos = true, contextsub = true },
- reversecoverage = { reversesub = true },
- glyphs = { chainsub = true, chainpos = true },
+local function split(replacement,original)
+ local result={}
+ for i=1,#replacement do
+ result[original[i]]=replacement[i]
+ end
+ return result
+end
+local valid={
+ coverage={ chainsub=true,chainpos=true,contextsub=true },
+ reversecoverage={ reversesub=true },
+ glyphs={ chainsub=true,chainpos=true },
}
-
local function prepare_contextchains(tfmdata)
- local rawdata = tfmdata.shared.rawdata
- local resources = rawdata.resources
- local lookuphash = resources.lookuphash
- local lookups = rawdata.lookups
- if lookups then
- for lookupname, lookupdata in next, rawdata.lookups do
- local lookuptype = lookupdata.type
- if lookuptype then
- local rules = lookupdata.rules
- if rules then
- local format = lookupdata.format
- local validformat = valid[format]
- if not validformat then
- report_prepare("unsupported format %s",format)
- elseif not validformat[lookuptype] then
- -- todo: dejavu-serif has one (but i need to see what use it has)
- report_prepare("unsupported %s %s for %s",format,lookuptype,lookupname)
- else
- local contexts = lookuphash[lookupname]
- if not contexts then
- contexts = { }
- lookuphash[lookupname] = contexts
- end
- local t, nt = { }, 0
- for nofrules=1,#rules do
- local rule = rules[nofrules]
- local current = rule.current
- local before = rule.before
- local after = rule.after
- local replacements = rule.replacements
- local sequence = { }
- local nofsequences = 0
- -- Wventually we can store start, stop and sequence in the cached file
- -- but then less sharing takes place so best not do that without a lot
- -- of profiling so let's forget about it.
- if before then
- for n=1,#before do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = before[n]
- end
- end
- local start = nofsequences + 1
- for n=1,#current do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = current[n]
- end
- local stop = nofsequences
- if after then
- for n=1,#after do
- nofsequences = nofsequences + 1
- sequence[nofsequences] = after[n]
- end
- end
- if sequence[1] then
- -- Replacements only happen with reverse lookups as they are single only. We
- -- could pack them into current (replacement value instead of true) and then
- -- use sequence[start] instead but it's somewhat ugly.
- nt = nt + 1
- t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements }
- for unic, _ in next, sequence[start] do
- local cu = contexts[unic]
- if not cu then
- contexts[unic] = t
- end
- end
- end
- end
- end
- else
- -- no rules
- end
- else
- report_prepare("missing lookuptype for %s",lookupname)
- end
+ local rawdata=tfmdata.shared.rawdata
+ local resources=rawdata.resources
+ local lookuphash=resources.lookuphash
+ local lookups=rawdata.lookups
+ if lookups then
+ for lookupname,lookupdata in next,rawdata.lookups do
+ local lookuptype=lookupdata.type
+ if lookuptype then
+ local rules=lookupdata.rules
+ if rules then
+ local format=lookupdata.format
+ local validformat=valid[format]
+ if not validformat then
+ report_prepare("unsupported format %s",format)
+ elseif not validformat[lookuptype] then
+ report_prepare("unsupported %s %s for %s",format,lookuptype,lookupname)
+ else
+ local contexts=lookuphash[lookupname]
+ if not contexts then
+ contexts={}
+ lookuphash[lookupname]=contexts
+ end
+ local t,nt={},0
+ for nofrules=1,#rules do
+ local rule=rules[nofrules]
+ local current=rule.current
+ local before=rule.before
+ local after=rule.after
+ local replacements=rule.replacements
+ local sequence={}
+ local nofsequences=0
+ if before then
+ for n=1,#before do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=before[n]
+ end
+ end
+ local start=nofsequences+1
+ for n=1,#current do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=current[n]
+ end
+ local stop=nofsequences
+ if after then
+ for n=1,#after do
+ nofsequences=nofsequences+1
+ sequence[nofsequences]=after[n]
+ end
+ end
+ if sequence[1] then
+ nt=nt+1
+ t[nt]={ nofrules,lookuptype,sequence,start,stop,rule.lookups,replacements }
+ for unic,_ in next,sequence[start] do
+ local cu=contexts[unic]
+ if not cu then
+ contexts[unic]=t
+ end
+ end
+ end
+ end
+ end
+ else
end
+ else
+ report_prepare("missing lookuptype for %s",lookupname)
+ end
end
+ end
end
-
--- we can consider lookuphash == false (initialized but empty) vs lookuphash == table
-
local function featuresinitializer(tfmdata,value)
- if true then -- value then
- -- beware we need to use the topmost properties table
- local rawdata = tfmdata.shared.rawdata
- local properties = rawdata.properties
- if not properties.initialized then
- local starttime = trace_preparing and os.clock()
- local resources = rawdata.resources
- resources.lookuphash = resources.lookuphash or { }
- prepare_contextchains(tfmdata)
- prepare_lookups(tfmdata)
- properties.initialized = true
- if trace_preparing then
- report_prepare("preparation time is %0.3f seconds for %s",os.clock()-starttime,tfmdata.properties.fullname or "?")
- end
- end
- end
+ if true then
+ local rawdata=tfmdata.shared.rawdata
+ local properties=rawdata.properties
+ if not properties.initialized then
+ local starttime=trace_preparing and os.clock()
+ local resources=rawdata.resources
+ resources.lookuphash=resources.lookuphash or {}
+ prepare_contextchains(tfmdata)
+ prepare_lookups(tfmdata)
+ properties.initialized=true
+ if trace_preparing then
+ report_prepare("preparation time is %0.3f seconds for %s",os.clock()-starttime,tfmdata.properties.fullname or "?")
+ end
+ end
+ end
end
-
registerotffeature {
- name = "features",
- description = "features",
- default = true,
- initializers = {
- position = 1,
- node = featuresinitializer,
- },
- processors = {
- node = featuresprocessor,
- }
+ name="features",
+ description="features",
+ default=true,
+ initializers={
+ position=1,
+ node=featuresinitializer,
+ },
+ processors={
+ node=featuresprocessor,
+ }
}
-
--- This can be used for extra handlers, but should be used with care!
-
-otf.handlers = handlers
+otf.handlers=handlers
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-fonts-chr'] = {
- version = 1.001,
- comment = "companion to luatex-fonts.lua",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-fonts-lua']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
end
-
-characters = characters or { }
-characters.categories = {
- [0x0300]="mn",
- [0x0301]="mn",
- [0x0302]="mn",
- [0x0303]="mn",
- [0x0304]="mn",
- [0x0305]="mn",
- [0x0306]="mn",
- [0x0307]="mn",
- [0x0308]="mn",
- [0x0309]="mn",
- [0x030A]="mn",
- [0x030B]="mn",
- [0x030C]="mn",
- [0x030D]="mn",
- [0x030E]="mn",
- [0x030F]="mn",
- [0x0310]="mn",
- [0x0311]="mn",
- [0x0312]="mn",
- [0x0313]="mn",
- [0x0314]="mn",
- [0x0315]="mn",
- [0x0316]="mn",
- [0x0317]="mn",
- [0x0318]="mn",
- [0x0319]="mn",
- [0x031A]="mn",
- [0x031B]="mn",
- [0x031C]="mn",
- [0x031D]="mn",
- [0x031E]="mn",
- [0x031F]="mn",
- [0x0320]="mn",
- [0x0321]="mn",
- [0x0322]="mn",
- [0x0323]="mn",
- [0x0324]="mn",
- [0x0325]="mn",
- [0x0326]="mn",
- [0x0327]="mn",
- [0x0328]="mn",
- [0x0329]="mn",
- [0x032A]="mn",
- [0x032B]="mn",
- [0x032C]="mn",
- [0x032D]="mn",
- [0x032E]="mn",
- [0x032F]="mn",
- [0x0330]="mn",
- [0x0331]="mn",
- [0x0332]="mn",
- [0x0333]="mn",
- [0x0334]="mn",
- [0x0335]="mn",
- [0x0336]="mn",
- [0x0337]="mn",
- [0x0338]="mn",
- [0x0339]="mn",
- [0x033A]="mn",
- [0x033B]="mn",
- [0x033C]="mn",
- [0x033D]="mn",
- [0x033E]="mn",
- [0x033F]="mn",
- [0x0340]="mn",
- [0x0341]="mn",
- [0x0342]="mn",
- [0x0343]="mn",
- [0x0344]="mn",
- [0x0345]="mn",
- [0x0346]="mn",
- [0x0347]="mn",
- [0x0348]="mn",
- [0x0349]="mn",
- [0x034A]="mn",
- [0x034B]="mn",
- [0x034C]="mn",
- [0x034D]="mn",
- [0x034E]="mn",
- [0x034F]="mn",
- [0x0350]="mn",
- [0x0351]="mn",
- [0x0352]="mn",
- [0x0353]="mn",
- [0x0354]="mn",
- [0x0355]="mn",
- [0x0356]="mn",
- [0x0357]="mn",
- [0x0358]="mn",
- [0x0359]="mn",
- [0x035A]="mn",
- [0x035B]="mn",
- [0x035C]="mn",
- [0x035D]="mn",
- [0x035E]="mn",
- [0x035F]="mn",
- [0x0360]="mn",
- [0x0361]="mn",
- [0x0362]="mn",
- [0x0363]="mn",
- [0x0364]="mn",
- [0x0365]="mn",
- [0x0366]="mn",
- [0x0367]="mn",
- [0x0368]="mn",
- [0x0369]="mn",
- [0x036A]="mn",
- [0x036B]="mn",
- [0x036C]="mn",
- [0x036D]="mn",
- [0x036E]="mn",
- [0x036F]="mn",
- [0x0483]="mn",
- [0x0484]="mn",
- [0x0485]="mn",
- [0x0486]="mn",
- [0x0591]="mn",
- [0x0592]="mn",
- [0x0593]="mn",
- [0x0594]="mn",
- [0x0595]="mn",
- [0x0596]="mn",
- [0x0597]="mn",
- [0x0598]="mn",
- [0x0599]="mn",
- [0x059A]="mn",
- [0x059B]="mn",
- [0x059C]="mn",
- [0x059D]="mn",
- [0x059E]="mn",
- [0x059F]="mn",
- [0x05A0]="mn",
- [0x05A1]="mn",
- [0x05A2]="mn",
- [0x05A3]="mn",
- [0x05A4]="mn",
- [0x05A5]="mn",
- [0x05A6]="mn",
- [0x05A7]="mn",
- [0x05A8]="mn",
- [0x05A9]="mn",
- [0x05AA]="mn",
- [0x05AB]="mn",
- [0x05AC]="mn",
- [0x05AD]="mn",
- [0x05AE]="mn",
- [0x05AF]="mn",
- [0x05B0]="mn",
- [0x05B1]="mn",
- [0x05B2]="mn",
- [0x05B3]="mn",
- [0x05B4]="mn",
- [0x05B5]="mn",
- [0x05B6]="mn",
- [0x05B7]="mn",
- [0x05B8]="mn",
- [0x05B9]="mn",
- [0x05BA]="mn",
- [0x05BB]="mn",
- [0x05BC]="mn",
- [0x05BD]="mn",
- [0x05BF]="mn",
- [0x05C1]="mn",
- [0x05C2]="mn",
- [0x05C4]="mn",
- [0x05C5]="mn",
- [0x05C7]="mn",
- [0x0610]="mn",
- [0x0611]="mn",
- [0x0612]="mn",
- [0x0613]="mn",
- [0x0614]="mn",
- [0x0615]="mn",
- [0x064B]="mn",
- [0x064C]="mn",
- [0x064D]="mn",
- [0x064E]="mn",
- [0x064F]="mn",
- [0x0650]="mn",
- [0x0651]="mn",
- [0x0652]="mn",
- [0x0653]="mn",
- [0x0654]="mn",
- [0x0655]="mn",
- [0x0656]="mn",
- [0x0657]="mn",
- [0x0658]="mn",
- [0x0659]="mn",
- [0x065A]="mn",
- [0x065B]="mn",
- [0x065C]="mn",
- [0x065D]="mn",
- [0x065E]="mn",
- [0x0670]="mn",
- [0x06D6]="mn",
- [0x06D7]="mn",
- [0x06D8]="mn",
- [0x06D9]="mn",
- [0x06DA]="mn",
- [0x06DB]="mn",
- [0x06DC]="mn",
- [0x06DF]="mn",
- [0x06E0]="mn",
- [0x06E1]="mn",
- [0x06E2]="mn",
- [0x06E3]="mn",
- [0x06E4]="mn",
- [0x06E7]="mn",
- [0x06E8]="mn",
- [0x06EA]="mn",
- [0x06EB]="mn",
- [0x06EC]="mn",
- [0x06ED]="mn",
- [0x0711]="mn",
- [0x0730]="mn",
- [0x0731]="mn",
- [0x0732]="mn",
- [0x0733]="mn",
- [0x0734]="mn",
- [0x0735]="mn",
- [0x0736]="mn",
- [0x0737]="mn",
- [0x0738]="mn",
- [0x0739]="mn",
- [0x073A]="mn",
- [0x073B]="mn",
- [0x073C]="mn",
- [0x073D]="mn",
- [0x073E]="mn",
- [0x073F]="mn",
- [0x0740]="mn",
- [0x0741]="mn",
- [0x0742]="mn",
- [0x0743]="mn",
- [0x0744]="mn",
- [0x0745]="mn",
- [0x0746]="mn",
- [0x0747]="mn",
- [0x0748]="mn",
- [0x0749]="mn",
- [0x074A]="mn",
- [0x07A6]="mn",
- [0x07A7]="mn",
- [0x07A8]="mn",
- [0x07A9]="mn",
- [0x07AA]="mn",
- [0x07AB]="mn",
- [0x07AC]="mn",
- [0x07AD]="mn",
- [0x07AE]="mn",
- [0x07AF]="mn",
- [0x07B0]="mn",
- [0x07EB]="mn",
- [0x07EC]="mn",
- [0x07ED]="mn",
- [0x07EE]="mn",
- [0x07EF]="mn",
- [0x07F0]="mn",
- [0x07F1]="mn",
- [0x07F2]="mn",
- [0x07F3]="mn",
- [0x0901]="mn",
- [0x0902]="mn",
- [0x093C]="mn",
- [0x0941]="mn",
- [0x0942]="mn",
- [0x0943]="mn",
- [0x0944]="mn",
- [0x0945]="mn",
- [0x0946]="mn",
- [0x0947]="mn",
- [0x0948]="mn",
- [0x094D]="mn",
- [0x0951]="mn",
- [0x0952]="mn",
- [0x0953]="mn",
- [0x0954]="mn",
- [0x0962]="mn",
- [0x0963]="mn",
- [0x0981]="mn",
- [0x09BC]="mn",
- [0x09C1]="mn",
- [0x09C2]="mn",
- [0x09C3]="mn",
- [0x09C4]="mn",
- [0x09CD]="mn",
- [0x09E2]="mn",
- [0x09E3]="mn",
- [0x0A01]="mn",
- [0x0A02]="mn",
- [0x0A3C]="mn",
- [0x0A41]="mn",
- [0x0A42]="mn",
- [0x0A47]="mn",
- [0x0A48]="mn",
- [0x0A4B]="mn",
- [0x0A4C]="mn",
- [0x0A4D]="mn",
- [0x0A70]="mn",
- [0x0A71]="mn",
- [0x0A81]="mn",
- [0x0A82]="mn",
- [0x0ABC]="mn",
- [0x0AC1]="mn",
- [0x0AC2]="mn",
- [0x0AC3]="mn",
- [0x0AC4]="mn",
- [0x0AC5]="mn",
- [0x0AC7]="mn",
- [0x0AC8]="mn",
- [0x0ACD]="mn",
- [0x0AE2]="mn",
- [0x0AE3]="mn",
- [0x0B01]="mn",
- [0x0B3C]="mn",
- [0x0B3F]="mn",
- [0x0B41]="mn",
- [0x0B42]="mn",
- [0x0B43]="mn",
- [0x0B4D]="mn",
- [0x0B56]="mn",
- [0x0B82]="mn",
- [0x0BC0]="mn",
- [0x0BCD]="mn",
- [0x0C3E]="mn",
- [0x0C3F]="mn",
- [0x0C40]="mn",
- [0x0C46]="mn",
- [0x0C47]="mn",
- [0x0C48]="mn",
- [0x0C4A]="mn",
- [0x0C4B]="mn",
- [0x0C4C]="mn",
- [0x0C4D]="mn",
- [0x0C55]="mn",
- [0x0C56]="mn",
- [0x0CBC]="mn",
- [0x0CBF]="mn",
- [0x0CC6]="mn",
- [0x0CCC]="mn",
- [0x0CCD]="mn",
- [0x0CE2]="mn",
- [0x0CE3]="mn",
- [0x0D41]="mn",
- [0x0D42]="mn",
- [0x0D43]="mn",
- [0x0D4D]="mn",
- [0x0DCA]="mn",
- [0x0DD2]="mn",
- [0x0DD3]="mn",
- [0x0DD4]="mn",
- [0x0DD6]="mn",
- [0x0E31]="mn",
- [0x0E34]="mn",
- [0x0E35]="mn",
- [0x0E36]="mn",
- [0x0E37]="mn",
- [0x0E38]="mn",
- [0x0E39]="mn",
- [0x0E3A]="mn",
- [0x0E47]="mn",
- [0x0E48]="mn",
- [0x0E49]="mn",
- [0x0E4A]="mn",
- [0x0E4B]="mn",
- [0x0E4C]="mn",
- [0x0E4D]="mn",
- [0x0E4E]="mn",
- [0x0EB1]="mn",
- [0x0EB4]="mn",
- [0x0EB5]="mn",
- [0x0EB6]="mn",
- [0x0EB7]="mn",
- [0x0EB8]="mn",
- [0x0EB9]="mn",
- [0x0EBB]="mn",
- [0x0EBC]="mn",
- [0x0EC8]="mn",
- [0x0EC9]="mn",
- [0x0ECA]="mn",
- [0x0ECB]="mn",
- [0x0ECC]="mn",
- [0x0ECD]="mn",
- [0x0F18]="mn",
- [0x0F19]="mn",
- [0x0F35]="mn",
- [0x0F37]="mn",
- [0x0F39]="mn",
- [0x0F71]="mn",
- [0x0F72]="mn",
- [0x0F73]="mn",
- [0x0F74]="mn",
- [0x0F75]="mn",
- [0x0F76]="mn",
- [0x0F77]="mn",
- [0x0F78]="mn",
- [0x0F79]="mn",
- [0x0F7A]="mn",
- [0x0F7B]="mn",
- [0x0F7C]="mn",
- [0x0F7D]="mn",
- [0x0F7E]="mn",
- [0x0F80]="mn",
- [0x0F81]="mn",
- [0x0F82]="mn",
- [0x0F83]="mn",
- [0x0F84]="mn",
- [0x0F86]="mn",
- [0x0F87]="mn",
- [0x0F90]="mn",
- [0x0F91]="mn",
- [0x0F92]="mn",
- [0x0F93]="mn",
- [0x0F94]="mn",
- [0x0F95]="mn",
- [0x0F96]="mn",
- [0x0F97]="mn",
- [0x0F99]="mn",
- [0x0F9A]="mn",
- [0x0F9B]="mn",
- [0x0F9C]="mn",
- [0x0F9D]="mn",
- [0x0F9E]="mn",
- [0x0F9F]="mn",
- [0x0FA0]="mn",
- [0x0FA1]="mn",
- [0x0FA2]="mn",
- [0x0FA3]="mn",
- [0x0FA4]="mn",
- [0x0FA5]="mn",
- [0x0FA6]="mn",
- [0x0FA7]="mn",
- [0x0FA8]="mn",
- [0x0FA9]="mn",
- [0x0FAA]="mn",
- [0x0FAB]="mn",
- [0x0FAC]="mn",
- [0x0FAD]="mn",
- [0x0FAE]="mn",
- [0x0FAF]="mn",
- [0x0FB0]="mn",
- [0x0FB1]="mn",
- [0x0FB2]="mn",
- [0x0FB3]="mn",
- [0x0FB4]="mn",
- [0x0FB5]="mn",
- [0x0FB6]="mn",
- [0x0FB7]="mn",
- [0x0FB8]="mn",
- [0x0FB9]="mn",
- [0x0FBA]="mn",
- [0x0FBB]="mn",
- [0x0FBC]="mn",
- [0x0FC6]="mn",
- [0x102D]="mn",
- [0x102E]="mn",
- [0x102F]="mn",
- [0x1030]="mn",
- [0x1032]="mn",
- [0x1036]="mn",
- [0x1037]="mn",
- [0x1039]="mn",
- [0x1058]="mn",
- [0x1059]="mn",
- [0x135F]="mn",
- [0x1712]="mn",
- [0x1713]="mn",
- [0x1714]="mn",
- [0x1732]="mn",
- [0x1733]="mn",
- [0x1734]="mn",
- [0x1752]="mn",
- [0x1753]="mn",
- [0x1772]="mn",
- [0x1773]="mn",
- [0x17B7]="mn",
- [0x17B8]="mn",
- [0x17B9]="mn",
- [0x17BA]="mn",
- [0x17BB]="mn",
- [0x17BC]="mn",
- [0x17BD]="mn",
- [0x17C6]="mn",
- [0x17C9]="mn",
- [0x17CA]="mn",
- [0x17CB]="mn",
- [0x17CC]="mn",
- [0x17CD]="mn",
- [0x17CE]="mn",
- [0x17CF]="mn",
- [0x17D0]="mn",
- [0x17D1]="mn",
- [0x17D2]="mn",
- [0x17D3]="mn",
- [0x17DD]="mn",
- [0x180B]="mn",
- [0x180C]="mn",
- [0x180D]="mn",
- [0x18A9]="mn",
- [0x1920]="mn",
- [0x1921]="mn",
- [0x1922]="mn",
- [0x1927]="mn",
- [0x1928]="mn",
- [0x1932]="mn",
- [0x1939]="mn",
- [0x193A]="mn",
- [0x193B]="mn",
- [0x1A17]="mn",
- [0x1A18]="mn",
- [0x1B00]="mn",
- [0x1B01]="mn",
- [0x1B02]="mn",
- [0x1B03]="mn",
- [0x1B34]="mn",
- [0x1B36]="mn",
- [0x1B37]="mn",
- [0x1B38]="mn",
- [0x1B39]="mn",
- [0x1B3A]="mn",
- [0x1B3C]="mn",
- [0x1B42]="mn",
- [0x1B6B]="mn",
- [0x1B6C]="mn",
- [0x1B6D]="mn",
- [0x1B6E]="mn",
- [0x1B6F]="mn",
- [0x1B70]="mn",
- [0x1B71]="mn",
- [0x1B72]="mn",
- [0x1B73]="mn",
- [0x1DC0]="mn",
- [0x1DC1]="mn",
- [0x1DC2]="mn",
- [0x1DC3]="mn",
- [0x1DC4]="mn",
- [0x1DC5]="mn",
- [0x1DC6]="mn",
- [0x1DC7]="mn",
- [0x1DC8]="mn",
- [0x1DC9]="mn",
- [0x1DCA]="mn",
- [0x1DFE]="mn",
- [0x1DFF]="mn",
- [0x20D0]="mn",
- [0x20D1]="mn",
- [0x20D2]="mn",
- [0x20D3]="mn",
- [0x20D4]="mn",
- [0x20D5]="mn",
- [0x20D6]="mn",
- [0x20D7]="mn",
- [0x20D8]="mn",
- [0x20D9]="mn",
- [0x20DA]="mn",
- [0x20DB]="mn",
- [0x20DC]="mn",
- [0x20E1]="mn",
- [0x20E5]="mn",
- [0x20E6]="mn",
- [0x20E7]="mn",
- [0x20E8]="mn",
- [0x20E9]="mn",
- [0x20EA]="mn",
- [0x20EB]="mn",
- [0x20EC]="mn",
- [0x20ED]="mn",
- [0x20EE]="mn",
- [0x20EF]="mn",
- [0x302A]="mn",
- [0x302B]="mn",
- [0x302C]="mn",
- [0x302D]="mn",
- [0x302E]="mn",
- [0x302F]="mn",
- [0x3099]="mn",
- [0x309A]="mn",
- [0xA806]="mn",
- [0xA80B]="mn",
- [0xA825]="mn",
- [0xA826]="mn",
- [0xFB1E]="mn",
- [0xFE00]="mn",
- [0xFE01]="mn",
- [0xFE02]="mn",
- [0xFE03]="mn",
- [0xFE04]="mn",
- [0xFE05]="mn",
- [0xFE06]="mn",
- [0xFE07]="mn",
- [0xFE08]="mn",
- [0xFE09]="mn",
- [0xFE0A]="mn",
- [0xFE0B]="mn",
- [0xFE0C]="mn",
- [0xFE0D]="mn",
- [0xFE0E]="mn",
- [0xFE0F]="mn",
- [0xFE20]="mn",
- [0xFE21]="mn",
- [0xFE22]="mn",
- [0xFE23]="mn",
-}
-
-end -- closure
-
-do -- begin closure to overcome local limits and interference
-
-if not modules then modules = { } end modules ['luatex-fonts-lua'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
-}
-
-if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
-end
-
-local fonts = fonts
-fonts.formats.lua = "lua"
-
+local fonts=fonts
+fonts.formats.lua="lua"
function fonts.readers.lua(specification)
- local fullname = specification.filename or ""
- if fullname == "" then
- local forced = specification.forced or ""
- if forced ~= "" then
- fullname = specification.name .. "." .. forced
- else
- fullname = specification.name
- end
- end
- local fullname = resolvers.findfile(fullname) or ""
- if fullname ~= "" then
- local loader = loadfile(fullname)
- loader = loader and loader()
- return loader and loader(specification)
+ local fullname=specification.filename or ""
+ if fullname=="" then
+ local forced=specification.forced or ""
+ if forced~="" then
+ fullname=specification.name.."."..forced
+ else
+ fullname=specification.name
end
+ end
+ local fullname=resolvers.findfile(fullname) or ""
+ if fullname~="" then
+ local loader=loadfile(fullname)
+ loader=loader and loader()
+ return loader and loader(specification)
+ end
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['font-def'] = {
- version = 1.001,
- comment = "companion to font-ini.mkiv",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['font-def']={
+ version=1.001,
+ comment="companion to font-ini.mkiv",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
--- We can overload some of the definers.functions so we don't local them.
-
-local format, gmatch, match, find, lower, gsub = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub
-local tostring, next = tostring, next
-local lpegmatch = lpeg.match
-
-local allocate = utilities.storage.allocate
-
-local trace_defining = false trackers .register("fonts.defining", function(v) trace_defining = v end)
-local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end)
-
-trackers.register("fonts.loading", "fonts.defining", "otf.loading", "afm.loading", "tfm.loading")
-trackers.register("fonts.all", "fonts.*", "otf.*", "afm.*", "tfm.*")
-
-local report_defining = logs.reporter("fonts","defining")
-
---[[ldx--
-<p>Here we deal with defining fonts. We do so by intercepting the
-default loader that only handles <l n='tfm'/>.</p>
---ldx]]--
-
-local fonts = fonts
-local fontdata = fonts.hashes.identifiers
-local readers = fonts.readers
-local definers = fonts.definers
-local specifiers = fonts.specifiers
-local constructors = fonts.constructors
-local fontgoodies = fonts.goodies
-
-readers.sequence = allocate { 'otf', 'ttf', 'afm', 'tfm', 'lua' } -- dfont ttc
-
-local variants = allocate()
-specifiers.variants = variants
-
-definers.methods = definers.methods or { }
-
-local internalized = allocate() -- internal tex numbers (private)
-
-local loadedfonts = constructors.loadedfonts
-local designsizes = constructors.designsizes
-
--- not in generic (some day I'll make two defs, one for context, one for generic)
-
-local resolvefile = fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end
-
---[[ldx--
-<p>We hardly gain anything when we cache the final (pre scaled)
-<l n='tfm'/> table. But it can be handy for debugging, so we no
-longer carry this code along. Also, we now have quite some reference
-to other tables so we would end up with lots of catches.</p>
---ldx]]--
-
---[[ldx--
-<p>We can prefix a font specification by <type>name:</type> or
-<type>file:</type>. The first case will result in a lookup in the
-synonym table.</p>
-
-<typing>
-[ name: | file: ] identifier [ separator [ specification ] ]
-</typing>
-
-<p>The following function split the font specification into components
-and prepares a table that will move along as we proceed.</p>
---ldx]]--
-
--- beware, we discard additional specs
---
--- method:name method:name(sub) method:name(sub)*spec method:name*spec
--- name name(sub) name(sub)*spec name*spec
--- name@spec*oeps
-
-local splitter, splitspecifiers = nil, "" -- not so nice
-
-local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc
-
-local left = P("(")
-local right = P(")")
-local colon = P(":")
-local space = P(" ")
-
-definers.defaultlookup = "file"
-
-local prefixpattern = P(false)
-
+local format,gmatch,match,find,lower,gsub=string.format,string.gmatch,string.match,string.find,string.lower,string.gsub
+local tostring,next=tostring,next
+local lpegmatch=lpeg.match
+local allocate=utilities.storage.allocate
+local trace_defining=false trackers .register("fonts.defining",function(v) trace_defining=v end)
+local directive_embedall=false directives.register("fonts.embedall",function(v) directive_embedall=v end)
+trackers.register("fonts.loading","fonts.defining","otf.loading","afm.loading","tfm.loading")
+trackers.register("fonts.all","fonts.*","otf.*","afm.*","tfm.*")
+local report_defining=logs.reporter("fonts","defining")
+local fonts=fonts
+local fontdata=fonts.hashes.identifiers
+local readers=fonts.readers
+local definers=fonts.definers
+local specifiers=fonts.specifiers
+local constructors=fonts.constructors
+local fontgoodies=fonts.goodies
+readers.sequence=allocate { 'otf','ttf','afm','tfm','lua' }
+local variants=allocate()
+specifiers.variants=variants
+definers.methods=definers.methods or {}
+local internalized=allocate()
+local loadedfonts=constructors.loadedfonts
+local designsizes=constructors.designsizes
+local resolvefile=fontgoodies and fontgoodies.filenames and fontgoodies.filenames.resolve or function(s) return s end
+local splitter,splitspecifiers=nil,""
+local P,C,S,Cc=lpeg.P,lpeg.C,lpeg.S,lpeg.Cc
+local left=P("(")
+local right=P(")")
+local colon=P(":")
+local space=P(" ")
+definers.defaultlookup="file"
+local prefixpattern=P(false)
local function addspecifier(symbol)
- splitspecifiers = splitspecifiers .. symbol
- local method = S(splitspecifiers)
- local lookup = C(prefixpattern) * colon
- local sub = left * C(P(1-left-right-method)^1) * right
- local specification = C(method) * C(P(1)^1)
- local name = C((1-sub-specification)^1)
- splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc("")))
+ splitspecifiers=splitspecifiers..symbol
+ local method=S(splitspecifiers)
+ local lookup=C(prefixpattern)*colon
+ local sub=left*C(P(1-left-right-method)^1)*right
+ local specification=C(method)*C(P(1)^1)
+ local name=C((1-sub-specification)^1)
+ splitter=P((lookup+Cc(""))*name*(sub+Cc(""))*(specification+Cc("")))
end
-
local function addlookup(str,default)
- prefixpattern = prefixpattern + P(str)
+ prefixpattern=prefixpattern+P(str)
end
-
-definers.addlookup = addlookup
-
+definers.addlookup=addlookup
addlookup("file")
addlookup("name")
addlookup("spec")
-
local function getspecification(str)
- return lpegmatch(splitter,str)
+ return lpegmatch(splitter,str)
end
-
-definers.getspecification = getspecification
-
+definers.getspecification=getspecification
function definers.registersplit(symbol,action,verbosename)
- addspecifier(symbol)
- variants[symbol] = action
- if verbosename then
- variants[verbosename] = action
- end
+ addspecifier(symbol)
+ variants[symbol]=action
+ if verbosename then
+ variants[verbosename]=action
+ end
end
-
local function makespecification(specification,lookup,name,sub,method,detail,size)
- size = size or 655360
- if trace_defining then
- report_defining("%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s",
- specification, lookup ~= "" and lookup or "[file]", name ~= "" and name or "-",
- sub ~= "" and sub or "-", method ~= "" and method or "-", detail ~= "" and detail or "-")
- end
- if not lookup or lookup == "" then
- lookup = definers.defaultlookup
- end
- local t = {
- lookup = lookup, -- forced type
- specification = specification, -- full specification
- size = size, -- size in scaled points or -1000*n
- name = name, -- font or filename
- sub = sub, -- subfont (eg in ttc)
- method = method, -- specification method
- detail = detail, -- specification
- resolved = "", -- resolved font name
- forced = "", -- forced loader
- features = { }, -- preprocessed features
- }
- return t
-end
-
-
-definers.makespecification = makespecification
-
-function definers.analyze(specification, size)
- -- can be optimized with locals
- local lookup, name, sub, method, detail = getspecification(specification or "")
- return makespecification(specification, lookup, name, sub, method, detail, size)
-end
-
---[[ldx--
-<p>We can resolve the filename using the next function:</p>
---ldx]]--
-
-definers.resolvers = definers.resolvers or { }
-local resolvers = definers.resolvers
-
--- todo: reporter
-
+ size=size or 655360
+ if trace_defining then
+ report_defining("%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s",
+ specification,lookup~="" and lookup or "[file]",name~="" and name or "-",
+ sub~="" and sub or "-",method~="" and method or "-",detail~="" and detail or "-")
+ end
+ if not lookup or lookup=="" then
+ lookup=definers.defaultlookup
+ end
+ local t={
+ lookup=lookup,
+ specification=specification,
+ size=size,
+ name=name,
+ sub=sub,
+ method=method,
+ detail=detail,
+ resolved="",
+ forced="",
+ features={},
+ }
+ return t
+end
+definers.makespecification=makespecification
+function definers.analyze(specification,size)
+ local lookup,name,sub,method,detail=getspecification(specification or "")
+ return makespecification(specification,lookup,name,sub,method,detail,size)
+end
+definers.resolvers=definers.resolvers or {}
+local resolvers=definers.resolvers
function resolvers.file(specification)
- local name = resolvefile(specification.name) -- catch for renames
- local suffix = file.suffix(name)
- if fonts.formats[suffix] then
- specification.forced = suffix
- specification.name = file.removesuffix(name)
- else
- specification.name = name -- cna be resolved
- end
+ local name=resolvefile(specification.name)
+ local suffix=file.suffix(name)
+ if fonts.formats[suffix] then
+ specification.forced=suffix
+ specification.name=file.removesuffix(name)
+ else
+ specification.name=name
+ end
end
-
function resolvers.name(specification)
- local resolve = fonts.names.resolve
- if resolve then
- local resolved, sub = resolve(specification.name,specification.sub,specification) -- we pass specification for overloaded versions
- if resolved then
- specification.resolved = resolved
- specification.sub = sub
- local suffix = file.suffix(resolved)
- if fonts.formats[suffix] then
- specification.forced = suffix
- specification.name = file.removesuffix(resolved)
- else
- specification.name = resolved
- end
- end
- else
- resolvers.file(specification)
- end
+ local resolve=fonts.names.resolve
+ if resolve then
+ local resolved,sub=resolve(specification.name,specification.sub,specification)
+ if resolved then
+ specification.resolved=resolved
+ specification.sub=sub
+ local suffix=file.suffix(resolved)
+ if fonts.formats[suffix] then
+ specification.forced=suffix
+ specification.name=file.removesuffix(resolved)
+ else
+ specification.name=resolved
+ end
+ end
+ else
+ resolvers.file(specification)
+ end
end
-
function resolvers.spec(specification)
- local resolvespec = fonts.names.resolvespec
- if resolvespec then
- local resolved, sub = resolvespec(specification.name,specification.sub,specification) -- we pass specification for overloaded versions
- if resolved then
- specification.resolved = resolved
- specification.sub = sub
- specification.forced = file.suffix(resolved)
- specification.name = file.removesuffix(resolved)
- end
- else
- resolvers.name(specification)
- end
+ local resolvespec=fonts.names.resolvespec
+ if resolvespec then
+ local resolved,sub=resolvespec(specification.name,specification.sub,specification)
+ if resolved then
+ specification.resolved=resolved
+ specification.sub=sub
+ specification.forced=file.suffix(resolved)
+ specification.name=file.removesuffix(resolved)
+ end
+ else
+ resolvers.name(specification)
+ end
end
-
function definers.resolve(specification)
- if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash
- local r = resolvers[specification.lookup]
- if r then
- r(specification)
- end
- end
- if specification.forced == "" then
- specification.forced = nil
- else
- specification.forced = specification.forced
- end
- specification.hash = lower(specification.name .. ' @ ' .. constructors.hashfeatures(specification))
- if specification.sub and specification.sub ~= "" then
- specification.hash = specification.sub .. ' @ ' .. specification.hash
- end
- return specification
+ if not specification.resolved or specification.resolved=="" then
+ local r=resolvers[specification.lookup]
+ if r then
+ r(specification)
+ end
+ end
+ if specification.forced=="" then
+ specification.forced=nil
+ else
+ specification.forced=specification.forced
+ end
+ specification.hash=lower(specification.name..' @ '..constructors.hashfeatures(specification))
+ if specification.sub and specification.sub~="" then
+ specification.hash=specification.sub..' @ '..specification.hash
+ end
+ return specification
end
-
---[[ldx--
-<p>The main read function either uses a forced reader (as determined by
-a lookup) or tries to resolve the name using the list of readers.</p>
-
-<p>We need to cache when possible. We do cache raw tfm data (from <l
-n='tfm'/>, <l n='afm'/> or <l n='otf'/>). After that we can cache based
-on specificstion (name) and size, that is, <l n='tex'/> only needs a number
-for an already loaded fonts. However, it may make sense to cache fonts
-before they're scaled as well (store <l n='tfm'/>'s with applied methods
-and features). However, there may be a relation between the size and
-features (esp in virtual fonts) so let's not do that now.</p>
-
-<p>Watch out, here we do load a font, but we don't prepare the
-specification yet.</p>
---ldx]]--
-
--- very experimental:
-
function definers.applypostprocessors(tfmdata)
- local postprocessors = tfmdata.postprocessors
- if postprocessors then
- local properties = tfmdata.properties
- for i=1,#postprocessors do
- local extrahash = postprocessors[i](tfmdata) -- after scaling etc
- if type(extrahash) == "string" and extrahash ~= "" then
- -- e.g. a reencoding needs this
- extrahash = gsub(lower(extrahash),"[^a-z]","-")
- properties.fullname = format("%s-%s",properties.fullname,extrahash)
- end
- end
- end
- return tfmdata
+ local postprocessors=tfmdata.postprocessors
+ if postprocessors then
+ local properties=tfmdata.properties
+ for i=1,#postprocessors do
+ local extrahash=postprocessors[i](tfmdata)
+ if type(extrahash)=="string" and extrahash~="" then
+ extrahash=gsub(lower(extrahash),"[^a-z]","-")
+ properties.fullname=format("%s-%s",properties.fullname,extrahash)
+ end
+ end
+ end
+ return tfmdata
end
-
--- function definers.applypostprocessors(tfmdata)
--- return tfmdata
--- end
-
local function checkembedding(tfmdata)
- local properties = tfmdata.properties
- local embedding
- if directive_embedall then
- embedding = "full"
- elseif properties and properties.filename and constructors.dontembed[properties.filename] then
- embedding = "no"
- else
- embedding = "subset"
- end
- if properties then
- properties.embedding = embedding
- else
- tfmdata.properties = { embedding = embedding }
- end
- tfmdata.embedding = embedding
+ local properties=tfmdata.properties
+ local embedding
+ if directive_embedall then
+ embedding="full"
+ elseif properties and properties.filename and constructors.dontembed[properties.filename] then
+ embedding="no"
+ else
+ embedding="subset"
+ end
+ if properties then
+ properties.embedding=embedding
+ else
+ tfmdata.properties={ embedding=embedding }
+ end
+ tfmdata.embedding=embedding
end
-
function definers.loadfont(specification)
- local hash = constructors.hashinstance(specification)
- local tfmdata = loadedfonts[hash] -- hashes by size !
- if not tfmdata then
- local forced = specification.forced or ""
- if forced ~= "" then
- local reader = readers[lower(forced)]
- tfmdata = reader and reader(specification)
- if not tfmdata then
- report_defining("forced type %s of %s not found",forced,specification.name)
- end
- else
- local sequence = readers.sequence -- can be overloaded so only a shortcut here
- for s=1,#sequence do
- local reader = sequence[s]
- if readers[reader] then -- we skip not loaded readers
- if trace_defining then
- report_defining("trying (reader sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown")
- end
- tfmdata = readers[reader](specification)
- if tfmdata then
- break
- else
- specification.filename = nil
- end
- end
- end
- end
- if tfmdata then
- tfmdata = definers.applypostprocessors(tfmdata)
- checkembedding(tfmdata) -- todo: general postprocessor
- loadedfonts[hash] = tfmdata
- designsizes[specification.hash] = tfmdata.parameters.designsize
+ local hash=constructors.hashinstance(specification)
+ local tfmdata=loadedfonts[hash]
+ if not tfmdata then
+ local forced=specification.forced or ""
+ if forced~="" then
+ local reader=readers[lower(forced)]
+ tfmdata=reader and reader(specification)
+ if not tfmdata then
+ report_defining("forced type %s of %s not found",forced,specification.name)
+ end
+ else
+ local sequence=readers.sequence
+ for s=1,#sequence do
+ local reader=sequence[s]
+ if readers[reader] then
+ if trace_defining then
+ report_defining("trying (reader sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown")
+ end
+ tfmdata=readers[reader](specification)
+ if tfmdata then
+ break
+ else
+ specification.filename=nil
+ end
end
+ end
end
- if not tfmdata then
- report_defining("font with asked name '%s' is not found using lookup '%s'",specification.name,specification.lookup)
- end
- return tfmdata
-end
-
---[[ldx--
-<p>For virtual fonts we need a slightly different approach:</p>
---ldx]]--
-
-function constructors.readanddefine(name,size) -- no id -- maybe a dummy first
- local specification = definers.analyze(name,size)
- local method = specification.method
- if method and variants[method] then
- specification = variants[method](specification)
- end
- specification = definers.resolve(specification)
- local hash = constructors.hashinstance(specification)
- local id = definers.registered(hash)
- if not id then
- local tfmdata = definers.loadfont(specification)
- if tfmdata then
- tfmdata.properties.hash = hash
- id = font.define(tfmdata)
- definers.register(tfmdata,id)
- else
- id = 0 -- signal
- end
+ if tfmdata then
+ tfmdata=definers.applypostprocessors(tfmdata)
+ checkembedding(tfmdata)
+ loadedfonts[hash]=tfmdata
+ designsizes[specification.hash]=tfmdata.parameters.designsize
+ end
+ end
+ if not tfmdata then
+ report_defining("font with asked name '%s' is not found using lookup '%s'",specification.name,specification.lookup)
+ end
+ return tfmdata
+end
+function constructors.readanddefine(name,size)
+ local specification=definers.analyze(name,size)
+ local method=specification.method
+ if method and variants[method] then
+ specification=variants[method](specification)
+ end
+ specification=definers.resolve(specification)
+ local hash=constructors.hashinstance(specification)
+ local id=definers.registered(hash)
+ if not id then
+ local tfmdata=definers.loadfont(specification)
+ if tfmdata then
+ tfmdata.properties.hash=hash
+ id=font.define(tfmdata)
+ definers.register(tfmdata,id)
+ else
+ id=0
end
- return fontdata[id], id
+ end
+ return fontdata[id],id
end
-
---[[ldx--
-<p>So far the specifiers. Now comes the real definer. Here we cache
-based on id's. Here we also intercept the virtual font handler. Since
-it evolved stepwise I may rewrite this bit (combine code).</p>
-
-In the previously defined reader (the one resulting in a <l n='tfm'/>
-table) we cached the (scaled) instances. Here we cache them again, but
-this time based on id. We could combine this in one cache but this does
-not gain much. By the way, passing id's back to in the callback was
-introduced later in the development.</p>
---ldx]]--
-
-local lastdefined = nil -- we don't want this one to end up in s-tra-02
-local internalized = { }
-
-function definers.current() -- or maybe current
- return lastdefined
+local lastdefined=nil
+local internalized={}
+function definers.current()
+ return lastdefined
end
-
function definers.registered(hash)
- local id = internalized[hash]
- return id, id and fontdata[id]
+ local id=internalized[hash]
+ return id,id and fontdata[id]
end
-
function definers.register(tfmdata,id)
- if tfmdata and id then
- local hash = tfmdata.properties.hash
- if not internalized[hash] then
- internalized[hash] = id
- if trace_defining then
- report_defining("registering font, id: %s, hash: %s",id or "?",hash or "?")
- end
- fontdata[id] = tfmdata
- end
+ if tfmdata and id then
+ local hash=tfmdata.properties.hash
+ if not internalized[hash] then
+ internalized[hash]=id
+ if trace_defining then
+ report_defining("registering font, id: %s, hash: %s",id or "?",hash or "?")
+ end
+ fontdata[id]=tfmdata
+ end
+ end
+end
+function definers.read(specification,size,id)
+ statistics.starttiming(fonts)
+ if type(specification)=="string" then
+ specification=definers.analyze(specification,size)
+ end
+ local method=specification.method
+ if method and variants[method] then
+ specification=variants[method](specification)
+ end
+ specification=definers.resolve(specification)
+ local hash=constructors.hashinstance(specification)
+ local tfmdata=definers.registered(hash)
+ if tfmdata then
+ if trace_defining then
+ report_defining("already hashed: %s",hash)
end
-end
-
-function definers.read(specification,size,id) -- id can be optional, name can already be table
- statistics.starttiming(fonts)
- if type(specification) == "string" then
- specification = definers.analyze(specification,size)
- end
- local method = specification.method
- if method and variants[method] then
- specification = variants[method](specification)
- end
- specification = definers.resolve(specification)
- local hash = constructors.hashinstance(specification)
- local tfmdata = definers.registered(hash) -- id
+ else
+ tfmdata=definers.loadfont(specification)
if tfmdata then
- if trace_defining then
- report_defining("already hashed: %s",hash)
- end
+ if trace_defining then
+ report_defining("loaded and hashed: %s",hash)
+ end
+ tfmdata.properties.hash=hash
+ if id then
+ definers.register(tfmdata,id)
+ end
else
- tfmdata = definers.loadfont(specification) -- can be overloaded
- if tfmdata then
- if trace_defining then
- report_defining("loaded and hashed: %s",hash)
- end
- --~ constructors.checkvirtualid(tfmdata) -- interferes
- tfmdata.properties.hash = hash
- if id then
- definers.register(tfmdata,id)
- end
- else
- if trace_defining then
- report_defining("not loaded and hashed: %s",hash)
- end
- end
- end
- lastdefined = tfmdata or id -- todo ! ! ! ! !
- if not tfmdata then -- or id?
- report_defining( "unknown font %s, loading aborted",specification.name)
- elseif trace_defining and type(tfmdata) == "table" then
- local properties = tfmdata.properties or { }
- local parameters = tfmdata.parameters or { }
- report_defining("using %s font with id %s, name:%s size:%s bytes:%s encoding:%s fullname:%s filename:%s",
- properties.format or "unknown",
- id or "?",
- properties.name or "?",
- parameters.size or "default",
- properties.encodingbytes or "?",
- properties.encodingname or "unicode",
- properties.fullname or "?",
- file.basename(properties.filename or "?"))
- end
- statistics.stoptiming(fonts)
- return tfmdata
-end
-
---[[ldx--
-<p>We overload the <l n='tfm'/> reader.</p>
---ldx]]--
-
-callbacks.register('define_font', definers.read, "definition of fonts (tfmdata preparation)")
+ if trace_defining then
+ report_defining("not loaded and hashed: %s",hash)
+ end
+ end
+ end
+ lastdefined=tfmdata or id
+ if not tfmdata then
+ report_defining("unknown font %s, loading aborted",specification.name)
+ elseif trace_defining and type(tfmdata)=="table" then
+ local properties=tfmdata.properties or {}
+ local parameters=tfmdata.parameters or {}
+ report_defining("using %s font with id %s, name:%s size:%s bytes:%s encoding:%s fullname:%s filename:%s",
+ properties.format or "unknown",
+ id or "?",
+ properties.name or "?",
+ parameters.size or "default",
+ properties.encodingbytes or "?",
+ properties.encodingname or "unicode",
+ properties.fullname or "?",
+ file.basename(properties.filename or "?"))
+ end
+ statistics.stoptiming(fonts)
+ return tfmdata
+end
+callbacks.register('define_font',definers.read,"definition of fonts (tfmdata preparation)")
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-font-def'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-font-def']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
end
-
-local fonts = fonts
-
--- A bit of tuning for definitions.
-
-fonts.constructors.namemode = "specification" -- somehow latex needs this (changed name!) => will change into an overload
-
--- tricky: we sort of bypass the parser and directly feed all into
--- the sub parser
-
+local fonts=fonts
+fonts.constructors.namemode="specification"
function fonts.definers.getspecification(str)
- return "", str, "", ":", str
+ return "",str,"",":",str
+end
+local list={}
+local function issome () list.lookup='name' end
+local function isfile () list.lookup='file' end
+local function isname () list.lookup='name' end
+local function thename(s) list.name=s end
+local function issub (v) list.sub=v end
+local function iscrap (s) list.crap=string.lower(s) end
+local function iskey (k,v) list[k]=v end
+local function istrue (s) list[s]=true end
+local function isfalse(s) list[s]=false end
+local P,S,R,C=lpeg.P,lpeg.S,lpeg.R,lpeg.C
+local spaces=P(" ")^0
+local namespec=(1-S("/:("))^0
+local crapspec=spaces*P("/")*(((1-P(":"))^0)/iscrap)*spaces
+local filename_1=P("file:")/isfile*(namespec/thename)
+local filename_2=P("[")*P(true)/isname*(((1-P("]"))^0)/thename)*P("]")
+local fontname_1=P("name:")/isname*(namespec/thename)
+local fontname_2=P(true)/issome*(namespec/thename)
+local sometext=(R("az","AZ","09")+S("+-."))^1
+local truevalue=P("+")*spaces*(sometext/istrue)
+local falsevalue=P("-")*spaces*(sometext/isfalse)
+local keyvalue=(C(sometext)*spaces*P("=")*spaces*C(sometext))/iskey
+local somevalue=sometext/istrue
+local subvalue=P("(")*(C(P(1-S("()"))^1)/issub)*P(")")
+local option=spaces*(keyvalue+falsevalue+truevalue+somevalue)*spaces
+local options=P(":")*spaces*(P(";")^0*option)^0
+local pattern=(filename_1+filename_2+fontname_1+fontname_2)*subvalue^0*crapspec^0*options^0
+local function colonized(specification)
+ list={}
+ lpeg.match(pattern,specification.specification)
+ list.crap=nil
+ if list.name then
+ specification.name=list.name
+ list.name=nil
+ end
+ if list.lookup then
+ specification.lookup=list.lookup
+ list.lookup=nil
+ end
+ if list.sub then
+ specification.sub=list.sub
+ list.sub=nil
+ end
+ specification.features.normal=fonts.handlers.otf.features.normalize(list)
+ return specification
end
-
--- the generic name parser (different from context!)
-
-local list = { }
-
-local function issome () list.lookup = 'name' end -- xetex mode prefers name (not in context!)
-local function isfile () list.lookup = 'file' end
-local function isname () list.lookup = 'name' end
-local function thename(s) list.name = s end
-local function issub (v) list.sub = v end
-local function iscrap (s) list.crap = string.lower(s) end
-local function iskey (k,v) list[k] = v end
-local function istrue (s) list[s] = true end
-local function isfalse(s) list[s] = false end
-
-local P, S, R, C = lpeg.P, lpeg.S, lpeg.R, lpeg.C
-
-local spaces = P(" ")^0
-local namespec = (1-S("/:("))^0 -- was: (1-S("/: ("))^0
-local crapspec = spaces * P("/") * (((1-P(":"))^0)/iscrap) * spaces
-local filename_1 = P("file:")/isfile * (namespec/thename)
-local filename_2 = P("[") * P(true)/isname * (((1-P("]"))^0)/thename) * P("]")
-local fontname_1 = P("name:")/isname * (namespec/thename)
-local fontname_2 = P(true)/issome * (namespec/thename)
-local sometext = (R("az","AZ","09") + S("+-."))^1
-local truevalue = P("+") * spaces * (sometext/istrue)
-local falsevalue = P("-") * spaces * (sometext/isfalse)
-local keyvalue = (C(sometext) * spaces * P("=") * spaces * C(sometext))/iskey
-local somevalue = sometext/istrue
-local subvalue = P("(") * (C(P(1-S("()"))^1)/issub) * P(")") -- for Kim
-local option = spaces * (keyvalue + falsevalue + truevalue + somevalue) * spaces
-local options = P(":") * spaces * (P(";")^0 * option)^0
-
-local pattern = (filename_1 + filename_2 + fontname_1 + fontname_2) * subvalue^0 * crapspec^0 * options^0
-
-local function colonized(specification) -- xetex mode
- list = { }
- lpeg.match(pattern,specification.specification)
- list.crap = nil -- style not supported, maybe some day
- if list.name then
- specification.name = list.name
- list.name = nil
- end
- if list.lookup then
- specification.lookup = list.lookup
- list.lookup = nil
- end
- if list.sub then
- specification.sub = list.sub
- list.sub = nil
- end
- specification.features.normal = fonts.handlers.otf.features.normalize(list)
- return specification
-end
-
fonts.definers.registersplit(":",colonized,"cryptic")
-fonts.definers.registersplit("", colonized,"more cryptic") -- catches \font\text=[names]
-
+fonts.definers.registersplit("",colonized,"more cryptic")
function fonts.definers.applypostprocessors(tfmdata)
- local postprocessors = tfmdata.postprocessors
- if postprocessors then
- for i=1,#postprocessors do
- local extrahash = postprocessors[i](tfmdata) -- after scaling etc
- if type(extrahash) == "string" and extrahash ~= "" then
- -- e.g. a reencoding needs this
- extrahash = string.gsub(lower(extrahash),"[^a-z]","-")
- tfmdata.properties.fullname = format("%s-%s",tfmdata.properties.fullname,extrahash)
- end
- end
+ local postprocessors=tfmdata.postprocessors
+ if postprocessors then
+ for i=1,#postprocessors do
+ local extrahash=postprocessors[i](tfmdata)
+ if type(extrahash)=="string" and extrahash~="" then
+ extrahash=string.gsub(lower(extrahash),"[^a-z]","-")
+ tfmdata.properties.fullname=format("%s-%s",tfmdata.properties.fullname,extrahash)
+ end
end
- return tfmdata
+ end
+ return tfmdata
end
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-fonts-ext'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-fonts-ext']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
end
-
-local fonts = fonts
-local otffeatures = fonts.constructors.newfeatures("otf")
-
--- A few generic extensions.
-
+local fonts=fonts
+local otffeatures=fonts.constructors.newfeatures("otf")
local function initializeitlc(tfmdata,value)
- if value then
- -- the magic 40 and it formula come from Dohyun Kim but we might need another guess
- local parameters = tfmdata.parameters
- local italicangle = parameters.italicangle
- if italicangle and italicangle ~= 0 then
- local properties = tfmdata.properties
- local factor = tonumber(value) or 1
- properties.hasitalics = true
- properties.autoitalicamount = factor * (parameters.uwidth or 40)/2
- end
+ if value then
+ local parameters=tfmdata.parameters
+ local italicangle=parameters.italicangle
+ if italicangle and italicangle~=0 then
+ local properties=tfmdata.properties
+ local factor=tonumber(value) or 1
+ properties.hasitalics=true
+ properties.autoitalicamount=factor*(parameters.uwidth or 40)/2
end
+ end
end
-
otffeatures.register {
- name = "itlc",
- description = "italic correction",
- initializers = {
- base = initializeitlc,
- node = initializeitlc,
- }
+ name="itlc",
+ description="italic correction",
+ initializers={
+ base=initializeitlc,
+ node=initializeitlc,
+ }
}
-
--- slant and extend
-
local function initializeslant(tfmdata,value)
- value = tonumber(value)
- if not value then
- value = 0
- elseif value > 1 then
- value = 1
- elseif value < -1 then
- value = -1
- end
- tfmdata.parameters.slantfactor = value
+ value=tonumber(value)
+ if not value then
+ value=0
+ elseif value>1 then
+ value=1
+ elseif value<-1 then
+ value=-1
+ end
+ tfmdata.parameters.slantfactor=value
end
-
otffeatures.register {
- name = "slant",
- description = "slant glyphs",
- initializers = {
- base = initializeslant,
- node = initializeslant,
- }
+ name="slant",
+ description="slant glyphs",
+ initializers={
+ base=initializeslant,
+ node=initializeslant,
+ }
}
-
local function initializeextend(tfmdata,value)
- value = tonumber(value)
- if not value then
- value = 0
- elseif value > 10 then
- value = 10
- elseif value < -10 then
- value = -10
- end
- tfmdata.parameters.extendfactor = value
+ value=tonumber(value)
+ if not value then
+ value=0
+ elseif value>10 then
+ value=10
+ elseif value<-10 then
+ value=-10
+ end
+ tfmdata.parameters.extendfactor=value
end
-
otffeatures.register {
- name = "extend",
- description = "scale glyphs horizontally",
- initializers = {
- base = initializeextend,
- node = initializeextend,
- }
+ name="extend",
+ description="scale glyphs horizontally",
+ initializers={
+ base=initializeextend,
+ node=initializeextend,
+ }
}
-
--- expansion and protrusion
-
-fonts.protrusions = fonts.protrusions or { }
-fonts.protrusions.setups = fonts.protrusions.setups or { }
-
-local setups = fonts.protrusions.setups
-
+fonts.protrusions=fonts.protrusions or {}
+fonts.protrusions.setups=fonts.protrusions.setups or {}
+local setups=fonts.protrusions.setups
local function initializeprotrusion(tfmdata,value)
- if value then
- local setup = setups[value]
- if setup then
- local factor, left, right = setup.factor or 1, setup.left or 1, setup.right or 1
- local emwidth = tfmdata.parameters.quad
- tfmdata.parameters.protrusion = {
- auto = true,
- }
- for i, chr in next, tfmdata.characters do
- local v, pl, pr = setup[i], nil, nil
- if v then
- pl, pr = v[1], v[2]
- end
- if pl and pl ~= 0 then chr.left_protruding = left *pl*factor end
- if pr and pr ~= 0 then chr.right_protruding = right*pr*factor end
- end
+ if value then
+ local setup=setups[value]
+ if setup then
+ local factor,left,right=setup.factor or 1,setup.left or 1,setup.right or 1
+ local emwidth=tfmdata.parameters.quad
+ tfmdata.parameters.protrusion={
+ auto=true,
+ }
+ for i,chr in next,tfmdata.characters do
+ local v,pl,pr=setup[i],nil,nil
+ if v then
+ pl,pr=v[1],v[2]
end
+ if pl and pl~=0 then chr.left_protruding=left*pl*factor end
+ if pr and pr~=0 then chr.right_protruding=right*pr*factor end
+ end
end
+ end
end
-
otffeatures.register {
- name = "protrusion",
- description = "shift characters into the left and or right margin",
- initializers = {
- base = initializeprotrusion,
- node = initializeprotrusion,
- }
+ name="protrusion",
+ description="shift characters into the left and or right margin",
+ initializers={
+ base=initializeprotrusion,
+ node=initializeprotrusion,
+ }
}
-
-fonts.expansions = fonts.expansions or { }
-fonts.expansions.setups = fonts.expansions.setups or { }
-
-local setups = fonts.expansions.setups
-
+fonts.expansions=fonts.expansions or {}
+fonts.expansions.setups=fonts.expansions.setups or {}
+local setups=fonts.expansions.setups
local function initializeexpansion(tfmdata,value)
- if value then
- local setup = setups[value]
- if setup then
- local factor = setup.factor or 1
- tfmdata.parameters.expansion = {
- stretch = 10 * (setup.stretch or 0),
- shrink = 10 * (setup.shrink or 0),
- step = 10 * (setup.step or 0),
- auto = true,
- }
- for i, chr in next, tfmdata.characters do
- local v = setup[i]
- if v and v ~= 0 then
- chr.expansion_factor = v*factor
- else -- can be option
- chr.expansion_factor = factor
- end
- end
- end
- end
+ if value then
+ local setup=setups[value]
+ if setup then
+ local factor=setup.factor or 1
+ tfmdata.parameters.expansion={
+ stretch=10*(setup.stretch or 0),
+ shrink=10*(setup.shrink or 0),
+ step=10*(setup.step or 0),
+ auto=true,
+ }
+ for i,chr in next,tfmdata.characters do
+ local v=setup[i]
+ if v and v~=0 then
+ chr.expansion_factor=v*factor
+ else
+ chr.expansion_factor=factor
+ end
+ end
+ end
+ end
end
-
otffeatures.register {
- name = "expansion",
- description = "apply hz optimization",
- initializers = {
- base = initializeexpansion,
- node = initializeexpansion,
- }
+ name="expansion",
+ description="apply hz optimization",
+ initializers={
+ base=initializeexpansion,
+ node=initializeexpansion,
+ }
}
-
--- left over
-
function fonts.loggers.onetimemessage() end
-
--- example vectors
-
-local byte = string.byte
-
-fonts.expansions.setups['default'] = {
-
- stretch = 2, shrink = 2, step = .5, factor = 1,
-
- [byte('A')] = 0.5, [byte('B')] = 0.7, [byte('C')] = 0.7, [byte('D')] = 0.5, [byte('E')] = 0.7,
- [byte('F')] = 0.7, [byte('G')] = 0.5, [byte('H')] = 0.7, [byte('K')] = 0.7, [byte('M')] = 0.7,
- [byte('N')] = 0.7, [byte('O')] = 0.5, [byte('P')] = 0.7, [byte('Q')] = 0.5, [byte('R')] = 0.7,
- [byte('S')] = 0.7, [byte('U')] = 0.7, [byte('W')] = 0.7, [byte('Z')] = 0.7,
- [byte('a')] = 0.7, [byte('b')] = 0.7, [byte('c')] = 0.7, [byte('d')] = 0.7, [byte('e')] = 0.7,
- [byte('g')] = 0.7, [byte('h')] = 0.7, [byte('k')] = 0.7, [byte('m')] = 0.7, [byte('n')] = 0.7,
- [byte('o')] = 0.7, [byte('p')] = 0.7, [byte('q')] = 0.7, [byte('s')] = 0.7, [byte('u')] = 0.7,
- [byte('w')] = 0.7, [byte('z')] = 0.7,
- [byte('2')] = 0.7, [byte('3')] = 0.7, [byte('6')] = 0.7, [byte('8')] = 0.7, [byte('9')] = 0.7,
+local byte=string.byte
+fonts.expansions.setups['default']={
+ stretch=2,shrink=2,step=.5,factor=1,
+ [byte('A')]=0.5,[byte('B')]=0.7,[byte('C')]=0.7,[byte('D')]=0.5,[byte('E')]=0.7,
+ [byte('F')]=0.7,[byte('G')]=0.5,[byte('H')]=0.7,[byte('K')]=0.7,[byte('M')]=0.7,
+ [byte('N')]=0.7,[byte('O')]=0.5,[byte('P')]=0.7,[byte('Q')]=0.5,[byte('R')]=0.7,
+ [byte('S')]=0.7,[byte('U')]=0.7,[byte('W')]=0.7,[byte('Z')]=0.7,
+ [byte('a')]=0.7,[byte('b')]=0.7,[byte('c')]=0.7,[byte('d')]=0.7,[byte('e')]=0.7,
+ [byte('g')]=0.7,[byte('h')]=0.7,[byte('k')]=0.7,[byte('m')]=0.7,[byte('n')]=0.7,
+ [byte('o')]=0.7,[byte('p')]=0.7,[byte('q')]=0.7,[byte('s')]=0.7,[byte('u')]=0.7,
+ [byte('w')]=0.7,[byte('z')]=0.7,
+ [byte('2')]=0.7,[byte('3')]=0.7,[byte('6')]=0.7,[byte('8')]=0.7,[byte('9')]=0.7,
}
-
-fonts.protrusions.setups['default'] = {
-
- factor = 1, left = 1, right = 1,
-
- [0x002C] = { 0, 1 }, -- comma
- [0x002E] = { 0, 1 }, -- period
- [0x003A] = { 0, 1 }, -- colon
- [0x003B] = { 0, 1 }, -- semicolon
- [0x002D] = { 0, 1 }, -- hyphen
- [0x2013] = { 0, 0.50 }, -- endash
- [0x2014] = { 0, 0.33 }, -- emdash
- [0x3001] = { 0, 1 }, -- ideographic comma 、
- [0x3002] = { 0, 1 }, -- ideographic full stop 。
- [0x060C] = { 0, 1 }, -- arabic comma ،
- [0x061B] = { 0, 1 }, -- arabic semicolon ؛
- [0x06D4] = { 0, 1 }, -- arabic full stop ۔
-
+fonts.protrusions.setups['default']={
+ factor=1,left=1,right=1,
+ [0x002C]={ 0,1 },
+ [0x002E]={ 0,1 },
+ [0x003A]={ 0,1 },
+ [0x003B]={ 0,1 },
+ [0x002D]={ 0,1 },
+ [0x2013]={ 0,0.50 },
+ [0x2014]={ 0,0.33 },
+ [0x3001]={ 0,1 },
+ [0x3002]={ 0,1 },
+ [0x060C]={ 0,1 },
+ [0x061B]={ 0,1 },
+ [0x06D4]={ 0,1 },
}
-
--- normalizer
-
-fonts.handlers.otf.features.normalize = function(t)
- if t.rand then
- t.rand = "random"
- end
- return t
+fonts.handlers.otf.features.normalize=function(t)
+ if t.rand then
+ t.rand="random"
+ end
+ return t
end
-
--- bonus
-
function fonts.helpers.nametoslot(name)
- local t = type(name)
- if t == "string" then
- local tfmdata = fonts.hashes.identifiers[currentfont()]
- local shared = tfmdata and tfmdata.shared
- local fntdata = shared and shared.rawdata
- return fntdata and fntdata.resources.unicodes[name]
- elseif t == "number" then
- return n
- end
+ local t=type(name)
+ if t=="string" then
+ local tfmdata=fonts.hashes.identifiers[currentfont()]
+ local shared=tfmdata and tfmdata.shared
+ local fntdata=shared and shared.rawdata
+ return fntdata and fntdata.resources.unicodes[name]
+ elseif t=="number" then
+ return n
+ end
end
-
--- \font\test=file:somefont:reencode=mymessup
---
--- fonts.encodings.reencodings.mymessup = {
--- [109] = 110, -- m
--- [110] = 109, -- n
--- }
-
-fonts.encodings = fonts.encodings or { }
-local reencodings = { }
-fonts.encodings.reencodings = reencodings
-
+fonts.encodings=fonts.encodings or {}
+local reencodings={}
+fonts.encodings.reencodings=reencodings
local function specialreencode(tfmdata,value)
- -- we forget about kerns as we assume symbols and we
- -- could issue a message if ther are kerns but it's
- -- a hack anyway so we odn't care too much here
- local encoding = value and reencodings[value]
- if encoding then
- local temp = { }
- local char = tfmdata.characters
- for k, v in next, encoding do
- temp[k] = char[v]
- end
- for k, v in next, temp do
- char[k] = temp[k]
- end
- -- if we use the font otherwise luatex gets confused so
- -- we return an additional hash component for fullname
- return string.format("reencoded:%s",value)
+ local encoding=value and reencodings[value]
+ if encoding then
+ local temp={}
+ local char=tfmdata.characters
+ for k,v in next,encoding do
+ temp[k]=char[v]
+ end
+ for k,v in next,temp do
+ char[k]=temp[k]
end
+ return string.format("reencoded:%s",value)
+ end
end
-
local function reencode(tfmdata,value)
- tfmdata.postprocessors = tfmdata.postprocessors or { }
- table.insert(tfmdata.postprocessors,
- function(tfmdata)
- return specialreencode(tfmdata,value)
- end
- )
+ tfmdata.postprocessors=tfmdata.postprocessors or {}
+ table.insert(tfmdata.postprocessors,
+ function(tfmdata)
+ return specialreencode(tfmdata,value)
+ end
+ )
end
-
otffeatures.register {
- name = "reencode",
- description = "reencode characters",
- manipulators = {
- base = reencode,
- node = reencode,
- }
+ name="reencode",
+ description="reencode characters",
+ manipulators={
+ base=reencode,
+ node=reencode,
+ }
}
end -- closure
do -- begin closure to overcome local limits and interference
-if not modules then modules = { } end modules ['luatex-fonts-cbk'] = {
- version = 1.001,
- comment = "companion to luatex-*.tex",
- author = "Hans Hagen, PRAGMA-ADE, Hasselt NL",
- copyright = "PRAGMA ADE / ConTeXt Development Team",
- license = "see context related readme files"
+if not modules then modules={} end modules ['luatex-fonts-cbk']={
+ version=1.001,
+ comment="companion to luatex-*.tex",
+ author="Hans Hagen, PRAGMA-ADE, Hasselt NL",
+ copyright="PRAGMA ADE / ConTeXt Development Team",
+ license="see context related readme files"
}
-
if context then
- texio.write_nl("fatal error: this module is not for context")
- os.exit()
+ texio.write_nl("fatal error: this module is not for context")
+ os.exit()
end
-
-local fonts = fonts
-local nodes = nodes
-
--- Fonts: (might move to node-gef.lua)
-
-local traverse_id = node.traverse_id
-local glyph_code = nodes.nodecodes.glyph
-
+local fonts=fonts
+local nodes=nodes
+local traverse_id=node.traverse_id
+local glyph_code=nodes.nodecodes.glyph
function nodes.handlers.characters(head)
- local fontdata = fonts.hashes.identifiers
- if fontdata then
- local usedfonts, done, prevfont = { }, false, nil
- for n in traverse_id(glyph_code,head) do
- local font = n.font
- if font ~= prevfont then
- prevfont = font
- local used = usedfonts[font]
- if not used then
- local tfmdata = fontdata[font] --
- if tfmdata then
- local shared = tfmdata.shared -- we need to check shared, only when same features
- if shared then
- local processors = shared.processes
- if processors and #processors > 0 then
- usedfonts[font] = processors
- done = true
- end
- end
- end
- end
- end
- end
- if done then
- for font, processors in next, usedfonts do
- for i=1,#processors do
- local h, d = processors[i](head,font,0)
- head, done = h or head, done or d
- end
- end
- end
- return head, true
- else
- return head, false
- end
+ local fontdata=fonts.hashes.identifiers
+ if fontdata then
+ local usedfonts,done,prevfont={},false,nil
+ for n in traverse_id(glyph_code,head) do
+ local font=n.font
+ if font~=prevfont then
+ prevfont=font
+ local used=usedfonts[font]
+ if not used then
+ local tfmdata=fontdata[font]
+ if tfmdata then
+ local shared=tfmdata.shared
+ if shared then
+ local processors=shared.processes
+ if processors and #processors>0 then
+ usedfonts[font]=processors
+ done=true
+ end
+ end
+ end
+ end
+ end
+ end
+ if done then
+ for font,processors in next,usedfonts do
+ for i=1,#processors do
+ local h,d=processors[i](head,font,0)
+ head,done=h or head,done or d
+ end
+ end
+ end
+ return head,true
+ else
+ return head,false
+ end
end
-
function nodes.simple_font_handler(head)
--- lang.hyphenate(head)
- head = nodes.handlers.characters(head)
- nodes.injections.handler(head)
- nodes.handlers.protectglyphs(head)
- head = node.ligaturing(head)
- head = node.kerning(head)
- return head
+ head=nodes.handlers.characters(head)
+ nodes.injections.handler(head)
+ nodes.handlers.protectglyphs(head)
+ head=node.ligaturing(head)
+ head=node.kerning(head)
+ return head
end
end -- closure
diff --git a/tex/generic/context/luatex/luatex-fonts.lua b/tex/generic/context/luatex/luatex-fonts.lua
index 6b502cd24..f0ff3e12d 100644
--- a/tex/generic/context/luatex/luatex-fonts.lua
+++ b/tex/generic/context/luatex/luatex-fonts.lua
@@ -6,21 +6,16 @@ if not modules then modules = { } end modules ['luatex-fonts'] = {
license = "see context related readme files"
}
--- The following code isolates the generic ConTeXt code from already
--- defined or to be defined namespaces. This is the reference loader
--- for plain, but the generic code is also used in luaotfload (which
--- is is a file meant for latex) and that is maintained by Khaled
+-- The following code isolates the generic context code from already defined or to be defined
+-- namespaces. This is the reference loader for plain, but the generic code is also used in
+-- luaotfload (which is is a file meant for latex) and that used to be maintained by Khaled
-- Hosny. We do our best to keep the interface as clean as possible.
--
--- The code base is rather stable now, especially if you stay away from
--- the non generic code. All relevant data is organized in tables within
--- the main table of a font instance. There are a few places where in
--- context other code is plugged in, but this does not affect the core
--- code. Users can (given that their macro package provides this option)
--- access the font data (characters, descriptions, properties, parameters,
--- etc) of this main table.
---
--- Todo: all global namespaces in called modules will get local shortcuts.
+-- The code base is rather stable now, especially if you stay away from the non generic code. All
+-- relevant data is organized in tables within the main table of a font instance. There are a few
+-- places where in context other code is plugged in, but this does not affect the core code. Users
+-- can (given that their macro package provides this option) access the font data (characters,
+-- descriptions, properties, parameters, etc) of this main table.
utf = utf or unicode.utf8
@@ -67,20 +62,21 @@ end
local whatever = generic_context.push_namespaces()
--- We keep track of load time by storing the current time. That
--- way we cannot be accused of slowing down loading too much.
+-- We keep track of load time by storing the current time. That way we cannot be accused
+-- of slowing down loading too much. Anyhow, there is no reason for this library to perform
+-- slower in any other package as it does in context.
--
--- Please don't update to this version without proper testing. It
--- might be that this version lags behind stock context and the only
--- formal release takes place around tex live code freeze.
+-- Please don't update to this version without proper testing. It might be that this version
+-- lags behind stock context and the only formal release takes place around tex live code
+-- freeze.
local starttime = os.gettimeofday()
--- As we don't use the ConTeXt file searching, we need to
--- initialize the kpse library. As the progname can be anything
--- we will temporary switch to the ConTeXt namespace if needed.
--- Just adding the context paths to the path specification is
--- somewhat faster
+-- As we don't use the context file searching, we need to initialize the kpse library. As the
+-- progname can be anything we will temporary switch to the context namespace if needed. Just
+-- adding the context paths to the path specification is somewhat faster.
+--
+-- Now, with lua 5.2 being used we might create a special ENV for this.
-- kpse.set_program_name("luatex")
@@ -128,43 +124,51 @@ if fonts then
else
- -- The following helpers are a bit overkill but I don't want to
- -- mess up ConTeXt code for the sake of general generality. Around
- -- version 1.0 there will be an official api defined.
-
- loadmodule('l-lpeg.lua')
- loadmodule('l-function.lua')
- loadmodule('l-string.lua')
- loadmodule('l-table.lua')
- loadmodule('l-boolean.lua')
- loadmodule('l-math.lua')
- loadmodule('l-file.lua')
- loadmodule('l-io.lua')
-
- -- The following modules contain code that is either not used
- -- at all outside ConTeXt or will fail when enabled due to
- -- lack of other modules.
-
- -- First we load a few helper modules. This is about the miminum
- -- needed to let the font modules do their work. Don't depend on
- -- their functions as we might strip them in future versions of
- -- this generic variant.
+ -- The following helpers are a bit overkill but I don't want to mess up context code for the
+ -- sake of general generality. Around version 1.0 there will be an official api defined.
+ --
+ -- So, I will strip these libraries and see what is really needed so that we don't have this
+ -- overhead in the generic modules. The next section is only there for the packager, so stick
+ -- to using luatex-fonts with luatex-fonts-merged.lua and forget about the rest. The following
+ -- list might change without prior notice (for instance because we shuffled code around).
+
+ ----------("l-lua.lua")
+ loadmodule("l-lpeg.lua")
+ loadmodule("l-function.lua")
+ loadmodule("l-string.lua")
+ loadmodule("l-table.lua")
+ loadmodule("l-io.lua")
+ ----------("l-number.lua")
+ ----------("l-set.lua")
+ ----------("l-os.lua")
+ loadmodule("l-file.lua")
+ ----------("l-md5.lua")
+ ----------("l-url.lua")
+ ----------("l-dir.lua")
+ loadmodule("l-boolean.lua")
+ ----------("l-unicode.lua")
+ loadmodule("l-math.lua")
+
+
+ -- The following modules contain code that is either not used at all outside context or will fail
+ -- when enabled due to lack of other modules.
+
+ -- First we load a few helper modules. This is about the miminum needed to let the font modules do
+ -- their work. Don't depend on their functions as we might strip them in future versions of his
+ -- generic variant.
loadmodule('luatex-basics-gen.lua')
loadmodule('data-con.lua')
- -- We do need some basic node support. The code in there is not for
- -- general use as it might change.
+ -- We do need some basic node support. The code in there is not for general use as it might change.
loadmodule('luatex-basics-nod.lua')
- -- Now come the font modules that deal with traditional TeX fonts
- -- as well as open type fonts. We only support OpenType fonts here.
+ -- Now come the font modules that deal with traditional tex fonts as well as open type fonts. We only
+ -- support OpenType fonts here.
--
- -- The font database file (if used at all) must be put someplace
- -- visible for kpse and is not shared with ConTeXt. The mtx-fonts
- -- script can be used to genate this file (using the --names
- -- option).
+ -- The font database file (if used at all) must be put someplace visible for kpse and is not shared
+ -- with context. The mtx-fonts script can be used to genate this file (using the --names option).
loadmodule('font-ini.lua')
loadmodule('font-con.lua')
@@ -179,14 +183,14 @@ else
loadmodule('node-inj.lua') -- will be replaced (luatex >= .70)
loadmodule('font-ota.lua')
loadmodule('font-otn.lua')
- -- loadmodule('luatex-fonts-chr.lua')
+ ----------('luatex-fonts-chr.lua')
loadmodule('luatex-fonts-lua.lua')
loadmodule('font-def.lua')
loadmodule('luatex-fonts-def.lua')
loadmodule('luatex-fonts-ext.lua') -- some extensions
- -- We need to plug into a callback and the following module implements
- -- the handlers. Actual plugging in happens later.
+ -- We need to plug into a callback and the following module implements the handlers. Actual plugging
+ -- in happens later.
loadmodule('luatex-fonts-cbk.lua')
@@ -194,9 +198,8 @@ end
resolvers.loadmodule = loadmodule
--- In order to deal with the fonts we need to initialize some
--- callbacks. One can overload them later on if needed. First
--- a bit of abstraction.
+-- In order to deal with the fonts we need to initialize some callbacks. One can overload them later on if
+-- needed. First a bit of abstraction.
generic_context.callback_ligaturing = false
generic_context.callback_kerning = false
@@ -204,9 +207,10 @@ generic_context.callback_pre_linebreak_filter = nodes.simple_font_handler
generic_context.callback_hpack_filter = nodes.simple_font_handler
generic_context.callback_define_font = fonts.definers.read
--- The next ones can be done at a different moment if needed. You can create
--- a generic_context namespace and set no_callbacks_yet to true, load this
--- module, and enable the callbacks later.
+-- The next ones can be done at a different moment if needed. You can create a generic_context namespace
+-- and set no_callbacks_yet to true, load this module, and enable the callbacks later. So, there is really
+-- *no* need to create a alternative for luatex-fonts.lua and luatex-fonts-merged.lua: just load this one
+-- and overload if needed.
if not generic_context.no_callbacks_yet then