diff options
author | Hans Hagen <pragma@wxs.nl> | 2009-12-30 23:52:00 +0100 |
---|---|---|
committer | Hans Hagen <pragma@wxs.nl> | 2009-12-30 23:52:00 +0100 |
commit | 3162ffe9a685e6a5f0d882681a03056eea9775c8 (patch) | |
tree | 1814e67a292d9f6724c9c13235cb90af875ecd15 | |
parent | 5eb872dbc6bbc35e222d5b23fc783fb0e75d4a99 (diff) | |
download | context-3162ffe9a685e6a5f0d882681a03056eea9775c8.tar.gz |
beta 2009.12.30 23:52
35 files changed, 1306 insertions, 546 deletions
diff --git a/scripts/context/lua/luatools.lua b/scripts/context/lua/luatools.lua index 1019439e0..69a7f488a 100644 --- a/scripts/context/lua/luatools.lua +++ b/scripts/context/lua/luatools.lua @@ -5318,6 +5318,23 @@ end local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) +--~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") +--~ local l_confusing = lpeg.P(" ") +--~ local l_character = lpeg.utf8 +--~ local l_dangerous = lpeg.P(".") + +--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1) +--~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false) + +--~ local function test(str) +--~ print(str,lpeg.match(l_normal,str)) +--~ end +--~ test("ヒラギノ明朝 Pro W3") +--~ test("..ヒラギノ明朝 Pro W3") +--~ test(":ヒラギノ明朝 Pro W3;") +--~ test("ヒラギノ明朝 /Pro W3;") +--~ test("ヒラギノ明朝 Pro W3") + function resolvers.generators.tex(specification) local tag = specification if trace_locating then @@ -5338,6 +5355,7 @@ function resolvers.generators.tex(specification) end for name in directory(full) do if not lpegmatch(weird,name) then + -- if lpegmatch(l_normal,name) then local mode = attributes(full..name,'mode') if mode == 'file' then if path then diff --git a/scripts/context/lua/mtx-fonts.lua b/scripts/context/lua/mtx-fonts.lua index 03c3eb068..94e0063f9 100644 --- a/scripts/context/lua/mtx-fonts.lua +++ b/scripts/context/lua/mtx-fonts.lua @@ -119,7 +119,7 @@ local function fontweight(fw) end end -local function list_specifications(t) +local function list_specifications(t,info) if t then local s, w = table.sortedkeys(t), { 0, 0, 0 } for k,v in ipairs(s) do @@ -134,6 +134,9 @@ local function list_specifications(t) subfont(entry.subfont), fontweight(entry.fontweight), } +--~ if info then +--~ showfeatures(v,t[v]) +--~ end end table.formatcolumns(s) for k,v in ipairs(s) do @@ -142,7 +145,7 @@ local function list_specifications(t) end end -local function list_matches(t) +local function list_matches(t,info) if t then local s, w = table.sortedkeys(t), { 0, 0, 0 } if info then @@ -181,12 +184,12 @@ function scripts.fonts.list() if environment.argument("name") then if pattern then --~ mtxrun --script font --list --name --pattern=*somename* - list_matches(fonts.names.list(string.topattern(pattern,true),reload,all)) + list_matches(fonts.names.list(string.topattern(pattern,true),reload,all),info) elseif filter then logs.report("fontnames","not supported: --list --name --filter",name) elseif given then --~ mtxrun --script font --list --name somename - list_matches(fonts.names.list(given,reload,all)) + list_matches(fonts.names.list(given,reload,all),info) else logs.report("fontnames","not supported: --list --name <no specification>",name) end @@ -196,31 +199,31 @@ function scripts.fonts.list() logs.report("fontnames","not supported: --list --spec --pattern",name) elseif filter then --~ mtxrun --script font --list --spec --filter="fontname=somename" - list_specifications(fonts.names.getlookups(filter),nil,reload) + list_specifications(fonts.names.getlookups(filter),info) elseif given then --~ mtxrun --script font --list --spec somename - list_specifications(fonts.names.collectspec(given,reload,all)) + list_specifications(fonts.names.collectspec(given,reload,all),info) else logs.report("fontnames","not supported: --list --spec <no specification>",name) end elseif environment.argument("file") then if pattern then --~ mtxrun --script font --list --file --pattern=*somename* - list_specifications(fonts.names.collectfiles(string.topattern(pattern,true),reload,all)) + list_specifications(fonts.names.collectfiles(string.topattern(pattern,true),reload,all),info) elseif filter then logs.report("fontnames","not supported: --list --spec",name) elseif given then --~ mtxrun --script font --list --file somename - list_specifications(fonts.names.collectfiles(given,reload,all)) + list_specifications(fonts.names.collectfiles(given,reload,all),info) else logs.report("fontnames","not supported: --list --file <no specification>",name) end elseif pattern then --~ mtxrun --script font --list --pattern=*somename* - list_matches(fonts.names.list(string.topattern(pattern,true),reload,all)) + list_matches(fonts.names.list(string.topattern(pattern,true),reload,all),info) elseif given then --~ mtxrun --script font --list somename - list_matches(fonts.names.list(given,reload,all)) + list_matches(fonts.names.list(given,reload,all),info) else logs.report("fontnames","not supported: --list <no specification>",name) end diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua index 84bbcb8cc..79fdaa759 100644 --- a/scripts/context/lua/mtxrun.lua +++ b/scripts/context/lua/mtxrun.lua @@ -4890,7 +4890,6 @@ local function apply_nodes(list,directive,nodes) if ltg then local lns = ll.rn or ll.ns local ok = ltg == ntg and lns == nns ---~ if lns ~= "" then logs.report("!",ltg .. " < " .. (lns or "?")) end if directive then if ok then local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end @@ -4916,9 +4915,6 @@ local function apply_nodes(list,directive,nodes) for n=1,maxn,3 do local nns, ntg = nodes[n+1], nodes[n+2] ok = (not ntg or ltg == ntg) and (not nns or lns == nns) ---~ if lns ~= "" and ntg == "mo" then ---~ logs.report("!",n .. "< ".. maxn .. " < ".. (lns or "?") .. ":" .. ltg .. "< " .. (nns or "?") .. ":" .. ntg .. "==>".. tostring(ok)) ---~ end if ok then break end @@ -5070,6 +5066,31 @@ local template_f_n = [[ -- +local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] } +local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] } +local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] } +local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] } +local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] } +local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] } +local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] } +local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] } +local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] } +local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] } +local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] } +local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] } +local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] } +local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] } +local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] } + +local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] } +local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] } +local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] } +local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] } + +local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] } + +local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } } + local function errorrunner_e(str,cnv) logs.report("lpath","error in expression: %s => %s",str,cnv) return false @@ -5116,39 +5137,22 @@ local arguments = P { "ar", -- todo: better arg parser -local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] } -local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] } -local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] } -local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] } -local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] } -local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] } -local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] } -local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] } -local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] } -local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] } -local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] } -local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] } -local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] } -local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] } -local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] } - -local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] } -local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] } -local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] } -local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] } - -local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] } - -local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } } - local function register_error(str) - return { kind = "error", comment = format("unparsed: %s",str) } + return { kind = "error", error = format("unparsed: %s",str) } end +-- there is a difference in * and /*/ and so we need to catch a few special cases + +local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes) -- last one not needed +local special_2 = P("/") * Cc(register_auto_self) +local special_3 = P("") * Cc(register_auto_self) + local parser = Ct { "patterns", -- can be made a bit faster by moving pattern outside - patterns = spaces * V("protocol") * spaces * V("initial") * spaces * V("step") * spaces * - (P("/") * spaces * V("step") * spaces)^0, + patterns = spaces * V("protocol") * spaces * ( + ( V("special") * spaces * P(-1) ) + + ( V("initial") * spaces * V("step") * spaces * (P("/") * spaces * V("step") * spaces)^0 ) + ), protocol = Cg(V("letters"),"protocol") * P("://") + Cg(Cc(nil),"protocol"), @@ -5161,6 +5165,8 @@ local parser = Ct { "patterns", -- can be made a bit faster by moving pattern ou V("reverse_sibling") + V("preceding_sibling") + V("preceding") + V("ancestor_or_self") + #(1-P(-1)) * Cc(register_auto_child), + special = special_1 + special_2 + special_3, + initial = (P("/") * spaces * Cc(register_initial_child))^-1, error = (P(1)^1) / register_error, @@ -5172,7 +5178,8 @@ local parser = Ct { "patterns", -- can be made a bit faster by moving pattern ou s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self), s_descendant = P("**") * Cc(register_descendant), - s_child = P("*") * #(1-P(":")) * Cc(register_child ), + s_child = P("*") * #(1-P(":")) * Cc(register_child ), +-- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ), s_parent = P("..") * Cc(register_parent ), s_self = P("." ) * Cc(register_self ), s_root = P("^^") * Cc(register_root ), @@ -5262,6 +5269,15 @@ end xml.lshow = lshow +local function add_comment(p,str) + local pc = p.comment + if not pc then + p.comment = { str } + else + pc[#pc+1] = str + end +end + parse_pattern = function (pattern) -- the gain of caching is rather minimal lpathcalls = lpathcalls + 1 if type(pattern) == "table" then @@ -5280,18 +5296,32 @@ parse_pattern = function (pattern) -- the gain of caching is rather minimal logs.report("lpath","parsing error in '%s'",pattern) lshow(parsed) else - -- we could have done this with a more complex parsed but this + -- we could have done this with a more complex parser but this -- is cleaner local pi = parsed[1] if pi.axis == "auto-child" then - parsed.comment = "auto-child replaced by auto-descendant-or-self" - parsed[1] = register_auto_descendant_or_self - --~ parsed.comment = "auto-child replaced by auto-descendant" - --~ parsed[1] = register_auto_descendant + if false then + add_comment(parsed, "auto-child replaced by auto-descendant-or-self") + parsed[1] = register_auto_descendant_or_self + else + add_comment(parsed, "auto-child replaced by auto-descendant") + parsed[1] = register_auto_descendant + end elseif pi.axis == "initial-child" and np > 1 and parsed[2].axis then - parsed.comment = "initial-child removed" -- we could also make it a auto-self + add_comment(parsed, "initial-child removed") -- we could also make it a auto-self remove(parsed,1) end +local np = #parsed -- can have changed +if np > 1 then + local pnp = parsed[np] + if pnp.kind == "nodes" and pnp.nodetest == true then + local nodes = pnp.nodes + if nodes[1] == true and nodes[2] == false and nodes[3] == false then + add_comment(parsed, "redundant final wildcard filter removed") + remove(parsed,np) + end + end +end end else parsed = { pattern = pattern } @@ -8146,6 +8176,23 @@ end local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) +--~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") +--~ local l_confusing = lpeg.P(" ") +--~ local l_character = lpeg.utf8 +--~ local l_dangerous = lpeg.P(".") + +--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1) +--~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false) + +--~ local function test(str) +--~ print(str,lpeg.match(l_normal,str)) +--~ end +--~ test("ヒラギノ明朝 Pro W3") +--~ test("..ヒラギノ明朝 Pro W3") +--~ test(":ヒラギノ明朝 Pro W3;") +--~ test("ヒラギノ明朝 /Pro W3;") +--~ test("ヒラギノ明朝 Pro W3") + function resolvers.generators.tex(specification) local tag = specification if trace_locating then @@ -8166,6 +8213,7 @@ function resolvers.generators.tex(specification) end for name in directory(full) do if not lpegmatch(weird,name) then + -- if lpegmatch(l_normal,name) then local mode = attributes(full..name,'mode') if mode == 'file' then if path then diff --git a/scripts/context/stubs/mswin/luatools.lua b/scripts/context/stubs/mswin/luatools.lua index 1019439e0..69a7f488a 100644 --- a/scripts/context/stubs/mswin/luatools.lua +++ b/scripts/context/stubs/mswin/luatools.lua @@ -5318,6 +5318,23 @@ end local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) +--~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") +--~ local l_confusing = lpeg.P(" ") +--~ local l_character = lpeg.utf8 +--~ local l_dangerous = lpeg.P(".") + +--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1) +--~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false) + +--~ local function test(str) +--~ print(str,lpeg.match(l_normal,str)) +--~ end +--~ test("ヒラギノ明朝 Pro W3") +--~ test("..ヒラギノ明朝 Pro W3") +--~ test(":ヒラギノ明朝 Pro W3;") +--~ test("ヒラギノ明朝 /Pro W3;") +--~ test("ヒラギノ明朝 Pro W3") + function resolvers.generators.tex(specification) local tag = specification if trace_locating then @@ -5338,6 +5355,7 @@ function resolvers.generators.tex(specification) end for name in directory(full) do if not lpegmatch(weird,name) then + -- if lpegmatch(l_normal,name) then local mode = attributes(full..name,'mode') if mode == 'file' then if path then diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua index 84bbcb8cc..79fdaa759 100644 --- a/scripts/context/stubs/mswin/mtxrun.lua +++ b/scripts/context/stubs/mswin/mtxrun.lua @@ -4890,7 +4890,6 @@ local function apply_nodes(list,directive,nodes) if ltg then local lns = ll.rn or ll.ns local ok = ltg == ntg and lns == nns ---~ if lns ~= "" then logs.report("!",ltg .. " < " .. (lns or "?")) end if directive then if ok then local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end @@ -4916,9 +4915,6 @@ local function apply_nodes(list,directive,nodes) for n=1,maxn,3 do local nns, ntg = nodes[n+1], nodes[n+2] ok = (not ntg or ltg == ntg) and (not nns or lns == nns) ---~ if lns ~= "" and ntg == "mo" then ---~ logs.report("!",n .. "< ".. maxn .. " < ".. (lns or "?") .. ":" .. ltg .. "< " .. (nns or "?") .. ":" .. ntg .. "==>".. tostring(ok)) ---~ end if ok then break end @@ -5070,6 +5066,31 @@ local template_f_n = [[ -- +local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] } +local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] } +local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] } +local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] } +local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] } +local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] } +local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] } +local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] } +local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] } +local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] } +local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] } +local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] } +local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] } +local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] } +local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] } + +local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] } +local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] } +local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] } +local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] } + +local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] } + +local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } } + local function errorrunner_e(str,cnv) logs.report("lpath","error in expression: %s => %s",str,cnv) return false @@ -5116,39 +5137,22 @@ local arguments = P { "ar", -- todo: better arg parser -local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] } -local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] } -local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] } -local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] } -local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] } -local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] } -local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] } -local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] } -local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] } -local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] } -local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] } -local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] } -local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] } -local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] } -local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] } - -local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] } -local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] } -local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] } -local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] } - -local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] } - -local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } } - local function register_error(str) - return { kind = "error", comment = format("unparsed: %s",str) } + return { kind = "error", error = format("unparsed: %s",str) } end +-- there is a difference in * and /*/ and so we need to catch a few special cases + +local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes) -- last one not needed +local special_2 = P("/") * Cc(register_auto_self) +local special_3 = P("") * Cc(register_auto_self) + local parser = Ct { "patterns", -- can be made a bit faster by moving pattern outside - patterns = spaces * V("protocol") * spaces * V("initial") * spaces * V("step") * spaces * - (P("/") * spaces * V("step") * spaces)^0, + patterns = spaces * V("protocol") * spaces * ( + ( V("special") * spaces * P(-1) ) + + ( V("initial") * spaces * V("step") * spaces * (P("/") * spaces * V("step") * spaces)^0 ) + ), protocol = Cg(V("letters"),"protocol") * P("://") + Cg(Cc(nil),"protocol"), @@ -5161,6 +5165,8 @@ local parser = Ct { "patterns", -- can be made a bit faster by moving pattern ou V("reverse_sibling") + V("preceding_sibling") + V("preceding") + V("ancestor_or_self") + #(1-P(-1)) * Cc(register_auto_child), + special = special_1 + special_2 + special_3, + initial = (P("/") * spaces * Cc(register_initial_child))^-1, error = (P(1)^1) / register_error, @@ -5172,7 +5178,8 @@ local parser = Ct { "patterns", -- can be made a bit faster by moving pattern ou s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self), s_descendant = P("**") * Cc(register_descendant), - s_child = P("*") * #(1-P(":")) * Cc(register_child ), + s_child = P("*") * #(1-P(":")) * Cc(register_child ), +-- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ), s_parent = P("..") * Cc(register_parent ), s_self = P("." ) * Cc(register_self ), s_root = P("^^") * Cc(register_root ), @@ -5262,6 +5269,15 @@ end xml.lshow = lshow +local function add_comment(p,str) + local pc = p.comment + if not pc then + p.comment = { str } + else + pc[#pc+1] = str + end +end + parse_pattern = function (pattern) -- the gain of caching is rather minimal lpathcalls = lpathcalls + 1 if type(pattern) == "table" then @@ -5280,18 +5296,32 @@ parse_pattern = function (pattern) -- the gain of caching is rather minimal logs.report("lpath","parsing error in '%s'",pattern) lshow(parsed) else - -- we could have done this with a more complex parsed but this + -- we could have done this with a more complex parser but this -- is cleaner local pi = parsed[1] if pi.axis == "auto-child" then - parsed.comment = "auto-child replaced by auto-descendant-or-self" - parsed[1] = register_auto_descendant_or_self - --~ parsed.comment = "auto-child replaced by auto-descendant" - --~ parsed[1] = register_auto_descendant + if false then + add_comment(parsed, "auto-child replaced by auto-descendant-or-self") + parsed[1] = register_auto_descendant_or_self + else + add_comment(parsed, "auto-child replaced by auto-descendant") + parsed[1] = register_auto_descendant + end elseif pi.axis == "initial-child" and np > 1 and parsed[2].axis then - parsed.comment = "initial-child removed" -- we could also make it a auto-self + add_comment(parsed, "initial-child removed") -- we could also make it a auto-self remove(parsed,1) end +local np = #parsed -- can have changed +if np > 1 then + local pnp = parsed[np] + if pnp.kind == "nodes" and pnp.nodetest == true then + local nodes = pnp.nodes + if nodes[1] == true and nodes[2] == false and nodes[3] == false then + add_comment(parsed, "redundant final wildcard filter removed") + remove(parsed,np) + end + end +end end else parsed = { pattern = pattern } @@ -8146,6 +8176,23 @@ end local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) +--~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") +--~ local l_confusing = lpeg.P(" ") +--~ local l_character = lpeg.utf8 +--~ local l_dangerous = lpeg.P(".") + +--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1) +--~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false) + +--~ local function test(str) +--~ print(str,lpeg.match(l_normal,str)) +--~ end +--~ test("ヒラギノ明朝 Pro W3") +--~ test("..ヒラギノ明朝 Pro W3") +--~ test(":ヒラギノ明朝 Pro W3;") +--~ test("ヒラギノ明朝 /Pro W3;") +--~ test("ヒラギノ明朝 Pro W3") + function resolvers.generators.tex(specification) local tag = specification if trace_locating then @@ -8166,6 +8213,7 @@ function resolvers.generators.tex(specification) end for name in directory(full) do if not lpegmatch(weird,name) then + -- if lpegmatch(l_normal,name) then local mode = attributes(full..name,'mode') if mode == 'file' then if path then diff --git a/scripts/context/stubs/unix/luatools b/scripts/context/stubs/unix/luatools index 1019439e0..69a7f488a 100755 --- a/scripts/context/stubs/unix/luatools +++ b/scripts/context/stubs/unix/luatools @@ -5318,6 +5318,23 @@ end local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) +--~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") +--~ local l_confusing = lpeg.P(" ") +--~ local l_character = lpeg.utf8 +--~ local l_dangerous = lpeg.P(".") + +--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1) +--~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false) + +--~ local function test(str) +--~ print(str,lpeg.match(l_normal,str)) +--~ end +--~ test("ヒラギノ明朝 Pro W3") +--~ test("..ヒラギノ明朝 Pro W3") +--~ test(":ヒラギノ明朝 Pro W3;") +--~ test("ヒラギノ明朝 /Pro W3;") +--~ test("ヒラギノ明朝 Pro W3") + function resolvers.generators.tex(specification) local tag = specification if trace_locating then @@ -5338,6 +5355,7 @@ function resolvers.generators.tex(specification) end for name in directory(full) do if not lpegmatch(weird,name) then + -- if lpegmatch(l_normal,name) then local mode = attributes(full..name,'mode') if mode == 'file' then if path then diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun index 84bbcb8cc..79fdaa759 100755 --- a/scripts/context/stubs/unix/mtxrun +++ b/scripts/context/stubs/unix/mtxrun @@ -4890,7 +4890,6 @@ local function apply_nodes(list,directive,nodes) if ltg then local lns = ll.rn or ll.ns local ok = ltg == ntg and lns == nns ---~ if lns ~= "" then logs.report("!",ltg .. " < " .. (lns or "?")) end if directive then if ok then local llp = ll.__p__ ; if llp ~= p then p, m = llp, 1 else m = m + 1 end @@ -4916,9 +4915,6 @@ local function apply_nodes(list,directive,nodes) for n=1,maxn,3 do local nns, ntg = nodes[n+1], nodes[n+2] ok = (not ntg or ltg == ntg) and (not nns or lns == nns) ---~ if lns ~= "" and ntg == "mo" then ---~ logs.report("!",n .. "< ".. maxn .. " < ".. (lns or "?") .. ":" .. ltg .. "< " .. (nns or "?") .. ":" .. ntg .. "==>".. tostring(ok)) ---~ end if ok then break end @@ -5070,6 +5066,31 @@ local template_f_n = [[ -- +local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] } +local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] } +local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] } +local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] } +local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] } +local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] } +local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] } +local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] } +local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] } +local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] } +local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] } +local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] } +local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] } +local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] } +local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] } + +local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] } +local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] } +local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] } +local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] } + +local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] } + +local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } } + local function errorrunner_e(str,cnv) logs.report("lpath","error in expression: %s => %s",str,cnv) return false @@ -5116,39 +5137,22 @@ local arguments = P { "ar", -- todo: better arg parser -local register_self = { kind = "axis", axis = "self" } -- , apply = apply_axis["self"] } -local register_parent = { kind = "axis", axis = "parent" } -- , apply = apply_axis["parent"] } -local register_descendant = { kind = "axis", axis = "descendant" } -- , apply = apply_axis["descendant"] } -local register_child = { kind = "axis", axis = "child" } -- , apply = apply_axis["child"] } -local register_descendant_or_self = { kind = "axis", axis = "descendant-or-self" } -- , apply = apply_axis["descendant-or-self"] } -local register_root = { kind = "axis", axis = "root" } -- , apply = apply_axis["root"] } -local register_ancestor = { kind = "axis", axis = "ancestor" } -- , apply = apply_axis["ancestor"] } -local register_ancestor_or_self = { kind = "axis", axis = "ancestor-or-self" } -- , apply = apply_axis["ancestor-or-self"] } -local register_attribute = { kind = "axis", axis = "attribute" } -- , apply = apply_axis["attribute"] } -local register_namespace = { kind = "axis", axis = "namespace" } -- , apply = apply_axis["namespace"] } -local register_following = { kind = "axis", axis = "following" } -- , apply = apply_axis["following"] } -local register_following_sibling = { kind = "axis", axis = "following-sibling" } -- , apply = apply_axis["following-sibling"] } -local register_preceding = { kind = "axis", axis = "preceding" } -- , apply = apply_axis["preceding"] } -local register_preceding_sibling = { kind = "axis", axis = "preceding-sibling" } -- , apply = apply_axis["preceding-sibling"] } -local register_reverse_sibling = { kind = "axis", axis = "reverse-sibling" } -- , apply = apply_axis["reverse-sibling"] } - -local register_auto_descendant_or_self = { kind = "axis", axis = "auto-descendant-or-self" } -- , apply = apply_axis["auto-descendant-or-self"] } -local register_auto_descendant = { kind = "axis", axis = "auto-descendant" } -- , apply = apply_axis["auto-descendant"] } -local register_auto_self = { kind = "axis", axis = "auto-self" } -- , apply = apply_axis["auto-self"] } -local register_auto_child = { kind = "axis", axis = "auto-child" } -- , apply = apply_axis["auto-child"] } - -local register_initial_child = { kind = "axis", axis = "initial-child" } -- , apply = apply_axis["initial-child"] } - -local register_all_nodes = { kind = "nodes", nodetest = true, nodes = { true, false, false } } - local function register_error(str) - return { kind = "error", comment = format("unparsed: %s",str) } + return { kind = "error", error = format("unparsed: %s",str) } end +-- there is a difference in * and /*/ and so we need to catch a few special cases + +local special_1 = P("*") * Cc(register_auto_descendant) * Cc(register_all_nodes) -- last one not needed +local special_2 = P("/") * Cc(register_auto_self) +local special_3 = P("") * Cc(register_auto_self) + local parser = Ct { "patterns", -- can be made a bit faster by moving pattern outside - patterns = spaces * V("protocol") * spaces * V("initial") * spaces * V("step") * spaces * - (P("/") * spaces * V("step") * spaces)^0, + patterns = spaces * V("protocol") * spaces * ( + ( V("special") * spaces * P(-1) ) + + ( V("initial") * spaces * V("step") * spaces * (P("/") * spaces * V("step") * spaces)^0 ) + ), protocol = Cg(V("letters"),"protocol") * P("://") + Cg(Cc(nil),"protocol"), @@ -5161,6 +5165,8 @@ local parser = Ct { "patterns", -- can be made a bit faster by moving pattern ou V("reverse_sibling") + V("preceding_sibling") + V("preceding") + V("ancestor_or_self") + #(1-P(-1)) * Cc(register_auto_child), + special = special_1 + special_2 + special_3, + initial = (P("/") * spaces * Cc(register_initial_child))^-1, error = (P(1)^1) / register_error, @@ -5172,7 +5178,8 @@ local parser = Ct { "patterns", -- can be made a bit faster by moving pattern ou s_descendant_or_self = (P("***/") + P("/")) * Cc(register_descendant_or_self), --- *** is a bonus -- s_descendant_or_self = P("/") * Cc(register_descendant_or_self), s_descendant = P("**") * Cc(register_descendant), - s_child = P("*") * #(1-P(":")) * Cc(register_child ), + s_child = P("*") * #(1-P(":")) * Cc(register_child ), +-- s_child = P("*") * #(P("/")+P(-1)) * Cc(register_child ), s_parent = P("..") * Cc(register_parent ), s_self = P("." ) * Cc(register_self ), s_root = P("^^") * Cc(register_root ), @@ -5262,6 +5269,15 @@ end xml.lshow = lshow +local function add_comment(p,str) + local pc = p.comment + if not pc then + p.comment = { str } + else + pc[#pc+1] = str + end +end + parse_pattern = function (pattern) -- the gain of caching is rather minimal lpathcalls = lpathcalls + 1 if type(pattern) == "table" then @@ -5280,18 +5296,32 @@ parse_pattern = function (pattern) -- the gain of caching is rather minimal logs.report("lpath","parsing error in '%s'",pattern) lshow(parsed) else - -- we could have done this with a more complex parsed but this + -- we could have done this with a more complex parser but this -- is cleaner local pi = parsed[1] if pi.axis == "auto-child" then - parsed.comment = "auto-child replaced by auto-descendant-or-self" - parsed[1] = register_auto_descendant_or_self - --~ parsed.comment = "auto-child replaced by auto-descendant" - --~ parsed[1] = register_auto_descendant + if false then + add_comment(parsed, "auto-child replaced by auto-descendant-or-self") + parsed[1] = register_auto_descendant_or_self + else + add_comment(parsed, "auto-child replaced by auto-descendant") + parsed[1] = register_auto_descendant + end elseif pi.axis == "initial-child" and np > 1 and parsed[2].axis then - parsed.comment = "initial-child removed" -- we could also make it a auto-self + add_comment(parsed, "initial-child removed") -- we could also make it a auto-self remove(parsed,1) end +local np = #parsed -- can have changed +if np > 1 then + local pnp = parsed[np] + if pnp.kind == "nodes" and pnp.nodetest == true then + local nodes = pnp.nodes + if nodes[1] == true and nodes[2] == false and nodes[3] == false then + add_comment(parsed, "redundant final wildcard filter removed") + remove(parsed,np) + end + end +end end else parsed = { pattern = pattern } @@ -8146,6 +8176,23 @@ end local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) +--~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") +--~ local l_confusing = lpeg.P(" ") +--~ local l_character = lpeg.utf8 +--~ local l_dangerous = lpeg.P(".") + +--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1) +--~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false) + +--~ local function test(str) +--~ print(str,lpeg.match(l_normal,str)) +--~ end +--~ test("ヒラギノ明朝 Pro W3") +--~ test("..ヒラギノ明朝 Pro W3") +--~ test(":ヒラギノ明朝 Pro W3;") +--~ test("ヒラギノ明朝 /Pro W3;") +--~ test("ヒラギノ明朝 Pro W3") + function resolvers.generators.tex(specification) local tag = specification if trace_locating then @@ -8166,6 +8213,7 @@ function resolvers.generators.tex(specification) end for name in directory(full) do if not lpegmatch(weird,name) then + -- if lpegmatch(l_normal,name) then local mode = attributes(full..name,'mode') if mode == 'file' then if path then diff --git a/tex/context/base/buff-ini.lua b/tex/context/base/buff-ini.lua index 1199b47cf..d6b5211b1 100644 --- a/tex/context/base/buff-ini.lua +++ b/tex/context/base/buff-ini.lua @@ -20,7 +20,8 @@ buffers.visualizers = { } -- if needed we can make 'm local -local trace_run = false trackers.register("buffers.run",function(v) trace_run = v end) +local trace_run = false trackers.register("buffers.run", function(v) trace_run = v end) +local trace_visualize = false trackers.register("buffers.visualize", function(v) trace_visualize = v end) local utf = unicode.utf8 @@ -28,7 +29,7 @@ local concat, texsprint, texprint, texwrite = table.concat, tex.sprint, tex.prin local utfbyte, utffind, utfgsub = utf.byte, utf.find, utf.gsub local type, next = type, next local huge = math.huge -local byte, sub, find, char, gsub, rep, lower, format, gmatch = string.byte, string.sub, string.find, string.char, string.gsub, string.rep, string.lower, string.format, string.gmatch +local byte, sub, find, char, gsub, rep, lower, format, gmatch, match = string.byte, string.sub, string.find, string.char, string.gsub, string.rep, string.lower, string.format, string.gmatch, string.match local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues local ctxcatcodes = tex.ctxcatcodes local variables = interfaces.variables @@ -324,23 +325,37 @@ end -- maybe just line(n,str) empty(n,str) -visualizers.handlers = visualizers.handlers or { } -visualizers.escapetoken = nil -visualizers.tablength = 7 -visualizers.enabletab = true -- false -visualizers.enableescape = false -visualizers.obeyspace = true +visualizers.handlers = visualizers.handlers or { } +visualizers.tablength = 7 +visualizers.enabletab = true -- false +visualizers.obeyspace = true local handlers = visualizers.handlers function buffers.newvisualizer(name) + name = lower(name) local handler = { } handlers[name] = handler return handler end function buffers.getvisualizer(name) - return handlers[name] + name = lower(name) + return handlers[name] or buffers.loadvisualizer(name) +end + +function buffers.loadvisualizer(name) + name = lower(name) + local hn = handlers[name] + if hn then + return hn + else + if trace_visualize then + logs.report("buffers","loading '%s' visualizer",name) + end + environment.loadluafile("pret-" .. name) + return handlers[name] or buffers.newvisualizer(name) + end end local default = buffers.newvisualizer("default") @@ -621,6 +636,67 @@ function buffers.realign(name,forced_n) -- no, auto, <number> return d end +-- escapes: buffers.set_escape("tex","/BTEX","/ETEX") + +local function flush_escaped_line(str,pattern,flushline) + while true do + local a, b, c = match(str,pattern) + if a and a ~= "" then + flushline(a) + end + if b and b ~= "" then + texsprint(ctxcatcodes,"{",b,"}") + end + if c then + if c == "" then + break + else + str = c + end + else + flushline(str) + break + end + end +end + +function buffers.set_escape(name,pair) + if pair and pair ~= "" then + local visualizer = buffers.getvisualizer(name) + visualizer.normal_flush_line = visualizer.normal_flush_line or visualizer.flush_line + if pair == variables.no then + visualizer.flush_line = visualizer.normal_flush_line or visualizer.flush_line + if trace_visualize then + logs.report("buffers","resetting escape range for visualizer '%s'",name) + end + else + local start, stop + if pair == variables.yes then + start, stop = "/BTEX", "/ETEX" + else + pair = string.split(pair,",") + start, stop = string.esc(pair[1] or ""), string.esc(pair[2] or "") + end + if start ~= "" then + local pattern + if stop == "" then + pattern = "^(.-)" .. start .. "(.*)(.*)$" + else + pattern = "^(.-)" .. start .. "(.-)" .. stop .. "(.*)$" + end + function visualizer.flush_line(str) + flush_escaped_line(str,pattern,visualizer.normal_flush_line) + end + if trace_visualize then + logs.report("buffers","setting escape range for visualizer '%s' to %s -> %s",name,start,stop) + end + elseif trace_visualize then + logs.report("buffers","problematic escape specification '%s' for visualizer '%s'",pair,name) + end + end + end +end + -- THIS WILL BECOME A FRAMEWORK: the problem with prety printing is that -- we deal with snippets and therefore we need tolerant parsing @@ -663,5 +739,3 @@ end --~ str = [[test 123 test $oeps$]] --~ lpegmatch(pattern,str) - - diff --git a/tex/context/base/buff-ver.mkiv b/tex/context/base/buff-ver.mkiv index 58dbd2a8b..8b9c584f1 100644 --- a/tex/context/base/buff-ver.mkiv +++ b/tex/context/base/buff-ver.mkiv @@ -71,8 +71,9 @@ \edef\prettyidentifier{\executeifdefined{\??ty\??ty\ascii}{TEX}}% \begingroup % we can move this to lua - \lowercasestring \f!prettyprefix\prettyidentifier\to\filename - \doonlyonce\filename{\ctxloadluafile\filename\empty}% + % \lowercasestring \f!prettyprefix\prettyidentifier\to\filename + % \doonlyonce\filename{\ctxloadluafile\filename\empty}% + \ctxlua{buffers.loadvisualizer("\ascii")}% \endgroup} \def\setupprettytype @@ -94,18 +95,19 @@ \setvalue{\??tp:\c!option:\v!none }{\let\obeycharacters\relax} \setvalue{\??tp:\c!option:\empty }{\let\obeycharacters\relax} -\setvalue{\??tp:\c!option:\v!color }{\setupprettiesintype{TEX}% +\setvalue{\??tp:\c!option:\v!color }{\setupprettiesintype{\typingparameter\c!option}% \let\obeycharacters\setupprettytype \let\obeytabs\ignoretabs} \setvalue{\??tp:\c!option:\v!normal }{\let\obeycharacters\setupgroupedtype} -\setvalue{\??tp:\c!option:\v!commands }{\def\obeycharacters{\setupcommandsintype}% - \let\obeytabs\ignoretabs} \setvalue{\??tp:\c!option:\v!slanted }{\let\obeycharacters\setupslantedtype \let\obeytabs\ignoretabs} \setvalue{\??tp:\c!option:\s!unknown }{\setupprettiesintype{\typingparameter\c!option}% \let\obeycharacters\setupprettytype \let\obeytabs\ignoretabs} +%setvalue{\??tp:\c!option:\v!commands }{\def\obeycharacters{\setupcommandsintype}% +% \let\obeytabs\ignoretabs} + \def\dosetverbatimfont {\redoconvertfont\dosetfontattribute{\currenttypingclass\currenttyping}\c!style \normalnoligatures\font} @@ -164,12 +166,52 @@ % \typeTEX{\example---oeps}. this---ligates---again. % \type {\example---oeps}. this---ligates---again. -\def\setupcommandsintype % can also be \string\ - {\ctxlua{ - buffers.visualizers.enableescape = true - buffers.visualizers.escapetoken = \!!bs\typingparameter\c!escape\!!es - }% - \setevalue{\typingparameter\c!escape}{\typingparameter\c!escape}} +%D \startbuffer +%D \setuptyping[TEX][escape=yes] +%D +%D \startTEX +%D /BTEX\em sometex/ETEX +%D /BTEX\em sometex/ETEX \after +%D \before /BTEX\em sometex/ETEX +%D \before /BTEX\em sometex/ETEX \after +%D \before /BTEX\em sometex/ETEX \inbetween /BTEX\em sometex/ETEX \after +%D \before \after +%D \stopTEX +%D \stopbuffer +%D +%D \typebuffer \start \getbuffer \stop +%D +%D \startbuffer +%D \setuptyping[TEX][escape={[[,]]}] +%D +%D \startTEX +%D [[\em sometex]] +%D [[\em sometex]] \after +%D \before [[\em sometex]] +%D \before [[\em sometex]] \after +%D \before [[\em sometex]] \inbetween [[\em sometex]] \after +%D \before \after +%D \stopTEX +%D \stopbuffer +%D +%D \typebuffer \start \getbuffer \stop +%D +%D \startbuffer +%D \setuptyping[TEX][escape=//] +%D +%D \startTEX +%D //\em sometex +%D \before //\em sometex +%D \stopTEX +%D +%D \typebuffer \start \getbuffer \stop + +\def\setupcommandsintype + {\ctxlua{buffers.set_escape("\currenttyping",\!!bs\typingparameter\c!escape\!!es)}} + +\appendtoks + \setupcommandsintype +\to \everyinitializeverbatim \def\setupslantedtype {\slantedtypepermittedtrue} @@ -941,7 +983,7 @@ \c!evenmargin=\!!zeropoint, \c!oddmargin=\!!zeropoint, \c!blank=\v!line, - \c!escape=/, % beware \string\ , should also be accepted + \c!escape=, % yes | no | START,STOP BTEX,ETEX \c!numbering=\v!no, \c!lines=, \c!empty=, diff --git a/tex/context/base/colo-ini.mkiv b/tex/context/base/colo-ini.mkiv index d4d8f7bd8..091ccc747 100644 --- a/tex/context/base/colo-ini.mkiv +++ b/tex/context/base/colo-ini.mkiv @@ -175,7 +175,7 @@ \doactivatecolor{#1}} \unexpanded\def\popcolor - {\csname\??on:s:\number\currentcolornesting\endcsname + {\csname\??cl:s:\number\currentcolornesting\endcsname \global\advance\currentcolornesting\minusone} %D \macros @@ -955,12 +955,7 @@ \def\doinheritta#1{\csname(ta:\ifcsname(ta:\currentpalet#1)\endcsname\currentpalet#1\else\ifcsname(ta:#1)\endcsname#1\fi\fi)\endcsname} \def\doinheritts#1{\csname(ts:\ifcsname(ts:\currentpalet#1)\endcsname\currentpalet#1\else\ifcsname(ts:#1)\endcsname#1\fi\fi)\endcsname} -%D Low level defs: ... \colorattribute#2 - -% \def\colordefalc#1#2{\setevalue{(ca:#1)}{#2}\setevalue{(cs:#1)}{\dosetattribute{color}{#2}}} -% \def\colordefagc#1#2{\setxvalue{(ca:#1)}{#2}\setevalue{(cs:#1)}{\dosetattribute{color}{#2}}} -% \def\colordefalt#1#2{\setevalue{(ta:#1)}{#2}\setevalue{(ts:#1)}{\dosetattribute{transparency}{#2}}} -% \def\colordefagt#1#2{\setxvalue{(ta:#1)}{#2}\setxvalue{(ts:#1)}{\dosetattribute{transparency}{#2}}} +%D Low level defs: \def\colordefalc#1#2{\setevalue{(ca:#1)}{#2}\setvalue {(cs:#1)}{\attribute\colorattribute #2 }} \def\colordefagc#1#2{\setxvalue{(ca:#1)}{#2}\setvalue {(cs:#1)}{\attribute\colorattribute #2 }} diff --git a/tex/context/base/cont-new.tex b/tex/context/base/cont-new.tex index 9e7e2cd50..4a7212d0e 100644 --- a/tex/context/base/cont-new.tex +++ b/tex/context/base/cont-new.tex @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2009.12.29 22:32} +\newcontextversion{2009.12.30 23:52} %D This file is loaded at runtime, thereby providing an %D excellent place for hacks, patches, extensions and new diff --git a/tex/context/base/context.tex b/tex/context/base/context.tex index 25374601e..11dc9a1c7 100644 --- a/tex/context/base/context.tex +++ b/tex/context/base/context.tex @@ -20,7 +20,7 @@ %D your styles an modules. \edef\contextformat {\jobname} -\edef\contextversion{2009.12.29 22:32} +\edef\contextversion{2009.12.30 23:52} %D For those who want to use this: diff --git a/tex/context/base/data-res.lua b/tex/context/base/data-res.lua index 67bb7cf88..5cc74ad5b 100644 --- a/tex/context/base/data-res.lua +++ b/tex/context/base/data-res.lua @@ -795,6 +795,23 @@ end local weird = lpeg.P(".")^1 + lpeg.anywhere(lpeg.S("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) +--~ local l_forbidden = lpeg.S("~`!#$%^&*()={}[]:;\"\'||\\/<>,?\n\r\t") +--~ local l_confusing = lpeg.P(" ") +--~ local l_character = lpeg.utf8 +--~ local l_dangerous = lpeg.P(".") + +--~ local l_normal = (l_character - l_forbidden - l_confusing - l_dangerous) * (l_character - l_forbidden - l_confusing^2)^0 * lpeg.P(-1) +--~ ----- l_normal = l_normal * lpeg.Cc(true) + lpeg.Cc(false) + +--~ local function test(str) +--~ print(str,lpeg.match(l_normal,str)) +--~ end +--~ test("ヒラギノ明朝 Pro W3") +--~ test("..ヒラギノ明朝 Pro W3") +--~ test(":ヒラギノ明朝 Pro W3;") +--~ test("ヒラギノ明朝 /Pro W3;") +--~ test("ヒラギノ明朝 Pro W3") + function resolvers.generators.tex(specification) local tag = specification if trace_locating then @@ -815,6 +832,7 @@ function resolvers.generators.tex(specification) end for name in directory(full) do if not lpegmatch(weird,name) then + -- if lpegmatch(l_normal,name) then local mode = attributes(full..name,'mode') if mode == 'file' then if path then diff --git a/tex/context/base/font-ctx.lua b/tex/context/base/font-ctx.lua index 1233f075d..166da7dec 100644 --- a/tex/context/base/font-ctx.lua +++ b/tex/context/base/font-ctx.lua @@ -414,6 +414,6 @@ function fonts.dimenfactor(unit,tfmdata) end end -function fonts.cleanname(name) -- mapped onto macro, see fonts.names.cleanname - texsprint(ctxcatcodes,(gsub(lower(name),"[^%a%d]",""))) +function fonts.cleanname(name) + texsprint(ctxcatcodes,fonts.names.cleanname(name)) end diff --git a/tex/context/base/font-def.lua b/tex/context/base/font-def.lua index 9a007b991..02369cc37 100644 --- a/tex/context/base/font-def.lua +++ b/tex/context/base/font-def.lua @@ -314,7 +314,7 @@ function tfm.read(specification) local reader = sequence[s] if readers[reader] then -- not really needed if trace_defining then - logs.report("define font","trying (sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown") + logs.report("define font","trying (reader sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown") end tfmtable = readers[reader](specification) if tfmtable then diff --git a/tex/context/base/font-otb.lua b/tex/context/base/font-otb.lua index 675c124e1..a6845800d 100644 --- a/tex/context/base/font-otb.lua +++ b/tex/context/base/font-otb.lua @@ -279,6 +279,47 @@ function prepare_base_substitutions(tfmdata,kind,value) -- we can share some cod end end +--~ local function prepare_base_kerns(tfmdata,kind,value) -- todo what kind of kerns, currently all +--~ if value then +--~ local otfdata = tfmdata.shared.otfdata +--~ local validlookups, lookuplist = collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language) +--~ if validlookups then +--~ local unicodes = tfmdata.unicodes -- names to unicodes +--~ local indices = tfmdata.indices +--~ local characters = tfmdata.characters +--~ local descriptions = tfmdata.descriptions +--~ for u, chr in next, characters do +--~ local d = descriptions[u] +--~ if d then +--~ local dk = d.mykerns +--~ if dk then +--~ local t, done = chr.kerns or { }, false +--~ for l=1,#lookuplist do +--~ local lookup = lookuplist[l] +--~ local kerns = dk[lookup] +--~ if kerns then +--~ for k, v in next, kerns do +--~ if v ~= 0 and not t[k] then -- maybe no 0 test here +--~ t[k], done = v, true +--~ if trace_baseinit and trace_kerns then +--~ logs.report("define otf","%s: base kern %s + %s => %s",cref(kind,lookup),gref(descriptions,u),gref(descriptions,k),v) +--~ end +--~ end +--~ end +--~ end +--~ end +--~ if done then +--~ chr.kerns = t -- no empty assignments +--~ end +--~ -- elseif d.kerns then +--~ -- logs.report("define otf","%s: invalid mykerns for %s",cref(kind),gref(descriptions,u)) +--~ end +--~ end +--~ end +--~ end +--~ end +--~ end + local function prepare_base_kerns(tfmdata,kind,value) -- todo what kind of kerns, currently all if value then local otfdata = tfmdata.shared.otfdata @@ -288,31 +329,40 @@ local function prepare_base_kerns(tfmdata,kind,value) -- todo what kind of kerns local indices = tfmdata.indices local characters = tfmdata.characters local descriptions = tfmdata.descriptions + local sharedkerns = { } for u, chr in next, characters do local d = descriptions[u] if d then - local dk = d.mykerns + local dk = d.mykerns -- shared if dk then - local t, done = chr.kerns or { }, false - for l=1,#lookuplist do - local lookup = lookuplist[l] - local kerns = dk[lookup] - if kerns then - for k, v in next, kerns do - if v ~= 0 and not t[k] then -- maybe no 0 test here - t[k], done = v, true - if trace_baseinit and trace_kerns then - logs.report("define otf","%s: base kern %s + %s => %s",cref(kind,lookup),gref(descriptions,u),gref(descriptions,k),v) + local s = sharedkerns[dk] + if s == false then + -- skip + elseif s then + chr.kerns = s + else + local t, done = chr.kerns or { }, false + for l=1,#lookuplist do + local lookup = lookuplist[l] + local kerns = dk[lookup] + if kerns then + for k, v in next, kerns do + if v ~= 0 and not t[k] then -- maybe no 0 test here + t[k], done = v, true + if trace_baseinit and trace_kerns then + logs.report("define otf","%s: base kern %s + %s => %s",cref(kind,lookup),gref(descriptions,u),gref(descriptions,k),v) + end end end end end + if done then + sharedkerns[dk] = t + chr.kerns = t -- no empty assignments + else + sharedkerns[dk] = false + end end - if done then - chr.kerns = t -- no empty assignments - end - -- elseif d.kerns then - -- logs.report("define otf","%s: invalid mykerns for %s",cref(kind),gref(descriptions,u)) end end end diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua index bf5acadcd..1cf514804 100644 --- a/tex/context/base/font-otf.lua +++ b/tex/context/base/font-otf.lua @@ -193,7 +193,7 @@ function otf.enhance(name,data,filename,verbose) local enhancer = otf.enhancers[name] if enhancer then if (verbose ~= nil and verbose) or trace_loading then - logs.report("load otf","enhance: %s",name) + logs.report("load otf","enhance: %s (%s)",name,filename) end enhancer(data,filename) end @@ -258,6 +258,7 @@ function otf.load(filename,format,sub,featurefile) logs.report("load otf","enhancing ...") for e=1,#enhancers do otf.enhance(enhancers[e],data,filename) + io.flush() -- we want instant messages end if otf.pack and not fonts.verbose then otf.enhance("pack",data,filename) @@ -993,6 +994,129 @@ end -- kern: ttf has a table with kerns +--~ otf.enhancers["reorganize kerns"] = function(data,filename) +--~ local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes +--~ local mkdone = false +--~ for index, glyph in next, data.glyphs do +--~ if glyph.kerns then +--~ local mykerns = { } +--~ for k,v in next, glyph.kerns do +--~ local vc, vo, vl = v.char, v.off, v.lookup +--~ if vc and vo and vl then -- brrr, wrong! we miss the non unicode ones +--~ local uvc = unicodes[vc] +--~ if not uvc then +--~ if trace_loading then +--~ logs.report("load otf","problems with unicode %s of kern %s at glyph %s",vc,k,index) +--~ end +--~ else +--~ if type(vl) ~= "table" then +--~ vl = { vl } +--~ end +--~ for l=1,#vl do +--~ local vll = vl[l] +--~ local mkl = mykerns[vll] +--~ if not mkl then +--~ mkl = { } +--~ mykerns[vll] = mkl +--~ end +--~ if type(uvc) == "table" then +--~ for u=1,#uvc do +--~ mkl[uvc[u]] = vo +--~ end +--~ else +--~ mkl[uvc] = vo +--~ end +--~ end +--~ end +--~ end +--~ end +--~ glyph.mykerns = mykerns +--~ glyph.kerns = nil -- saves space and time +--~ mkdone = true +--~ end +--~ end +--~ if trace_loading and mkdone then +--~ logs.report("load otf", "replacing 'kerns' tables by 'mykerns' tables") +--~ end +--~ if data.kerns then +--~ if trace_loading then +--~ logs.report("load otf", "removing global 'kern' table") +--~ end +--~ data.kerns = nil +--~ end +--~ local dgpos = data.gpos +--~ if dgpos then +--~ for gp=1,#dgpos do +--~ local gpos = dgpos[gp] +--~ local subtables = gpos.subtables +--~ if subtables then +--~ for s=1,#subtables do +--~ local subtable = subtables[s] +--~ local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes +--~ if kernclass then -- the next one is quite slow +--~ for k=1,#kernclass do +--~ local kcl = kernclass[k] +--~ local firsts, seconds, offsets, lookups = kcl.firsts, kcl.seconds, kcl.offsets, kcl.lookup -- singular +--~ if type(lookups) ~= "table" then +--~ lookups = { lookups } +--~ end +--~ for l=1,#lookups do +--~ local lookup = lookups[l] +--~ -- weird, as maxfirst and maxseconds can have holes +--~ local maxfirsts, maxseconds = getn(firsts), getn(seconds) +--~ if trace_loading then +--~ logs.report("load otf", "adding kernclass %s with %s times %s pairs",lookup, maxfirsts, maxseconds) +--~ end +--~ for fk, fv in next, firsts do +--~ for first in gmatch(fv,"[^ ]+") do +--~ local first_unicode = unicodes[first] +--~ if type(first_unicode) == "number" then +--~ first_unicode = { first_unicode } +--~ end +--~ for f=1,#first_unicode do +--~ local glyph = glyphs[mapmap[first_unicode[f]]] +--~ if glyph then +--~ local mykerns = glyph.mykerns +--~ if not mykerns then +--~ mykerns = { } -- unicode indexed ! +--~ glyph.mykerns = mykerns +--~ end +--~ local lookupkerns = mykerns[lookup] +--~ if not lookupkerns then +--~ lookupkerns = { } +--~ mykerns[lookup] = lookupkerns +--~ end +--~ for sk, sv in next, seconds do +--~ local offset = offsets[(fk-1) * maxseconds + sk] +--~ --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk] +--~ for second in gmatch(sv,"[^ ]+") do +--~ local second_unicode = unicodes[second] +--~ if type(second_unicode) == "number" then +--~ lookupkerns[second_unicode] = offset +--~ else +--~ for s=1,#second_unicode do +--~ lookupkerns[second_unicode[s]] = offset +--~ end +--~ end +--~ end +--~ end +--~ elseif trace_loading then +--~ logs.report("load otf", "no glyph data for U+%04X", first_unicode[f]) +--~ end +--~ end +--~ end +--~ end +--~ end +--~ end +--~ subtable.comment = "The kernclass table is merged into mykerns in the indexed glyph tables." +--~ subtable.kernclass = { } +--~ end +--~ end +--~ end +--~ end +--~ end +--~ end + otf.enhancers["reorganize kerns"] = function(data,filename) local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes local mkdone = false @@ -1045,6 +1169,9 @@ otf.enhancers["reorganize kerns"] = function(data,filename) end local dgpos = data.gpos if dgpos then + local separator = lpeg.P(" ") + local other = ((1 - separator)^0) / unicodes + local splitter = lpeg.Ct(other * (separator * other)^0) for gp=1,#dgpos do local gpos = dgpos[gp] local subtables = gpos.subtables @@ -1052,54 +1179,71 @@ otf.enhancers["reorganize kerns"] = function(data,filename) for s=1,#subtables do local subtable = subtables[s] local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes - if kernclass then + if kernclass then -- the next one is quite slow for k=1,#kernclass do local kcl = kernclass[k] local firsts, seconds, offsets, lookups = kcl.firsts, kcl.seconds, kcl.offsets, kcl.lookup -- singular if type(lookups) ~= "table" then lookups = { lookups } end + local split = { } for l=1,#lookups do local lookup = lookups[l] + -- weird, as maxfirst and maxseconds can have holes, first seems to be indexed, seconds starts at 2 local maxfirsts, maxseconds = getn(firsts), getn(seconds) + for _, s in next, firsts do + split[s] = split[s] or lpegmatch(splitter,s) + end + for _, s in next, seconds do + split[s] = split[s] or lpegmatch(splitter,s) + end if trace_loading then logs.report("load otf", "adding kernclass %s with %s times %s pairs",lookup, maxfirsts, maxseconds) end - for fk, fv in next, firsts do - for first in gmatch(fv,"[^ ]+") do - local first_unicode = unicodes[first] - if type(first_unicode) == "number" then - first_unicode = { first_unicode } + local function do_it(fk,first_unicode) + local glyph = glyphs[mapmap[first_unicode]] + if glyph then + local mykerns = glyph.mykerns + if not mykerns then + mykerns = { } -- unicode indexed ! + glyph.mykerns = mykerns end - for f=1,#first_unicode do - local glyph = glyphs[mapmap[first_unicode[f]]] - if glyph then - local mykerns = glyph.mykerns - if not mykerns then - mykerns = { } -- unicode indexed ! - glyph.mykerns = mykerns - end - local lookupkerns = mykerns[lookup] - if not lookupkerns then - lookupkerns = { } - mykerns[lookup] = lookupkerns - end - for sk, sv in next, seconds do - local offset = offsets[(fk-1) * maxseconds + sk] - --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk] - for second in gmatch(sv,"[^ ]+") do - local second_unicode = unicodes[second] - if type(second_unicode) == "number" then - lookupkerns[second_unicode] = offset - else - for s=1,#second_unicode do - lookupkerns[second_unicode[s]] = offset - end - end + local lookupkerns = mykerns[lookup] + if not lookupkerns then + lookupkerns = { } + mykerns[lookup] = lookupkerns + end + local baseoffset = (fk-1) * maxseconds + for sk=2,maxseconds do + local sv = seconds[sk] + local offset = offsets[baseoffset + sk] + --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk] + local splt = split[sv] + for i=1,#splt do + local second_unicode = splt[i] + if tonumber(second_unicode) then + lookupkerns[second_unicode] = offset + else + for s=1,#second_unicode do + lookupkerns[second_unicode[s]] = offset end end - elseif trace_loading then - logs.report("load otf", "no glyph data for U+%04X", first_unicode[f]) + end + end + elseif trace_loading then + logs.report("load otf", "no glyph data for U+%04X", first_unicode) + end + end + for fk=1,#firsts do + local fv = firsts[fk] + local splt = split[fv] + for i=1,#splt do + local first_unicode = splt[i] + if tonumber(first_unicode) then + do_it(fk,first_unicode) + else + for f=1,#first_unicode do + do_it(fk,first_unicode[f]) end end end diff --git a/tex/context/base/font-otp.lua b/tex/context/base/font-otp.lua index 6c4ad0f8c..47962d806 100644 --- a/tex/context/base/font-otp.lua +++ b/tex/context/base/font-otp.lua @@ -8,7 +8,8 @@ if not modules then modules = { } end modules ['font-otp'] = { -- todo: pack math (but not that much to share) -local next = next +local next, type, tostring = next, type, tostring +local sort, concat = table.sort, table.concat local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end) @@ -19,6 +20,19 @@ fonts.otf.glists = fonts.otf.glists or { "gsub", "gpos" } local criterium, threshold, tabstr = 1, 0, table.serialize +local function tabstr(t) -- hashed from core-uti / experiment + local s = { } + for k, v in next, t do + if type(v) == "table" then + s[#s+1] = k.."={"..tabstr(v).."}" + else + s[#s+1] = k.."="..tostring(v) + end + end + sort(s) + return concat(s,",") +end + function fonts.otf.enhancers.pack(data) if data then local h, t, c = { }, { }, { } @@ -279,159 +293,157 @@ function fonts.otf.enhancers.pack(data) end end -function fonts.otf.enhancers.unpack(data) - if data then - local t = data.tables - if t then - for k, v in next, data.glyphs do - local tv = t[v.boundingbox] if tv then v.boundingbox = tv end - local l = v.slookups - if l then - for k,v in next, l do - local tv = t[v] if tv then l[k] = tv end - end - end - local l = v.mlookups - if l then - for k,v in next, l do - for i=1,#v do - local tv = t[v[i]] if tv then v[i] = tv end - local vi = v[i] - local what = vi[1] - if what == "pair" then - local tv = t[vi[3]] if tv then vi[3] = tv end - local tv = t[vi[4]] if tv then vi[4] = tv end - elseif what == "position" then - local tv = t[vi[2]] if tv then vi[2] = tv end - end - end - end - end - local m = v.mykerns - if m then - local tv = t[m] if tv then m = tv v.mykerns = m end -- secondary optimization - for k,v in next, m do - local tv = t[v] if tv then m[k] = tv end - end - end - local m = v.math - if m then - local mk = m.kerns - if mk then - local tv = t[mk] if tv then mk = tv m.kerns = mk end -- secondary optimization - for k,v in next, mk do - local tv = t[v] if tv then mk[k] = tv end - end - end - end - local a = v.anchors - if a then - local tv = t[a] if tv then a = tv v.anchors = a end -- secondary optimization - for k,v in next, a do - if k == "baselig" then - for kk, vv in next, v do - for kkk=1,#vv do - local tv = t[vv[kkk]] if tv then vv[kkk] = tv end - end - end - else - for kk, vv in next, v do - local tv = t[vv] if tv then v[kk] = tv end - end - end - end - end - end - if data.lookups then - for k, v in next, data.lookups do - local r = v.rules - if r then - for kk, vv in next, r do - local l = vv.lookups - if l then - local tv = t[l] if tv then vv.lookups = tv end - end - local c = vv.coverage - if c then - local cc = c.before if cc then local tv = t[cc] if tv then c.before = tv end end - cc = c.after if cc then local tv = t[cc] if tv then c.after = tv end end - cc = c.current if cc then local tv = t[cc] if tv then c.current = tv end end - end - local c = vv.reversecoverage - if c then - local cc = c.before if cc then local tv = t[cc] if tv then c.before = tv end end - cc = c.after if cc then local tv = t[cc] if tv then c.after = tv end end - cc = c.current if cc then local tv = t[cc] if tv then c.current = tv end end - end - end - end - end - end - local luatex = data.luatex - if luatex then - local la = luatex.anchor_to_lookup - if la then - for lookup, ldata in next, la do - local tv = t[ldata] if tv then la[lookup] = tv end - end - end - local la = luatex.lookup_to_anchor - if la then - for lookup, ldata in next, la do - local tv = t[ldata] if tv then la[lookup] = tv end - end - end - local ls = luatex.sequences - if ls then - for feature, fdata in next, ls do - local flags = fdata.flags - if flags then - local tv = t[flags] if tv then fdata.flags = tv end - end - local subtables = fdata.subtables - if subtables then - local tv = t[subtables] if tv then fdata.subtables = tv end - end - local features = fdata.features - if features then - local tv = t[features] if tv then fdata.features = tv features = tv end -- secondary pack - for script, sdata in next, features do - local tv = t[sdata] if tv then features[script] = tv end - end - end - end - end - local ls = luatex.lookups - if ls then - for lookups, fdata in next, ls do - local flags = fdata.flags - if flags then - local tv = t[flags] if tv then fdata.flags = tv end - end - local subtables = fdata.subtables - if subtables then - local tv = t[subtables] if tv then fdata.subtables = tv end - end - end - end - local lf = luatex.features - if lf then - for _, g in next, fonts.otf.glists do - local gl = lf[g] - if gl then - for feature, spec in next, gl do - local tv = t[spec] if tv then gl[feature] = tv end - end - end - end - end - end - data.tables = nil - end - end -end - - +--~ function fonts.otf.enhancers.unpack(data) +--~ if data then +--~ local t = data.tables +--~ if t then +--~ for k, v in next, data.glyphs do +--~ local tv = t[v.boundingbox] if tv then v.boundingbox = tv end +--~ local l = v.slookups +--~ if l then +--~ for k,v in next, l do +--~ local tv = t[v] if tv then l[k] = tv end +--~ end +--~ end +--~ local l = v.mlookups +--~ if l then +--~ for k,v in next, l do +--~ for i=1,#v do +--~ local tv = t[v[i]] if tv then v[i] = tv end +--~ local vi = v[i] +--~ local what = vi[1] +--~ if what == "pair" then +--~ local tv = t[vi[3]] if tv then vi[3] = tv end +--~ local tv = t[vi[4]] if tv then vi[4] = tv end +--~ elseif what == "position" then +--~ local tv = t[vi[2]] if tv then vi[2] = tv end +--~ end +--~ end +--~ end +--~ end +--~ local m = v.mykerns +--~ if m then +--~ local tv = t[m] if tv then m = tv v.mykerns = m end -- secondary optimization +--~ for k,v in next, m do +--~ local tv = t[v] if tv then m[k] = tv end +--~ end +--~ end +--~ local m = v.math +--~ if m then +--~ local mk = m.kerns +--~ if mk then +--~ local tv = t[mk] if tv then mk = tv m.kerns = mk end -- secondary optimization +--~ for k,v in next, mk do +--~ local tv = t[v] if tv then mk[k] = tv end +--~ end +--~ end +--~ end +--~ local a = v.anchors +--~ if a then +--~ local tv = t[a] if tv then a = tv v.anchors = a end -- secondary optimization +--~ for k,v in next, a do +--~ if k == "baselig" then +--~ for kk, vv in next, v do +--~ for kkk=1,#vv do +--~ local tv = t[vv[kkk]] if tv then vv[kkk] = tv end +--~ end +--~ end +--~ else +--~ for kk, vv in next, v do +--~ local tv = t[vv] if tv then v[kk] = tv end +--~ end +--~ end +--~ end +--~ end +--~ end +--~ if data.lookups then +--~ for k, v in next, data.lookups do +--~ local r = v.rules +--~ if r then +--~ for kk, vv in next, r do +--~ local l = vv.lookups +--~ if l then +--~ local tv = t[l] if tv then vv.lookups = tv end +--~ end +--~ local c = vv.coverage +--~ if c then +--~ local cc = c.before if cc then local tv = t[cc] if tv then c.before = tv end end +--~ cc = c.after if cc then local tv = t[cc] if tv then c.after = tv end end +--~ cc = c.current if cc then local tv = t[cc] if tv then c.current = tv end end +--~ end +--~ local c = vv.reversecoverage +--~ if c then +--~ local cc = c.before if cc then local tv = t[cc] if tv then c.before = tv end end +--~ cc = c.after if cc then local tv = t[cc] if tv then c.after = tv end end +--~ cc = c.current if cc then local tv = t[cc] if tv then c.current = tv end end +--~ end +--~ end +--~ end +--~ end +--~ end +--~ local luatex = data.luatex +--~ if luatex then +--~ local la = luatex.anchor_to_lookup +--~ if la then +--~ for lookup, ldata in next, la do +--~ local tv = t[ldata] if tv then la[lookup] = tv end +--~ end +--~ end +--~ local la = luatex.lookup_to_anchor +--~ if la then +--~ for lookup, ldata in next, la do +--~ local tv = t[ldata] if tv then la[lookup] = tv end +--~ end +--~ end +--~ local ls = luatex.sequences +--~ if ls then +--~ for feature, fdata in next, ls do +--~ local flags = fdata.flags +--~ if flags then +--~ local tv = t[flags] if tv then fdata.flags = tv end +--~ end +--~ local subtables = fdata.subtables +--~ if subtables then +--~ local tv = t[subtables] if tv then fdata.subtables = tv end +--~ end +--~ local features = fdata.features +--~ if features then +--~ local tv = t[features] if tv then fdata.features = tv features = tv end -- secondary pack +--~ for script, sdata in next, features do +--~ local tv = t[sdata] if tv then features[script] = tv end +--~ end +--~ end +--~ end +--~ end +--~ local ls = luatex.lookups +--~ if ls then +--~ for lookups, fdata in next, ls do +--~ local flags = fdata.flags +--~ if flags then +--~ local tv = t[flags] if tv then fdata.flags = tv end +--~ end +--~ local subtables = fdata.subtables +--~ if subtables then +--~ local tv = t[subtables] if tv then fdata.subtables = tv end +--~ end +--~ end +--~ end +--~ local lf = luatex.features +--~ if lf then +--~ for _, g in next, fonts.otf.glists do +--~ local gl = lf[g] +--~ if gl then +--~ for feature, spec in next, gl do +--~ local tv = t[spec] if tv then gl[feature] = tv end +--~ end +--~ end +--~ end +--~ end +--~ end +--~ data.tables = nil +--~ end +--~ end +--~ end function fonts.otf.enhancers.unpack(data) if data then diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua index a3b61af31..e3bd85288 100644 --- a/tex/context/base/font-syn.lua +++ b/tex/context/base/font-syn.lua @@ -8,11 +8,13 @@ if not modules then modules = { } end modules ['font-syn'] = { -- todo: subs in lookups requests +local utf = unicode.utf8 local next, tonumber = next, tonumber local gsub, lower, match, find, lower, upper = string.gsub, string.lower, string.match, string.find, string.lower, string.upper local find, gmatch = string.find, string.gmatch local concat, sort, format = table.concat, table.sort, string.format local lpegmatch = lpeg.match +local utfgsub, utflower = utf.gsub, utf.lower local trace_names = false trackers.register("fonts.names", function(v) trace_names = v end) local trace_warnings = false trackers.register("fonts.warnings", function(v) trace_warnings = v end) @@ -270,6 +272,8 @@ end local function cleanname(name) return (gsub(lower(name),"[^%a%d]","")) + -- once we can load files with utf names, we can play with the following: + -- return (utfgsub(utfgsub(lower(str),"[^%a%A%d]",""),"%s","")) end names.cleanname = cleanname diff --git a/tex/context/base/font-tfm.lua b/tex/context/base/font-tfm.lua index 2ab28c737..4bb47cf04 100644 --- a/tex/context/base/font-tfm.lua +++ b/tex/context/base/font-tfm.lua @@ -186,36 +186,36 @@ fonts.trace_scaling = false -- basekerns are scaled and will be hashed by table id -- sharedkerns are unscaled and are be hashed by concatenated indexes -function tfm.check_base_kerns(tfmdata) - if tfm.share_base_kerns then - local sharedkerns = tfmdata.sharedkerns - if sharedkerns then - local basekerns = { } - tfmdata.basekerns = basekerns - return sharedkerns, basekerns - end - end - return nil, nil -end +--~ function tfm.check_base_kerns(tfmdata) +--~ if tfm.share_base_kerns then +--~ local sharedkerns = tfmdata.sharedkerns +--~ if sharedkerns then +--~ local basekerns = { } +--~ tfmdata.basekerns = basekerns +--~ return sharedkerns, basekerns +--~ end +--~ end +--~ return nil, nil +--~ end -function tfm.prepare_base_kerns(tfmdata) - if tfm.share_base_kerns and not tfmdata.sharedkerns then - local sharedkerns = { } - tfmdata.sharedkerns = sharedkerns - for u, chr in next, tfmdata.characters do - local kerns = chr.kerns - if kerns then - local hash = concat(sortedkeys(kerns), " ") - local base = sharedkerns[hash] - if not base then - sharedkerns[hash] = kerns - else - chr.kerns = base - end - end - end - end -end +--~ function tfm.prepare_base_kerns(tfmdata) +--~ if tfm.share_base_kerns and not tfmdata.sharedkerns then +--~ local sharedkerns = { } +--~ tfmdata.sharedkerns = sharedkerns +--~ for u, chr in next, tfmdata.characters do +--~ local kerns = chr.kerns +--~ if kerns then +--~ local hash = concat(sortedkeys(kerns), " ") +--~ local base = sharedkerns[hash] +--~ if not base then +--~ sharedkerns[hash] = kerns +--~ else +--~ chr.kerns = base +--~ end +--~ end +--~ end +--~ end +--~ end -- we can have cache scaled characters when we are in node mode and don't have -- protruding and expansion: hash == fullname @ size @ protruding @ expansion @@ -229,7 +229,7 @@ local charactercache = { } -- has_italic flag. Some more flags will be added in the future. function tfm.do_scale(tfmtable, scaledpoints) - tfm.prepare_base_kerns(tfmtable) -- optimalization + -- tfm.prepare_base_kerns(tfmtable) -- optimalization if scaledpoints < 0 then scaledpoints = (- scaledpoints/1000) * tfmtable.designsize -- already in sp end @@ -303,7 +303,7 @@ t.colorscheme = tfmtable.colorscheme local defaultheight = luatex and luatex.defaultheight or 0 local defaultdepth = luatex and luatex.defaultdepth or 0 -- experimental, sharing kerns (unscaled and scaled) saves memory - local sharedkerns, basekerns = tfm.check_base_kerns(tfmtable) + -- local sharedkerns, basekerns = tfm.check_base_kerns(tfmtable) -- loop over descriptions (afm and otf have descriptions, tfm not) -- there is no need (yet) to assign a value to chr.tonunicode local scaledwidth = defaultwidth * hdelta @@ -311,6 +311,7 @@ t.colorscheme = tfmtable.colorscheme local scaleddepth = defaultdepth * vdelta local stackmath = tfmtable.ignore_stack_math ~= true local private = fonts.private + local sharedkerns = { } for k,v in next, characters do local chr, description, index if ischanged then @@ -471,19 +472,26 @@ t.colorscheme = tfmtable.colorscheme if not nodemode then local vk = v.kerns if vk then - if sharedkerns then - local base = basekerns[vk] -- hashed by table id, not content - if not base then - base = {} - for k,v in next, vk do base[k] = v*hdelta end - basekerns[vk] = base - end - chr.kerns = base - else - local tt = {} - for k,v in next, vk do tt[k] = v*hdelta end - chr.kerns = tt + --~ if sharedkerns then + --~ local base = basekerns[vk] -- hashed by table id, not content + --~ if not base then + --~ base = {} + --~ for k,v in next, vk do base[k] = v*hdelta end + --~ basekerns[vk] = base + --~ end + --~ chr.kerns = base + --~ else + --~ local tt = {} + --~ for k,v in next, vk do tt[k] = v*hdelta end + --~ chr.kerns = tt + --~ end + local s = sharedkerns[vk] + if not s then + local s = {} + for k,v in next, vk do s[k] = v*hdelta end + sharedkerns[vk] = s end + chr.kerns = s end local vl = v.ligatures if vl then @@ -600,21 +608,19 @@ local lastfont = nil -- -- flushing the kern and ligature tables from memory saves a lot (only -- base mode) but it complicates vf building where the new characters --- demand this data - ---~ for id, f in pairs(fonts.ids) do -- or font.fonts ---~ local ffi = font.fonts[id] ---~ f.characters = ffi.characters ---~ f.kerns = ffi.kerns ---~ f.ligatures = ffi.ligatures ---~ end +-- demand this data .. solution: functions that access them function tfm.cleanup_table(tfmdata) -- we need a cleanup callback, now we miss the last one if tfm.auto_cleanup then -- ok, we can hook this into everyshipout or so ... todo if tfmdata.type == 'virtual' or tfmdata.virtualized then for k, v in next, tfmdata.characters do - if v.commands then v.commands = nil end + if v.commands then v.commands = nil end + -- if v.kerns then v.kerns = nil end end + else + -- for k, v in next, tfmdata.characters do + -- if v.kerns then v.kerns = nil end + -- end end end end diff --git a/tex/context/base/lxml-lpt.lua b/tex/context/base/lxml-lpt.lua index 8f7d60d9e..ffa2edf3e 100644 --- a/tex/context/base/lxml-lpt.lua +++ b/tex/context/base/lxml-lpt.lua @@ -956,13 +956,17 @@ local function parse_apply(list,pattern) end local nofparsed = #parsed if nofparsed == 0 then - -- something is wrong + return -- something is wrong + end + local one = list[1] + if not one then + return -- something is wrong elseif not trace_lpath then - return normal_apply(list,parsed,nofparsed,list[1].mi) + return normal_apply(list,parsed,nofparsed,one.mi) elseif trace_lprofile then - return profiled_apply(list,parsed,nofparsed,list[1].mi) - else -- trace_lpath - return traced_apply(list,parsed,nofparsed,list[1].mi) + return profiled_apply(list,parsed,nofparsed,one.mi) + else + return traced_apply(list,parsed,nofparsed,one.mi) end end diff --git a/tex/context/base/strc-mat.mkiv b/tex/context/base/strc-mat.mkiv index 038c5cb34..7653544a7 100644 --- a/tex/context/base/strc-mat.mkiv +++ b/tex/context/base/strc-mat.mkiv @@ -616,6 +616,8 @@ \plusthree \fi\fi\fi} +\def\formulanumber{\doformulanumber} % for the moment + \def\doformulanumber {\dosingleempty\dodoformulanumber} diff --git a/tex/context/base/strc-pag.mkiv b/tex/context/base/strc-pag.mkiv index c7cc701af..63a8a1f76 100644 --- a/tex/context/base/strc-pag.mkiv +++ b/tex/context/base/strc-pag.mkiv @@ -465,6 +465,14 @@ \fi \to \everysetupuserpagenumber % todo: set state: none, start, stop, reset +\appendtoks + \edef\askedsubpagenumber{\structurecounterparameter\s!subpage\c!number}% + \ifx\askedsubpagenumber\empty \else + \normalexpanded{\noexpand\setupsubpagenumber[\c!start=\structurecounterparameter\s!subpage\c!number,\c!number=]}% + \subpageno\rawstructurecounter[\s!subpage]% + \fi +\to \everysetupsubpagenumber % todo: set state: none, start, stop, reset + % \setuplayout[width=300pt,backspace=4cm] % \setuppagenumbering [alternative=doublesided] % \setupuserpagenumber[start=2] diff --git a/tex/context/base/supp-fil.mkiv b/tex/context/base/supp-fil.mkiv index 7ba71f891..ce81ec7e4 100644 --- a/tex/context/base/supp-fil.mkiv +++ b/tex/context/base/supp-fil.mkiv @@ -281,7 +281,7 @@ %D Due to different needs, we decided to offer four alternative %D loading commands. With \type{\readjobfile} we load a local %D file and do no backtracking, while \type{\readlocfile} -%D backtracks~\number\readlevel\ directories, including the current +%D backtracks~\number\maxreadlevel\ directories, including the current %D one. %D %D System files can be anywhere and therefore diff --git a/tex/context/base/syst-ini.tex b/tex/context/base/syst-ini.tex index aea29df5d..6d88d6b75 100644 --- a/tex/context/base/syst-ini.tex +++ b/tex/context/base/syst-ini.tex @@ -580,7 +580,7 @@ \tracingstats\plusone -%D Here we also save \input, more will be saved later. +%D Here we also save \type {\input}, more will be saved later. \ifdefined\normalinput \else \let\normalinput\input \fi diff --git a/tex/context/base/x-asciimath.mkiv b/tex/context/base/x-asciimath.mkiv index d7a9d1b2e..c9252408d 100644 --- a/tex/context/base/x-asciimath.mkiv +++ b/tex/context/base/x-asciimath.mkiv @@ -16,7 +16,7 @@ \ctxloadluafile{x-asciimath}{} %D The following code is not officially supported and is only meant -%D for the \MATHFORALL\ project. +%D for the Math4All project. %D %D The following code kind of maps ascii math %D http://www1.chapman.edu/~jipsen/mathml/asciimath.html onto \TEX. The diff --git a/tex/context/interface/cont-cs.xml b/tex/context/interface/cont-cs.xml index 98e0111e0..0732acd8b 100644 --- a/tex/context/interface/cont-cs.xml +++ b/tex/context/interface/cont-cs.xml @@ -944,7 +944,7 @@ <cd:parameter name="volba"> <cd:constant type="sklonene"/> <cd:constant type="normalni"/> - <cd:constant type="prikazy"/> + <cd:constant type="prikazy" version="mkiv"/> <cd:constant type="barevne"/> <cd:constant type="zadny"/> </cd:parameter> @@ -989,7 +989,7 @@ <cd:constant type="radek"/> </cd:parameter> <cd:parameter name="escape"> - <cd:constant type="cd:character"/> + <cd:constant type="cd:text"/> </cd:parameter> <cd:parameter name="mezera"> <cd:constant type="zap"/> diff --git a/tex/context/interface/cont-de.xml b/tex/context/interface/cont-de.xml index 5b39c628a..3cf049694 100644 --- a/tex/context/interface/cont-de.xml +++ b/tex/context/interface/cont-de.xml @@ -944,7 +944,7 @@ <cd:parameter name="option"> <cd:constant type="geneigt"/> <cd:constant type="normal"/> - <cd:constant type="befehle"/> + <cd:constant type="befehle" version="mkiv"/> <cd:constant type="farbe"/> <cd:constant type="kein"/> </cd:parameter> @@ -989,7 +989,7 @@ <cd:constant type="zeile"/> </cd:parameter> <cd:parameter name="escape"> - <cd:constant type="cd:character"/> + <cd:constant type="cd:text"/> </cd:parameter> <cd:parameter name="spatium"> <cd:constant type="an"/> diff --git a/tex/context/interface/cont-en.xml b/tex/context/interface/cont-en.xml index cf1a59085..c9485e003 100644 --- a/tex/context/interface/cont-en.xml +++ b/tex/context/interface/cont-en.xml @@ -944,7 +944,7 @@ <cd:parameter name="option"> <cd:constant type="slanted"/> <cd:constant type="normal"/> - <cd:constant type="commands"/> + <cd:constant type="commands" version="mkiv"/> <cd:constant type="color"/> <cd:constant type="none"/> </cd:parameter> @@ -989,7 +989,7 @@ <cd:constant type="line"/> </cd:parameter> <cd:parameter name="escape"> - <cd:constant type="cd:character"/> + <cd:constant type="cd:text"/> </cd:parameter> <cd:parameter name="space"> <cd:constant type="on"/> diff --git a/tex/context/interface/cont-fr.xml b/tex/context/interface/cont-fr.xml index be4a19280..21bc89d14 100644 --- a/tex/context/interface/cont-fr.xml +++ b/tex/context/interface/cont-fr.xml @@ -944,7 +944,7 @@ <cd:parameter name="option"> <cd:constant type="incline"/> <cd:constant type="normal"/> - <cd:constant type="commandes"/> + <cd:constant type="commandes" version="mkiv"/> <cd:constant type="couleur"/> <cd:constant type="rien"/> </cd:parameter> @@ -989,7 +989,7 @@ <cd:constant type="ligne"/> </cd:parameter> <cd:parameter name="escape"> - <cd:constant type="cd:character"/> + <cd:constant type="cd:text"/> </cd:parameter> <cd:parameter name="espace"> <cd:constant type="actif"/> diff --git a/tex/context/interface/cont-it.xml b/tex/context/interface/cont-it.xml index e167f22f2..de714f58b 100644 --- a/tex/context/interface/cont-it.xml +++ b/tex/context/interface/cont-it.xml @@ -944,7 +944,7 @@ <cd:parameter name="opzione"> <cd:constant type="inclinato"/> <cd:constant type="normale"/> - <cd:constant type="comandi"/> + <cd:constant type="comandi" version="mkiv"/> <cd:constant type="colore"/> <cd:constant type="nessuno"/> </cd:parameter> @@ -989,7 +989,7 @@ <cd:constant type="riga"/> </cd:parameter> <cd:parameter name="escape"> - <cd:constant type="cd:character"/> + <cd:constant type="cd:text"/> </cd:parameter> <cd:parameter name="spazio"> <cd:constant type="attivo"/> diff --git a/tex/context/interface/cont-nl.xml b/tex/context/interface/cont-nl.xml index dff677cd2..264d6bcde 100644 --- a/tex/context/interface/cont-nl.xml +++ b/tex/context/interface/cont-nl.xml @@ -944,7 +944,7 @@ <cd:parameter name="optie"> <cd:constant type="schuin"/> <cd:constant type="normaal"/> - <cd:constant type="commandos"/> + <cd:constant type="commandos" version="mkiv"/> <cd:constant type="kleur"/> <cd:constant type="geen"/> </cd:parameter> @@ -989,7 +989,7 @@ <cd:constant type="regel"/> </cd:parameter> <cd:parameter name="escape"> - <cd:constant type="cd:character"/> + <cd:constant type="cd:text"/> </cd:parameter> <cd:parameter name="spatie"> <cd:constant type="aan"/> diff --git a/tex/context/interface/cont-pe.xml b/tex/context/interface/cont-pe.xml index 228cae2e8..b806de4ac 100644 --- a/tex/context/interface/cont-pe.xml +++ b/tex/context/interface/cont-pe.xml @@ -944,7 +944,7 @@ <cd:parameter name="گزینه"> <cd:constant type="خوابیده"/> <cd:constant type="نرمال"/> - <cd:constant type="فرمانها"/> + <cd:constant type="فرمانها" version="mkiv"/> <cd:constant type="رنگ"/> <cd:constant type="هیچکدام"/> </cd:parameter> @@ -989,7 +989,7 @@ <cd:constant type="خط"/> </cd:parameter> <cd:parameter name="فرار"> - <cd:constant type="cd:character"/> + <cd:constant type="cd:text"/> </cd:parameter> <cd:parameter name="فضا"> <cd:constant type="روی"/> diff --git a/tex/context/interface/cont-ro.xml b/tex/context/interface/cont-ro.xml index 74003dd5c..f3598eecf 100644 --- a/tex/context/interface/cont-ro.xml +++ b/tex/context/interface/cont-ro.xml @@ -944,7 +944,7 @@ <cd:parameter name="optiune"> <cd:constant type="inclinat"/> <cd:constant type="normal"/> - <cd:constant type="comenzi"/> + <cd:constant type="comenzi" version="mkiv"/> <cd:constant type="culoare"/> <cd:constant type="niciunul"/> </cd:parameter> @@ -989,7 +989,7 @@ <cd:constant type="linie"/> </cd:parameter> <cd:parameter name="escape"> - <cd:constant type="cd:character"/> + <cd:constant type="cd:text"/> </cd:parameter> <cd:parameter name="spatiu"> <cd:constant type="on"/> diff --git a/tex/generic/context/luatex-fonts-merged.lua b/tex/generic/context/luatex-fonts-merged.lua index e6cc18125..bb7efccb1 100644 --- a/tex/generic/context/luatex-fonts-merged.lua +++ b/tex/generic/context/luatex-fonts-merged.lua @@ -1,6 +1,6 @@ -- merged file : c:/data/develop/context/texmf/tex/generic/context/luatex-fonts-merged.lua -- parent file : c:/data/develop/context/texmf/tex/generic/context/luatex-fonts.lua --- merge date : 12/29/09 22:36:24 +-- merge date : 12/30/09 23:55:40 do -- begin closure to overcome local limits and interference @@ -3509,36 +3509,36 @@ fonts.trace_scaling = false -- basekerns are scaled and will be hashed by table id -- sharedkerns are unscaled and are be hashed by concatenated indexes -function tfm.check_base_kerns(tfmdata) - if tfm.share_base_kerns then - local sharedkerns = tfmdata.sharedkerns - if sharedkerns then - local basekerns = { } - tfmdata.basekerns = basekerns - return sharedkerns, basekerns - end - end - return nil, nil -end +--~ function tfm.check_base_kerns(tfmdata) +--~ if tfm.share_base_kerns then +--~ local sharedkerns = tfmdata.sharedkerns +--~ if sharedkerns then +--~ local basekerns = { } +--~ tfmdata.basekerns = basekerns +--~ return sharedkerns, basekerns +--~ end +--~ end +--~ return nil, nil +--~ end -function tfm.prepare_base_kerns(tfmdata) - if tfm.share_base_kerns and not tfmdata.sharedkerns then - local sharedkerns = { } - tfmdata.sharedkerns = sharedkerns - for u, chr in next, tfmdata.characters do - local kerns = chr.kerns - if kerns then - local hash = concat(sortedkeys(kerns), " ") - local base = sharedkerns[hash] - if not base then - sharedkerns[hash] = kerns - else - chr.kerns = base - end - end - end - end -end +--~ function tfm.prepare_base_kerns(tfmdata) +--~ if tfm.share_base_kerns and not tfmdata.sharedkerns then +--~ local sharedkerns = { } +--~ tfmdata.sharedkerns = sharedkerns +--~ for u, chr in next, tfmdata.characters do +--~ local kerns = chr.kerns +--~ if kerns then +--~ local hash = concat(sortedkeys(kerns), " ") +--~ local base = sharedkerns[hash] +--~ if not base then +--~ sharedkerns[hash] = kerns +--~ else +--~ chr.kerns = base +--~ end +--~ end +--~ end +--~ end +--~ end -- we can have cache scaled characters when we are in node mode and don't have -- protruding and expansion: hash == fullname @ size @ protruding @ expansion @@ -3552,7 +3552,7 @@ local charactercache = { } -- has_italic flag. Some more flags will be added in the future. function tfm.do_scale(tfmtable, scaledpoints) - tfm.prepare_base_kerns(tfmtable) -- optimalization + -- tfm.prepare_base_kerns(tfmtable) -- optimalization if scaledpoints < 0 then scaledpoints = (- scaledpoints/1000) * tfmtable.designsize -- already in sp end @@ -3626,7 +3626,7 @@ t.colorscheme = tfmtable.colorscheme local defaultheight = luatex and luatex.defaultheight or 0 local defaultdepth = luatex and luatex.defaultdepth or 0 -- experimental, sharing kerns (unscaled and scaled) saves memory - local sharedkerns, basekerns = tfm.check_base_kerns(tfmtable) + -- local sharedkerns, basekerns = tfm.check_base_kerns(tfmtable) -- loop over descriptions (afm and otf have descriptions, tfm not) -- there is no need (yet) to assign a value to chr.tonunicode local scaledwidth = defaultwidth * hdelta @@ -3634,6 +3634,7 @@ t.colorscheme = tfmtable.colorscheme local scaleddepth = defaultdepth * vdelta local stackmath = tfmtable.ignore_stack_math ~= true local private = fonts.private + local sharedkerns = { } for k,v in next, characters do local chr, description, index if ischanged then @@ -3794,19 +3795,26 @@ t.colorscheme = tfmtable.colorscheme if not nodemode then local vk = v.kerns if vk then - if sharedkerns then - local base = basekerns[vk] -- hashed by table id, not content - if not base then - base = {} - for k,v in next, vk do base[k] = v*hdelta end - basekerns[vk] = base - end - chr.kerns = base - else - local tt = {} - for k,v in next, vk do tt[k] = v*hdelta end - chr.kerns = tt + --~ if sharedkerns then + --~ local base = basekerns[vk] -- hashed by table id, not content + --~ if not base then + --~ base = {} + --~ for k,v in next, vk do base[k] = v*hdelta end + --~ basekerns[vk] = base + --~ end + --~ chr.kerns = base + --~ else + --~ local tt = {} + --~ for k,v in next, vk do tt[k] = v*hdelta end + --~ chr.kerns = tt + --~ end + local s = sharedkerns[vk] + if not s then + local s = {} + for k,v in next, vk do s[k] = v*hdelta end + sharedkerns[vk] = s end + chr.kerns = s end local vl = v.ligatures if vl then @@ -3923,21 +3931,19 @@ local lastfont = nil -- -- flushing the kern and ligature tables from memory saves a lot (only -- base mode) but it complicates vf building where the new characters --- demand this data - ---~ for id, f in pairs(fonts.ids) do -- or font.fonts ---~ local ffi = font.fonts[id] ---~ f.characters = ffi.characters ---~ f.kerns = ffi.kerns ---~ f.ligatures = ffi.ligatures ---~ end +-- demand this data .. solution: functions that access them function tfm.cleanup_table(tfmdata) -- we need a cleanup callback, now we miss the last one if tfm.auto_cleanup then -- ok, we can hook this into everyshipout or so ... todo if tfmdata.type == 'virtual' or tfmdata.virtualized then for k, v in next, tfmdata.characters do - if v.commands then v.commands = nil end + if v.commands then v.commands = nil end + -- if v.kerns then v.kerns = nil end end + else + -- for k, v in next, tfmdata.characters do + -- if v.kerns then v.kerns = nil end + -- end end end end @@ -5483,7 +5489,7 @@ function otf.enhance(name,data,filename,verbose) local enhancer = otf.enhancers[name] if enhancer then if (verbose ~= nil and verbose) or trace_loading then - logs.report("load otf","enhance: %s",name) + logs.report("load otf","enhance: %s (%s)",name,filename) end enhancer(data,filename) end @@ -5548,6 +5554,7 @@ function otf.load(filename,format,sub,featurefile) logs.report("load otf","enhancing ...") for e=1,#enhancers do otf.enhance(enhancers[e],data,filename) + io.flush() -- we want instant messages end if otf.pack and not fonts.verbose then otf.enhance("pack",data,filename) @@ -6283,6 +6290,129 @@ end -- kern: ttf has a table with kerns +--~ otf.enhancers["reorganize kerns"] = function(data,filename) +--~ local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes +--~ local mkdone = false +--~ for index, glyph in next, data.glyphs do +--~ if glyph.kerns then +--~ local mykerns = { } +--~ for k,v in next, glyph.kerns do +--~ local vc, vo, vl = v.char, v.off, v.lookup +--~ if vc and vo and vl then -- brrr, wrong! we miss the non unicode ones +--~ local uvc = unicodes[vc] +--~ if not uvc then +--~ if trace_loading then +--~ logs.report("load otf","problems with unicode %s of kern %s at glyph %s",vc,k,index) +--~ end +--~ else +--~ if type(vl) ~= "table" then +--~ vl = { vl } +--~ end +--~ for l=1,#vl do +--~ local vll = vl[l] +--~ local mkl = mykerns[vll] +--~ if not mkl then +--~ mkl = { } +--~ mykerns[vll] = mkl +--~ end +--~ if type(uvc) == "table" then +--~ for u=1,#uvc do +--~ mkl[uvc[u]] = vo +--~ end +--~ else +--~ mkl[uvc] = vo +--~ end +--~ end +--~ end +--~ end +--~ end +--~ glyph.mykerns = mykerns +--~ glyph.kerns = nil -- saves space and time +--~ mkdone = true +--~ end +--~ end +--~ if trace_loading and mkdone then +--~ logs.report("load otf", "replacing 'kerns' tables by 'mykerns' tables") +--~ end +--~ if data.kerns then +--~ if trace_loading then +--~ logs.report("load otf", "removing global 'kern' table") +--~ end +--~ data.kerns = nil +--~ end +--~ local dgpos = data.gpos +--~ if dgpos then +--~ for gp=1,#dgpos do +--~ local gpos = dgpos[gp] +--~ local subtables = gpos.subtables +--~ if subtables then +--~ for s=1,#subtables do +--~ local subtable = subtables[s] +--~ local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes +--~ if kernclass then -- the next one is quite slow +--~ for k=1,#kernclass do +--~ local kcl = kernclass[k] +--~ local firsts, seconds, offsets, lookups = kcl.firsts, kcl.seconds, kcl.offsets, kcl.lookup -- singular +--~ if type(lookups) ~= "table" then +--~ lookups = { lookups } +--~ end +--~ for l=1,#lookups do +--~ local lookup = lookups[l] +--~ -- weird, as maxfirst and maxseconds can have holes +--~ local maxfirsts, maxseconds = getn(firsts), getn(seconds) +--~ if trace_loading then +--~ logs.report("load otf", "adding kernclass %s with %s times %s pairs",lookup, maxfirsts, maxseconds) +--~ end +--~ for fk, fv in next, firsts do +--~ for first in gmatch(fv,"[^ ]+") do +--~ local first_unicode = unicodes[first] +--~ if type(first_unicode) == "number" then +--~ first_unicode = { first_unicode } +--~ end +--~ for f=1,#first_unicode do +--~ local glyph = glyphs[mapmap[first_unicode[f]]] +--~ if glyph then +--~ local mykerns = glyph.mykerns +--~ if not mykerns then +--~ mykerns = { } -- unicode indexed ! +--~ glyph.mykerns = mykerns +--~ end +--~ local lookupkerns = mykerns[lookup] +--~ if not lookupkerns then +--~ lookupkerns = { } +--~ mykerns[lookup] = lookupkerns +--~ end +--~ for sk, sv in next, seconds do +--~ local offset = offsets[(fk-1) * maxseconds + sk] +--~ --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk] +--~ for second in gmatch(sv,"[^ ]+") do +--~ local second_unicode = unicodes[second] +--~ if type(second_unicode) == "number" then +--~ lookupkerns[second_unicode] = offset +--~ else +--~ for s=1,#second_unicode do +--~ lookupkerns[second_unicode[s]] = offset +--~ end +--~ end +--~ end +--~ end +--~ elseif trace_loading then +--~ logs.report("load otf", "no glyph data for U+%04X", first_unicode[f]) +--~ end +--~ end +--~ end +--~ end +--~ end +--~ end +--~ subtable.comment = "The kernclass table is merged into mykerns in the indexed glyph tables." +--~ subtable.kernclass = { } +--~ end +--~ end +--~ end +--~ end +--~ end +--~ end + otf.enhancers["reorganize kerns"] = function(data,filename) local glyphs, mapmap, unicodes = data.glyphs, data.luatex.indices, data.luatex.unicodes local mkdone = false @@ -6335,6 +6465,9 @@ otf.enhancers["reorganize kerns"] = function(data,filename) end local dgpos = data.gpos if dgpos then + local separator = lpeg.P(" ") + local other = ((1 - separator)^0) / unicodes + local splitter = lpeg.Ct(other * (separator * other)^0) for gp=1,#dgpos do local gpos = dgpos[gp] local subtables = gpos.subtables @@ -6342,54 +6475,71 @@ otf.enhancers["reorganize kerns"] = function(data,filename) for s=1,#subtables do local subtable = subtables[s] local kernclass = subtable.kernclass -- name is inconsistent with anchor_classes - if kernclass then + if kernclass then -- the next one is quite slow for k=1,#kernclass do local kcl = kernclass[k] local firsts, seconds, offsets, lookups = kcl.firsts, kcl.seconds, kcl.offsets, kcl.lookup -- singular if type(lookups) ~= "table" then lookups = { lookups } end + local split = { } for l=1,#lookups do local lookup = lookups[l] + -- weird, as maxfirst and maxseconds can have holes, first seems to be indexed, seconds starts at 2 local maxfirsts, maxseconds = getn(firsts), getn(seconds) + for _, s in next, firsts do + split[s] = split[s] or lpegmatch(splitter,s) + end + for _, s in next, seconds do + split[s] = split[s] or lpegmatch(splitter,s) + end if trace_loading then logs.report("load otf", "adding kernclass %s with %s times %s pairs",lookup, maxfirsts, maxseconds) end - for fk, fv in next, firsts do - for first in gmatch(fv,"[^ ]+") do - local first_unicode = unicodes[first] - if type(first_unicode) == "number" then - first_unicode = { first_unicode } + local function do_it(fk,first_unicode) + local glyph = glyphs[mapmap[first_unicode]] + if glyph then + local mykerns = glyph.mykerns + if not mykerns then + mykerns = { } -- unicode indexed ! + glyph.mykerns = mykerns end - for f=1,#first_unicode do - local glyph = glyphs[mapmap[first_unicode[f]]] - if glyph then - local mykerns = glyph.mykerns - if not mykerns then - mykerns = { } -- unicode indexed ! - glyph.mykerns = mykerns - end - local lookupkerns = mykerns[lookup] - if not lookupkerns then - lookupkerns = { } - mykerns[lookup] = lookupkerns - end - for sk, sv in next, seconds do - local offset = offsets[(fk-1) * maxseconds + sk] - --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk] - for second in gmatch(sv,"[^ ]+") do - local second_unicode = unicodes[second] - if type(second_unicode) == "number" then - lookupkerns[second_unicode] = offset - else - for s=1,#second_unicode do - lookupkerns[second_unicode[s]] = offset - end - end + local lookupkerns = mykerns[lookup] + if not lookupkerns then + lookupkerns = { } + mykerns[lookup] = lookupkerns + end + local baseoffset = (fk-1) * maxseconds + for sk=2,maxseconds do + local sv = seconds[sk] + local offset = offsets[baseoffset + sk] + --~ local offset = offsets[sk] -- (fk-1) * maxseconds + sk] + local splt = split[sv] + for i=1,#splt do + local second_unicode = splt[i] + if tonumber(second_unicode) then + lookupkerns[second_unicode] = offset + else + for s=1,#second_unicode do + lookupkerns[second_unicode[s]] = offset end end - elseif trace_loading then - logs.report("load otf", "no glyph data for U+%04X", first_unicode[f]) + end + end + elseif trace_loading then + logs.report("load otf", "no glyph data for U+%04X", first_unicode) + end + end + for fk=1,#firsts do + local fv = firsts[fk] + local splt = split[fv] + for i=1,#splt do + local first_unicode = splt[i] + if tonumber(first_unicode) then + do_it(fk,first_unicode) + else + for f=1,#first_unicode do + do_it(fk,first_unicode[f]) end end end @@ -7413,6 +7563,47 @@ function prepare_base_substitutions(tfmdata,kind,value) -- we can share some cod end end +--~ local function prepare_base_kerns(tfmdata,kind,value) -- todo what kind of kerns, currently all +--~ if value then +--~ local otfdata = tfmdata.shared.otfdata +--~ local validlookups, lookuplist = collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language) +--~ if validlookups then +--~ local unicodes = tfmdata.unicodes -- names to unicodes +--~ local indices = tfmdata.indices +--~ local characters = tfmdata.characters +--~ local descriptions = tfmdata.descriptions +--~ for u, chr in next, characters do +--~ local d = descriptions[u] +--~ if d then +--~ local dk = d.mykerns +--~ if dk then +--~ local t, done = chr.kerns or { }, false +--~ for l=1,#lookuplist do +--~ local lookup = lookuplist[l] +--~ local kerns = dk[lookup] +--~ if kerns then +--~ for k, v in next, kerns do +--~ if v ~= 0 and not t[k] then -- maybe no 0 test here +--~ t[k], done = v, true +--~ if trace_baseinit and trace_kerns then +--~ logs.report("define otf","%s: base kern %s + %s => %s",cref(kind,lookup),gref(descriptions,u),gref(descriptions,k),v) +--~ end +--~ end +--~ end +--~ end +--~ end +--~ if done then +--~ chr.kerns = t -- no empty assignments +--~ end +--~ -- elseif d.kerns then +--~ -- logs.report("define otf","%s: invalid mykerns for %s",cref(kind),gref(descriptions,u)) +--~ end +--~ end +--~ end +--~ end +--~ end +--~ end + local function prepare_base_kerns(tfmdata,kind,value) -- todo what kind of kerns, currently all if value then local otfdata = tfmdata.shared.otfdata @@ -7422,31 +7613,40 @@ local function prepare_base_kerns(tfmdata,kind,value) -- todo what kind of kerns local indices = tfmdata.indices local characters = tfmdata.characters local descriptions = tfmdata.descriptions + local sharedkerns = { } for u, chr in next, characters do local d = descriptions[u] if d then - local dk = d.mykerns + local dk = d.mykerns -- shared if dk then - local t, done = chr.kerns or { }, false - for l=1,#lookuplist do - local lookup = lookuplist[l] - local kerns = dk[lookup] - if kerns then - for k, v in next, kerns do - if v ~= 0 and not t[k] then -- maybe no 0 test here - t[k], done = v, true - if trace_baseinit and trace_kerns then - logs.report("define otf","%s: base kern %s + %s => %s",cref(kind,lookup),gref(descriptions,u),gref(descriptions,k),v) + local s = sharedkerns[dk] + if s == false then + -- skip + elseif s then + chr.kerns = s + else + local t, done = chr.kerns or { }, false + for l=1,#lookuplist do + local lookup = lookuplist[l] + local kerns = dk[lookup] + if kerns then + for k, v in next, kerns do + if v ~= 0 and not t[k] then -- maybe no 0 test here + t[k], done = v, true + if trace_baseinit and trace_kerns then + logs.report("define otf","%s: base kern %s + %s => %s",cref(kind,lookup),gref(descriptions,u),gref(descriptions,k),v) + end end end end end + if done then + sharedkerns[dk] = t + chr.kerns = t -- no empty assignments + else + sharedkerns[dk] = false + end end - if done then - chr.kerns = t -- no empty assignments - end - -- elseif d.kerns then - -- logs.report("define otf","%s: invalid mykerns for %s",cref(kind),gref(descriptions,u)) end end end @@ -11039,7 +11239,7 @@ function tfm.read(specification) local reader = sequence[s] if readers[reader] then -- not really needed if trace_defining then - logs.report("define font","trying (sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown") + logs.report("define font","trying (reader sequence driven) type %s for %s with file %s",reader,specification.name,specification.filename or "unknown") end tfmtable = readers[reader](specification) if tfmtable then |