From c727ed7331960718681fa4222bec81fb577b56fb Mon Sep 17 00:00:00 2001 From: Context Git Mirror Bot Date: Fri, 12 Jun 2015 10:15:04 +0200 Subject: 2015-06-12 10:08:00 --- tex/context/base/char-ini.lua | 11 - tex/context/base/cont-new.mkiv | 2 +- tex/context/base/context-version.pdf | Bin 4179 -> 4205 bytes tex/context/base/context.mkiv | 2 +- tex/context/base/data-exp.lua | 36 +- tex/context/base/enco-ini.mkiv | 32 +- tex/context/base/font-col.lua | 43 +- tex/context/base/font-mis.lua | 2 +- tex/context/base/font-otc.lua | 22 +- tex/context/base/font-otf.lua | 134 ++- tex/context/base/font-otn.lua | 121 ++- tex/context/base/font-ott.lua | 1020 ++++++++++++-------- tex/context/base/font-syn.lua | 133 ++- tex/context/base/l-lpeg.lua | 2 +- tex/context/base/l-lua.lua | 34 +- tex/context/base/l-string.lua | 7 +- tex/context/base/l-table.lua | 2 +- tex/context/base/luat-lib.mkiv | 1 + tex/context/base/lxml-tex.lua | 20 +- tex/context/base/math-fbk.lua | 48 + tex/context/base/meta-tex.mkiv | 8 + tex/context/base/mlib-lua.lua | 21 +- tex/context/base/mlib-pdf.lua | 192 ++-- tex/context/base/mlib-pps.lua | 59 +- tex/context/base/mlib-run.lua | 4 +- tex/context/base/mult-fun.lua | 6 +- tex/context/base/node-ser.lua | 2 +- tex/context/base/pack-box.mkiv | 4 +- tex/context/base/spac-chr.lua | 2 + tex/context/base/spac-ver.lua | 1 + tex/context/base/status-files.pdf | Bin 24518 -> 24422 bytes tex/context/base/status-lua.pdf | Bin 251247 -> 251556 bytes tex/context/base/toks-ini.lua | 2 +- tex/context/base/trac-vis.lua | 47 +- tex/context/base/util-fil.lua | 109 +++ tex/context/base/util-str.lua | 13 +- tex/context/base/util-tab.lua | 63 +- tex/context/base/x-asciimath.lua | 66 +- tex/context/base/x-asciimath.mkiv | 34 + tex/context/sample/cow-black.mps | 154 +++ tex/context/sample/cow-black.pdf | Bin 0 -> 5465 bytes tex/context/sample/cow-brown.mps | 154 +++ tex/context/sample/cow-brown.pdf | Bin 0 -> 5556 bytes tex/generic/context/luatex/luatex-fonts-merged.lua | 157 ++- tex/generic/context/luatex/luatex-fonts-otn.lua | 28 +- tex/generic/context/luatex/luatex-fonts.lua | 20 +- tex/generic/context/luatex/luatex-mplib.lua | 114 ++- tex/generic/context/luatex/luatex-mplib.tex | 21 +- tex/generic/context/luatex/luatex-plain.tex | 23 +- tex/generic/context/luatex/luatex-test.tex | 20 + 50 files changed, 2192 insertions(+), 804 deletions(-) create mode 100644 tex/context/base/util-fil.lua create mode 100644 tex/context/sample/cow-black.mps create mode 100644 tex/context/sample/cow-black.pdf create mode 100644 tex/context/sample/cow-brown.mps create mode 100644 tex/context/sample/cow-brown.pdf (limited to 'tex') diff --git a/tex/context/base/char-ini.lua b/tex/context/base/char-ini.lua index 4559fa28c..136cbf705 100644 --- a/tex/context/base/char-ini.lua +++ b/tex/context/base/char-ini.lua @@ -1291,17 +1291,6 @@ if not characters.superscripts then end --- for the moment only a few - -local tracedchars = utilities.strings.tracers - -tracedchars[0x00] = "[signal]" -tracedchars[0x0A] = "[linefeed]" -tracedchars[0x0B] = "[tab]" -tracedchars[0x0C] = "[formfeed]" -tracedchars[0x0D] = "[return]" -tracedchars[0x20] = "[space]" - function characters.showstring(str) local list = utotable(str) for i=1,#list do diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv index 053498fe3..45216352b 100644 --- a/tex/context/base/cont-new.mkiv +++ b/tex/context/base/cont-new.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2015.05.24 12:42} +\newcontextversion{2015.06.12 10:06} %D This file is loaded at runtime, thereby providing an excellent place for %D hacks, patches, extensions and new features. diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf index 9395648bd..97d3058a2 100644 Binary files a/tex/context/base/context-version.pdf and b/tex/context/base/context-version.pdf differ diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv index 4fe6a2ae2..df01cab5a 100644 --- a/tex/context/base/context.mkiv +++ b/tex/context/base/context.mkiv @@ -39,7 +39,7 @@ %D up and the dependencies are more consistent. \edef\contextformat {\jobname} -\edef\contextversion{2015.05.24 12:42} +\edef\contextversion{2015.06.12 10:06} \edef\contextkind {beta} %D For those who want to use this: diff --git a/tex/context/base/data-exp.lua b/tex/context/base/data-exp.lua index 0a7396171..19ceb90c3 100644 --- a/tex/context/base/data-exp.lua +++ b/tex/context/base/data-exp.lua @@ -8,6 +8,7 @@ if not modules then modules = { } end modules ['data-exp'] = { local format, find, gmatch, lower, char, sub = string.format, string.find, string.gmatch, string.lower, string.char, string.sub local concat, sort = table.concat, table.sort +local sortedkeys = table.sortedkeys local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns local Ct, Cs, Cc, Carg, P, C, S = lpeg.Ct, lpeg.Cs, lpeg.Cc, lpeg.Carg, lpeg.P, lpeg.C, lpeg.S local type, next = type, next @@ -501,17 +502,41 @@ end local nothing = function() end +-- function resolvers.filtered_from_content(content,pattern) +-- if content and type(pattern) == "string" then +-- local pattern = lower(pattern) +-- local files = content.files +-- local remap = content.remap +-- if files and remap then +-- local n = next(files) +-- local function iterator() +-- while n do +-- local k = n +-- n = next(files,k) +-- if find(k,pattern) then +-- return files[k], remap and remap[k] or k +-- end +-- end +-- end +-- return iterator +-- end +-- end +-- return nothing +-- end + function resolvers.filtered_from_content(content,pattern) if content and type(pattern) == "string" then local pattern = lower(pattern) - local files = content.files + local files = content.files -- we could store the sorted list local remap = content.remap if files and remap then - local n = next(files) + local f = sortedkeys(files) + local n = #f + local i = 0 local function iterator() - while n do - local k = n - n = next(files,k) + while i < n do + i = i + 1 + local k = f[i] if find(k,pattern) then return files[k], remap and remap[k] or k end @@ -523,6 +548,5 @@ function resolvers.filtered_from_content(content,pattern) return nothing end - -- inspect(resolvers.simplescanfiles("e:/temporary/mb-mp")) -- inspect(resolvers.scanfiles("e:/temporary/mb-mp")) diff --git a/tex/context/base/enco-ini.mkiv b/tex/context/base/enco-ini.mkiv index ef0ebef4e..2f94c8fc8 100644 --- a/tex/context/base/enco-ini.mkiv +++ b/tex/context/base/enco-ini.mkiv @@ -111,10 +111,36 @@ % \relax#2% % \endgroup} +% \unexpanded\def\buildtextaccent#1#2% we could do all at the lua end +% {\begingroup % but that's no fun (yet) +% \setbox\b_enco_accent\hbox{#1}% +% \clf_buildtextaccent\b_enco_accent#2% +% \endgroup} +% +% This one can handle font collections too. The accent command is a scanner +% and the same font and otherwise discards the character (imo it could +% better drop the accent). + \unexpanded\def\buildtextaccent#1#2% we could do all at the lua end - {\begingroup % but that's no fun (yet) - \setbox\b_enco_accent\hbox{#1}% - \clf_buildtextaccent\b_enco_accent#2% + {\dontleavehmode\begingroup % but that's no fun (yet) + \setbox\scratchboxone\hbox{#1}% accent + \setbox\scratchboxtwo\hbox{#2}% character + \scratchheight\dimexpr\ht\scratchboxtwo-\ht\scratchboxone\relax + \scratchdepth \dimexpr\dp\scratchboxtwo-\dp\scratchboxone\relax + \scratchwidth \wd\scratchboxtwo + \hbox to \wd\ifdim\wd\scratchboxone>\wd\scratchboxtwo\scratchboxone\else\scratchboxtwo\fi\bgroup + \hss\box\scratchboxtwo\hss + \hskip-\scratchwidth + \hss + \ifdim\ht\scratchboxone>\exheight + % top accent + \raise\dimexpr\scratchheight+\exheight/3\relax + \else + \lower-\dimexpr\scratchdepth+\exheight/3\relax + \fi + \box\scratchboxone + \hss + \egroup \endgroup} \unexpanded\def\bottomaccent#1#2#3#4#5% down right slantcorrection accent char diff --git a/tex/context/base/font-col.lua b/tex/context/base/font-col.lua index cbc1953f4..dfad5c821 100644 --- a/tex/context/base/font-col.lua +++ b/tex/context/base/font-col.lua @@ -26,7 +26,7 @@ local traverse_id = nuts.traverse_id local settings_to_hash = utilities.parsers.settings_to_hash -local trace_collecting = false trackers.register("fonts.collecting", function(v) trace_collecting = v end) +local trace_collecting = false trackers.register("fonts.collecting", function(v) trace_collecting = v end) local report_fonts = logs.reporter("fonts","collections") @@ -253,23 +253,32 @@ end function collections.process(head) -- this way we keep feature processing local done = false for n in traverse_id(glyph_code,tonut(head)) do - local v = vectors[getfont(n)] - if v then - local id = v[getchar(n)] - if id then - if type(id) == "table" then - local newid, newchar = id[1], id[2] - if trace_collecting then - report_fonts("remapping character %C in font %a to character %C in font %a",getchar(n),getfont(n),newchar,newid) - end - setfield(n,"font",newid) - setfield(n,"char",newchar) - else - if trace_collecting then - report_fonts("remapping font %a to %a for character %C",getfont(n),id,getchar(n)) - end - setfield(n,"font",id) + local font = getfont(n) + local vector = vectors[font] + if vector then + local char = getchar(n) + local vect = vector[char] + if not vect then + -- keep it + elseif type(vect) == "table" then + local newfont = vect[1] + local newchar = vect[2] + if trace_collecting then + report_fonts("remapping character %C in font %a to character %C in font %a%s", + char,font,newchar,newfont,not chardata[newfont][newchar] and " (missing)" or "" + ) + end + setfield(n,"font",newfont) + setfield(n,"char",newchar) + done = true + else + if trace_collecting then + report_fonts("remapping font %a to %a for character %C%s", + font,vect,char,not chardata[vect][char] and " (missing)" or "" + ) end + setfield(n,"font",vect) + done = true end end end diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua index 30b0303e1..5169dd4e1 100644 --- a/tex/context/base/font-mis.lua +++ b/tex/context/base/font-mis.lua @@ -22,7 +22,7 @@ local handlers = fonts.handlers handlers.otf = handlers.otf or { } local otf = handlers.otf -otf.version = otf.version or 2.812 +otf.version = otf.version or 2.814 otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true) local fontloader = fontloader diff --git a/tex/context/base/font-otc.lua b/tex/context/base/font-otc.lua index f7f8e9ce2..a3afcd9e1 100644 --- a/tex/context/base/font-otc.lua +++ b/tex/context/base/font-otc.lua @@ -84,7 +84,6 @@ local function addfeature(data,feature,specifications) local unicode = tonumber(code) or unicodes[code] local description = descriptions[unicode] if description then - local slookups = description.slookups if type(ligature) == "string" then ligature = { lpegmatch(splitter,ligature) } end @@ -96,6 +95,7 @@ local function addfeature(data,feature,specifications) end end if present then + local slookups = description.slookups if slookups then slookups[full] = ligature else @@ -113,9 +113,9 @@ local function addfeature(data,feature,specifications) local unicode = tonumber(code) or unicodes[code] local description = descriptions[unicode] if description then - local slookups = description.slookups replacement = tonumber(replacement) or unicodes[replacement] if descriptions[replacement] then + local slookups = description.slookups if slookups then slookups[full] = replacement else @@ -347,3 +347,21 @@ registerotffeature { name = 'anum', description = 'arabic digits', } + +-- maybe: + +-- fonts.handlers.otf.addfeature("hangulfix",{ +-- type = "substitution", +-- features = { ["hang"] = { ["*"] = true } }, +-- data = { +-- [0x1160] = 0x119E, +-- }, +-- order = { "hangulfix" }, +-- flags = { }, +-- prepend = true, +-- }) + +-- fonts.handlers.otf.features.register { +-- name = 'hangulfix', +-- description = 'fixes for hangul', +-- } diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua index e7a97c66d..db0118d84 100644 --- a/tex/context/base/font-otf.lua +++ b/tex/context/base/font-otf.lua @@ -12,6 +12,12 @@ if not modules then modules = { } end modules ['font-otf'] = { -- to_table -> totable -- ascent descent +-- to be checked: combinations like: +-- +-- current="ABCD" with [A]=nothing, [BC]=ligature, [D]=single (applied to result of BC so funny index) +-- +-- unlikely but possible + -- more checking against low level calls of functions local utfbyte = utf.byte @@ -54,7 +60,7 @@ local otf = fonts.handlers.otf otf.glists = { "gsub", "gpos" } -otf.version = 2.812 -- beware: also sync font-mis.lua +otf.version = 2.814 -- beware: also sync font-mis.lua otf.cache = containers.define("fonts", "otf", otf.version, true) local hashes = fonts.hashes @@ -283,13 +289,17 @@ local ordered_enhancers = { "check glyphs", "check metadata", - "check extra features", -- after metadata +-- "check extra features", -- after metadata "prepare tounicode", "check encoding", -- moved "add duplicates", + "expand lookups", -- a temp hack awaiting the lua loader + + "check extra features", -- after metadata and duplicates + "cleanup tables", "compact lookups", @@ -386,6 +396,7 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone if featurefile then name = name .. "@" .. file.removesuffix(file.basename(featurefile)) end + -- or: sub = tonumber(sub) if sub == "" then sub = false end @@ -476,6 +487,7 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone data = { size = size, time = time, + subfont = sub, format = otf_format(filename), featuredata = featurefiles, resources = { @@ -819,35 +831,35 @@ actions["prepare glyphs"] = function(data,filename,raw) glyph = glyph, } descriptions[unicode] = description -local altuni = glyph.altuni -if altuni then - -- local d - for i=1,#altuni do - local a = altuni[i] - local u = a.unicode - if u ~= unicode then - local v = a.variant - if v then - -- tricky: no addition to d? needs checking but in practice such dups are either very simple - -- shapes or e.g cjk with not that many features - local vv = variants[v] - if vv then - vv[u] = unicode - else -- xits-math has some: - vv = { [u] = unicode } - variants[v] = vv - end - -- elseif d then - -- d[#d+1] = u - -- else - -- d = { u } - end - end - end - -- if d then - -- duplicates[unicode] = d -- is this needed ? - -- end -end + local altuni = glyph.altuni + if altuni then + -- local d + for i=1,#altuni do + local a = altuni[i] + local u = a.unicode + if u ~= unicode then + local v = a.variant + if v then + -- tricky: no addition to d? needs checking but in practice such dups are either very simple + -- shapes or e.g cjk with not that many features + local vv = variants[v] + if vv then + vv[u] = unicode + else -- xits-math has some: + vv = { [u] = unicode } + variants[v] = vv + end + -- elseif d then + -- d[#d+1] = u + -- else + -- d = { u } + end + end + end + -- if d then + -- duplicates[unicode] = d -- is this needed ? + -- end + end end end else @@ -1494,12 +1506,16 @@ end actions["reorganize lookups"] = function(data,filename,raw) -- we could check for "" and n == 0 -- we prefer the before lookups in a normal order if data.lookups then - local splitter = data.helpers.tounicodetable - local t_u_cache = { } - local s_u_cache = t_u_cache -- string keys - local t_h_cache = { } - local s_h_cache = t_h_cache -- table keys (so we could use one cache) - local r_u_cache = { } -- maybe shared + local helpers = data.helpers + local duplicates = data.resources.duplicates + local splitter = helpers.tounicodetable + local t_u_cache = { } + local s_u_cache = t_u_cache -- string keys + local t_h_cache = { } + local s_h_cache = t_h_cache -- table keys (so we could use one cache) + local r_u_cache = { } -- maybe shared + helpers.matchcache = t_h_cache -- so that we can add duplicates + -- for _, lookup in next, data.lookups do local rules = lookup.rules if rules then @@ -1653,6 +1669,50 @@ actions["reorganize lookups"] = function(data,filename,raw) -- we could check fo end end +actions["expand lookups"] = function(data,filename,raw) -- we could check for "" and n == 0 + if data.lookups then + local cache = data.helpers.matchcache + if cache then + local duplicates = data.resources.duplicates + for key, hash in next, cache do + local done = nil + for key in next, hash do + local unicode = duplicates[key] + if not unicode then + -- no duplicate + elseif type(unicode) == "table" then + -- multiple duplicates + for i=1,#unicode do + local u = unicode[i] + if hash[u] then + -- already in set + elseif done then + done[u] = key + else + done = { [u] = key } + end + end + else + -- one duplicate + if hash[unicode] then + -- already in set + elseif done then + done[unicode] = key + else + done = { [unicode] = key } + end + end + end + if done then + for u in next, done do + hash[u] = true + end + end + end + end + end +end + local function check_variants(unicode,the_variants,splitter,unicodes) local variants = the_variants.variants if variants then -- use splitter diff --git a/tex/context/base/font-otn.lua b/tex/context/base/font-otn.lua index b265dbbeb..ffe5618b3 100644 --- a/tex/context/base/font-otn.lua +++ b/tex/context/base/font-otn.lua @@ -12,6 +12,13 @@ if not modules then modules = { } end modules ['font-otn'] = { -- this is a context version which can contain experimental code, but when we -- have serious patches we also need to change the other two font-otn files +-- at some point i might decide to convert the whole list into a table and then +-- run over that instead (but it has some drawbacks as we also need to deal with +-- attributes and such so we need to keep a lot of track - which is why i rejected +-- that method - although it has become a bit easier in the meantime so it might +-- become an alternative (by that time i probably have gone completely lua) .. the +-- usual chicken-egg issues ... maybe mkix as it's no real tex any more then + -- preprocessors = { "nodes" } -- anchor class : mark, mkmk, curs, mklg (todo) @@ -1573,7 +1580,7 @@ function chainprocs.gsub_ligature(head,start,stop,kind,chainname,currentcontext, local s = getnext(start) local discfound = false local last = stop - local nofreplacements = 0 + local nofreplacements = 1 local skipmark = currentlookup.flags[1] while s do local id = getid(s) @@ -2005,6 +2012,8 @@ local function show_skip(kind,chainname,char,ck,class) end end +--hm, do i need to deal with disc here ? + local function normal_handle_contextchain(head,start,kind,chainname,contexts,sequence,lookuphash) -- local rule, lookuptype, sequence, f, l, lookups = ck[1], ck[2] ,ck[3], ck[4], ck[5], ck[6] local flags = sequence.flags @@ -2319,9 +2328,9 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq end else local i = 1 - while true do + while start and true do if skipped then - while true do + while true do -- todo: use properties local char = getchar(start) local ccd = descriptions[char] if ccd then @@ -2336,10 +2345,11 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq end end end + -- see remark in ms standard under : LookupType 5: Contextual Substitution Subtable local chainlookupname = chainlookups[i] local chainlookup = lookuptable[chainlookupname] if not chainlookup then - -- okay, n matches, < n replacements + -- we just advance i = i + 1 else local cp = chainmores[chainlookup.type] @@ -2353,19 +2363,29 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq -- messy since last can be changed ! if ok then done = true - -- skip next one(s) if ligature - i = i + (n or 1) - else - i = i + 1 + if n and n > 1 then + -- we have a ligature (cf the spec we advance one but we really need to test it + -- as there are fonts out there that are fuzzy and have too many lookups: + -- + -- U+1105 U+119E U+1105 U+119E : sourcehansansklight: script=hang ccmp=yes + -- + if i + n > nofchainlookups then + -- if trace_contexts then + -- logprocess("%s: quitting lookups",cref(kind,chainname)) + -- end + break + else + -- we need to carry one + end + end end + i = i + 1 end end - if i > nofchainlookups then + if i > nofchainlookups or not start then break elseif start then start = getnext(start) - else - -- weird end end end @@ -3363,47 +3383,47 @@ local function generic(lookupdata,lookupname,unicode,lookuphash) end end -local action = { +local function ligature(lookupdata,lookupname,unicode,lookuphash) + local target = lookuphash[lookupname] + if not target then + target = { } + lookuphash[lookupname] = target + end + for i=1,#lookupdata do + local li = lookupdata[i] + local tu = target[li] + if not tu then + tu = { } + target[li] = tu + end + target = tu + end + target.ligature = unicode +end + +local function pair(lookupdata,lookupname,unicode,lookuphash) + local target = lookuphash[lookupname] + if not target then + target = { } + lookuphash[lookupname] = target + end + local others = target[unicode] + local paired = lookupdata[1] + if others then + others[paired] = lookupdata + else + others = { [paired] = lookupdata } + target[unicode] = others + end +end +local action = { substitution = generic, multiple = generic, alternate = generic, position = generic, - - ligature = function(lookupdata,lookupname,unicode,lookuphash) - local target = lookuphash[lookupname] - if not target then - target = { } - lookuphash[lookupname] = target - end - for i=1,#lookupdata do - local li = lookupdata[i] - local tu = target[li] - if not tu then - tu = { } - target[li] = tu - end - target = tu - end - target.ligature = unicode - end, - - pair = function(lookupdata,lookupname,unicode,lookuphash) - local target = lookuphash[lookupname] - if not target then - target = { } - lookuphash[lookupname] = target - end - local others = target[unicode] - local paired = lookupdata[1] - if others then - others[paired] = lookupdata - else - others = { [paired] = lookupdata } - target[unicode] = others - end - end, - + ligature = ligature, + pair = pair, } local function prepare_lookups(tfmdata) @@ -3416,6 +3436,7 @@ local function prepare_lookups(tfmdata) local lookuptypes = resources.lookuptypes local characters = tfmdata.characters local descriptions = tfmdata.descriptions + local duplicates = resources.duplicates -- we cannot free the entries in the descriptions as sometimes we access -- then directly (for instance anchors) ... selectively freeing does save @@ -3435,7 +3456,7 @@ local function prepare_lookups(tfmdata) local lookups = description.slookups if lookups then for lookupname, lookupdata in next, lookups do - action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash) + action[lookuptypes[lookupname]](lookupdata,lookupname,unicode,lookuphash,duplicates) end end @@ -3445,7 +3466,7 @@ local function prepare_lookups(tfmdata) local lookuptype = lookuptypes[lookupname] for l=1,#lookuplist do local lookupdata = lookuplist[l] - action[lookuptype](lookupdata,lookupname,unicode,lookuphash) + action[lookuptype](lookupdata,lookupname,unicode,lookuphash,duplicates) end end end @@ -3564,7 +3585,7 @@ local function prepare_contextchains(tfmdata) -- use sequence[start] instead but it's somewhat ugly. nt = nt + 1 t[nt] = { nofrules, lookuptype, sequence, start, stop, rule.lookups, replacements } - for unic, _ in next, sequence[start] do + for unic in next, sequence[start] do local cu = contexts[unic] if not cu then contexts[unic] = t diff --git a/tex/context/base/font-ott.lua b/tex/context/base/font-ott.lua index 1f9a1ac04..ed6519f38 100644 --- a/tex/context/base/font-ott.lua +++ b/tex/context/base/font-ott.lua @@ -28,15 +28,24 @@ otf.statistics = statistics local scripts = allocate { ['arab'] = 'arabic', + ['armi'] = 'imperial aramaic', ['armn'] = 'armenian', + ['avst'] = 'avestan', ['bali'] = 'balinese', + ['bamu'] = 'bamum', + ['batk'] = 'batak', ['beng'] = 'bengali', + ['bng2'] = 'bengali variant 2', ['bopo'] = 'bopomofo', + ['brah'] = 'brahmi', ['brai'] = 'braille', ['bugi'] = 'buginese', ['buhd'] = 'buhid', ['byzm'] = 'byzantine music', + ['cakm'] = 'chakma', ['cans'] = 'canadian syllabics', + ['cari'] = 'carian', + ['cham'] = 'cham', ['cher'] = 'cherokee', ['copt'] = 'coptic', ['cprt'] = 'cypriot syllabary', @@ -44,13 +53,16 @@ local scripts = allocate { ['deva'] = 'devanagari', ['dev2'] = 'devanagari variant 2', ['dsrt'] = 'deseret', + ['egyp'] = 'egyptian heiroglyphs', ['ethi'] = 'ethiopic', ['geor'] = 'georgian', ['glag'] = 'glagolitic', ['goth'] = 'gothic', ['grek'] = 'greek', ['gujr'] = 'gujarati', + ['gjr2'] = 'gujarati variant 2', ['guru'] = 'gurmukhi', + ['gur2'] = 'gurmukhi variant 2', ['hang'] = 'hangul', ['hani'] = 'cjk ideographic', ['hano'] = 'hanunoo', @@ -58,437 +70,660 @@ local scripts = allocate { ['ital'] = 'old italic', ['jamo'] = 'hangul jamo', ['java'] = 'javanese', + ['kali'] = 'kayah li', ['kana'] = 'hiragana and katakana', ['khar'] = 'kharosthi', ['khmr'] = 'khmer', ['knda'] = 'kannada', + ['knd2'] = 'kannada variant 2', + ['kthi'] = 'kaithi', + ['lana'] = 'tai tham', ['lao' ] = 'lao', ['latn'] = 'latin', + ['lepc'] = 'lepcha', ['limb'] = 'limbu', ['linb'] = 'linear b', + ['lisu'] = 'lisu', + ['lyci'] = 'lycian', + ['lydi'] = 'lydian', + ['mand'] = 'mandaic and mandaean', ['math'] = 'mathematical alphanumeric symbols', + ['merc'] = 'meroitic cursive', + ['mero'] = 'meroitic hieroglyphs', ['mlym'] = 'malayalam', ['mlm2'] = 'malayalam variant 2', ['mong'] = 'mongolian', + ['mtei'] = 'meitei Mayek', ['musc'] = 'musical symbols', + ['mym2'] = 'myanmar variant 2', ['mymr'] = 'myanmar', ['nko' ] = "n'ko", ['ogam'] = 'ogham', + ['olck'] = 'ol chiki', + ['orkh'] = 'old turkic and orkhon runic', ['orya'] = 'oriya', + ['ory2'] = 'odia variant 2', ['osma'] = 'osmanya', ['phag'] = 'phags-pa', + ['phli'] = 'inscriptional pahlavi', ['phnx'] = 'phoenician', + ['prti'] = 'inscriptional parthian', + ['rjng'] = 'rejang', ['runr'] = 'runic', + ['samr'] = 'samaritan', + ['sarb'] = 'old south arabian', + ['saur'] = 'saurashtra', ['shaw'] = 'shavian', + ['shrd'] = 'sharada', ['sinh'] = 'sinhala', + ['sora'] = 'sora sompeng', + ['sund'] = 'sundanese', ['sylo'] = 'syloti nagri', ['syrc'] = 'syriac', ['tagb'] = 'tagbanwa', + ['takr'] = 'takri', ['tale'] = 'tai le', ['talu'] = 'tai lu', ['taml'] = 'tamil', + ['tavt'] = 'tai viet', ['telu'] = 'telugu', + ['tel2'] = 'telugu variant 2', ['tfng'] = 'tifinagh', ['tglg'] = 'tagalog', ['thaa'] = 'thaana', ['thai'] = 'thai', ['tibt'] = 'tibetan', + ['tml2'] = 'tamil variant 2', ['ugar'] = 'ugaritic cuneiform', + ['vai' ] = 'vai', ['xpeo'] = 'old persian cuneiform', ['xsux'] = 'sumero-akkadian cuneiform', ['yi' ] = 'yi', } local languages = allocate { - ['aba'] = 'abaza', - ['abk'] = 'abkhazian', - ['ady'] = 'adyghe', - ['afk'] = 'afrikaans', - ['afr'] = 'afar', - ['agw'] = 'agaw', - ['als'] = 'alsatian', - ['alt'] = 'altai', - ['amh'] = 'amharic', - ['ara'] = 'arabic', - ['ari'] = 'aari', - ['ark'] = 'arakanese', - ['asm'] = 'assamese', - ['ath'] = 'athapaskan', - ['avr'] = 'avar', - ['awa'] = 'awadhi', - ['aym'] = 'aymara', - ['aze'] = 'azeri', - ['bad'] = 'badaga', - ['bag'] = 'baghelkhandi', - ['bal'] = 'balkar', - ['bau'] = 'baule', - ['bbr'] = 'berber', - ['bch'] = 'bench', - ['bcr'] = 'bible cree', - ['bel'] = 'belarussian', - ['bem'] = 'bemba', - ['ben'] = 'bengali', - ['bgr'] = 'bulgarian', - ['bhi'] = 'bhili', - ['bho'] = 'bhojpuri', - ['bik'] = 'bikol', - ['bil'] = 'bilen', - ['bkf'] = 'blackfoot', - ['bli'] = 'balochi', - ['bln'] = 'balante', - ['blt'] = 'balti', - ['bmb'] = 'bambara', - ['bml'] = 'bamileke', - ['bos'] = 'bosnian', - ['bre'] = 'breton', - ['brh'] = 'brahui', - ['bri'] = 'braj bhasha', - ['brm'] = 'burmese', - ['bsh'] = 'bashkir', - ['bti'] = 'beti', - ['cat'] = 'catalan', - ['ceb'] = 'cebuano', - ['che'] = 'chechen', - ['chg'] = 'chaha gurage', - ['chh'] = 'chattisgarhi', - ['chi'] = 'chichewa', - ['chk'] = 'chukchi', - ['chp'] = 'chipewyan', - ['chr'] = 'cherokee', - ['chu'] = 'chuvash', - ['cmr'] = 'comorian', - ['cop'] = 'coptic', - ['cos'] = 'corsican', - ['cre'] = 'cree', - ['crr'] = 'carrier', - ['crt'] = 'crimean tatar', - ['csl'] = 'church slavonic', - ['csy'] = 'czech', - ['dan'] = 'danish', - ['dar'] = 'dargwa', - ['dcr'] = 'woods cree', - ['deu'] = 'german', - ['dgr'] = 'dogri', - ['div'] = 'divehi', - ['djr'] = 'djerma', - ['dng'] = 'dangme', - ['dnk'] = 'dinka', - ['dri'] = 'dari', - ['dun'] = 'dungan', - ['dzn'] = 'dzongkha', - ['ebi'] = 'ebira', - ['ecr'] = 'eastern cree', - ['edo'] = 'edo', - ['efi'] = 'efik', - ['ell'] = 'greek', - ['eng'] = 'english', - ['erz'] = 'erzya', - ['esp'] = 'spanish', - ['eti'] = 'estonian', - ['euq'] = 'basque', - ['evk'] = 'evenki', - ['evn'] = 'even', - ['ewe'] = 'ewe', - ['fan'] = 'french antillean', - ['far'] = 'farsi', - ['fin'] = 'finnish', - ['fji'] = 'fijian', - ['fle'] = 'flemish', - ['fne'] = 'forest nenets', - ['fon'] = 'fon', - ['fos'] = 'faroese', - ['fra'] = 'french', - ['fri'] = 'frisian', - ['frl'] = 'friulian', - ['fta'] = 'futa', - ['ful'] = 'fulani', - ['gad'] = 'ga', - ['gae'] = 'gaelic', - ['gag'] = 'gagauz', - ['gal'] = 'galician', - ['gar'] = 'garshuni', - ['gaw'] = 'garhwali', - ['gez'] = "ge'ez", - ['gil'] = 'gilyak', - ['gmz'] = 'gumuz', - ['gon'] = 'gondi', - ['grn'] = 'greenlandic', - ['gro'] = 'garo', - ['gua'] = 'guarani', - ['guj'] = 'gujarati', - ['hai'] = 'haitian', - ['hal'] = 'halam', - ['har'] = 'harauti', - ['hau'] = 'hausa', - ['haw'] = 'hawaiin', - ['hbn'] = 'hammer-banna', - ['hil'] = 'hiligaynon', - ['hin'] = 'hindi', - ['hma'] = 'high mari', - ['hnd'] = 'hindko', - ['ho'] = 'ho', - ['hri'] = 'harari', - ['hrv'] = 'croatian', - ['hun'] = 'hungarian', - ['hye'] = 'armenian', - ['ibo'] = 'igbo', - ['ijo'] = 'ijo', - ['ilo'] = 'ilokano', - ['ind'] = 'indonesian', - ['ing'] = 'ingush', - ['inu'] = 'inuktitut', - ['iri'] = 'irish', - ['irt'] = 'irish traditional', - ['isl'] = 'icelandic', - ['ism'] = 'inari sami', - ['ita'] = 'italian', - ['iwr'] = 'hebrew', - ['jan'] = 'japanese', - ['jav'] = 'javanese', - ['jii'] = 'yiddish', - ['jud'] = 'judezmo', - ['jul'] = 'jula', - ['kab'] = 'kabardian', - ['kac'] = 'kachchi', - ['kal'] = 'kalenjin', - ['kan'] = 'kannada', - ['kar'] = 'karachay', - ['kat'] = 'georgian', - ['kaz'] = 'kazakh', - ['keb'] = 'kebena', - ['kge'] = 'khutsuri georgian', - ['kha'] = 'khakass', - ['khk'] = 'khanty-kazim', - ['khm'] = 'khmer', - ['khs'] = 'khanty-shurishkar', - ['khv'] = 'khanty-vakhi', - ['khw'] = 'khowar', - ['kik'] = 'kikuyu', - ['kir'] = 'kirghiz', - ['kis'] = 'kisii', - ['kkn'] = 'kokni', - ['klm'] = 'kalmyk', - ['kmb'] = 'kamba', - ['kmn'] = 'kumaoni', - ['kmo'] = 'komo', - ['kms'] = 'komso', - ['knr'] = 'kanuri', - ['kod'] = 'kodagu', - ['koh'] = 'korean old hangul', - ['kok'] = 'konkani', - ['kon'] = 'kikongo', - ['kop'] = 'komi-permyak', - ['kor'] = 'korean', - ['koz'] = 'komi-zyrian', - ['kpl'] = 'kpelle', - ['kri'] = 'krio', - ['krk'] = 'karakalpak', - ['krl'] = 'karelian', - ['krm'] = 'karaim', - ['krn'] = 'karen', - ['krt'] = 'koorete', - ['ksh'] = 'kashmiri', - ['ksi'] = 'khasi', - ['ksm'] = 'kildin sami', - ['kui'] = 'kui', - ['kul'] = 'kulvi', - ['kum'] = 'kumyk', - ['kur'] = 'kurdish', - ['kuu'] = 'kurukh', - ['kuy'] = 'kuy', - ['kyk'] = 'koryak', - ['lad'] = 'ladin', - ['lah'] = 'lahuli', - ['lak'] = 'lak', - ['lam'] = 'lambani', - ['lao'] = 'lao', - ['lat'] = 'latin', - ['laz'] = 'laz', - ['lcr'] = 'l-cree', - ['ldk'] = 'ladakhi', - ['lez'] = 'lezgi', - ['lin'] = 'lingala', - ['lma'] = 'low mari', - ['lmb'] = 'limbu', - ['lmw'] = 'lomwe', - ['lsb'] = 'lower sorbian', - ['lsm'] = 'lule sami', - ['lth'] = 'lithuanian', - ['ltz'] = 'luxembourgish', - ['lub'] = 'luba', - ['lug'] = 'luganda', - ['luh'] = 'luhya', - ['luo'] = 'luo', - ['lvi'] = 'latvian', - ['maj'] = 'majang', - ['mak'] = 'makua', - ['mal'] = 'malayalam traditional', - ['man'] = 'mansi', - ['map'] = 'mapudungun', - ['mar'] = 'marathi', - ['maw'] = 'marwari', - ['mbn'] = 'mbundu', - ['mch'] = 'manchu', - ['mcr'] = 'moose cree', - ['mde'] = 'mende', - ['men'] = "me'en", - ['miz'] = 'mizo', - ['mkd'] = 'macedonian', - ['mle'] = 'male', - ['mlg'] = 'malagasy', - ['mln'] = 'malinke', - ['mlr'] = 'malayalam reformed', - ['mly'] = 'malay', - ['mnd'] = 'mandinka', - ['mng'] = 'mongolian', - ['mni'] = 'manipuri', - ['mnk'] = 'maninka', - ['mnx'] = 'manx gaelic', - ['moh'] = 'mohawk', - ['mok'] = 'moksha', - ['mol'] = 'moldavian', - ['mon'] = 'mon', - ['mor'] = 'moroccan', - ['mri'] = 'maori', - ['mth'] = 'maithili', - ['mts'] = 'maltese', - ['mun'] = 'mundari', - ['nag'] = 'naga-assamese', - ['nan'] = 'nanai', - ['nas'] = 'naskapi', - ['ncr'] = 'n-cree', - ['ndb'] = 'ndebele', - ['ndg'] = 'ndonga', - ['nep'] = 'nepali', - ['new'] = 'newari', - ['ngr'] = 'nagari', - ['nhc'] = 'norway house cree', - ['nis'] = 'nisi', - ['niu'] = 'niuean', - ['nkl'] = 'nkole', - ['nko'] = "n'ko", - ['nld'] = 'dutch', - ['nog'] = 'nogai', - ['nor'] = 'norwegian', - ['nsm'] = 'northern sami', - ['nta'] = 'northern tai', - ['nto'] = 'esperanto', - ['nyn'] = 'nynorsk', - ['oci'] = 'occitan', - ['ocr'] = 'oji-cree', - ['ojb'] = 'ojibway', - ['ori'] = 'oriya', - ['oro'] = 'oromo', - ['oss'] = 'ossetian', - ['paa'] = 'palestinian aramaic', - ['pal'] = 'pali', - ['pan'] = 'punjabi', - ['pap'] = 'palpa', - ['pas'] = 'pashto', - ['pgr'] = 'polytonic greek', - ['pil'] = 'pilipino', - ['plg'] = 'palaung', - ['plk'] = 'polish', - ['pro'] = 'provencal', - ['ptg'] = 'portuguese', - ['qin'] = 'chin', - ['raj'] = 'rajasthani', - ['rbu'] = 'russian buriat', - ['rcr'] = 'r-cree', - ['ria'] = 'riang', - ['rms'] = 'rhaeto-romanic', - ['rom'] = 'romanian', - ['roy'] = 'romany', - ['rsy'] = 'rusyn', - ['rua'] = 'ruanda', - ['rus'] = 'russian', - ['sad'] = 'sadri', - ['san'] = 'sanskrit', - ['sat'] = 'santali', - ['say'] = 'sayisi', - ['sek'] = 'sekota', - ['sel'] = 'selkup', - ['sgo'] = 'sango', - ['shn'] = 'shan', - ['sib'] = 'sibe', - ['sid'] = 'sidamo', - ['sig'] = 'silte gurage', - ['sks'] = 'skolt sami', - ['sky'] = 'slovak', - ['sla'] = 'slavey', - ['slv'] = 'slovenian', - ['sml'] = 'somali', - ['smo'] = 'samoan', - ['sna'] = 'sena', - ['snd'] = 'sindhi', - ['snh'] = 'sinhalese', - ['snk'] = 'soninke', - ['sog'] = 'sodo gurage', - ['sot'] = 'sotho', - ['sqi'] = 'albanian', - ['srb'] = 'serbian', - ['srk'] = 'saraiki', - ['srr'] = 'serer', - ['ssl'] = 'south slavey', - ['ssm'] = 'southern sami', - ['sur'] = 'suri', - ['sva'] = 'svan', - ['sve'] = 'swedish', - ['swa'] = 'swadaya aramaic', - ['swk'] = 'swahili', - ['swz'] = 'swazi', - ['sxt'] = 'sutu', - ['syr'] = 'syriac', - ['tab'] = 'tabasaran', - ['taj'] = 'tajiki', - ['tam'] = 'tamil', - ['tat'] = 'tatar', - ['tcr'] = 'th-cree', - ['tel'] = 'telugu', - ['tgn'] = 'tongan', - ['tgr'] = 'tigre', - ['tgy'] = 'tigrinya', - ['tha'] = 'thai', - ['tht'] = 'tahitian', - ['tib'] = 'tibetan', - ['tkm'] = 'turkmen', - ['tmn'] = 'temne', - ['tna'] = 'tswana', - ['tne'] = 'tundra nenets', - ['tng'] = 'tonga', - ['tod'] = 'todo', - ['trk'] = 'turkish', - ['tsg'] = 'tsonga', - ['tua'] = 'turoyo aramaic', - ['tul'] = 'tulu', - ['tuv'] = 'tuvin', - ['twi'] = 'twi', - ['udm'] = 'udmurt', - ['ukr'] = 'ukrainian', - ['urd'] = 'urdu', - ['usb'] = 'upper sorbian', - ['uyg'] = 'uyghur', - ['uzb'] = 'uzbek', - ['ven'] = 'venda', - ['vit'] = 'vietnamese', - ['wa' ] = 'wa', - ['wag'] = 'wagdi', - ['wcr'] = 'west-cree', - ['wel'] = 'welsh', - ['wlf'] = 'wolof', - ['xbd'] = 'tai lue', - ['xhs'] = 'xhosa', - ['yak'] = 'yakut', - ['yba'] = 'yoruba', - ['ycr'] = 'y-cree', - ['yic'] = 'yi classic', - ['yim'] = 'yi modern', - ['zhh'] = 'chinese hong kong', - ['zhp'] = 'chinese phonetic', - ['zhs'] = 'chinese simplified', - ['zht'] = 'chinese traditional', - ['znd'] = 'zande', - ['zul'] = 'zulu' + ["aba" ] = "abaza", + ["abk" ] = "abkhazian", + ["ach" ] = "acholi", + ["acr" ] = "achi", + ["ady" ] = "adyghe", + ["afk" ] = "afrikaans", + ["afr" ] = "afar", + ["agw" ] = "agaw", + ["aio" ] = "aiton", + ["aka" ] = "akan", + ["als" ] = "alsatian", + ["alt" ] = "altai", + ["amh" ] = "amharic", + ["ang" ] = "anglo-saxon", + ["apph"] = "phonetic transcription—americanist conventions", + ["ara" ] = "arabic", + ["arg" ] = "aragonese", + ["ari" ] = "aari", + ["ark" ] = "rakhine", + ["asm" ] = "assamese", + ["ast" ] = "asturian", + ["ath" ] = "athapaskan", + ["avr" ] = "avar", + ["awa" ] = "awadhi", + ["aym" ] = "aymara", + ["azb" ] = "torki", + ["aze" ] = "azerbaijani", + ["bad" ] = "badaga", + ["bad0"] = "banda", + ["bag" ] = "baghelkhandi", + ["bal" ] = "balkar", + ["ban" ] = "balinese", + ["bar" ] = "bavarian", + ["bau" ] = "baulé", + ["bbc" ] = "batak toba", + ["bbr" ] = "berber", + ["bch" ] = "bench", + ["bcr" ] = "bible cree", + ["bdy" ] = "bandjalang", + ["bel" ] = "belarussian", + ["bem" ] = "bemba", + ["ben" ] = "bengali", + ["bgc" ] = "haryanvi", + ["bgq" ] = "bagri", + ["bgr" ] = "bulgarian", + ["bhi" ] = "bhili", + ["bho" ] = "bhojpuri", + ["bik" ] = "bikol", + ["bil" ] = "bilen", + ["bis" ] = "bislama", + ["bjj" ] = "kanauji", + ["bkf" ] = "blackfoot", + ["bli" ] = "baluchi", + ["blk" ] = "pa'o karen", + ["bln" ] = "balante", + ["blt" ] = "balti", + ["bmb" ] = "bambara (bamanankan)", + ["bml" ] = "bamileke", + ["bos" ] = "bosnian", + ["bpy" ] = "bishnupriya manipuri", + ["bre" ] = "breton", + ["brh" ] = "brahui", + ["bri" ] = "braj bhasha", + ["brm" ] = "burmese", + ["brx" ] = "bodo", + ["bsh" ] = "bashkir", + ["bti" ] = "beti", + ["bts" ] = "batak simalungun", + ["bug" ] = "bugis", + ["cak" ] = "kaqchikel", + ["cat" ] = "catalan", + ["cbk" ] = "zamboanga chavacano", + ["ceb" ] = "cebuano", + ["cgg" ] = "chiga", + ["cha" ] = "chamorro", + ["che" ] = "chechen", + ["chg" ] = "chaha gurage", + ["chh" ] = "chattisgarhi", + ["chi" ] = "chichewa (chewa, nyanja)", + ["chk" ] = "chukchi", + ["chk0"] = "chuukese", + ["cho" ] = "choctaw", + ["chp" ] = "chipewyan", + ["chr" ] = "cherokee", + ["chu" ] = "chuvash", + ["chy" ] = "cheyenne", + ["cmr" ] = "comorian", + ["cop" ] = "coptic", + ["cor" ] = "cornish", + ["cos" ] = "corsican", + ["cpp" ] = "creoles", + ["cre" ] = "cree", + ["crr" ] = "carrier", + ["crt" ] = "crimean tatar", + ["csb" ] = "kashubian", + ["csl" ] = "church slavonic", + ["csy" ] = "czech", + ["ctg" ] = "chittagonian", + ["cuk" ] = "san blas kuna", + ["dan" ] = "danish", + ["dar" ] = "dargwa", + ["dax" ] = "dayi", + ["dcr" ] = "woods cree", + ["deu" ] = "german", + ["dgo" ] = "dogri", + ["dgr" ] = "dogri", + ["dhg" ] = "dhangu", + ["dhv" ] = "divehi (dhivehi, maldivian)", + ["diq" ] = "dimli", + ["div" ] = "divehi (dhivehi, maldivian)", + ["djr" ] = "zarma", + ["djr0"] = "djambarrpuyngu", + ["dng" ] = "dangme", + ["dnj" ] = "dan", + ["dnk" ] = "dinka", + ["dri" ] = "dari", + ["duj" ] = "dhuwal", + ["dun" ] = "dungan", + ["dzn" ] = "dzongkha", + ["ebi" ] = "ebira", + ["ecr" ] = "eastern cree", + ["edo" ] = "edo", + ["efi" ] = "efik", + ["ell" ] = "greek", + ["emk" ] = "eastern maninkakan", + ["eng" ] = "english", + ["erz" ] = "erzya", + ["esp" ] = "spanish", + ["esu" ] = "central yupik", + ["eti" ] = "estonian", + ["euq" ] = "basque", + ["evk" ] = "evenki", + ["evn" ] = "even", + ["ewe" ] = "ewe", + ["fan" ] = "french antillean", + ["fan0"] = " fang", + ["far" ] = "persian", + ["fat" ] = "fanti", + ["fin" ] = "finnish", + ["fji" ] = "fijian", + ["fle" ] = "dutch (flemish)", + ["fne" ] = "forest nenets", + ["fon" ] = "fon", + ["fos" ] = "faroese", + ["fra" ] = "french", + ["frc" ] = "cajun french", + ["fri" ] = "frisian", + ["frl" ] = "friulian", + ["frp" ] = "arpitan", + ["fta" ] = "futa", + ["ful" ] = "fulah", + ["fuv" ] = "nigerian fulfulde", + ["gad" ] = "ga", + ["gae" ] = "scottish gaelic (gaelic)", + ["gag" ] = "gagauz", + ["gal" ] = "galician", + ["gar" ] = "garshuni", + ["gaw" ] = "garhwali", + ["gez" ] = "ge'ez", + ["gih" ] = "githabul", + ["gil" ] = "gilyak", + ["gil0"] = " kiribati (gilbertese)", + ["gkp" ] = "kpelle (guinea)", + ["glk" ] = "gilaki", + ["gmz" ] = "gumuz", + ["gnn" ] = "gumatj", + ["gog" ] = "gogo", + ["gon" ] = "gondi", + ["grn" ] = "greenlandic", + ["gro" ] = "garo", + ["gua" ] = "guarani", + ["guc" ] = "wayuu", + ["guf" ] = "gupapuyngu", + ["guj" ] = "gujarati", + ["guz" ] = "gusii", + ["hai" ] = "haitian (haitian creole)", + ["hal" ] = "halam", + ["har" ] = "harauti", + ["hau" ] = "hausa", + ["haw" ] = "hawaiian", + ["hay" ] = "haya", + ["haz" ] = "hazaragi", + ["hbn" ] = "hammer-banna", + ["her" ] = "herero", + ["hil" ] = "hiligaynon", + ["hin" ] = "hindi", + ["hma" ] = "high mari", + ["hmn" ] = "hmong", + ["hmo" ] = "hiri motu", + ["hnd" ] = "hindko", + ["ho" ] = "ho", + ["hri" ] = "harari", + ["hrv" ] = "croatian", + ["hun" ] = "hungarian", + ["hye" ] = "armenian", + ["hye0"] = "armenian east", + ["iba" ] = "iban", + ["ibb" ] = "ibibio", + ["ibo" ] = "igbo", + ["ido" ] = "ido", + ["ijo" ] = "ijo languages", + ["ile" ] = "interlingue", + ["ilo" ] = "ilokano", + ["ina" ] = "interlingua", + ["ind" ] = "indonesian", + ["ing" ] = "ingush", + ["inu" ] = "inuktitut", + ["ipk" ] = "inupiat", + ["ipph"] = "phonetic transcription—ipa conventions", + ["iri" ] = "irish", + ["irt" ] = "irish traditional", + ["isl" ] = "icelandic", + ["ism" ] = "inari sami", + ["ita" ] = "italian", + ["iwr" ] = "hebrew", + ["jam" ] = "jamaican creole", + ["jan" ] = "japanese", + ["jav" ] = "javanese", + ["jbo" ] = "lojban", + ["jii" ] = "yiddish", + ["jud" ] = "ladino", + ["jul" ] = "jula", + ["kab" ] = "kabardian", + ["kab0"] = "kabyle", + ["kac" ] = "kachchi", + ["kal" ] = "kalenjin", + ["kan" ] = "kannada", + ["kar" ] = "karachay", + ["kat" ] = "georgian", + ["kaz" ] = "kazakh", + ["kde" ] = "makonde", + ["kea" ] = "kabuverdianu (crioulo)", + ["keb" ] = "kebena", + ["kek" ] = "kekchi", + ["kge" ] = "khutsuri georgian", + ["kha" ] = "khakass", + ["khk" ] = "khanty-kazim", + ["khm" ] = "khmer", + ["khs" ] = "khanty-shurishkar", + ["kht" ] = "khamti shan", + ["khv" ] = "khanty-vakhi", + ["khw" ] = "khowar", + ["kik" ] = "kikuyu (gikuyu)", + ["kir" ] = "kirghiz (kyrgyz)", + ["kis" ] = "kisii", + ["kiu" ] = "kirmanjki", + ["kjd" ] = "southern kiwai", + ["kjp" ] = "eastern pwo karen", + ["kkn" ] = "kokni", + ["klm" ] = "kalmyk", + ["kmb" ] = "kamba", + ["kmn" ] = "kumaoni", + ["kmo" ] = "komo", + ["kms" ] = "komso", + ["knr" ] = "kanuri", + ["kod" ] = "kodagu", + ["koh" ] = "korean old hangul", + ["kok" ] = "konkani", + ["kom" ] = "komi", + ["kon" ] = "kikongo", + ["kon0"] = "kongo", + ["kop" ] = "komi-permyak", + ["kor" ] = "korean", + ["kos" ] = "kosraean", + ["koz" ] = "komi-zyrian", + ["kpl" ] = "kpelle", + ["kri" ] = "krio", + ["krk" ] = "karakalpak", + ["krl" ] = "karelian", + ["krm" ] = "karaim", + ["krn" ] = "karen", + ["krt" ] = "koorete", + ["ksh" ] = "kashmiri", + ["ksh0"] = "ripuarian", + ["ksi" ] = "khasi", + ["ksm" ] = "kildin sami", + ["ksw" ] = "s’gaw karen", + ["kua" ] = "kuanyama", + ["kui" ] = "kui", + ["kul" ] = "kulvi", + ["kum" ] = "kumyk", + ["kur" ] = "kurdish", + ["kuu" ] = "kurukh", + ["kuy" ] = "kuy", + ["kyk" ] = "koryak", + ["kyu" ] = "western kayah", + ["lad" ] = "ladin", + ["lah" ] = "lahuli", + ["lak" ] = "lak", + ["lam" ] = "lambani", + ["lao" ] = "lao", + ["lat" ] = "latin", + ["laz" ] = "laz", + ["lcr" ] = "l-cree", + ["ldk" ] = "ladakhi", + ["lez" ] = "lezgi", + ["lij" ] = "ligurian", + ["lim" ] = "limburgish", + ["lin" ] = "lingala", + ["lis" ] = "lisu", + ["ljp" ] = "lampung", + ["lki" ] = "laki", + ["lma" ] = "low mari", + ["lmb" ] = "limbu", + ["lmo" ] = "lombard", + ["lmw" ] = "lomwe", + ["lom" ] = "loma", + ["lrc" ] = "luri", + ["lsb" ] = "lower sorbian", + ["lsm" ] = "lule sami", + ["lth" ] = "lithuanian", + ["ltz" ] = "luxembourgish", + ["lua" ] = "luba-lulua", + ["lub" ] = "luba-katanga", + ["lug" ] = "ganda", + ["luh" ] = "luyia", + ["luo" ] = "luo", + ["lvi" ] = "latvian", + ["mad" ] = "madura", + ["mag" ] = "magahi", + ["mah" ] = "marshallese", + ["maj" ] = "majang", + ["mak" ] = "makhuwa", + ["mal" ] = "malayalam reformed", + ["mam" ] = "mam", + ["man" ] = "mansi", + ["map" ] = "mapudungun", + ["mar" ] = "marathi", + ["maw" ] = "marwari", + ["mbn" ] = "mbundu", + ["mch" ] = "manchu", + ["mcr" ] = "moose cree", + ["mde" ] = "mende", + ["mdr" ] = "mandar", + ["men" ] = "me'en", + ["mer" ] = "meru", + ["mfe" ] = "morisyen", + ["min" ] = "minangkabau", + ["miz" ] = "mizo", + ["mkd" ] = "macedonian", + ["mkr" ] = "makasar", + ["mkw" ] = "kituba", + ["mle" ] = "male", + ["mlg" ] = "malagasy", + ["mln" ] = "malinke", + ["mly" ] = "malay", + ["mnd" ] = "mandinka", + ["mng" ] = "mongolian", + ["mni" ] = "manipuri", + ["mnk" ] = "maninka", + ["mnx" ] = "manx", + ["moh" ] = "mohawk", + ["mok" ] = "moksha", + ["mol" ] = "moldavian", + ["mon" ] = "mon", + ["mor" ] = "moroccan", + ["mos" ] = "mossi", + ["mri" ] = "maori", + ["mth" ] = "maithili", + ["mts" ] = "maltese", + ["mun" ] = "mundari", + ["mus" ] = "muscogee", + ["mwl" ] = "mirandese", + ["mww" ] = "hmong daw", + ["myn" ] = "mayan", + ["mzn" ] = "mazanderani", + ["nag" ] = "naga-assamese", + ["nah" ] = "nahuatl", + ["nan" ] = "nanai", + ["nap" ] = "neapolitan", + ["nas" ] = "naskapi", + ["nau" ] = "nauruan", + ["nav" ] = "navajo", + ["ncr" ] = "n-cree", + ["ndb" ] = "ndebele", + ["ndc" ] = "ndau", + ["ndg" ] = "ndonga", + ["nds" ] = "low saxon", + ["nep" ] = "nepali", + ["new" ] = "newari", + ["nga" ] = "ngbaka", + ["ngr" ] = "nagari", + ["nhc" ] = "norway house cree", + ["nis" ] = "nisi", + ["niu" ] = "niuean", + ["nkl" ] = "nyankole", + ["nko" ] = "n'ko", + ["nld" ] = "dutch", + ["noe" ] = "nimadi", + ["nog" ] = "nogai", + ["nor" ] = "norwegian", + ["nov" ] = "novial", + ["nsm" ] = "northern sami", + ["nso" ] = "sotho, northern", + ["nta" ] = "northern tai", + ["nto" ] = "esperanto", + ["nym" ] = "nyamwezi", + ["nyn" ] = "norwegian nynorsk", + ["oci" ] = "occitan", + ["ocr" ] = "oji-cree", + ["ojb" ] = "ojibway", + ["ori" ] = "odia", + ["oro" ] = "oromo", + ["oss" ] = "ossetian", + ["paa" ] = "palestinian aramaic", + ["pag" ] = "pangasinan", + ["pal" ] = "pali", + ["pam" ] = "pampangan", + ["pan" ] = "punjabi", + ["pap" ] = "palpa", + ["pap0"] = "papiamentu", + ["pas" ] = "pashto", + ["pau" ] = "palauan", + ["pcc" ] = "bouyei", + ["pcd" ] = "picard", + ["pdc" ] = "pennsylvania german", + ["pgr" ] = "polytonic greek", + ["phk" ] = "phake", + ["pih" ] = "norfolk", + ["pil" ] = "filipino", + ["plg" ] = "palaung", + ["plk" ] = "polish", + ["pms" ] = "piemontese", + ["pnb" ] = "western panjabi", + ["poh" ] = "pocomchi", + ["pon" ] = "pohnpeian", + ["pro" ] = "provencal", + ["ptg" ] = "portuguese", + ["pwo" ] = "western pwo karen", + ["qin" ] = "chin", + ["quc" ] = "k’iche’", + ["quh" ] = "quechua (bolivia)", + ["quz" ] = "quechua", + ["qvi" ] = "quechua (ecuador)", + ["qwh" ] = "quechua (peru)", + ["raj" ] = "rajasthani", + ["rar" ] = "rarotongan", + ["rbu" ] = "russian buriat", + ["rcr" ] = "r-cree", + ["rej" ] = "rejang", + ["ria" ] = "riang", + ["rif" ] = "tarifit", + ["rit" ] = "ritarungo", + ["rkw" ] = "arakwal", + ["rms" ] = "romansh", + ["rmy" ] = "vlax romani", + ["rom" ] = "romanian", + ["roy" ] = "romany", + ["rsy" ] = "rusyn", + ["rtm" ] = "rotuman", + ["rua" ] = "kinyarwanda", + ["run" ] = "rundi", + ["rup" ] = "aromanian", + ["rus" ] = "russian", + ["sad" ] = "sadri", + ["san" ] = "sanskrit", + ["sas" ] = "sasak", + ["sat" ] = "santali", + ["say" ] = "sayisi", + ["scn" ] = "sicilian", + ["sco" ] = "scots", + ["sek" ] = "sekota", + ["sel" ] = "selkup", + ["sga" ] = "old irish", + ["sgo" ] = "sango", + ["sgs" ] = "samogitian", + ["shi" ] = "tachelhit", + ["shn" ] = "shan", + ["sib" ] = "sibe", + ["sid" ] = "sidamo", + ["sig" ] = "silte gurage", + ["sks" ] = "skolt sami", + ["sky" ] = "slovak", + ["sla" ] = "slavey", + ["slv" ] = "slovenian", + ["sml" ] = "somali", + ["smo" ] = "samoan", + ["sna" ] = "sena", + ["sna0"] = "shona", + ["snd" ] = "sindhi", + ["snh" ] = "sinhala (sinhalese)", + ["snk" ] = "soninke", + ["sog" ] = "sodo gurage", + ["sop" ] = "songe", + ["sot" ] = "sotho, southern", + ["sqi" ] = "albanian", + ["srb" ] = "serbian", + ["srd" ] = "sardinian", + ["srk" ] = "saraiki", + ["srr" ] = "serer", + ["ssl" ] = "south slavey", + ["ssm" ] = "southern sami", + ["stq" ] = "saterland frisian", + ["suk" ] = "sukuma", + ["sun" ] = "sundanese", + ["sur" ] = "suri", + ["sva" ] = "svan", + ["sve" ] = "swedish", + ["swa" ] = "swadaya aramaic", + ["swk" ] = "swahili", + ["swz" ] = "swati", + ["sxt" ] = "sutu", + ["sxu" ] = "upper saxon", + ["syl" ] = "sylheti", + ["syr" ] = "syriac", + ["szl" ] = "silesian", + ["tab" ] = "tabasaran", + ["taj" ] = "tajiki", + ["tam" ] = "tamil", + ["tat" ] = "tatar", + ["tcr" ] = "th-cree", + ["tdd" ] = "dehong dai", + ["tel" ] = "telugu", + ["tet" ] = "tetum", + ["tgl" ] = "tagalog", + ["tgn" ] = "tongan", + ["tgr" ] = "tigre", + ["tgy" ] = "tigrinya", + ["tha" ] = "thai", + ["tht" ] = "tahitian", + ["tib" ] = "tibetan", + ["tiv" ] = "tiv", + ["tkm" ] = "turkmen", + ["tmh" ] = "tamashek", + ["tmn" ] = "temne", + ["tna" ] = "tswana", + ["tne" ] = "tundra nenets", + ["tng" ] = "tonga", + ["tod" ] = "todo", + ["tod0"] = "toma", + ["tpi" ] = "tok pisin", + ["trk" ] = "turkish", + ["tsg" ] = "tsonga", + ["tua" ] = "turoyo aramaic", + ["tul" ] = "tulu", + ["tuv" ] = "tuvin", + ["tvl" ] = "tuvalu", + ["twi" ] = "twi", + ["tyz" ] = "tày", + ["tzm" ] = "tamazight", + ["tzo" ] = "tzotzil", + ["udm" ] = "udmurt", + ["ukr" ] = "ukrainian", + ["umb" ] = "umbundu", + ["urd" ] = "urdu", + ["usb" ] = "upper sorbian", + ["uyg" ] = "uyghur", + ["uzb" ] = "uzbek", + ["vec" ] = "venetian", + ["ven" ] = "venda", + ["vit" ] = "vietnamese", + ["vol" ] = "volapük", + ["vro" ] = "võro", + ["wa" ] = "wa", + ["wag" ] = "wagdi", + ["war" ] = "waray-waray", + ["wcr" ] = "west-cree", + ["wel" ] = "welsh", + ["wlf" ] = "wolof", + ["wln" ] = "walloon", + ["xbd" ] = "lü", + ["xhs" ] = "xhosa", + ["xjb" ] = "minjangbal", + ["xog" ] = "soga", + ["xpe" ] = "kpelle (liberia)", + ["yak" ] = "sakha", + ["yao" ] = "yao", + ["yap" ] = "yapese", + ["yba" ] = "yoruba", + ["ycr" ] = "y-cree", + ["yic" ] = "yi classic", + ["yim" ] = "yi modern", + ["zea" ] = "zealandic", + ["zgh" ] = "standard morrocan tamazigh", + ["zha" ] = "zhuang", + ["zhh" ] = "chinese, hong kong sar", + ["zhp" ] = "chinese phonetic", + ["zhs" ] = "chinese simplified", + ["zht" ] = "chinese traditional", + ["znd" ] = "zande", + ["zul" ] = "zulu", + ["zza" ] = "zazaki", } local features = allocate { @@ -506,8 +741,10 @@ local features = allocate { ['calt'] = 'contextual alternates', ['case'] = 'case-sensitive forms', ['ccmp'] = 'glyph composition/decomposition', + ['cfar'] = 'conjunct form after ro', ['cjct'] = 'conjunct forms', ['clig'] = 'contextual ligatures', + ['cpct'] = 'centered cjk punctuation', ['cpsp'] = 'capital spacing', ['cswh'] = 'contextual swash', ['curs'] = 'cursive positioning', @@ -547,6 +784,8 @@ local features = allocate { ['ljmo'] = 'leading jamo forms', ['lnum'] = 'lining figures', ['locl'] = 'localized forms', + ['ltra'] = 'left-to-right alternates', + ['ltrm'] = 'left-to-right mirrored forms', ['mark'] = 'mark positioning', ['med2'] = 'medial forms #2', ['medi'] = 'medial forms', @@ -563,6 +802,7 @@ local features = allocate { ['ornm'] = 'ornaments', ['palt'] = 'proportional alternate width', ['pcap'] = 'petite capitals', + ['pkna'] = 'proportional kana', ['pnum'] = 'proportional figures', ['pref'] = 'pre-base forms', ['pres'] = 'pre-base substitutions', @@ -571,6 +811,7 @@ local features = allocate { ['pwid'] = 'proportional widths', ['qwid'] = 'quarter widths', ['rand'] = 'randomize', + ['rclt'] = 'required contextual alternates', ['rkrf'] = 'rakar forms', ['rlig'] = 'required ligatures', ['rphf'] = 'reph form', @@ -604,6 +845,7 @@ local features = allocate { -- ['ss19'] = 'stylistic set 19', -- ['ss20'] = 'stylistic set 20', ['ssty'] = 'script style', -- math + ['stch'] = 'stretching glyph decomposition', ['subs'] = 'subscript', ['sups'] = 'superscript', ['swsh'] = 'swash', diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua index e625419ee..da9c19967 100644 --- a/tex/context/base/font-syn.lua +++ b/tex/context/base/font-syn.lua @@ -9,6 +9,11 @@ if not modules then modules = { } end modules ['font-syn'] = { -- todo: subs in lookups requests -- todo: see if the (experimental) lua reader (on my machine) be used (it's a bit slower so maybe wait till lua 5.3) +-- identifying ttf/otf/ttc/afm : 2200 fonts: +-- +-- old ff loader: 140 sec +-- new lua loader: 5 sec + local next, tonumber, type, tostring = next, tonumber, type, tostring local sub, gsub, lower, match, find, lower, upper = string.sub, string.gsub, string.lower, string.match, string.find, string.lower, string.upper local find, gmatch = string.find, string.gmatch @@ -17,6 +22,7 @@ local serialize, sortedhash = table.serialize, table.sortedhash local lpegmatch = lpeg.match local unpack = unpack or table.unpack local formatters, topattern = string.formatters, string.topattern +local round = math.round local allocate = utilities.storage.allocate local sparse = utilities.storage.sparse @@ -48,6 +54,7 @@ local settings_to_hash = utilities.parsers.settings_to_hash_tolerant local trace_names = false trackers.register("fonts.names", function(v) trace_names = v end) local trace_warnings = false trackers.register("fonts.warnings", function(v) trace_warnings = v end) local trace_specifications = false trackers.register("fonts.specifications", function(v) trace_specifications = v end) +local trace_rejections = false trackers.register("fonts.rejections", function(v) trace_rejections = v end) local report_names = logs.reporter("fonts","names") @@ -354,9 +361,9 @@ filters.dfont = get_font_info -- pfminfo = fields.pfminfo and ff.pfminfo, -- top_side_bearing = fields.top_side_bearing and ff.top_side_bearing, -- } --- setmetatableindex(d,function(t,k) --- report_names("warning, trying to access field %a in font table of %a",k,name) --- end) +-- -- setmetatableindex(d,function(t,k) +-- -- report_names("warning, trying to access field %a in font table of %a",k,name) +-- -- end) -- close_font(ff) -- return d -- else @@ -390,9 +397,12 @@ local function get_full_info(name) pfminfo = fields.pfminfo and ff.pfminfo, top_side_bearing = fields.top_side_bearing and ff.top_side_bearing, -- not there } - setmetatableindex(d,function(t,k) - report_names("warning, trying to access field %a in font table of %a",k,name) - end) + if d.italicangle then + d.italicangle = round(1000*d.italicangle)/1000 + end + -- setmetatableindex(d,function(t,k) + -- report_names("warning, trying to access field %a in font table of %a",k,name) + -- end) close_font(ff) return d else @@ -462,6 +472,8 @@ filters.list = { -- "ttc", "otf", "ttf", "dfont", "afm", } +-- to be considered: loop over paths per list entry (so first all otf ttf etc) + names.fontconfigfile = "fonts.conf" -- a bit weird format, bonus feature names.osfontdirvariable = "OSFONTDIR" -- the official way, in minimals etc @@ -555,6 +567,10 @@ local function cleanfilename(fullname,defaultsuffix) end end +local sorter = function(a,b) + return a > b -- to be checked +end + names.cleanname = cleanname names.cleanfilename = cleanfilename @@ -568,6 +584,7 @@ local function check_names(result) end end end + return result end local function walk_tree(pathlist,suffix,identify) @@ -588,21 +605,33 @@ local function walk_tree(pathlist,suffix,identify) end end +-- "typographicfamily", -- preffamilyname +-- "typographicsubfamily", -- prefmodifiers + local function check_name(data,result,filename,modification,suffix,subfont) -- shortcuts local specifications = data.specifications -- prepare local names = check_names(result) -- fetch - local familyname = names and names.preffamilyname or result.familyname - local fullname = names and names.fullname or result.fullname +-- if string.find(string.lower(filename),"ebgaramond") then +-- inspect(result) +-- inspect(names) +-- end + +if string.find(filename,"avkv") then + inspect(result) +end + + local familyname = names and names.preffamilyname or result.familyname + local fullname = names and names.fullname or result.fullname local fontname = result.fontname - local subfamily = names and names.subfamily - local modifiers = names and names.prefmodifiers - local weight = names and names.weight or result.weight + local subfamily = names and names.subfamily or result.subfamily + local modifiers = names and names.prefmodifiers or result.modifiers + local weight = names and names.weight or result.weight local italicangle = tonumber(result.italicangle) - local subfont = subfont or nil - local rawname = fullname or fontname or familyname + local subfont = subfont or nil + local rawname = fullname or fontname or familyname local filebase = removesuffix(basename(filename)) local cleanfilename = cleanname(filebase) -- for WS -- normalize @@ -637,15 +666,15 @@ local function check_name(data,result,filename,modification,suffix,subfont) fontname = fontname or fullname or familyname or filebase -- maybe cleanfilename fullname = fullname or fontname familyname = familyname or fontname - -- we do these sparse - local units = result.units_per_em or 1000 -- can be zero too - local minsize = result.design_range_bottom or 0 - local maxsize = result.design_range_top or 0 - local designsize = result.design_size or 0 - local angle = result.italicangle or 0 + -- we do these sparse -- todo: check table type or change names in ff loader + local units = result.units_per_em or result.emunits or 1000 -- can be zero too + local minsize = result.design_range_bottom or result.mindesignsize or 0 + local maxsize = result.design_range_top or result.maxdesignsize or 0 + local designsize = result.design_size or result.designsize or 0 + local angle = result.italicangle or 0 local pfminfo = result.pfminfo - local pfmwidth = pfminfo and pfminfo.width or 0 - local pfmweight = pfminfo and pfminfo.weight or 0 + local pfmwidth = (pfminfo and pfminfo.width ) or result.pfmwidth or 0 + local pfmweight = (pfminfo and pfminfo.weight) or result.pfmweight or 0 -- specifications[#specifications + 1] = { filename = filename, -- unresolved @@ -908,10 +937,6 @@ local function checkduplicates() checkduplicate("fallbacks") end -local sorter = function(a,b) - return a > b -- to be checked -end - local function sorthashes() local data = names.data local list = filters.list @@ -936,23 +961,28 @@ local function unpackreferences() local data = names.data local specifications = data.specifications if specifications then - for k, v in next, data.families do +-- for k, v in next, data.families do + for k, v in sortedhash(data.families) do for i=1,#v do v[i] = specifications[v[i]] end end local mappings = data.mappings if mappings then - for _, m in next, mappings do - for k, v in next, m do +-- for _, m in next, mappings do + for _, m in sortedhash(mappings) do +-- for k, v in next, m do + for k, v in sortedhash(m) do m[k] = specifications[v] end end end local fallbacks = data.fallbacks if fallbacks then - for _, f in next, fallbacks do - for k, v in next, f do +-- for _, f in next, fallbacks do + for _, f in sortedhash(fallbacks) do +-- for k, v in next, f do + for k, v in sortedhash(f) do f[k] = specifications[v] end end @@ -979,31 +1009,34 @@ local function analyzefiles(olddata) local oldspecifications = olddata and olddata.specifications or { } local oldrejected = olddata and olddata.rejected or { } local treatmentdata = treatments.data or { } -- when used outside context + local function identify(completename,name,suffix,storedname) local pathpart, basepart = splitbase(completename) nofread = nofread + 1 local treatment = treatmentdata[completename] or treatmentdata[basepart] if treatment and treatment.ignored then - if trace_names then + if trace_names or trace_rejections then report_names("%s font %a is ignored, reason %a",suffix,completename,treatment.comment or "unknown") end nofskipped = nofskipped + 1 elseif done[name] then - -- already done (avoid otf afm clash) - if trace_names then - report_names("%s font %a already done",suffix,completename) + if lower(completename) ~= lower(done[name]) then + -- already done (avoid otf afm clash) + if trace_names or trace_rejections then + report_names("%s font %a already done as %a",suffix,completename,done[name]) + end + nofduplicates = nofduplicates + 1 + nofskipped = nofskipped + 1 end - nofduplicates = nofduplicates + 1 - nofskipped = nofskipped + 1 elseif not exists(completename) then -- weird error - if trace_names then + if trace_names or trace_rejections then report_names("%s font %a does not really exist",suffix,completename) end nofskipped = nofskipped + 1 elseif not is_qualified_path(completename) and findfile(completename,suffix) == "" then -- not locatable by backend anyway - if trace_names then + if trace_names or trace_rejections then report_names("%s font %a cannot be found by backend",suffix,completename) end nofskipped = nofskipped + 1 @@ -1011,7 +1044,7 @@ local function analyzefiles(olddata) if #skip_paths > 0 then for i=1,#skip_paths do if find(pathpart,skip_paths[i]) then - if trace_names then + if trace_names or trace_rejections then report_names("rejecting path of %s font %a",suffix,completename) end nofskipped = nofskipped + 1 @@ -1023,7 +1056,7 @@ local function analyzefiles(olddata) for i=1,#skip_paths do if find(basepart,skip_names[i]) then done[name] = true - if trace_names then + if trace_names or trace_rejections then report_names("rejecting name of %s font %a",suffix,completename) end nofskipped = nofskipped + 1 @@ -1078,7 +1111,7 @@ local function analyzefiles(olddata) report_names("error when identifying %s font %a, %s",suffix,completename,message or "unknown") end end - done[name] = true + done[name] = completename end logs.flush() -- a bit overkill for each font, maybe not needed here end @@ -1881,7 +1914,7 @@ local lastlookups, lastpattern = { }, "" -- end local function look_them_up(lookups,specification) - for key, value in next, specification do + for key, value in sortedhash(specification) do local t, n = { }, 0 if find(value,"*",1,true) then value = topattern(value) @@ -2003,7 +2036,7 @@ function names.register(files) local list, commonname = files.list, files.name if list then local n, m = 0, 0 - for filename, filespec in next, list do + for filename, filespec in sortedhash(list) do local name = lower(filespec.name or commonname) if name and name ~= "" then local style = normalized_styles [lower(filespec.style or "normal")] @@ -2115,3 +2148,17 @@ end -- end -- -- inspect(newhash) + +-- example made for luatex list (unlikely to be used): +-- +-- local command = [[reg QUERY "HKLM\SOFTWARE\Microsoft\Windows NT\CurrentVersion\Fonts"]] +-- local pattern = ".-[\n\r]+%s+(.-)%s%(([^%)]+)%)%s+REG_SZ%s+(%S+)%s+" +-- +-- local function getnamesfromregistry() +-- local data = os.resultof(command) +-- local list = { } +-- for name, format, filename in string.gmatch(data,pattern) do +-- list[name] = filename +-- end +-- return list +-- end diff --git a/tex/context/base/l-lpeg.lua b/tex/context/base/l-lpeg.lua index 55a0d8929..5be12468b 100644 --- a/tex/context/base/l-lpeg.lua +++ b/tex/context/base/l-lpeg.lua @@ -82,7 +82,7 @@ local lpegtype, lpegmatch, lpegprint = lpeg.type, lpeg.match, lpeg.print -- let's start with an inspector: if setinspector then - setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end) + setinspector("lpeg",function(v) if lpegtype(v) then lpegprint(v) return true end end) end -- Beware, we predefine a bunch of patterns here and one reason for doing so diff --git a/tex/context/base/l-lua.lua b/tex/context/base/l-lua.lua index 1a2a98723..cb6182907 100644 --- a/tex/context/base/l-lua.lua +++ b/tex/context/base/l-lua.lua @@ -129,22 +129,36 @@ local print, select, tostring = print, select, tostring local inspectors = { } -function setinspector(inspector) -- global function - inspectors[#inspectors+1] = inspector +function setinspector(kind,inspector) -- global function + inspectors[kind] = inspector end function inspect(...) -- global function for s=1,select("#",...) do local value = select(s,...) - local done = false - for i=1,#inspectors do - done = inspectors[i](value) - if done then - break + if value == nil then + print("nil") + else + local done = false + -- type driven (table) + local kind = type(value) + local inspector = inspectors[kind] + if inspector then + done = inspector(value) + if done then + break + end + end + -- whatever driven (token, node, ...) + for kind, inspector in next, inspectors do + done = inspector(value) + if done then + break + end + end + if not done then + print(tostring(value)) end - end - if not done then - print(tostring(value)) end end end diff --git a/tex/context/base/l-string.lua b/tex/context/base/l-string.lua index 70c66f661..e9dc2bbbc 100644 --- a/tex/context/base/l-string.lua +++ b/tex/context/base/l-string.lua @@ -192,10 +192,11 @@ string.itself = function(s) return s end -- also handy (see utf variant) -local pattern = Ct(C(1)^0) -- string and not utf ! +local pattern_c = Ct( C(1) ^0) -- string and not utf ! +local pattern_b = Ct((C(1)/byte)^0) -function string.totable(str) - return lpegmatch(pattern,str) +function string.totable(str,bytes) + return lpegmatch(bytes and pattern_b or pattern_c,str) end -- handy from within tex: diff --git a/tex/context/base/l-table.lua b/tex/context/base/l-table.lua index b02f210cb..552097e1c 100644 --- a/tex/context/base/l-table.lua +++ b/tex/context/base/l-table.lua @@ -1144,7 +1144,7 @@ function table.print(t,...) end if setinspector then - setinspector(function(v) if type(v) == "table" then serialize(print,v,"table") return true end end) + setinspector("table",function(v) if type(v) == "table" then serialize(print,v,"table") return true end end) end -- -- -- obsolete but we keep them for a while and might comment them later -- -- -- diff --git a/tex/context/base/luat-lib.mkiv b/tex/context/base/luat-lib.mkiv index 24f9da415..62a53c5dc 100644 --- a/tex/context/base/luat-lib.mkiv +++ b/tex/context/base/luat-lib.mkiv @@ -15,6 +15,7 @@ \registerctxluafile{util-str}{1.001} \registerctxluafile{util-tab}{1.001} +\registerctxluafile{util-fil}{1.001} \registerctxluafile{util-sto}{1.001} % could also be done in trac-deb.mkiv \registerctxluafile{util-pck}{1.001} \registerctxluafile{util-seq}{1.001} diff --git a/tex/context/base/lxml-tex.lua b/tex/context/base/lxml-tex.lua index 550a06a18..4c995acdb 100644 --- a/tex/context/base/lxml-tex.lua +++ b/tex/context/base/lxml-tex.lua @@ -1940,10 +1940,12 @@ function lxml.applyselectors(id) local categories = options_to_hash(rest) if categories["begin"] then local okay = false - for k, v in next, permitted do - if categories[k] then - okay = k - break + if permitted then + for k, v in next, permitted do + if categories[k] then + okay = k + break + end end end if not trace_selectors then @@ -1982,10 +1984,12 @@ function lxml.applyselectors(id) local categories = options_to_hash(rest) if categories["begin"] then local okay = false - for k, v in next, permitted do - if categories[k] then - okay = k - break + if permitted then + for k, v in next, permitted do + if categories[k] then + okay = k + break + end end end if not trace_selectors then diff --git a/tex/context/base/math-fbk.lua b/tex/context/base/math-fbk.lua index 76dd1ad9b..ab3a726d1 100644 --- a/tex/context/base/math-fbk.lua +++ b/tex/context/base/math-fbk.lua @@ -12,6 +12,7 @@ local report_fallbacks = logs.reporter("math","fallbacks") local formatters = string.formatters local fastcopy = table.fastcopy +local byte = string.byte local fallbacks = { } mathematics.fallbacks = fallbacks @@ -421,6 +422,53 @@ end virtualcharacters[0xFE33E] = virtualcharacters[0x203E] -- convenient virtualcharacters[0xFE33F] = virtualcharacters[0x203E] -- convenient +-- spacing + +local c_zero = byte('0') +local c_period = byte('.') + +local function spacefraction(data,fraction) + local width = fraction * data.target.parameters.space + return { + width = width, + commands = { right = width } + } +end + +local function charfraction(data,char) + local width = data.target.characters[char].width + return { + width = width, + commands = { right = width } + } +end + +local function quadfraction(data,fraction) + local width = fraction * data.target.parameters.quad + return { + width = width, + commands = { right = width } + } +end + +virtualcharacters[0x00A0] = function(data) return spacefraction(data,1) end -- nbsp +virtualcharacters[0x2000] = function(data) return quadfraction (data,1/2) end -- enquad +virtualcharacters[0x2001] = function(data) return quadfraction (data,1) end -- emquad +virtualcharacters[0x2002] = function(data) return quadfraction (data,1/2) end -- enspace +virtualcharacters[0x2003] = function(data) return quadfraction (data,1) end -- emspace +virtualcharacters[0x2004] = function(data) return quadfraction (data,1/3) end -- threeperemspace +virtualcharacters[0x2005] = function(data) return quadfraction (data,1/4) end -- fourperemspace +virtualcharacters[0x2006] = function(data) return quadfraction (data,1/6) end -- sixperemspace +virtualcharacters[0x2007] = function(data) return charfraction (data,c_zero) end -- figurespace +virtualcharacters[0x2008] = function(data) return charfraction (data,c_period) end -- punctuationspace +virtualcharacters[0x2009] = function(data) return quadfraction (data,1/8) end -- breakablethinspace +virtualcharacters[0x200A] = function(data) return quadfraction (data,1/8) end -- hairspace +virtualcharacters[0x200B] = function(data) return quadfraction (data,0) end -- zerowidthspace +virtualcharacters[0x202F] = function(data) return quadfraction (data,1/8) end -- narrownobreakspace +virtualcharacters[0x205F] = function(data) return spacefraction(data,1/2) end -- math thinspace + +-- + local function smashed(data,unicode,swap,private) local target = data.target local original = data.original diff --git a/tex/context/base/meta-tex.mkiv b/tex/context/base/meta-tex.mkiv index 0f5a27ff8..9548afea1 100644 --- a/tex/context/base/meta-tex.mkiv +++ b/tex/context/base/meta-tex.mkiv @@ -142,4 +142,12 @@ \unexpanded\def\MPformatted #1#2{\clf_metapostformatted{#1}{#2}} \unexpanded\def\MPgraphformat#1#2{\clf_metapostgraphformat{#1}{#2}} +%D Some new fun (but exprimental for a while): + +\unexpanded\def\MPLIBoutlinetext#1#2#3% index kind text + {\begingroup + \setbox\scratchbox\hbox{#3}% + \clf_MPLIBconvertoutlinetext\numexpr#1\relax{#2}\scratchbox + \endgroup} + \protect \endinput diff --git a/tex/context/base/mlib-lua.lua b/tex/context/base/mlib-lua.lua index 7e0568463..e7f8f9cc5 100644 --- a/tex/context/base/mlib-lua.lua +++ b/tex/context/base/mlib-lua.lua @@ -19,12 +19,13 @@ local lpegmatch = lpeg.match local P, S, Ct = lpeg.P, lpeg.S, lpeg.Ct -local report_luarun = logs.reporter("metapost","lua") +local report_luarun = logs.reporter("metapost","lua") +local report_message = logs.reporter("metapost") -local trace_luarun = false trackers.register("metapost.lua",function(v) trace_luarun = v end) -local trace_enabled = true +local trace_luarun = false trackers.register("metapost.lua",function(v) trace_luarun = v end) +local trace_enabled = true -local be_tolerant = true directives.register("metapost.lua.tolerant",function(v) be_tolerant = v end) +local be_tolerant = true directives.register("metapost.lua.tolerant",function(v) be_tolerant = v end) mp = mp or { } -- system namespace MP = MP or { } -- user namespace @@ -187,8 +188,10 @@ function mp.quoted(fmt,s,...) fmt = lpegmatch(replacer,fmt) end buffer[n] = '"' .. formatters[fmt](s,...) .. '"' - else + elseif fmt then buffer[n] = '"' .. fmt .. '"' + else + -- something is wrong end end @@ -381,3 +384,11 @@ function mp.tt_dimensions(n) mptriplet(0,0,0) end end + +function mp.report(a,b) + if b then + report_message("%s : %s",a,b) + elseif a then + report_message("%s : %s","message",a) + end +end diff --git a/tex/context/base/mlib-pdf.lua b/tex/context/base/mlib-pdf.lua index 8de09f42a..fb76a52e3 100644 --- a/tex/context/base/mlib-pdf.lua +++ b/tex/context/base/mlib-pdf.lua @@ -428,6 +428,8 @@ function metapost.flush(result,flusher,askedfig) result[#result+1] = "q" if objects then resetplugins(result) -- we should move the colorinitializer here +local savedpath = nil +local savedhtap = nil for o=1,#objects do local object = objects[o] local objecttype = object.type @@ -466,95 +468,129 @@ function metapost.flush(result,flusher,askedfig) }) -- first we analyze local before, after = processplugins(object) - local objecttype = object.type -- can have changed - if before then - result = pluginactions(before,result,flushfigure) - end - local ml = object.miterlimit - if ml and ml ~= miterlimit then - miterlimit = ml - result[#result+1] = f_M(ml) - end - local lj = object.linejoin - if lj and lj ~= linejoin then - linejoin = lj - result[#result+1] = f_j(lj) - end - local lc = object.linecap - if lc and lc ~= linecap then - linecap = lc - result[#result+1] = f_J(lc) - end - local dl = object.dash - if dl then - local d = f_d(concat(dl.dashes or {}," "),dl.offset) - if d ~= dashed then - dashed = d - result[#result+1] = d - end - elseif dashed ~= false then -- was just dashed test - result[#result+1] = "[] 0 d" - dashed = false - end - local path = object.path -- newpath - local transformed, penwidth = false, 1 - local open = path and path[1].left_type and path[#path].right_type -- at this moment only "end_point" - local pen = object.pen - if pen then - if pen.type == 'elliptical' then - transformed, penwidth = pen_characteristics(original) -- boolean, value - result[#result+1] = f_w(penwidth) -- todo: only if changed - if objecttype == 'fill' then - objecttype = 'both' - end - else -- calculated by mplib itself - objecttype = 'fill' - end - end - if transformed then - result[#result+1] = "q" - end local evenodd = not object.istext and object.postscript == "evenodd" - if path then - if transformed then - flushconcatpath(path,result,open) + local collect = not object.istext and object.postscript == "collect" + if collect then + if not savedpath then + savedpath = { object.path or false } + savedhtap = { object.htap or false } else - flushnormalpath(path,result,open) + savedpath[#savedpath+1] = object.path or false + savedhtap[#savedhtap+1] = object.htap or false end - if objecttype == "fill" then - result[#result+1] = evenodd and "h f*" or "h f" -- f* = eo - elseif objecttype == "outline" then - result[#result+1] = open and "S" or "h S" - elseif objecttype == "both" then - result[#result+1] = evenodd and "h B*" or "h B"-- B* = eo -- b includes closepath + else + local objecttype = object.type -- can have changed + if before then + result = pluginactions(before,result,flushfigure) end - end - if transformed then - result[#result+1] = "Q" - end - local path = object.htap - if path then - if transformed then - result[#result+1] = "q" + local ml = object.miterlimit + if ml and ml ~= miterlimit then + miterlimit = ml + result[#result+1] = f_M(ml) + end + local lj = object.linejoin + if lj and lj ~= linejoin then + linejoin = lj + result[#result+1] = f_j(lj) + end + local lc = object.linecap + if lc and lc ~= linecap then + linecap = lc + result[#result+1] = f_J(lc) + end + local dl = object.dash + if dl then + local d = f_d(concat(dl.dashes or {}," "),dl.offset) + if d ~= dashed then + dashed = d + result[#result+1] = d + end + elseif dashed ~= false then -- was just dashed test + result[#result+1] = "[] 0 d" + dashed = false + end + local path = object.path -- newpath + local transformed, penwidth = false, 1 + local open = path and path[1].left_type and path[#path].right_type -- at this moment only "end_point" + local pen = object.pen + if pen then + if pen.type == 'elliptical' then + transformed, penwidth = pen_characteristics(original) -- boolean, value + result[#result+1] = f_w(penwidth) -- todo: only if changed + if objecttype == 'fill' then + objecttype = 'both' + end + else -- calculated by mplib itself + objecttype = 'fill' + end end if transformed then - flushconcatpath(path,result,open) - else - flushnormalpath(path,result,open) + result[#result+1] = "q" end - if objecttype == "fill" then - result[#result+1] = evenodd and "h f*" or "h f" -- f* = eo - elseif objecttype == "outline" then - result[#result+1] = open and "S" or "h S" - elseif objecttype == "both" then - result[#result+1] = evenodd and "h B*" or "h B"-- B* = eo -- b includes closepath + if path then + if savedpath then + for i=1,#savedpath do + local path = savedpath[i] + if transformed then + flushconcatpath(path,result,open) + else + flushnormalpath(path,result,open) + end + end + savedpath = nil + end + if transformed then + flushconcatpath(path,result,open) + else + flushnormalpath(path,result,open) + end + if objecttype == "fill" then + result[#result+1] = evenodd and "h f*" or "h f" -- f* = eo + elseif objecttype == "outline" then + result[#result+1] = open and "S" or "h S" + elseif objecttype == "both" then + result[#result+1] = evenodd and "h B*" or "h B"-- B* = eo -- b includes closepath + end end if transformed then result[#result+1] = "Q" end - end - if after then - result = pluginactions(after,result,flushfigure) + local path = object.htap + if path then + if transformed then + result[#result+1] = "q" + end + if savedhtap then + for i=1,#savedhtap do + local path = savedhtap[i] + if transformed then + flushconcatpath(path,result,open) + else + flushnormalpath(path,result,open) + end + end + savedhtap = nil + evenodd = true + end + if transformed then + flushconcatpath(path,result,open) + else + flushnormalpath(path,result,open) + end + if objecttype == "fill" then + result[#result+1] = evenodd and "h f*" or "h f" -- f* = eo + elseif objecttype == "outline" then + result[#result+1] = open and "S" or "h S" + elseif objecttype == "both" then + result[#result+1] = evenodd and "h B*" or "h B"-- B* = eo -- b includes closepath + end + if transformed then + result[#result+1] = "Q" + end + end + if after then + result = pluginactions(after,result,flushfigure) + end end if object.grouped then -- can be qQ'd so changes can end up in groups diff --git a/tex/context/base/mlib-pps.lua b/tex/context/base/mlib-pps.lua index ab56699b9..a3a3bd9f6 100644 --- a/tex/context/base/mlib-pps.lua +++ b/tex/context/base/mlib-pps.lua @@ -882,7 +882,7 @@ function metapost.analyzeplugins(object) -- each object (first pass) if top.plugmode then local prescript = object.prescript -- specifications if prescript and #prescript > 0 then - analyzer(object,splitprescript(prescript)) + analyzer(object,splitprescript(prescript) or {}) return top.multipass end end @@ -895,7 +895,7 @@ function metapost.processplugins(object) -- each object (second pass) if prescript and #prescript > 0 then local before = { } local after = { } - processor(object,splitprescript(prescript),before,after) + processor(object,splitprescript(prescript) or {},before,after) return #before > 0 and before, #after > 0 and after else local c = object.color @@ -1386,13 +1386,56 @@ local function gr_process(object,prescript,before,after) end end +-- outlines + +local outlinetexts = { } + +local function ot_reset() + outlinetexts = { } +end + +local function ot_analyze(object,prescript) + local ot_stage = prescript.ot_stage + local ot_index = tonumber(prescript.ot_index) + if ot_index and ot_stage == "trial" and not outlinetexts[ot_index] then + local ot_kind = prescript.ot_kind or "" + top.intermediate = true + top.multipass = true + context.MPLIBoutlinetext(ot_index,ot_kind,object.postscript) + end +end + +local function ot_process(object,prescript,before,after) +end + +implement { + name = "MPLIBconvertoutlinetext", + arguments = { "integer", "string", "integer" }, + actions = function(index,kind,box) + local boxtomp = fonts.metapost.boxtomp + if boxtomp then + outlinetexts[index] = boxtomp(box,kind) + else + outlinetexts[index] = "" + end + end +} + +function mp.get_outline_text(index) -- maybe we need a more private namespace + mp.print(outlinetexts[index] or "draw origin;") +end + + -- definitions +appendaction(resetteractions, "system",ot_reset) appendaction(resetteractions, "system",cl_reset) appendaction(resetteractions, "system",tx_reset) +appendaction(processoractions,"system",ot_process) appendaction(processoractions,"system",gr_process) +appendaction(analyzeractions, "system",ot_analyze) appendaction(analyzeractions, "system",tx_analyze) appendaction(analyzeractions, "system",gt_analyze) @@ -1406,6 +1449,18 @@ appendaction(processoractions,"system",tr_process) -- last, as color can be rese appendaction(processoractions,"system",la_process) +-- function metapost.installplugin(reset,analyze,process) +-- if reset then +-- appendaction(resetteractions,"system",reset) +-- end +-- if analyze then +-- appendaction(analyzeractions,"system",analyze) +-- end +-- if process then +-- appendaction(processoractions,"system",process) +-- end +-- end + -- we're nice and set them already resetter = resetteractions .runner diff --git a/tex/context/base/mlib-run.lua b/tex/context/base/mlib-run.lua index 121c32ae9..5ec1f9c6b 100644 --- a/tex/context/base/mlib-run.lua +++ b/tex/context/base/mlib-run.lua @@ -128,7 +128,9 @@ do local function i_finder(name,mode,ftype) -- fake message for mpost.map and metafun.mpvi local specification = url.hashed(name) local finder = finders[specification.scheme] or finders.file - return finder(specification,name,mode,validftype(ftype)) + local found = finder(specification,name,mode,validftype(ftype)) + -- print(found) + return found end local function o_finder(name,mode,ftype) diff --git a/tex/context/base/mult-fun.lua b/tex/context/base/mult-fun.lua index 27aa32055..9af3f05ec 100644 --- a/tex/context/base/mult-fun.lua +++ b/tex/context/base/mult-fun.lua @@ -49,7 +49,7 @@ return { "inverted", "uncolored", "softened", "grayed", "greyed", "onlayer", "along", - "graphictext", "loadfigure", "externalfigure", "figure", "register", + "graphictext", "loadfigure", "externalfigure", "figure", "register", "outlinetext", -- "lua", "withmask", "bitmapimage", "colordecimals", "ddecimal", "dddecimal", "ddddecimal", "textext", "thetextext", "rawtextext", "textextoffset", @@ -106,7 +106,7 @@ return { -- -- "swappointlabels", "normalfill", "normaldraw", "visualizepaths", "naturalizepaths", - "drawboundary", "drawwholepath", + "drawboundary", "drawwholepath", "drawpathonly", "visualizeddraw", "visualizedfill", "draworigin", "drawboundingbox", "drawpath", @@ -122,7 +122,7 @@ return { "passvariable", "passarrayvariable", "tostring", "format", "formatted", "startpassingvariable", "stoppassingvariable", -- - "eofill", "eoclip", + "eofill", "eoclip", "nofill", "area", }, } diff --git a/tex/context/base/node-ser.lua b/tex/context/base/node-ser.lua index d7593cec7..99d26b63b 100644 --- a/tex/context/base/node-ser.lua +++ b/tex/context/base/node-ser.lua @@ -128,7 +128,7 @@ end nodes.astable = astable -setinspector(function(v) if is_node(v) then printtable(astable(v),tostring(v)) return true end end) +setinspector("node",function(v) if is_node(v) then printtable(astable(v),tostring(v)) return true end end) -- under construction: diff --git a/tex/context/base/pack-box.mkiv b/tex/context/base/pack-box.mkiv index 6e3bab6f4..690c5a663 100644 --- a/tex/context/base/pack-box.mkiv +++ b/tex/context/base/pack-box.mkiv @@ -657,11 +657,11 @@ \def\pack_layers_set_framed_s[#1][#2][#3]% {\setlayer[#1][\c!width=\wd\nextbox,\c!height=\ht\nextbox,\c!offset=\zeropoint]% - \normalframedwithsettings[\c!location=\v!normal]} % diffrent kind of location + \normalframedwithsettings[\c!location=\v!normal]} % different kind of location \def\pack_layers_set_framed_d[#1][#2][#3]% {\setlayer[#1][\c!width=\wd\nextbox,\c!height=\ht\nextbox,#2,\c!offset=\zeropoint]% - \normalframedwithsettings[\c!location=\v!normal,#2]} % diffrent kind of location + \normalframedwithsettings[\c!location=\v!normal,#2]} % different kind of location \def\pack_layers_set_framed_t[#1][#2][#3]% {\setlayer[#1][#2]% diff --git a/tex/context/base/spac-chr.lua b/tex/context/base/spac-chr.lua index e3fa6d099..84f513242 100644 --- a/tex/context/base/spac-chr.lua +++ b/tex/context/base/spac-chr.lua @@ -272,6 +272,8 @@ local methods = { } +characters.methods = methods + function characters.handler(head) -- todo: use traverse_id head = tonut(head) local current = head diff --git a/tex/context/base/spac-ver.lua b/tex/context/base/spac-ver.lua index d1cf09e17..c48c79c42 100644 --- a/tex/context/base/spac-ver.lua +++ b/tex/context/base/spac-ver.lua @@ -75,6 +75,7 @@ local a_skiporder = attributes.private('skiporder') ----- snap_category = attributes.private('snapcategory') local a_snapmethod = attributes.private('snapmethod') local a_snapvbox = attributes.private('snapvbox') +local a_profilemethod = attributes.private("profilemethod") local nuts = nodes.nuts local tonode = nuts.tonode diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf index c0f05bcaa..0349ed89b 100644 Binary files a/tex/context/base/status-files.pdf and b/tex/context/base/status-files.pdf differ diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf index 77e4c6167..72620604e 100644 Binary files a/tex/context/base/status-lua.pdf and b/tex/context/base/status-lua.pdf differ diff --git a/tex/context/base/toks-ini.lua b/tex/context/base/toks-ini.lua index f8b945890..642f85d94 100644 --- a/tex/context/base/toks-ini.lua +++ b/tex/context/base/toks-ini.lua @@ -49,7 +49,7 @@ if newtoken then tokens.istoken = istoken tokens.astable = astable - setinspector(function(v) if istoken(v) then printtable(astable(v),tostring(v)) return true end end) + setinspector("token",function(v) if istoken(v) then printtable(astable(v),tostring(v)) return true end end) end diff --git a/tex/context/base/trac-vis.lua b/tex/context/base/trac-vis.lua index b54182798..1b93ebb2d 100644 --- a/tex/context/base/trac-vis.lua +++ b/tex/context/base/trac-vis.lua @@ -654,7 +654,8 @@ end end end -local g_cache = { } +local g_cache_v = { } +local g_cache_h = { } local tags = { -- userskip = "US", @@ -691,7 +692,7 @@ local function ruledglue(head,current,vertical) local width = getfield(spec,"width") local subtype = getsubtype(current) local amount = formatters["%s:%0.3f"](tags[subtype] or (vertical and "VS") or "HS",width*pt_factor) - local info = g_cache[amount] + local info = (vertical and g_cache_v or g_cache_h)[amount] if info then -- print("glue hit") else @@ -710,7 +711,7 @@ local function ruledglue(head,current,vertical) else info = sometext(amount,l_glue,c_skip_b) end - g_cache[amount] = info + (vertical and g_cache_v or g_cache_h)[amount] = info end info = copy_list(info) if vertical then @@ -720,11 +721,12 @@ local function ruledglue(head,current,vertical) return head, getnext(current) end -local k_cache = { } +local k_cache_v = { } +local k_cache_h = { } local function ruledkern(head,current,vertical) local kern = getfield(current,"kern") - local info = k_cache[kern] + local info = (vertical and k_cache_v or k_cache_h)[kern] if info then -- print("kern hit") else @@ -736,7 +738,7 @@ local function ruledkern(head,current,vertical) else info = sometext(amount,l_kern,c_zero) end - k_cache[kern] = info + (vertical and k_cache_v or k_cache_h)[kern] = info end info = copy_list(info) if vertical then @@ -746,11 +748,12 @@ local function ruledkern(head,current,vertical) return head, getnext(current) end -local p_cache = { } +local p_cache_v = { } +local p_cache_h = { } local function ruledpenalty(head,current,vertical) local penalty = getfield(current,"penalty") - local info = p_cache[penalty] + local info = (vertical and p_cache_v or p_cache_h)[penalty] if info then -- print("penalty hit") else @@ -762,7 +765,7 @@ local function ruledpenalty(head,current,vertical) else info = sometext(amount,l_penalty,c_zero) end - p_cache[penalty] = info + (vertical and p_cache_v or p_cache_h)[penalty] = info end info = copy_list(info) if vertical then @@ -919,14 +922,18 @@ local function freed(cache) end local function cleanup() - local hf, ng, np, nk, nw - nf, f_cache = freed(f_cache) - ng, g_cache = freed(g_cache) - np, p_cache = freed(p_cache) - nk, k_cache = freed(k_cache) - nw, w_cache = freed(w_cache) - nb, b_cache = freed(b_cache) - -- report_visualize("cache cleanup: %s fontkerns, %s skips, %s penalties, %s kerns, %s whatsits, %s boxes",nf,ng,np,nk,nw,nb) + local hf, nw, nb, ng_v, ng_h, np_v, np_h, nk_v, nk_h + nf, f_cache = freed(f_cache) + nw, w_cache = freed(w_cache) + nb, b_cache = freed(b_cache) + ng_v, g_cache_v = freed(g_cache_v) + ng_h, g_cache_h = freed(g_cache_h) + np_v, p_cache_v = freed(p_cache_v) + np_h, p_cache_h = freed(p_cache_h) + nk_v, k_cache_v = freed(k_cache_v) + nk_h, k_cache_h = freed(k_cache_h) + -- report_visualize("cache cleanup: %s fontkerns, %s skips, %s penalties, %s kerns, %s whatsits, %s boxes", + -- nf,ng_v+ng_h,np_v+np_h,nk_v+nk_h,nw,nb) end local function handler(head) @@ -936,7 +943,7 @@ local function handler(head) -- local v = texgetattribute(a_visual) -- texsetattribute(a_layer,unsetvalue) -- texsetattribute(a_visual,unsetvalue) - head = visualize(tonut(head)) + head = visualize(tonut(head),true) -- texsetattribute(a_layer,l) -- texsetattribute(a_visual,v) -- -- cleanup() @@ -953,7 +960,9 @@ function visualizers.box(n) if usedfont then starttiming(visualizers) local box = getbox(n) - setfield(box,"list",visualize(getlist(box))) + if box then + setfield(box,"list",visualize(getlist(box),getid(box) == vlist_code)) + end stoptiming(visualizers) return head, true else diff --git a/tex/context/base/util-fil.lua b/tex/context/base/util-fil.lua new file mode 100644 index 000000000..fe6a117fa --- /dev/null +++ b/tex/context/base/util-fil.lua @@ -0,0 +1,109 @@ +if not modules then modules = { } end modules ['util-fil'] = { + version = 1.001, + comment = "companion to luat-lib.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local byte = string.byte +local extract = bit32.extract + +-- Here are a few helpers (the starting point were old ones I used for parsing +-- flac files). In Lua 5.3 we can probably do this better. Some code will move +-- here. + +utilities = utilities or { } +local files = { } +utilities.files = files + +function files.readbyte(f) + return byte(f:read(1)) +end + +function files.readchar(f) + return f:read(1) +end + +function files.readbytes(f,n) + return byte(f:read(n),1,n) +end + +function files.skipbytes(f,n) + f:read(n or 1) -- or a seek +end + +function files.readinteger1(f) -- one byte + local n = byte(f:read(1)) + if n >= 0x80 then + return n - 0xFF - 1 + else + return n + end +end + +files.readcardinal1 = files.readbyte -- one byte +files.readcardinal = files.readcardinal1 +files.readinteger = files.readinteger1 + +function files.readcardinal2(f) + local a, b = byte(f:read(2),1,2) + return 0x100 * a + b +end + +function files.readinteger2(f) + local a, b = byte(f:read(2),1,2) + local n = 0x100 * a + b + if n >= 0x8000 then + return n - 0xFFFF - 1 + else + return n + end +end + +function files.readcardinal3(f) + local a, b, c = byte(f:read(3),1,3) + return 0x10000 * a + 0x100 * b + c +end + +function files.readcardinal4(f) + local a, b, c, d = byte(f:read(4),1,4) + return 0x1000000 * a + 0x10000 * b + 0x100 * c + d +end + +function files.readinteger4(f) + local a, b, c, d = byte(f:read(4),1,4) + local n = 0x1000000 * a + 0x10000 * b + 0x100 * c + d + if n >= 0x8000000 then + return n - 0xFFFFFFFF - 1 + else + return n + end +end + +function files.readfixed4(f) + local a, b, c, d = byte(f:read(4),1,4) + local n = 0x100 * a + b + if n >= 0x8000 then + return n - 0xFFFF - 1 + (0x100 * c + d)/0xFFFF + else + return n + (0x100 * c + d)/0xFFFF + end +end + +function files.readstring(f,n) + return f:read(n or 1) +end + +function files.read2dot14(f) + local a, b = byte(f:read(2),1,2) + local n = 0x100 * a + b + local m = extract(n,0,30) + if n > 0x7FFF then + n = extract(n,30,2) + return m/0x4000 - 4 + else + n = extract(n,30,2) + return n + m/0x4000 + end +end diff --git a/tex/context/base/util-str.lua b/tex/context/base/util-str.lua index c2139b155..95534c8d8 100644 --- a/tex/context/base/util-str.lua +++ b/tex/context/base/util-str.lua @@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['util-str'] = { license = "see context related readme files" } -utilities = utilities or {} +utilities = utilities or { } utilities.strings = utilities.strings or { } local strings = utilities.strings @@ -354,7 +354,16 @@ function string.autosingle(s,sep) return ("'" .. tostring(s) .. "'") end -local tracedchars = { } +local tracedchars = { [0] = + -- the regular bunch + "[null]", "[soh]", "[stx]", "[etx]", "[eot]", "[enq]", "[ack]", "[bel]", + "[bs]", "[ht]", "[lf]", "[vt]", "[ff]", "[cr]", "[so]", "[si]", + "[dle]", "[dc1]", "[dc2]", "[dc3]", "[dc4]", "[nak]", "[syn]", "[etb]", + "[can]", "[em]", "[sub]", "[esc]", "[fs]", "[gs]", "[rs]", "[us]", + -- plus space + "[space]", -- 0x20 +} + string.tracedchars = tracedchars strings.tracers = tracedchars diff --git a/tex/context/base/util-tab.lua b/tex/context/base/util-tab.lua index 618f34cee..0ab388826 100644 --- a/tex/context/base/util-tab.lua +++ b/tex/context/base/util-tab.lua @@ -545,6 +545,7 @@ local f_val_str = formatters["%w%q,"] local f_val_boo = formatters["%w%l,"] local f_val_not = formatters["%w{},"] local f_val_seq = formatters["%w{ %, t },"] +local f_fin_seq = formatters[" %, t }"] local f_table_return = formatters["return {"] local f_table_name = formatters["%s={"] @@ -562,14 +563,15 @@ local serialize = table.serialize -- the extensive one, the one we started with -- latest lua for the value of #n (with holes) .. anyway for tracing purposes we want -- indices / keys being sorted, so it will never be real fast -function table.serialize(root,name,specification) +local function serialize(root,name,specification) if type(specification) == "table" then return serialize(root,name,specification) -- the original one end - local t -- = { } - local n = 1 + local t -- = { } + local n = 1 + local unknown = false -- local function simple_table(t) -- local ts = #t @@ -614,6 +616,7 @@ function table.serialize(root,name,specification) return nil end end + local haszero = t[0] if n == nt then local tt = { } for i=1,nt do @@ -630,6 +633,23 @@ function table.serialize(root,name,specification) end end return tt + elseif haszero and (n == nt + 1) then + local tt = { } + for i=0,nt do + local v = t[i] + local tv = type(v) + if tv == "number" then + tt[i+1] = v -- not needed tostring(v) + elseif tv == "string" then + tt[i+1] = format("%q",v) -- f_string(v) + elseif tv == "boolean" then + tt[i+1] = v and "true" or "false" + else + return nil + end + end + tt[1] = "[0] = " .. tt[1] + return tt end end return nil @@ -680,7 +700,7 @@ function table.serialize(root,name,specification) elseif tv == "string" then n = n + 1 t[n] = f_val_str(depth,v) elseif tv == "table" then - if next(v) == nil then + if next(v) == nil then -- tricky as next is unpredictable in a hash n = n + 1 t[n] = f_val_not(depth) else local st = simple_table(v) @@ -692,6 +712,8 @@ function table.serialize(root,name,specification) end elseif tv == "boolean" then n = n + 1 t[n] = f_val_boo(depth,v) + elseif unknown then + n = n + 1 t[n] = f_val_str(depth,tostring(v)) end elseif tv == "number" then if tk == "number" then @@ -700,6 +722,8 @@ function table.serialize(root,name,specification) n = n + 1 t[n] = f_key_str_value_num(depth,k,v) elseif tk == "boolean" then n = n + 1 t[n] = f_key_boo_value_num(depth,k,v) + elseif unknown then + n = n + 1 t[n] = f_key_str_value_num(depth,tostring(k),v) end elseif tv == "string" then if tk == "number" then @@ -708,6 +732,8 @@ function table.serialize(root,name,specification) n = n + 1 t[n] = f_key_str_value_str(depth,k,v) elseif tk == "boolean" then n = n + 1 t[n] = f_key_boo_value_str(depth,k,v) + elseif unknown then + n = n + 1 t[n] = f_key_str_value_str(depth,tostring(k),v) end elseif tv == "table" then if next(v) == nil then @@ -717,6 +743,8 @@ function table.serialize(root,name,specification) n = n + 1 t[n] = f_key_str_value_not(depth,k) elseif tk == "boolean" then n = n + 1 t[n] = f_key_boo_value_not(depth,k) + elseif unknown then + n = n + 1 t[n] = f_key_str_value_not(depth,tostring(k)) end else local st = simple_table(v) @@ -728,6 +756,8 @@ function table.serialize(root,name,specification) n = n + 1 t[n] = f_key_str_value_seq(depth,k,st) elseif tk == "boolean" then n = n + 1 t[n] = f_key_boo_value_seq(depth,k,st) + elseif unknown then + n = n + 1 t[n] = f_key_str_value_seq(depth,tostring(k),st) end end elseif tv == "boolean" then @@ -737,6 +767,18 @@ function table.serialize(root,name,specification) n = n + 1 t[n] = f_key_str_value_boo(depth,k,v) elseif tk == "boolean" then n = n + 1 t[n] = f_key_boo_value_boo(depth,k,v) + elseif unknown then + n = n + 1 t[n] = f_key_str_value_boo(depth,tostring(k),v) + end + else + if tk == "number" then + n = n + 1 t[n] = f_key_num_value_str(depth,k,tostring(v)) + elseif tk == "string" then + n = n + 1 t[n] = f_key_str_value_str(depth,k,tostring(v)) + elseif tk == "boolean" then + n = n + 1 t[n] = f_key_boo_value_str(depth,k,tostring(v)) + elseif unknown then + n = n + 1 t[n] = f_key_str_value_str(depth,tostring(k),tostring(v)) end end end @@ -775,10 +817,21 @@ function table.serialize(root,name,specification) end -- Let's forget about empty tables. if next(root) ~= nil then - do_serialize(root,name,1,0) + local st = simple_table(root) + if st then + return t[1] .. f_fin_seq(st) -- todo: move up and in one go + else + do_serialize(root,name,1,0) + end end end n = n + 1 t[n] = f_table_finish() return concat(t,"\n") end + +table.serialize = serialize + +if setinspector then + setinspector("table",function(v) if type(v) == "table" then print(serialize(v,"table")) return true end end) +end diff --git a/tex/context/base/x-asciimath.lua b/tex/context/base/x-asciimath.lua index 52664f0d3..451cd5925 100644 --- a/tex/context/base/x-asciimath.lua +++ b/tex/context/base/x-asciimath.lua @@ -794,7 +794,7 @@ local d_one = R("09") local d_two = d_one * d_one local d_three = d_two * d_one local d_four = d_three * d_one -local d_split = P(-1) + P(",") +local d_split = P(-1) + Carg(2) * (S(".") /"") local d_spaced = (Carg(1) * d_three)^1 @@ -808,9 +808,9 @@ local digitized_1 = Cs ( ( local p_fourbefore = d_four * d_split local p_fourafter = d_four * P(-1) -local p_beforecomma = d_three * d_spaced * d_split - + d_two * d_spaced * d_split - + d_one * d_spaced * d_split +local p_beforecomma = d_three * d_spaced^0 * d_split + + d_two * d_spaced^0 * d_split + + d_one * d_spaced^0 * d_split + d_one * d_split local p_aftercomma = p_fourafter @@ -843,11 +843,15 @@ local splitmethods = { digitized_3, } -local splitmethod = nil +local splitmethod = nil +local symbolmethod = nil +local digitseparator = utfchar(0x2008) +local digitsymbol = "." function asciimath.setup(settings) splitmethod = splitmethods[tonumber(settings.splitmethod) or 0] if splitmethod then + digitsymbol = settings.symbol or "." local separator = settings.separator if separator == true or not interfaces or interfaces.variables.yes then digitseparator = utfchar(0x2008) @@ -856,6 +860,11 @@ function asciimath.setup(settings) else splitmethod = nil end + if digitsymbol ~= "." then + symbolmethod = lpeg.replacer(".",digitsymbol) + else + symbolmethod = nil + end end end @@ -864,7 +873,10 @@ local collected_filename = "asciimath-digits.lua" function numbermess(s) if splitmethod then - local d = lpegmatch(splitmethod,s,1,digitseparator) + local d = lpegmatch(splitmethod,s,1,digitseparator,digitsymbol) + if not d and symbolmethod then + d = lpegmatch(symbolmethod,s) + end if d then if trace_digits and s ~= d then collected_digits[s] = d @@ -875,25 +887,31 @@ function numbermess(s) return s end --- asciimath.setup { splitmethod = 3 } +-- asciimath.setup { splitmethod = 3, symbol = "," } -- local t = { -- "1", "12", "123", "1234", "12345", "123456", "1234567", "12345678", "123456789", --- "1,1", --- "12,12", --- "123,123", --- "1234,123", --- "1234,1234", --- "12345,1234", --- "1234,12345", --- "12345,12345", --- "123456,123456", --- "1234567,1234567", --- "12345678,12345678", --- "123456789,123456789", --- "0,1234", --- "1234,0", --- "1234,00", --- "0,123456789", +-- "1.1", +-- "12.12", +-- "123.123", +-- "1234.123", +-- "1234.1234", +-- "12345.1234", +-- "1234.12345", +-- "12345.12345", +-- "123456.123456", +-- "1234567.1234567", +-- "12345678.12345678", +-- "123456789.123456789", +-- "0.1234", +-- "1234.0", +-- "1234.00", +-- "0.123456789", +-- "100.00005", +-- "0.80018", +-- "10.80018", +-- "100.80018", +-- "1000.80018", +-- "10000.80018", -- } -- for i=1,#t do print(formatters["%-20s : [%s]"](t[i],numbermess(t[i]))) end @@ -1860,7 +1878,7 @@ if not context then -- report_asciimath(cleanedup([[a "α" b]])) -- report_asciimath(cleanedup([[//4]])) -convert("leq\\leq") +-- convert("leq\\leq") -- convert([[\^{1/5}log]]) -- convert("sqrt") -- convert("^") diff --git a/tex/context/base/x-asciimath.mkiv b/tex/context/base/x-asciimath.mkiv index 1d62fb93d..0e1230bb0 100644 --- a/tex/context/base/x-asciimath.mkiv +++ b/tex/context/base/x-asciimath.mkiv @@ -153,6 +153,7 @@ \ctxlua{moduledata.asciimath.setup { splitmethod = "\asciimathparameter\c!splitmethod", separator = "\asciimathparameter\c!separator", + symbol = "\asciimathparameter\c!symbol", }}% \to \everysetupasciimath @@ -390,4 +391,37 @@ % \MyAsciiMath{x^2(10 -x)>2 x^2} % \MyAsciiMath{x^4>x} \stoplines + +\setupasciimath[splitmethod=3,symbol={{,}}] + +\startlines +\asciimath{sqrt 1} +\asciimath{sqrt 1.2} +\asciimath{sqrt 1.2} +\asciimath{1} +\asciimath{12} +\asciimath{123} +\asciimath{1234} +\asciimath{12345} +\asciimath{123456} +\asciimath{1234567} +\asciimath{12345678} +\asciimath{123456789} +\asciimath{1.1} +\asciimath{12.12} +\asciimath{1234.123} +\asciimath{1234.1234} +\asciimath{12345.1234} +\asciimath{1234.12345} +\asciimath{12345.12345} +\asciimath{123456.123456} +\asciimath{1234567.1234567} +\asciimath{12345678.12345678} +\asciimath{123456789.123456789} +\asciimath{0.1234} +\asciimath{1234.0} +\asciimath{1234.00} +\asciimath{0.123456789} +\stoplines + \stoptext diff --git a/tex/context/sample/cow-black.mps b/tex/context/sample/cow-black.mps new file mode 100644 index 000000000..b34e2bab8 --- /dev/null +++ b/tex/context/sample/cow-black.mps @@ -0,0 +1,154 @@ +%!PS-Adobe-2.0 EPSF-2.0 +%%BoundingBox: 52 425 327 625 +%%HiResBoundingBox: 52.50 425.75 326.25 623.25 +%%Comment: originally a CorelDraw cow +%%Creator: MetaPost +%%Pages: 1 +%%EndProlog +%%Page: 1 1 +0 2 dtransform truncate idtransform setlinewidth pop +0 setgray +newpath 245.45 600.34 moveto +242.78 599.40 239.62 596.02 237.67 594.07 curveto +236.74 584.42 244.58 583.63 250.20 577.44 curveto +258.77 573.70 251.21 567.72 256.18 557.42 curveto +257.04 550.94 257.90 543.89 255.31 539.78 curveto +249.48 538.92 247.97 540.22 246.89 531.43 curveto +246.31 526.97 231.77 529.06 229.03 538.27 curveto +227.09 544.97 221.33 546.70 217.80 543.17 curveto +213.77 538.06 215.78 531.22 217.80 527.47 curveto +224.93 517.32 212.04 511.42 205.13 516.74 curveto +199.73 508.68 211.39 500.04 207.43 494.50 curveto +205.78 493.99 204.77 489.17 185.47 500.54 curveto +180.36 504.14 167.83 500.76 168.77 520.63 curveto +168.77 525.82 165.60 543.53 162.14 555.91 curveto +159.41 561.24 156.74 559.08 156.89 553.90 curveto +157.18 547.85 162.94 531.22 155.52 540.22 curveto +153.58 539.21 156.89 523.58 156.89 521.64 curveto +162.00 517.03 157.39 513.58 154.73 512.28 curveto +151.27 518.33 149.62 518.04 147.17 514.44 curveto +141.70 514.08 144.58 528.19 140.26 528.62 curveto +137.02 527.76 139.18 520.06 138.24 518.76 curveto +132.98 524.74 130.90 529.27 127.01 521.64 curveto +126.14 521.64 122.11 519.19 120.96 526.54 curveto +117.65 552.74 107.06 558.36 93.82 565.13 curveto +92.02 565.63 84.24 566.71 79.34 568.15 curveto +73.51 560.88 58.32 565.63 56.23 570.31 curveto +54.79 572.69 54.65 575.21 54.79 576.50 curveto +52.34 580.10 55.87 582.70 59.62 583.06 curveto +62.86 587.16 68.54 594.94 71.28 601.56 curveto +72.29 603.07 74.95 609.34 78.19 609.55 curveto +74.95 612.94 74.30 622.51 82.66 617.33 curveto +87.12 624.02 92.09 624.31 95.76 615.82 curveto +102.89 615.38 102.31 608.69 115.78 605.52 curveto +122.76 602.86 132.77 604.58 140.26 603.72 curveto +136.22 596.88 127.44 566.86 132.98 559.80 curveto +140.76 564.70 141.84 605.38 157.03 595.66 curveto +160.56 593.93 159.91 590.04 164.09 590.18 curveto +170.42 587.45 169.13 600.77 172.51 600.77 curveto +176.47 599.76 183.02 599.04 186.98 599.54 curveto +197.71 600.77 206.93 604.08 223.92 602.50 curveto +231.12 601.78 238.25 601.06 245.45 600.34 curveto +closepath fill +newpath 305.28 560.95 moveto +304.63 560.95 299.95 561.24 299.38 561.24 curveto +302.40 550.44 303.98 536.47 304.20 525.31 curveto +303.70 521.35 299.81 517.46 299.38 525.67 curveto +295.85 530.86 296.42 540.07 293.40 540.29 curveto +287.35 539.64 285.34 513.22 280.01 509.33 curveto +276.26 512.28 280.73 524.02 275.54 524.74 curveto +270.50 524.02 264.31 526.68 266.69 534.46 curveto +270.29 543.02 268.34 554.76 266.54 561.60 curveto +262.37 578.59 264.02 587.09 271.58 596.09 curveto +267.48 604.51 lineto +275.40 608.26 285.62 604.58 290.02 602.21 curveto +294.62 600.26 300.24 595.94 301.10 587.38 curveto +303.34 578.88 304.42 569.74 305.28 560.95 curveto +closepath fill +newpath 84.38 618.55 moveto +88.34 624.38 92.59 622.94 96.34 615.67 curveto +101.23 615.60 102.46 612.43 104.98 610.78 curveto +122.62 598.39 147.46 607.18 167.90 601.92 curveto +180.94 598.54 190.87 599.76 200.09 602.06 curveto +220.32 607.25 246.10 596.16 263.74 603.86 curveto +274.75 608.62 284.76 605.66 292.97 600.91 curveto +297.58 597.96 299.59 596.09 300.96 591.26 curveto +306.29 572.54 306.29 551.02 309.53 530.57 curveto +309.53 528.84 312.19 526.10 312.48 522.07 curveto +315.79 511.34 316.08 510.12 317.16 502.20 curveto +317.16 501.34 326.52 488.45 325.01 479.02 curveto +323.93 481.25 323.86 482.83 321.62 481.68 curveto +320.33 479.30 320.90 473.90 322.56 471.74 curveto +320.83 470.81 318.46 473.47 317.52 475.20 curveto +318.17 473.04 317.81 470.81 316.73 469.30 curveto +315.86 472.25 316.58 473.18 315.36 473.90 curveto +313.99 472.90 314.21 469.30 314.28 466.20 curveto +313.49 468.07 311.47 472.46 312.55 476.42 curveto +312.48 484.20 308.81 489.10 310.32 499.10 curveto +310.10 504.43 307.30 521.06 304.56 524.30 curveto +303.12 526.25 306.36 510.77 306.36 506.16 curveto +306.65 500.90 307.08 468.72 306.43 463.10 curveto +306.43 459.22 306.22 453.96 307.08 452.16 curveto +308.74 450.79 309.38 450.50 309.60 447.98 curveto +309.24 446.62 308.74 446.04 307.73 445.54 curveto +306.07 444.60 307.37 441.79 306.07 439.85 curveto +304.49 438.77 304.13 441.86 303.34 441.86 curveto +302.69 441.00 303.05 437.98 302.47 436.18 curveto +299.66 433.80 292.18 432.50 289.15 434.66 curveto +289.73 440.64 291.74 441.58 295.63 446.62 curveto +298.66 452.59 297.00 460.94 296.93 468.14 curveto +295.49 480.38 289.22 487.30 289.44 496.44 curveto +287.86 495.72 286.42 494.57 284.26 494.86 curveto +283.39 489.46 286.42 484.56 284.83 480.82 curveto +281.95 471.96 277.06 446.62 279.00 437.76 curveto +280.01 434.74 278.21 433.15 277.06 433.94 curveto +276.77 433.94 276.55 433.94 276.41 433.94 curveto +276.41 433.94 276.55 431.42 275.69 430.92 curveto +274.10 430.34 273.67 431.71 272.66 432.14 curveto +271.22 430.85 272.52 429.48 271.15 428.04 curveto +267.19 428.04 261.36 425.38 257.98 428.26 curveto +257.33 434.16 263.30 436.68 266.47 440.71 curveto +268.63 446.62 271.08 462.89 267.77 474.62 curveto +267.77 475.56 264.38 485.28 261.43 488.66 curveto +258.70 487.66 257.33 485.50 253.22 486.29 curveto +252.58 484.34 253.30 482.33 252.22 480.10 curveto +251.86 479.52 249.34 478.58 249.19 481.39 curveto +248.98 483.05 248.90 486.36 248.26 486.72 curveto +243.65 486.72 233.71 487.08 231.77 493.92 curveto +219.89 492.34 215.93 491.26 206.57 493.42 curveto +196.63 489.67 183.24 506.16 174.53 502.20 curveto +172.51 496.15 173.09 485.64 171.65 481.39 curveto +169.34 474.77 171.14 467.14 171.14 456.41 curveto +170.57 455.40 169.85 454.46 168.48 454.46 curveto +168.48 453.10 169.34 450.86 168.62 449.42 curveto +167.18 447.62 165.89 451.80 165.02 444.60 curveto +163.15 443.74 157.75 442.22 155.59 445.18 curveto +155.88 448.99 158.33 451.30 160.13 453.38 curveto +161.42 456.91 160.99 458.28 160.70 461.81 curveto +160.99 464.98 161.71 468.58 161.86 470.09 curveto +161.86 473.04 162.50 479.30 161.14 481.18 curveto +159.41 482.69 lineto +157.18 487.22 158.33 494.64 157.61 500.26 curveto +155.81 500.69 155.81 500.98 154.01 498.31 curveto +154.01 494.42 153.50 486.36 152.35 483.84 curveto +149.69 479.81 150.84 459.65 151.42 448.56 curveto +151.78 446.47 149.69 447.70 149.76 444.74 curveto +150.05 442.80 147.89 443.59 146.09 444.60 curveto +145.15 445.18 146.59 439.78 145.37 439.56 curveto +142.34 438.84 136.87 438.19 135.22 440.71 curveto +134.57 444.60 137.88 448.06 140.62 451.01 curveto +143.14 455.83 140.90 465.70 140.47 476.28 curveto +138.89 478.22 lineto +134.86 483.19 139.61 496.94 136.51 506.23 curveto +120.02 514.87 122.11 519.19 118.73 537.62 curveto +115.13 557.64 93.38 567.65 79.06 567.65 curveto +73.44 563.04 66.24 563.62 58.54 567.65 curveto +55.66 569.23 54.43 573.19 54.50 576.50 curveto +52.63 580.75 55.22 582.19 59.62 583.49 curveto +62.71 587.81 68.62 594.65 69.19 597.74 curveto +70.34 601.92 75.53 608.11 77.76 609.77 curveto +75.82 613.01 74.81 615.17 77.11 618.55 curveto +79.56 620.14 81.79 616.61 84.38 618.55 curveto +closepath stroke +showpage +%%EOF diff --git a/tex/context/sample/cow-black.pdf b/tex/context/sample/cow-black.pdf new file mode 100644 index 000000000..cfd70839d Binary files /dev/null and b/tex/context/sample/cow-black.pdf differ diff --git a/tex/context/sample/cow-brown.mps b/tex/context/sample/cow-brown.mps new file mode 100644 index 000000000..530a45e9a --- /dev/null +++ b/tex/context/sample/cow-brown.mps @@ -0,0 +1,154 @@ +%!PS-Adobe-2.0 EPSF-2.0 +%%BoundingBox: 52 425 327 625 +%%HiResBoundingBox: 52.50 425.75 326.25 623.25 +%%Comment: originally a CorelDraw cow +%%Creator: MetaPost +%%Pages: 1 +%%EndProlog +%%Page: 1 1 +0 2 dtransform truncate idtransform setlinewidth pop +0.55 0.25 0.10 setrgbcolor +newpath 245.45 600.34 moveto +242.78 599.40 239.62 596.02 237.67 594.07 curveto +236.74 584.42 244.58 583.63 250.20 577.44 curveto +258.77 573.70 251.21 567.72 256.18 557.42 curveto +257.04 550.94 257.90 543.89 255.31 539.78 curveto +249.48 538.92 247.97 540.22 246.89 531.43 curveto +246.31 526.97 231.77 529.06 229.03 538.27 curveto +227.09 544.97 221.33 546.70 217.80 543.17 curveto +213.77 538.06 215.78 531.22 217.80 527.47 curveto +224.93 517.32 212.04 511.42 205.13 516.74 curveto +199.73 508.68 211.39 500.04 207.43 494.50 curveto +205.78 493.99 204.77 489.17 185.47 500.54 curveto +180.36 504.14 167.83 500.76 168.77 520.63 curveto +168.77 525.82 165.60 543.53 162.14 555.91 curveto +159.41 561.24 156.74 559.08 156.89 553.90 curveto +157.18 547.85 162.94 531.22 155.52 540.22 curveto +153.58 539.21 156.89 523.58 156.89 521.64 curveto +162.00 517.03 157.39 513.58 154.73 512.28 curveto +151.27 518.33 149.62 518.04 147.17 514.44 curveto +141.70 514.08 144.58 528.19 140.26 528.62 curveto +137.02 527.76 139.18 520.06 138.24 518.76 curveto +132.98 524.74 130.90 529.27 127.01 521.64 curveto +126.14 521.64 122.11 519.19 120.96 526.54 curveto +117.65 552.74 107.06 558.36 93.82 565.13 curveto +92.02 565.63 84.24 566.71 79.34 568.15 curveto +73.51 560.88 58.32 565.63 56.23 570.31 curveto +54.79 572.69 54.65 575.21 54.79 576.50 curveto +52.34 580.10 55.87 582.70 59.62 583.06 curveto +62.86 587.16 68.54 594.94 71.28 601.56 curveto +72.29 603.07 74.95 609.34 78.19 609.55 curveto +74.95 612.94 74.30 622.51 82.66 617.33 curveto +87.12 624.02 92.09 624.31 95.76 615.82 curveto +102.89 615.38 102.31 608.69 115.78 605.52 curveto +122.76 602.86 132.77 604.58 140.26 603.72 curveto +136.22 596.88 127.44 566.86 132.98 559.80 curveto +140.76 564.70 141.84 605.38 157.03 595.66 curveto +160.56 593.93 159.91 590.04 164.09 590.18 curveto +170.42 587.45 169.13 600.77 172.51 600.77 curveto +176.47 599.76 183.02 599.04 186.98 599.54 curveto +197.71 600.77 206.93 604.08 223.92 602.50 curveto +231.12 601.78 238.25 601.06 245.45 600.34 curveto +closepath fill +newpath 305.28 560.95 moveto +304.63 560.95 299.95 561.24 299.38 561.24 curveto +302.40 550.44 303.98 536.47 304.20 525.31 curveto +303.70 521.35 299.81 517.46 299.38 525.67 curveto +295.85 530.86 296.42 540.07 293.40 540.29 curveto +287.35 539.64 285.34 513.22 280.01 509.33 curveto +276.26 512.28 280.73 524.02 275.54 524.74 curveto +270.50 524.02 264.31 526.68 266.69 534.46 curveto +270.29 543.02 268.34 554.76 266.54 561.60 curveto +262.37 578.59 264.02 587.09 271.58 596.09 curveto +267.48 604.51 lineto +275.40 608.26 285.62 604.58 290.02 602.21 curveto +294.62 600.26 300.24 595.94 301.10 587.38 curveto +303.34 578.88 304.42 569.74 305.28 560.95 curveto +closepath fill +newpath 84.38 618.55 moveto +88.34 624.38 92.59 622.94 96.34 615.67 curveto +101.23 615.60 102.46 612.43 104.98 610.78 curveto +122.62 598.39 147.46 607.18 167.90 601.92 curveto +180.94 598.54 190.87 599.76 200.09 602.06 curveto +220.32 607.25 246.10 596.16 263.74 603.86 curveto +274.75 608.62 284.76 605.66 292.97 600.91 curveto +297.58 597.96 299.59 596.09 300.96 591.26 curveto +306.29 572.54 306.29 551.02 309.53 530.57 curveto +309.53 528.84 312.19 526.10 312.48 522.07 curveto +315.79 511.34 316.08 510.12 317.16 502.20 curveto +317.16 501.34 326.52 488.45 325.01 479.02 curveto +323.93 481.25 323.86 482.83 321.62 481.68 curveto +320.33 479.30 320.90 473.90 322.56 471.74 curveto +320.83 470.81 318.46 473.47 317.52 475.20 curveto +318.17 473.04 317.81 470.81 316.73 469.30 curveto +315.86 472.25 316.58 473.18 315.36 473.90 curveto +313.99 472.90 314.21 469.30 314.28 466.20 curveto +313.49 468.07 311.47 472.46 312.55 476.42 curveto +312.48 484.20 308.81 489.10 310.32 499.10 curveto +310.10 504.43 307.30 521.06 304.56 524.30 curveto +303.12 526.25 306.36 510.77 306.36 506.16 curveto +306.65 500.90 307.08 468.72 306.43 463.10 curveto +306.43 459.22 306.22 453.96 307.08 452.16 curveto +308.74 450.79 309.38 450.50 309.60 447.98 curveto +309.24 446.62 308.74 446.04 307.73 445.54 curveto +306.07 444.60 307.37 441.79 306.07 439.85 curveto +304.49 438.77 304.13 441.86 303.34 441.86 curveto +302.69 441.00 303.05 437.98 302.47 436.18 curveto +299.66 433.80 292.18 432.50 289.15 434.66 curveto +289.73 440.64 291.74 441.58 295.63 446.62 curveto +298.66 452.59 297.00 460.94 296.93 468.14 curveto +295.49 480.38 289.22 487.30 289.44 496.44 curveto +287.86 495.72 286.42 494.57 284.26 494.86 curveto +283.39 489.46 286.42 484.56 284.83 480.82 curveto +281.95 471.96 277.06 446.62 279.00 437.76 curveto +280.01 434.74 278.21 433.15 277.06 433.94 curveto +276.77 433.94 276.55 433.94 276.41 433.94 curveto +276.41 433.94 276.55 431.42 275.69 430.92 curveto +274.10 430.34 273.67 431.71 272.66 432.14 curveto +271.22 430.85 272.52 429.48 271.15 428.04 curveto +267.19 428.04 261.36 425.38 257.98 428.26 curveto +257.33 434.16 263.30 436.68 266.47 440.71 curveto +268.63 446.62 271.08 462.89 267.77 474.62 curveto +267.77 475.56 264.38 485.28 261.43 488.66 curveto +258.70 487.66 257.33 485.50 253.22 486.29 curveto +252.58 484.34 253.30 482.33 252.22 480.10 curveto +251.86 479.52 249.34 478.58 249.19 481.39 curveto +248.98 483.05 248.90 486.36 248.26 486.72 curveto +243.65 486.72 233.71 487.08 231.77 493.92 curveto +219.89 492.34 215.93 491.26 206.57 493.42 curveto +196.63 489.67 183.24 506.16 174.53 502.20 curveto +172.51 496.15 173.09 485.64 171.65 481.39 curveto +169.34 474.77 171.14 467.14 171.14 456.41 curveto +170.57 455.40 169.85 454.46 168.48 454.46 curveto +168.48 453.10 169.34 450.86 168.62 449.42 curveto +167.18 447.62 165.89 451.80 165.02 444.60 curveto +163.15 443.74 157.75 442.22 155.59 445.18 curveto +155.88 448.99 158.33 451.30 160.13 453.38 curveto +161.42 456.91 160.99 458.28 160.70 461.81 curveto +160.99 464.98 161.71 468.58 161.86 470.09 curveto +161.86 473.04 162.50 479.30 161.14 481.18 curveto +159.41 482.69 lineto +157.18 487.22 158.33 494.64 157.61 500.26 curveto +155.81 500.69 155.81 500.98 154.01 498.31 curveto +154.01 494.42 153.50 486.36 152.35 483.84 curveto +149.69 479.81 150.84 459.65 151.42 448.56 curveto +151.78 446.47 149.69 447.70 149.76 444.74 curveto +150.05 442.80 147.89 443.59 146.09 444.60 curveto +145.15 445.18 146.59 439.78 145.37 439.56 curveto +142.34 438.84 136.87 438.19 135.22 440.71 curveto +134.57 444.60 137.88 448.06 140.62 451.01 curveto +143.14 455.83 140.90 465.70 140.47 476.28 curveto +138.89 478.22 lineto +134.86 483.19 139.61 496.94 136.51 506.23 curveto +120.02 514.87 122.11 519.19 118.73 537.62 curveto +115.13 557.64 93.38 567.65 79.06 567.65 curveto +73.44 563.04 66.24 563.62 58.54 567.65 curveto +55.66 569.23 54.43 573.19 54.50 576.50 curveto +52.63 580.75 55.22 582.19 59.62 583.49 curveto +62.71 587.81 68.62 594.65 69.19 597.74 curveto +70.34 601.92 75.53 608.11 77.76 609.77 curveto +75.82 613.01 74.81 615.17 77.11 618.55 curveto +79.56 620.14 81.79 616.61 84.38 618.55 curveto +closepath stroke +showpage +%%EOF diff --git a/tex/context/sample/cow-brown.pdf b/tex/context/sample/cow-brown.pdf new file mode 100644 index 000000000..5e7fa4f86 Binary files /dev/null and b/tex/context/sample/cow-brown.pdf differ diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua index d8095a285..dee3ebec7 100644 --- a/tex/generic/context/luatex/luatex-fonts-merged.lua +++ b/tex/generic/context/luatex/luatex-fonts-merged.lua @@ -1,6 +1,6 @@ -- merged file : luatex-fonts-merged.lua -- parent file : luatex-fonts.lua --- merge date : 05/24/15 12:42:55 +-- merge date : 06/12/15 10:06:12 do -- begin closure to overcome local limits and interference @@ -57,21 +57,33 @@ if not package.loaders then end local print,select,tostring=print,select,tostring local inspectors={} -function setinspector(inspector) - inspectors[#inspectors+1]=inspector +function setinspector(kind,inspector) + inspectors[kind]=inspector end function inspect(...) for s=1,select("#",...) do local value=select(s,...) - local done=false - for i=1,#inspectors do - done=inspectors[i](value) - if done then - break + if value==nil then + print("nil") + else + local done=false + local kind=type(value) + local inspector=inspectors[kind] + if inspector then + done=inspector(value) + if done then + break + end + end + for kind,inspector in next,inspectors do + done=inspector(value) + if done then + break + end + end + if not done then + print(tostring(value)) end - end - if not done then - print(tostring(value)) end end end @@ -112,7 +124,7 @@ local floor=math.floor local P,R,S,V,Ct,C,Cs,Cc,Cp,Cmt=lpeg.P,lpeg.R,lpeg.S,lpeg.V,lpeg.Ct,lpeg.C,lpeg.Cs,lpeg.Cc,lpeg.Cp,lpeg.Cmt local lpegtype,lpegmatch,lpegprint=lpeg.type,lpeg.match,lpeg.print if setinspector then - setinspector(function(v) if lpegtype(v) then lpegprint(v) return true end end) + setinspector("lpeg",function(v) if lpegtype(v) then lpegprint(v) return true end end) end lpeg.patterns=lpeg.patterns or {} local patterns=lpeg.patterns @@ -995,9 +1007,10 @@ function string.valid(str,default) return (type(str)=="string" and str~="" and str) or default or nil end string.itself=function(s) return s end -local pattern=Ct(C(1)^0) -function string.totable(str) - return lpegmatch(pattern,str) +local pattern_c=Ct(C(1)^0) +local pattern_b=Ct((C(1)/byte)^0) +function string.totable(str,bytes) + return lpegmatch(bytes and pattern_b or pattern_c,str) end local replacer=lpeg.replacer("@","%%") function string.tformat(fmt,...) @@ -1884,7 +1897,7 @@ function table.print(t,...) end end if setinspector then - setinspector(function(v) if type(v)=="table" then serialize(print,v,"table") return true end end) + setinspector("table",function(v) if type(v)=="table" then serialize(print,v,"table") return true end end) end function table.sub(t,i,j) return { unpack(t,i,j) } @@ -2937,7 +2950,13 @@ function string.autosingle(s,sep) end return ("'"..tostring(s).."'") end -local tracedchars={} +local tracedchars={ [0]= + "[null]","[soh]","[stx]","[etx]","[eot]","[enq]","[ack]","[bel]", + "[bs]","[ht]","[lf]","[vt]","[ff]","[cr]","[so]","[si]", + "[dle]","[dc1]","[dc2]","[dc3]","[dc4]","[nak]","[syn]","[etb]", + "[can]","[em]","[sub]","[esc]","[fs]","[gs]","[rs]","[us]", + "[space]", +} string.tracedchars=tracedchars strings.tracers=tracedchars function string.tracedchar(b) @@ -7176,7 +7195,7 @@ local report_otf=logs.reporter("fonts","otf loading") local fonts=fonts local otf=fonts.handlers.otf otf.glists={ "gsub","gpos" } -otf.version=2.812 +otf.version=2.814 otf.cache=containers.define("fonts","otf",otf.version,true) local hashes=fonts.hashes local definers=fonts.definers @@ -7353,10 +7372,11 @@ local ordered_enhancers={ "reorganize subtables", "check glyphs", "check metadata", - "check extra features", "prepare tounicode", "check encoding", "add duplicates", + "expand lookups", + "check extra features", "cleanup tables", "compact lookups", "purge names", @@ -7526,6 +7546,7 @@ function otf.load(filename,sub,featurefile) data={ size=size, time=time, + subfont=sub, format=otf_format(filename), featuredata=featurefiles, resources={ @@ -7810,25 +7831,25 @@ actions["prepare glyphs"]=function(data,filename,raw) glyph=glyph, } descriptions[unicode]=description -local altuni=glyph.altuni -if altuni then - for i=1,#altuni do - local a=altuni[i] - local u=a.unicode - if u~=unicode then - local v=a.variant - if v then - local vv=variants[v] - if vv then - vv[u]=unicode - else - vv={ [u]=unicode } - variants[v]=vv - end - end - end - end -end + local altuni=glyph.altuni + if altuni then + for i=1,#altuni do + local a=altuni[i] + local u=a.unicode + if u~=unicode then + local v=a.variant + if v then + local vv=variants[v] + if vv then + vv[u]=unicode + else + vv={ [u]=unicode } + variants[v]=vv + end + end + end + end + end end end else @@ -8353,12 +8374,15 @@ local function r_uncover(splitter,cache,cover,replacements) end actions["reorganize lookups"]=function(data,filename,raw) if data.lookups then - local splitter=data.helpers.tounicodetable + local helpers=data.helpers + local duplicates=data.resources.duplicates + local splitter=helpers.tounicodetable local t_u_cache={} local s_u_cache=t_u_cache local t_h_cache={} local s_h_cache=t_h_cache local r_u_cache={} + helpers.matchcache=t_h_cache for _,lookup in next,data.lookups do local rules=lookup.rules if rules then @@ -8504,6 +8528,44 @@ actions["reorganize lookups"]=function(data,filename,raw) end end end +actions["expand lookups"]=function(data,filename,raw) + if data.lookups then + local cache=data.helpers.matchcache + if cache then + local duplicates=data.resources.duplicates + for key,hash in next,cache do + local done=nil + for key in next,hash do + local unicode=duplicates[key] + if not unicode then + elseif type(unicode)=="table" then + for i=1,#unicode do + local u=unicode[i] + if hash[u] then + elseif done then + done[u]=key + else + done={ [u]=key } + end + end + else + if hash[unicode] then + elseif done then + done[unicode]=key + else + done={ [unicode]=key } + end + end + end + if done then + for u in next,done do + hash[u]=true + end + end + end + end + end +end local function check_variants(unicode,the_variants,splitter,unicodes) local variants=the_variants.variants if variants then @@ -12871,9 +12933,9 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq end else local i=1 - while true do + while start and true do if skipped then - while true do + while true do local char=getchar(start) local ccd=descriptions[char] if ccd then @@ -12902,17 +12964,20 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq head,start,ok,n=cp(head,start,last,kind,chainname,ck,lookuphash,chainlookup,chainlookupname,i,sequence) if ok then done=true - i=i+(n or 1) - else - i=i+1 + if n and n>1 then + if i+n>nofchainlookups then + break + else + end + end end + i=i+1 end end - if i>nofchainlookups then + if i>nofchainlookups or not start then break elseif start then start=getnext(start) - else end end end diff --git a/tex/generic/context/luatex/luatex-fonts-otn.lua b/tex/generic/context/luatex/luatex-fonts-otn.lua index dd3aa6153..d2ad07689 100644 --- a/tex/generic/context/luatex/luatex-fonts-otn.lua +++ b/tex/generic/context/luatex/luatex-fonts-otn.lua @@ -1929,9 +1929,9 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq end else local i = 1 - while true do + while start and true do if skipped then - while true do + while true do -- todo: use properties local char = getchar(start) local ccd = descriptions[char] if ccd then @@ -1946,10 +1946,11 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq end end end + -- see remark in ms standard under : LookupType 5: Contextual Substitution Subtable local chainlookupname = chainlookups[i] local chainlookup = lookuptable[chainlookupname] if not chainlookup then - -- okay, n matches, < n replacements + -- we just advance i = i + 1 else local cp = chainmores[chainlookup.type] @@ -1963,19 +1964,26 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq -- messy since last can be changed ! if ok then done = true - -- skip next one(s) if ligature - i = i + (n or 1) - else - i = i + 1 + if n and n > 1 then + -- we have a ligature (cf the spec we advance one but we really need to test it + -- as there are fonts out there that are fuzzy and have too many lookups + if i + n > nofchainlookups then + -- if trace_contexts then + -- logprocess("%s: quitting lookups",cref(kind,chainname)) + -- end + break + else + -- we need to carry one + end + end end + i = i + 1 end end - if i > nofchainlookups then + if i > nofchainlookups or not start then break elseif start then start = getnext(start) - else - -- weird end end end diff --git a/tex/generic/context/luatex/luatex-fonts.lua b/tex/generic/context/luatex/luatex-fonts.lua index c81e8cd1a..fe0e9de77 100644 --- a/tex/generic/context/luatex/luatex-fonts.lua +++ b/tex/generic/context/luatex/luatex-fonts.lua @@ -27,16 +27,16 @@ if not modules then modules = { } end modules ['luatex-fonts'] = { -- also add more helper code here, but that depends to what extend metatex (sidetrack of context) -- evolves into a low level layer (depends on time, as usual). -texio.write_nl("") -texio.write_nl("--------------------------------------------------------------------------------") -texio.write_nl("The font code has been brought in sync with the context version of 2014.12.21 so") -texio.write_nl("if things don't work out as expected the interfacing needs to be checked. When") -texio.write_nl("this works as expected a second upgrade will happen that gives a more complete") -texio.write_nl("support and another sync with the context code (that new code is currently being") -texio.write_nl("tested. The base pass is now integrated in the main pass. The results can differ") -texio.write_nl("from those in context because there we integrate some mechanisms differently.") -texio.write_nl("--------------------------------------------------------------------------------") -texio.write_nl("") +-- texio.write_nl("") +-- texio.write_nl("--------------------------------------------------------------------------------") +-- texio.write_nl("The font code has been brought in sync with the context version of 2014.12.21 so") +-- texio.write_nl("if things don't work out as expected the interfacing needs to be checked. When") +-- texio.write_nl("this works as expected a second upgrade will happen that gives a more complete") +-- texio.write_nl("support and another sync with the context code (that new code is currently being") +-- texio.write_nl("tested. The base pass is now integrated in the main pass. The results can differ") +-- texio.write_nl("from those in context because there we integrate some mechanisms differently.") +-- texio.write_nl("--------------------------------------------------------------------------------") +-- texio.write_nl("") utf = utf or unicode.utf8 diff --git a/tex/generic/context/luatex/luatex-mplib.lua b/tex/generic/context/luatex/luatex-mplib.lua index c6628acb3..c093b8333 100644 --- a/tex/generic/context/luatex/luatex-mplib.lua +++ b/tex/generic/context/luatex/luatex-mplib.lua @@ -22,7 +22,9 @@ if metapost and metapost.version then else - local format, concat, abs, match = string.format, table.concat, math.abs, string.match + local format, match, gsub = string.format, string.match, string.gsub + local concat = table.concat + local abs = math.abs local mplib = require ('mplib') local kpse = require ('kpse') @@ -144,10 +146,101 @@ else metapost.make = metapost.make or function() end + local template = [[ + \pdfoutput=1 + \pdfpkresolution600 + \pdfcompresslevel=9 + %s\relax + \hsize=100in + \vsize=\hsize + \hoffset=-1in + \voffset=\hoffset + \topskip=0pt + \setbox0=\hbox{%s}\relax + \pdfpageheight=\ht0 + \pdfpagewidth=\wd0 + \box0 + \bye + ]] + + metapost.texrunner = "mtxrun --script plain" + + local texruns = 0 -- per document + local texhash = { } -- per document + + function metapost.maketext(mpd,str,what) + -- inefficient but one can always use metafun .. it's more a test + -- feature + local verbatimtex = mpd.verbatimtex + if not verbatimtex then + verbatimtex = { } + mpd.verbatimtex = verbatimtex + end + if what == 1 then + table.insert(verbatimtex,str) + else + local texcode = format(template,concat(verbatimtex,"\n"),str) + local texdone = texhash[texcode] + local jobname = tex.jobname + if not texdone then + texruns = texruns + 1 + texdone = texruns + texhash[texcode] = texdone + local texname = format("%s-mplib-%s.tmp",jobname,texdone) + local logname = format("%s-mplib-%s.log",jobname,texdone) + local pdfname = format("%s-mplib-%s.pdf",jobname,texdone) + io.savedata(texname,texcode) + os.execute(format("%s %s",metapost.texrunner,texname)) + os.remove(texname) + os.remove(logname) + end + return format('"image::%s-mplib-%s.pdf" infont defaultfont',jobname,texdone) + end + end + + local function mpprint(buffer,...) + for i=1,select("#",...) do + local value = select(i,...) + if value ~= nil then + local t = type(value) + if t == "number" then + buffer[#buffer+1] = format("%.16f",value) + elseif t == "string" then + buffer[#buffer+1] = value + elseif t == "table" then + buffer[#buffer+1] = "(" .. concat(value,",") .. ")" + else -- boolean or whatever + buffer[#buffer+1] = tostring(value) + end + end + end + end + + function metapost.runscript(mpd,code) + local code = loadstring(code) + if type(code) == "function" then + local buffer = { } + function metapost.print(...) + mpprint(buffer,...) + end + code() + -- mpd.buffer = buffer -- for tracing + return concat(buffer,"") + end + return "" + end + function metapost.load(name) + local mpd = { + buffer = { }, + verbatim = { } + } local mpx = mplib.new { ini_version = true, - find_file = metapost.finder, + find_file = metapost.finder, + make_text = function(...) return metapost.maketext (mpd,...) end, + run_script = function(...) return metapost.runscript(mpd,...) end, + extensions = 1, } local result if not mpx then @@ -217,8 +310,8 @@ else return figure:objects() end - function metapost.convert(result, flusher) - metapost.flush(result, flusher) + function metapost.convert(result,flusher) + metapost.flush(result,flusher) return true -- done end @@ -239,8 +332,13 @@ else end function pdf_textfigure(font,size,text,width,height,depth) - text = text:gsub(".","\\hbox{%1}") -- kerning happens in metapost - tex.sprint(format("\\MPLIBtextext{%s}{%s}{%s}{%s}{%s}",font,size,text,0,-( 7200/ 7227)/65536*depth)) + local how, what = match(text,"^(.-)::(.+)$") + if how == "image" then + tex.sprint(format("\\MPLIBpdftext{%s}{%s}",what,depth)) + else + text = gsub(text,".","\\hbox{%1}") -- kerning happens in metapost + tex.sprint(format("\\MPLIBtextext{%s}{%s}{%s}{%s}",font,size,text,depth)) + end end local bend_tolerance = 131/65536 @@ -375,8 +473,10 @@ else pdf_literalcode("Q") else local cs = object.color + local cr = false if cs and #cs > 0 then - pdf_literalcode(metapost.colorconverter(cs)) + cs, cr = metapost.colorconverter(cs) + pdf_literalcode(cs) end local ml = object.miterlimit if ml and ml ~= miterlimit then diff --git a/tex/generic/context/luatex/luatex-mplib.tex b/tex/generic/context/luatex/luatex-mplib.tex index 09dd179f3..f9de4b223 100644 --- a/tex/generic/context/luatex/luatex-mplib.tex +++ b/tex/generic/context/luatex/luatex-mplib.tex @@ -106,15 +106,14 @@ %D Text items have a special handler: -\def\MPLIBtextext#1#2#3#4#5% +\def\MPLIBtextext#1#2#3#4% {\begingroup \setbox\mplibscratchbox\hbox {\font\temp=#1 at #2bp% \temp #3}% \setbox\mplibscratchbox\hbox - {\hskip#4 bp% - \raise#5 bp% + {\raise#4sp% \box\mplibscratchbox}% \wd\mplibscratchbox0pt% \ht\mplibscratchbox0pt% @@ -122,4 +121,20 @@ \box\mplibscratchbox \endgroup} +\def\MPLIBpdftext#1#2% + {\ifcsname mplib::#1\endcsname + % already done, forgotten outside convert group + \message{}% + \else + \message{}% + \immediate\pdfximage{#1}% we cannot remove the file as it is included last + \expandafter\edef\csname mplib::#1\endcsname{\the\pdflastximage}% + \fi + \setbox\mplibscratchbox\hbox + {\raise#2sp\hbox{\pdfrefximage\csname mplib::#1\endcsname}}% + \wd\mplibscratchbox0pt% + \ht\mplibscratchbox0pt% + \dp\mplibscratchbox0pt% + \box\mplibscratchbox} + \endinput diff --git a/tex/generic/context/luatex/luatex-plain.tex b/tex/generic/context/luatex/luatex-plain.tex index c9a9e36cf..f209f4792 100644 --- a/tex/generic/context/luatex/luatex-plain.tex +++ b/tex/generic/context/luatex/luatex-plain.tex @@ -11,7 +11,26 @@ \directlua {tex.enableprimitives('', tex.extraprimitives())} -\pdfoutput=1 +% We assume that pdf is used. + +\pdfoutput 1 + +% We set the page dimensions because otherwise the backend does weird things +% when we have for instance this on a line of its own: +% +% \hbox to 100cm {\hss wide indeed\hss} +% +% The page dimension calculation is a fuzzy one as there are some compensations +% for the \hoffset and \voffset and such. I remember long discussions and much +% trial and error in figuring this out during pdftex development times. Where +% a dvi driver will project on a papersize (and thereby clip) the pdf backend +% has to deal with the lack of a page concept on tex by some guessing. Normally +% a macro package will set the dimensions to something reasonable anyway. + +\pdfpagewidth 8.5in +\pdfpageheight 11.0in + +% We load some code at runtime: \everyjob \expandafter {% \the\everyjob @@ -23,6 +42,8 @@ % \input {luatex-gadgets}% } +% We also patch the version number: + \edef\fmtversion{\fmtversion+luatex} \dump diff --git a/tex/generic/context/luatex/luatex-test.tex b/tex/generic/context/luatex/luatex-test.tex index 6f48e0ced..a1398ef48 100644 --- a/tex/generic/context/luatex/luatex-test.tex +++ b/tex/generic/context/luatex/luatex-test.tex @@ -1,3 +1,5 @@ +% texformat=luatex-plain + %D \module %D [ file=luatex-test, %D version=2009.12.01, @@ -48,12 +50,30 @@ \setmplibformat{plain} +\directlua { + function MpTest() + metapost.print("fullcircle scaled 3cm") + end +} + \mplibcode beginfig(1) ; draw fullcircle scaled 10cm withcolor red withpen pencircle xscaled 4mm yscaled 2mm rotated 30 ; + draw "test" infont defaultfont scaled 4 ; + verbatimtex \sl etex; + draw btex some more test etex scaled 2 ; + currentpicture := currentpicture shifted (0,1cm) ; + verbatimtex \bf etex; + draw btex another test etex scaled 2 ; + currentpicture := currentpicture shifted (0,1cm) ; + draw btex another test etex scaled 2 ; + draw + runscript("MpTest()") + withcolor green + withpen pencircle xscaled 2mm yscaled 1mm rotated 20 ; endfig ; \endmplibcode -- cgit v1.2.3