diff options
author | Hans Hagen <pragma@wxs.nl> | 2010-02-22 19:38:00 +0100 |
---|---|---|
committer | Hans Hagen <pragma@wxs.nl> | 2010-02-22 19:38:00 +0100 |
commit | a81728fa7c7e1b92d436835ac4019b2e46f80853 (patch) | |
tree | 4add4047ed2e858f96a22f5f9350983b0b1a47c1 /tex/generic | |
parent | 8b61273b8c492e094d65764f3151d779d2f7c6cc (diff) | |
download | context-a81728fa7c7e1b92d436835ac4019b2e46f80853.tar.gz |
beta 2010.02.22 19:38
Diffstat (limited to 'tex/generic')
-rw-r--r-- | tex/generic/context/luatex-fonts-merged.lua | 795 | ||||
-rw-r--r-- | tex/generic/context/luatex-fonts.lua | 2 |
2 files changed, 409 insertions, 388 deletions
diff --git a/tex/generic/context/luatex-fonts-merged.lua b/tex/generic/context/luatex-fonts-merged.lua index 808fb069d..3704975a2 100644 --- a/tex/generic/context/luatex-fonts-merged.lua +++ b/tex/generic/context/luatex-fonts-merged.lua @@ -1,6 +1,6 @@ -- merged file : c:/data/develop/context/texmf/tex/generic/context/luatex-fonts-merged.lua -- parent file : c:/data/develop/context/texmf/tex/generic/context/luatex-fonts.lua --- merge date : 02/21/10 19:28:43 +-- merge date : 02/22/10 19:49:18 do -- begin closure to overcome local limits and interference @@ -1990,7 +1990,8 @@ local remapper = { ttf = "truetype fonts", ttc = "truetype fonts", dfont = "truetype dictionary", - cid = "other text files", -- will become "cid files" + cid = "cid maps", + fea = "font feature files", } function resolvers.find_file(name,kind) @@ -3420,6 +3421,7 @@ supplied by <l n='luatex'/>.</p> tfm.resolve_vf = true -- false tfm.share_base_kerns = false -- true (.5 sec slower on mk but brings down mem from 410M to 310M, beware: then script/lang share too) tfm.mathactions = { } +tfm.fontname_mode = "fullpath" function tfm.enhance(tfmdata,specification) local name, size = specification.name, specification.size @@ -4247,9 +4249,7 @@ fonts.initializers.node.tfm.remap = tfm.remap -- status info statistics.register("fonts load time", function() - if statistics.elapsedindeed(fonts) then - return format("%s seconds",statistics.elapsedtime(fonts)) - end + return statistics.elapsedseconds(fonts) end) end -- closure @@ -5359,6 +5359,384 @@ end -- closure do -- begin closure to overcome local limits and interference +if not modules then modules = { } end modules ['font-map'] = { + version = 1.001, + comment = "companion to font-ini.mkiv", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + +local utf = unicode.utf8 +local match, format, find, concat, gsub, lower = string.match, string.format, string.find, table.concat, string.gsub, string.lower +local lpegmatch = lpeg.match +local utfbyte = utf.byte + +local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end) +local trace_unimapping = false trackers.register("otf.unimapping", function(v) trace_unimapping = v end) + +local ctxcatcodes = tex and tex.ctxcatcodes + +--[[ldx-- +<p>Eventually this code will disappear because map files are kind +of obsolete. Some code may move to runtime or auxiliary modules.</p> +<p>The name to unciode related code will stay of course.</p> +--ldx]]-- + +fonts = fonts or { } +fonts.map = fonts.map or { } +fonts.map.data = fonts.map.data or { } +fonts.map.encodings = fonts.map.encodings or { } +fonts.map.done = fonts.map.done or { } +fonts.map.loaded = fonts.map.loaded or { } +fonts.map.direct = fonts.map.direct or { } +fonts.map.line = fonts.map.line or { } + +function fonts.map.line.pdfmapline(tag,str) + return "\\loadmapline[" .. tag .. "][" .. str .. "]" +end + +function fonts.map.line.pdftex(e) -- so far no combination of slant and extend + if e.name and e.fontfile then + local fullname = e.fullname or "" + if e.slant and e.slant ~= 0 then + if e.encoding then + return fonts.map.line.pdfmapline("=",format('%s %s "%g SlantFont" <%s <%s',e.name,fullname,e.slant,e.encoding,e.fontfile)) + else + return fonts.map.line.pdfmapline("=",format('%s %s "%g SlantFont" <%s',e.name,fullname,e.slant,e.fontfile)) + end + elseif e.extend and e.extend ~= 1 and e.extend ~= 0 then + if e.encoding then + return fonts.map.line.pdfmapline("=",format('%s %s "%g ExtendFont" <%s <%s',e.name,fullname,e.extend,e.encoding,e.fontfile)) + else + return fonts.map.line.pdfmapline("=",format('%s %s "%g ExtendFont" <%s',e.name,fullname,e.extend,e.fontfile)) + end + else + if e.encoding then + return fonts.map.line.pdfmapline("=",format('%s %s <%s <%s',e.name,fullname,e.encoding,e.fontfile)) + else + return fonts.map.line.pdfmapline("=",format('%s %s <%s',e.name,fullname,e.fontfile)) + end + end + else + return nil + end +end + +function fonts.map.flush(backend) -- will also erase the accumulated data + local flushline = fonts.map.line[backend or "pdftex"] or fonts.map.line.pdftex + for _, e in pairs(fonts.map.data) do + tex.sprint(ctxcatcodes,flushline(e)) + end + fonts.map.data = { } +end + +fonts.map.line.dvips = fonts.map.line.pdftex +fonts.map.line.dvipdfmx = function() end + +function fonts.map.convert_entries(filename) + if not fonts.map.loaded[filename] then + fonts.map.data, fonts.map.encodings = fonts.map.load_file(filename,fonts.map.data, fonts.map.encodings) + fonts.map.loaded[filename] = true + end +end + +function fonts.map.load_file(filename, entries, encodings) + entries = entries or { } + encodings = encodings or { } + local f = io.open(filename) + if f then + local data = f:read("*a") + if data then + for line in gmatch(data,"(.-)[\n\t]") do + if find(line,"^[%#%%%s]") then + -- print(line) + else + local extend, slant, name, fullname, fontfile, encoding + line = gsub(line,'"(.+)"', function(s) + extend = find(s,'"([^"]+) ExtendFont"') + slant = find(s,'"([^"]+) SlantFont"') + return "" + end) + if not name then + -- name fullname encoding fontfile + name, fullname, encoding, fontfile = match(line,"^(%S+)%s+(%S*)[%s<]+(%S*)[%s<]+(%S*)%s*$") + end + if not name then + -- name fullname (flag) fontfile encoding + name, fullname, fontfile, encoding = match(line,"^(%S+)%s+(%S*)[%d%s<]+(%S*)[%s<]+(%S*)%s*$") + end + if not name then + -- name fontfile + name, fontfile = match(line,"^(%S+)%s+[%d%s<]+(%S*)%s*$") + end + if name then + if encoding == "" then encoding = nil end + entries[name] = { + name = name, -- handy + fullname = fullname, + encoding = encoding, + fontfile = fontfile, + slant = tonumber(slant), + extend = tonumber(extend) + } + encodings[name] = encoding + elseif line ~= "" then + -- print(line) + end + end + end + end + f:close() + end + return entries, encodings +end + +local function load_lum_table(filename) + local lumname = file.replacesuffix(file.basename(filename),"lum") + local lumfile = resolvers.find_file(lumname,"map") or "" + if lumfile ~= "" and lfs.isfile(lumfile) then + if trace_loading or trace_unimapping then + logs.report("load otf","enhance: loading %s ",lumfile) + end + lumunic = dofile(lumfile) + return lumunic, lumfile + end +end + +local hex = lpeg.R("AF","09") +local hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end +local hexsix = (hex^1) / function(s) return tonumber(s,16) end +local dec = (lpeg.R("09")^1) / tonumber +local period = lpeg.P(".") + +local unicode = lpeg.P("uni") * (hexfour * (period + lpeg.P(-1)) * lpeg.Cc(false) + lpeg.Ct(hexfour^1) * lpeg.Cc(true)) +local ucode = lpeg.P("u") * (hexsix * (period + lpeg.P(-1)) * lpeg.Cc(false) + lpeg.Ct(hexsix ^1) * lpeg.Cc(true)) +local index = lpeg.P("index") * dec * lpeg.Cc(false) + +local parser = unicode + ucode + index + +local parsers = { } + +local function make_name_parser(str) + if not str or str == "" then + return parser + else + local p = parsers[str] + if not p then + p = lpeg.P(str) * period * dec * lpeg.Cc(false) + parsers[str] = p + end + return p + end +end + +--~ local parser = fonts.map.make_name_parser("Japan1") +--~ local parser = fonts.map.make_name_parser() +--~ local function test(str) +--~ local b, a = lpegmatch(parser,str) +--~ print((a and table.serialize(b)) or b) +--~ end +--~ test("a.sc") +--~ test("a") +--~ test("uni1234") +--~ test("uni1234.xx") +--~ test("uni12349876") +--~ test("index1234") +--~ test("Japan1.123") + +local function tounicode16(unicode) + if unicode < 0x10000 then + return format("%04X",unicode) + else + return format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00) + end +end + +local function tounicode16sequence(unicodes) + local t = { } + for l=1,#unicodes do + local unicode = unicodes[l] + if unicode < 0x10000 then + t[l] = format("%04X",unicode) + else + t[l] = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00) + end + end + return concat(t) +end + +--~ This is quite a bit faster but at the cost of some memory but if we +--~ do this we will also use it elsewhere so let's not follow this route +--~ now. I might use this method in the plain variant (no caching there) +--~ but then I need a flag that distinguishes between code branches. +--~ +--~ local cache = { } +--~ +--~ function fonts.map.tounicode16(unicode) +--~ local s = cache[unicode] +--~ if not s then +--~ if unicode < 0x10000 then +--~ s = format("%04X",unicode) +--~ else +--~ s = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00) +--~ end +--~ cache[unicode] = s +--~ end +--~ return s +--~ end + +fonts.map.load_lum_table = load_lum_table +fonts.map.make_name_parser = make_name_parser +fonts.map.tounicode16 = tounicode16 +fonts.map.tounicode16sequence = tounicode16sequence + +local separator = lpeg.S("_.") +local other = lpeg.C((1 - separator)^1) +local ligsplitter = lpeg.Ct(other * (separator * other)^0) + +--~ print(table.serialize(lpegmatch(ligsplitter,"this"))) +--~ print(table.serialize(lpegmatch(ligsplitter,"this.that"))) +--~ print(table.serialize(lpegmatch(ligsplitter,"japan1.123"))) +--~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more"))) +--~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more.that"))) + +fonts.map.add_to_unicode = function(data,filename) + local unicodes = data.luatex and data.luatex.unicodes + if not unicodes then + return + end + -- we need to move this code + unicodes['space'] = unicodes['space'] or 32 + unicodes['hyphen'] = unicodes['hyphen'] or 45 + unicodes['zwj'] = unicodes['zwj'] or 0x200D + unicodes['zwnj'] = unicodes['zwnj'] or 0x200C + -- the tounicode mapping is sparse and only needed for alternatives + local tounicode, originals, ns, nl, private, unknown = { }, { }, 0, 0, fonts.private, format("%04X",utfbyte("?")) + data.luatex.tounicode, data.luatex.originals = tounicode, originals + local lumunic, uparser, oparser + if false then -- will become an option + lumunic = load_lum_table(filename) + lumunic = lumunic and lumunic.tounicode + end + local cidinfo, cidnames, cidcodes = data.cidinfo + local usedmap = cidinfo and cidinfo.usedname + usedmap = usedmap and lower(usedmap) + usedmap = usedmap and fonts.cid.map[usedmap] + if usedmap then + oparser = usedmap and make_name_parser(cidinfo.ordering) + cidnames = usedmap.names + cidcodes = usedmap.unicodes + end + uparser = make_name_parser() + local aglmap = fonts.map and fonts.map.agl_to_unicode + for index, glyph in next, data.glyphs do + local name, unic = glyph.name, glyph.unicode or -1 -- play safe + if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then + local unicode = (lumunic and lumunic[name]) or (aglmap and aglmap[name]) + if unicode then + originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1 + end + -- cidmap heuristics, beware, there is no guarantee for a match unless + -- the chain resolves + if (not unicode) and usedmap then + local foundindex = lpegmatch(oparser,name) + if foundindex then + unicode = cidcodes[foundindex] -- name to number + if unicode then + originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1 + else + local reference = cidnames[foundindex] -- number to name + if reference then + local foundindex = lpegmatch(oparser,reference) + if foundindex then + unicode = cidcodes[foundindex] + if unicode then + originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1 + end + end + if not unicode then + local foundcodes, multiple = lpegmatch(uparser,reference) + if foundcodes then + if multiple then + originals[index], tounicode[index], nl, unicode = foundcodes, tounicode16sequence(foundcodes), nl + 1, true + else + originals[index], tounicode[index], ns, unicode = foundcodes, tounicode16(foundcodes), ns + 1, foundcodes + end + end + end + end + end + end + end + -- a.whatever or a_b_c.whatever or a_b_c (no numbers) + if not unicode then + local split = lpegmatch(ligsplitter,name) + local nplit = (split and #split) or 0 + if nplit == 0 then + -- skip + elseif nplit == 1 then + local base = split[1] + unicode = unicodes[base] or (aglmap and aglmap[base]) + if unicode then + if type(unicode) == "table" then + unicode = unicode[1] + end + originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1 + end + else + local t = { } + for l=1,nplit do + local base = split[l] + local u = unicodes[base] or (aglmap and aglmap[base]) + if not u then + break + elseif type(u) == "table" then + t[#t+1] = u[1] + else + t[#t+1] = u + end + end + if #t > 0 then -- done then + originals[index], tounicode[index], nl, unicode = t, tounicode16sequence(t), nl + 1, true + end + end + end + -- last resort + if not unicode then + local foundcodes, multiple = lpegmatch(uparser,name) + if foundcodes then + if multiple then + originals[index], tounicode[index], nl, unicode = foundcodes, tounicode16sequence(foundcodes), nl + 1, true + else + originals[index], tounicode[index], ns, unicode = foundcodes, tounicode16(foundcodes), ns + 1, foundcodes + end + end + end + if not unicode then + originals[index], tounicode[index] = 0xFFFD, "FFFD" + end + end + end + if trace_unimapping then + for index, glyph in table.sortedpairs(data.glyphs) do + local toun, name, unic = tounicode[index], glyph.name, glyph.unicode or -1 -- play safe + if toun then + logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X, tounicode: %s",index,name,unic,toun) + else + logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X",index,name,unic) + end + end + end + if trace_loading and (ns > 0 or nl > 0) then + logs.report("load otf","enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns) + end +end + +end -- closure + +do -- begin closure to overcome local limits and interference + if not modules then modules = { } end modules ['font-otf'] = { version = 1.001, comment = "companion to font-ini.mkiv", @@ -5380,14 +5758,10 @@ local trace_features = false trackers.register("otf.features", function(v local trace_dynamics = false trackers.register("otf.dynamics", function(v) trace_dynamics = v end) local trace_sequences = false trackers.register("otf.sequences", function(v) trace_sequences = v end) local trace_math = false trackers.register("otf.math", function(v) trace_math = v end) -local trace_unimapping = false trackers.register("otf.unimapping", function(v) trace_unimapping = v end) local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) --~ trackers.enable("otf.loading") -local zwnj = 0x200C -local zwj = 0x200D - --[[ldx-- <p>The fontforge table has organized lookups in a certain way. A first implementation of this code was organized featurewise: information related to features was @@ -5445,7 +5819,7 @@ otf.features.default = otf.features.default or { } otf.enhancers = otf.enhancers or { } otf.glists = { "gsub", "gpos" } -otf.version = 2.644 -- beware: also sync font-mis.lua +otf.version = 2.645 -- beware: also sync font-mis.lua otf.pack = true -- beware: also sync font-mis.lua otf.syncspace = true otf.notdef = false @@ -5863,145 +6237,7 @@ otf.enhancers["analyse marks"] = function(data,filename) end end -local separator = lpeg.S("_.") -local other = lpeg.C((1 - separator)^1) -local ligsplitter = lpeg.Ct(other * (separator * other)^0) - ---~ print(table.serialize(lpegmatch(ligsplitter,"this"))) ---~ print(table.serialize(lpegmatch(ligsplitter,"this.that"))) ---~ print(table.serialize(lpegmatch(ligsplitter,"japan1.123"))) ---~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more"))) ---~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more.that"))) - -otf.enhancers["analyse unicodes"] = function(data,filename) - local tounicode16, tounicode16sequence = fonts.map.tounicode16, fonts.map.tounicode16sequence - local unicodes = data.luatex.unicodes - -- we need to move this code - unicodes['space'] = unicodes['space'] or 32 -- handly later on - unicodes['hyphen'] = unicodes['hyphen'] or 45 -- handly later on - unicodes['zwj'] = unicodes['zwj'] or zwj -- handly later on - unicodes['zwnj'] = unicodes['zwnj'] or zwnj -- handly later on - -- the tounicode mapping is sparse and only needed for alternatives - local tounicode, originals, ns, nl, private, unknown = { }, { }, 0, 0, fonts.private, format("%04X",utfbyte("?")) - data.luatex.tounicode, data.luatex.originals = tounicode, originals - local lumunic, uparser, oparser - if false then -- will become an option - lumunic = fonts.map.load_lum_table(filename) - lumunic = lumunic and lumunic.tounicode - end - local cidinfo, cidnames, cidcodes = data.cidinfo - local usedmap = cidinfo and cidinfo.usedname - usedmap = usedmap and lower(usedmap) - usedmap = usedmap and fonts.cid.map[usedmap] - if usedmap then - oparser = usedmap and fonts.map.make_name_parser(cidinfo.ordering) - cidnames = usedmap.names - cidcodes = usedmap.unicodes - end - uparser = fonts.map.make_name_parser() - local aglmap = fonts.map and fonts.map.agl_to_unicode - for index, glyph in next, data.glyphs do - local name, unic = glyph.name, glyph.unicode or -1 -- play safe - if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then - local unicode = (aglmap and aglmap[name]) or (lumunic and lumunic[name]) - if unicode then - originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1 - end - -- cidmap heuristics, beware, there is no guarantee for a match unless - -- the chain resolves - if (not unicode) and usedmap then - local foundindex = lpegmatch(oparser,name) - if foundindex then - unicode = cidcodes[foundindex] -- name to number - if unicode then - originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1 - else - local reference = cidnames[foundindex] -- number to name - if reference then - local foundindex = lpegmatch(oparser,reference) - if foundindex then - unicode = cidcodes[foundindex] - if unicode then - originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1 - end - end - if not unicode then - local foundcodes, multiple = lpegmatch(uparser,reference) - if foundcodes then - if multiple then - originals[index], tounicode[index], nl, unicode = foundcodes, tounicode16sequence(foundcodes), nl + 1, true - else - originals[index], tounicode[index], ns, unicode = foundcodes, tounicode16(foundcodes), ns + 1, foundcodes - end - end - end - end - end - end - end - -- a.whatever or a_b_c.whatever or a_b_c (no numbers) - if not unicode then - local split = lpegmatch(ligsplitter,name) - local nplit = (split and #split) or 0 - if nplit == 0 then - -- skip - elseif nplit == 1 then - local base = split[1] - unicode = unicodes[base] or (agl and agl[base]) - if unicode then - if type(unicode) == "table" then - unicode = unicode[1] - end - originals[index], tounicode[index], ns = unicode, tounicode16(unicode), ns + 1 - end - else - local t = { } - for l=1,nplit do - local base = split[l] - local u = unicodes[base] or (agl and agl[base]) - if not u then - break - elseif type(u) == "table" then - t[#t+1] = u[1] - else - t[#t+1] = u - end - end - if #t > 0 then -- done then - originals[index], tounicode[index], nl, unicode = t, tounicode16sequence(t), nl + 1, true - end - end - end - -- last resort - if not unicode then - local foundcodes, multiple = lpegmatch(uparser,name) - if foundcodes then - if multiple then - originals[index], tounicode[index], nl, unicode = foundcodes, tounicode16sequence(foundcodes), nl + 1, true - else - originals[index], tounicode[index], ns, unicode = foundcodes, tounicode16(foundcodes), ns + 1, foundcodes - end - end - end - if not unicode then - originals[index], tounicode[index] = 0xFFFD, "FFFD" - end - end - end - if trace_unimapping then - for index, glyph in table.sortedpairs(data.glyphs) do - local toun, name, unic = tounicode[index], glyph.name, glyph.unicode or -1 -- play safe - if toun then - logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X, tounicode: %s",index,name,unic,toun) - else - logs.report("load otf","internal: 0x%05X, name: %s, unicode: 0x%05X",index,name,unic) - end - end - end - if trace_loading and (ns > 0 or nl > 0) then - logs.report("load otf","enhance: %s tounicode entries added (%s ligatures)",nl+ns, ns) - end -end +otf.enhancers["analyse unicodes"] = fonts.map.add_to_unicode otf.enhancers["analyse subtables"] = function(data,filename) data.luatex = data.luatex or { } @@ -7198,6 +7434,16 @@ function tfm.read_from_open_type(specification) tfmtable.format = specification.format end tfmtable.name = tfmtable.filename or tfmtable.fullname or tfmtable.fontname + if tfm.fontname_mode == "specification" then + -- not to be used in context ! + local specname = specification.specification + if specname then + tfmtable.name = specname + if trace_defining then + logs.report("define font","overloaded fontname: '%s'",specname) + end + end + end end fonts.logger.save(tfmtable,file.extname(specification.filename),specification) end @@ -11724,12 +11970,15 @@ local function istrue (s) list[s] = 'yes' end local function isfalse(s) list[s] = 'no' end local function iskey (k,v) list[k] = v end +local function istrue (s) list[s] = true end +local function isfalse(s) list[s] = false end + local spaces = lpeg.P(" ")^0 local namespec = (1-lpeg.S("/:("))^0 -- was: (1-lpeg.S("/: ("))^0 local crapspec = spaces * lpeg.P("/") * (((1-lpeg.P(":"))^0)/iscrap) * spaces local filename = (lpeg.P("file:")/isfile * (namespec/thename)) + (lpeg.P("[") * lpeg.P(true)/isname * (((1-lpeg.P("]"))^0)/thename) * lpeg.P("]")) local fontname = (lpeg.P("name:")/isname * (namespec/thename)) + lpeg.P(true)/issome * (namespec/thename) -local sometext = (lpeg.R("az") + lpeg.R("AZ") + lpeg.R("09") + lpeg.P("."))^1 +local sometext = (lpeg.R("az","AZ","09") + lpeg.S("+-."))^1 local truevalue = lpeg.P("+") * spaces * (sometext/istrue) local falsevalue = lpeg.P("-") * spaces * (sometext/isfalse) local keyvalue = (lpeg.C(sometext) * spaces * lpeg.P("=") * spaces * lpeg.C(sometext))/iskey @@ -11742,12 +11991,12 @@ local pattern = (filename + fontname) * subvalue^0 * crapspec^0 * options^0 function fonts.define.specify.colonized(specification) -- xetex mode list = { } lpegmatch(pattern,specification.specification) - for k, v in next, list do - list[k] = v:is_boolean() - if type(list[a]) == "nil" then - list[k] = v - end - end +--~ for k, v in next, list do +--~ list[k] = v:is_boolean() +--~ if type(list[a]) == "nil" then +--~ list[k] = v +--~ end +--~ end list.crap = nil -- style not supported, maybe some day if list.name then specification.name = list.name @@ -11771,235 +12020,6 @@ end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules = { } end modules ['font-map'] = { - version = 1.001, - comment = "companion to font-ini.mkiv", - author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright = "PRAGMA ADE / ConTeXt Development Team", - license = "see context related readme files" -} - -local match, format, find, concat, gsub = string.match, string.format, string.find, table.concat, string.gsub -local lpegmatch = lpeg.match - -local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end) - -local ctxcatcodes = tex and tex.ctxcatcodes - ---[[ldx-- -<p>Eventually this code will disappear because map files are kind -of obsolete. Some code may move to runtime or auxiliary modules.</p> -<p>The name to unciode related code will stay of course.</p> ---ldx]]-- - -fonts = fonts or { } -fonts.map = fonts.map or { } -fonts.map.data = fonts.map.data or { } -fonts.map.encodings = fonts.map.encodings or { } -fonts.map.done = fonts.map.done or { } -fonts.map.loaded = fonts.map.loaded or { } -fonts.map.direct = fonts.map.direct or { } -fonts.map.line = fonts.map.line or { } - -function fonts.map.line.pdfmapline(tag,str) - return "\\loadmapline[" .. tag .. "][" .. str .. "]" -end - -function fonts.map.line.pdftex(e) -- so far no combination of slant and extend - if e.name and e.fontfile then - local fullname = e.fullname or "" - if e.slant and e.slant ~= 0 then - if e.encoding then - return fonts.map.line.pdfmapline("=",format('%s %s "%g SlantFont" <%s <%s',e.name,fullname,e.slant,e.encoding,e.fontfile)) - else - return fonts.map.line.pdfmapline("=",format('%s %s "%g SlantFont" <%s',e.name,fullname,e.slant,e.fontfile)) - end - elseif e.extend and e.extend ~= 1 and e.extend ~= 0 then - if e.encoding then - return fonts.map.line.pdfmapline("=",format('%s %s "%g ExtendFont" <%s <%s',e.name,fullname,e.extend,e.encoding,e.fontfile)) - else - return fonts.map.line.pdfmapline("=",format('%s %s "%g ExtendFont" <%s',e.name,fullname,e.extend,e.fontfile)) - end - else - if e.encoding then - return fonts.map.line.pdfmapline("=",format('%s %s <%s <%s',e.name,fullname,e.encoding,e.fontfile)) - else - return fonts.map.line.pdfmapline("=",format('%s %s <%s',e.name,fullname,e.fontfile)) - end - end - else - return nil - end -end - -function fonts.map.flush(backend) -- will also erase the accumulated data - local flushline = fonts.map.line[backend or "pdftex"] or fonts.map.line.pdftex - for _, e in pairs(fonts.map.data) do - tex.sprint(ctxcatcodes,flushline(e)) - end - fonts.map.data = { } -end - -fonts.map.line.dvips = fonts.map.line.pdftex -fonts.map.line.dvipdfmx = function() end - -function fonts.map.convert_entries(filename) - if not fonts.map.loaded[filename] then - fonts.map.data, fonts.map.encodings = fonts.map.load_file(filename,fonts.map.data, fonts.map.encodings) - fonts.map.loaded[filename] = true - end -end - -function fonts.map.load_file(filename, entries, encodings) - entries = entries or { } - encodings = encodings or { } - local f = io.open(filename) - if f then - local data = f:read("*a") - if data then - for line in gmatch(data,"(.-)[\n\t]") do - if find(line,"^[%#%%%s]") then - -- print(line) - else - local extend, slant, name, fullname, fontfile, encoding - line = gsub(line,'"(.+)"', function(s) - extend = find(s,'"([^"]+) ExtendFont"') - slant = find(s,'"([^"]+) SlantFont"') - return "" - end) - if not name then - -- name fullname encoding fontfile - name, fullname, encoding, fontfile = match(line,"^(%S+)%s+(%S*)[%s<]+(%S*)[%s<]+(%S*)%s*$") - end - if not name then - -- name fullname (flag) fontfile encoding - name, fullname, fontfile, encoding = match(line,"^(%S+)%s+(%S*)[%d%s<]+(%S*)[%s<]+(%S*)%s*$") - end - if not name then - -- name fontfile - name, fontfile = match(line,"^(%S+)%s+[%d%s<]+(%S*)%s*$") - end - if name then - if encoding == "" then encoding = nil end - entries[name] = { - name = name, -- handy - fullname = fullname, - encoding = encoding, - fontfile = fontfile, - slant = tonumber(slant), - extend = tonumber(extend) - } - encodings[name] = encoding - elseif line ~= "" then - -- print(line) - end - end - end - end - f:close() - end - return entries, encodings -end - -function fonts.map.load_lum_table(filename) - local lumname = file.replacesuffix(file.basename(filename),"lum") - local lumfile = resolvers.find_file(lumname,"map") or "" - if lumfile ~= "" and lfs.isfile(lumfile) then - if trace_loading or trace_unimapping then - logs.report("load otf","enhance: loading %s ",lumfile) - end - lumunic = dofile(lumfile) - return lumunic, lumfile - end -end - -local hex = lpeg.R("AF","09") -local hexfour = (hex*hex*hex*hex) / function(s) return tonumber(s,16) end -local hexsix = (hex^1) / function(s) return tonumber(s,16) end -local dec = (lpeg.R("09")^1) / tonumber -local period = lpeg.P(".") - -local unicode = lpeg.P("uni") * (hexfour * (period + lpeg.P(-1)) * lpeg.Cc(false) + lpeg.Ct(hexfour^1) * lpeg.Cc(true)) -local ucode = lpeg.P("u") * (hexsix * (period + lpeg.P(-1)) * lpeg.Cc(false) + lpeg.Ct(hexsix ^1) * lpeg.Cc(true)) -local index = lpeg.P("index") * dec * lpeg.Cc(false) - -local parser = unicode + ucode + index - -local parsers = { } - -function fonts.map.make_name_parser(str) - if not str or str == "" then - return parser - else - local p = parsers[str] - if not p then - p = lpeg.P(str) * period * dec * lpeg.Cc(false) - parsers[str] = p - end - return p - end -end - ---~ local parser = fonts.map.make_name_parser("Japan1") ---~ local parser = fonts.map.make_name_parser() ---~ local function test(str) ---~ local b, a = lpegmatch(parser,str) ---~ print((a and table.serialize(b)) or b) ---~ end ---~ test("a.sc") ---~ test("a") ---~ test("uni1234") ---~ test("uni1234.xx") ---~ test("uni12349876") ---~ test("index1234") ---~ test("Japan1.123") - -function fonts.map.tounicode16(unicode) - if unicode < 0x10000 then - return format("%04X",unicode) - else - return format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00) - end -end - -function fonts.map.tounicode16sequence(unicodes) - local t = { } - for l=1,#unicodes do - local unicode = unicodes[l] - if unicode < 0x10000 then - t[l] = format("%04X",unicode) - else - t[l] = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00) - end - end - return concat(t) -end - ---~ This is quite a bit faster but at the cost of some memory but if we ---~ do this we will also use it elsewhere so let's not follow this route ---~ now. I might use this method in the plain variant (no caching there) ---~ but then I need a flag that distinguishes between code branches. ---~ ---~ local cache = { } ---~ ---~ function fonts.map.tounicode16(unicode) ---~ local s = cache[unicode] ---~ if not s then ---~ if unicode < 0x10000 then ---~ s = format("%04X",unicode) ---~ else ---~ s = format("%04X%04X",unicode/1024+0xD800,unicode%1024+0xDC00) ---~ end ---~ cache[unicode] = s ---~ end ---~ return s ---~ end - - -end -- closure - -do -- begin closure to overcome local limits and interference - if not modules then modules = { } end modules ['font-dum'] = { version = 1.001, comment = "companion to luatex-*.tex", @@ -12012,8 +12032,9 @@ fonts = fonts or { } -- general -fonts.otf.pack = false -fonts.tfm.resolve_vf = false -- no sure about this +fonts.otf.pack = false +fonts.tfm.resolve_vf = false -- no sure about this +fonts.tfm.fontname_mode = "specification" -- somehow latex needs this -- readers diff --git a/tex/generic/context/luatex-fonts.lua b/tex/generic/context/luatex-fonts.lua index 56768138b..84acb2b18 100644 --- a/tex/generic/context/luatex-fonts.lua +++ b/tex/generic/context/luatex-fonts.lua @@ -111,6 +111,7 @@ else loadmodule('font-tfm.lua') -- will be split (we may need font-log) loadmodule('font-cid.lua') loadmodule('font-ott.lua') -- might be split + loadmodule('font-map.lua') -- for loading lum file (will be stripped) loadmodule('font-otf.lua') loadmodule('font-otd.lua') loadmodule('font-oti.lua') @@ -120,7 +121,6 @@ else loadmodule('font-otc.lua') loadmodule('font-def.lua') loadmodule('font-xtx.lua') - loadmodule('font-map.lua') -- for loading lum file (will be stripped) loadmodule('font-dum.lua') end |