From 8c9101fcc313f47232c8378a1cde37874efd845a Mon Sep 17 00:00:00 2001 From: Philipp Gesang Date: Tue, 5 Jul 2016 08:17:49 +0200 Subject: [fontloader] sync with Context as of 2016-07-05 --- src/fontloader/misc/fontloader-font-con.lua | 141 ++--- src/fontloader/misc/fontloader-font-def.lua | 5 +- src/fontloader/misc/fontloader-font-dsp.lua | 66 ++- src/fontloader/misc/fontloader-font-map.lua | 12 +- src/fontloader/misc/fontloader-font-ocl.lua | 113 ++-- src/fontloader/misc/fontloader-font-one.lua | 20 +- src/fontloader/misc/fontloader-font-onr.lua | 59 ++- src/fontloader/misc/fontloader-font-oti.lua | 2 + src/fontloader/misc/fontloader-font-otl.lua | 15 +- src/fontloader/misc/fontloader-font-oto.lua | 10 +- src/fontloader/misc/fontloader-font-oup.lua | 2 + src/fontloader/misc/fontloader-font-tfm.lua | 572 ++++++++++++++++++++- src/fontloader/misc/fontloader-fonts-demo-vf-1.lua | 8 + src/fontloader/misc/fontloader-fonts-enc.lua | 59 ++- src/fontloader/misc/fontloader-fonts.lua | 5 +- src/fontloader/misc/fontloader-l-table.lua | 16 +- src/fontloader/misc/fontloader-plain.tex | 8 +- src/fontloader/misc/fontloader-test.tex | 12 + src/fontloader/misc/fontloader-util-fil.lua | 23 +- src/fontloader/misc/fontloader-util-str.lua | 6 +- 20 files changed, 890 insertions(+), 264 deletions(-) (limited to 'src/fontloader/misc') diff --git a/src/fontloader/misc/fontloader-font-con.lua b/src/fontloader/misc/fontloader-font-con.lua index 1a0daff..9a6f3f8 100644 --- a/src/fontloader/misc/fontloader-font-con.lua +++ b/src/fontloader/misc/fontloader-font-con.lua @@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['font-con'] = { -- some names of table entries will be changed (no _) local next, tostring, rawget = next, tostring, rawget -local format, match, lower, gsub = string.format, string.match, string.lower, string.gsub +local format, match, lower, gsub, find = string.format, string.match, string.lower, string.gsub, string.find local sort, insert, concat, sortedkeys, serialize, fastcopy = table.sort, table.insert, table.concat, table.sortedkeys, table.serialize, table.fastcopy local derivetable = table.derive @@ -46,102 +46,6 @@ constructors.privateoffset = 0xF0000 -- 0x10FFFF constructors.cacheintex = true -- so we see the original table in fonts.font --- Some experimental helpers (handy for tracing): --- --- todo: extra: --- --- extra_space => space.extra --- space => space.width --- space_stretch => space.stretch --- space_shrink => space.shrink - --- We do keep the x-height, extra_space, space_shrink and space_stretch --- around as these are low level official names. - -constructors.keys = { - properties = { - encodingbytes = "number", - embedding = "number", - cidinfo = { }, - format = "string", - fontname = "string", - fullname = "string", - filename = "filename", - psname = "string", - name = "string", - virtualized = "boolean", - hasitalics = "boolean", - autoitalicamount = "basepoints", - nostackmath = "boolean", - noglyphnames = "boolean", - mode = "string", - hasmath = "boolean", - mathitalics = "boolean", - textitalics = "boolean", - finalized = "boolean", - }, - parameters = { - mathsize = "number", - scriptpercentage = "float", - scriptscriptpercentage = "float", - units = "cardinal", - designsize = "scaledpoints", - expansion = { - stretch = "integerscale", -- might become float - shrink = "integerscale", -- might become float - step = "integerscale", -- might become float - auto = "boolean", - }, - protrusion = { - auto = "boolean", - }, - slantfactor = "float", - extendfactor = "float", - factor = "float", - hfactor = "float", - vfactor = "float", - size = "scaledpoints", - units = "scaledpoints", - scaledpoints = "scaledpoints", - slantperpoint = "scaledpoints", - spacing = { - width = "scaledpoints", - stretch = "scaledpoints", - shrink = "scaledpoints", - extra = "scaledpoints", - }, - xheight = "scaledpoints", - quad = "scaledpoints", - ascender = "scaledpoints", - descender = "scaledpoints", - synonyms = { - space = "spacing.width", - spacestretch = "spacing.stretch", - spaceshrink = "spacing.shrink", - extraspace = "spacing.extra", - x_height = "xheight", - space_stretch = "spacing.stretch", - space_shrink = "spacing.shrink", - extra_space = "spacing.extra", - em = "quad", - ex = "xheight", - slant = "slantperpoint", - }, - }, - description = { - width = "basepoints", - height = "basepoints", - depth = "basepoints", - boundingbox = { }, - }, - character = { - width = "scaledpoints", - height = "scaledpoints", - depth = "scaledpoints", - italic = "scaledpoints", - }, -} - -- This might become an interface: local designsizes = allocate() @@ -351,6 +255,27 @@ local function mathkerns(v,vdelta) return k end +local psfake = 0 + +local function fixedpsname(psname,fallback) + local usedname = psname + if psname and psname ~= "" then + if find(psname," ") then + usedname = gsub(psname,"[%s]+","-") + else + -- we assume that the name is sane enough (we might sanitize completely some day) + end + elseif not fallback or fallback == "" then + psfake = psfake + 1 + psname = "fakename-" .. psfake + else + -- filenames can be a mess so we do a drastic cleanup + psname = fallback + usedname = gsub(psname,"[^a-zA-Z0-9]+","-") + end + return usedname, psname ~= usedname +end + function constructors.scale(tfmdata,specification) local target = { } -- the new table -- @@ -453,23 +378,22 @@ function constructors.scale(tfmdata,specification) target.format = properties.format target.cache = constructors.cacheintex and "yes" or "renew" -- - local fontname = properties.fontname or tfmdata.fontname -- for the moment we fall back on - local fullname = properties.fullname or tfmdata.fullname -- names in the tfmdata although - local filename = properties.filename or tfmdata.filename -- that is not the right place to - local psname = properties.psname or tfmdata.psname -- pass them + local fontname = properties.fontname or tfmdata.fontname + local fullname = properties.fullname or tfmdata.fullname + local filename = properties.filename or tfmdata.filename + local psname = properties.psname or tfmdata.psname local name = properties.name or tfmdata.name -- - if not psname or psname == "" then - -- name used in pdf file as well as for selecting subfont in ttc/dfont - psname = fontname or (fullname and fonts.names.cleanname(fullname)) - end + -- the psname used in pdf file as well as for selecting subfont in ttc + -- + local psname, psfixed = fixedpsname(psname,fontname or fullname or file.nameonly(filename)) + -- target.fontname = fontname target.fullname = fullname target.filename = filename target.psname = psname target.name = name -- - -- properties.fontname = fontname properties.fullname = fullname properties.filename = filename @@ -602,8 +526,9 @@ function constructors.scale(tfmdata,specification) -- end of context specific trickery -- if trace_defining then - report_defining("defining tfm, name %a, fullname %a, filename %a, hscale %a, vscale %a, math %a, italics %a", - name,fullname,filename,hdelta,vdelta,hasmath and "enabled" or "disabled",hasitalics and "enabled" or "disabled") + report_defining("defining tfm, name %a, fullname %a, filename %a, %spsname %a, hscale %a, vscale %a, math %a, italics %a", + name,fullname,filename,psfixed and "(fixed) " or "",psname,hdelta,vdelta, + hasmath and "enabled" or "disabled",hasitalics and "enabled" or "disabled") end -- constructors.beforecopyingcharacters(target,tfmdata) diff --git a/src/fontloader/misc/fontloader-font-def.lua b/src/fontloader/misc/fontloader-font-def.lua index add42ee..88d6145 100644 --- a/src/fontloader/misc/fontloader-font-def.lua +++ b/src/fontloader/misc/fontloader-font-def.lua @@ -8,10 +8,11 @@ if not modules then modules = { } end modules ['font-def'] = { -- We can overload some of the definers.functions so we don't local them. -local format, gmatch, match, find, lower, gsub = string.format, string.gmatch, string.match, string.find, string.lower, string.gsub +local lower, gsub = string.lower, string.gsub local tostring, next = tostring, next local lpegmatch = lpeg.match local suffixonly, removesuffix = file.suffix, file.removesuffix +local formatters = string.formatters local allocate = utilities.storage.allocate @@ -264,7 +265,7 @@ function definers.applypostprocessors(tfmdata) if type(extrahash) == "string" and extrahash ~= "" then -- e.g. a reencoding needs this extrahash = gsub(lower(extrahash),"[^a-z]","-") - properties.fullname = format("%s-%s",properties.fullname,extrahash) + properties.fullname = formatters["%s-%s"](properties.fullname,extrahash) end end end diff --git a/src/fontloader/misc/fontloader-font-dsp.lua b/src/fontloader/misc/fontloader-font-dsp.lua index 1e8b3bd..49d5929 100644 --- a/src/fontloader/misc/fontloader-font-dsp.lua +++ b/src/fontloader/misc/fontloader-font-dsp.lua @@ -224,10 +224,15 @@ local function readcoverage(f,offset,simple) return coverage end -local function readclassdef(f,offset) +local function readclassdef(f,offset,preset) setposition(f,offset) local classdefformat = readushort(f) local classdef = { } + if type(preset) == "number" then + for k=0,preset-1 do + classdef[k] = 1 + end + end if classdefformat == 1 then local index = readushort(f) local nofclassdef = readushort(f) @@ -249,6 +254,13 @@ local function readclassdef(f,offset) else report("unknown classdef format %a ",classdefformat) end + if type(preset) == "table" then + for k in next, preset do + if not classdef[k] then + classdef[k] = 1 + end + end + end return classdef end @@ -365,7 +377,9 @@ end -- We generalize the chained lookups so that we can do with only one handler -- when processing them. -local function readlookuparray(f,noflookups) +-- pruned + +local function readlookuparray(f,noflookups,nofcurrent) local lookups = { } if noflookups > 0 then local length = 0 @@ -381,10 +395,34 @@ local function readlookuparray(f,noflookups) lookups[index] = false end end + -- if length > nofcurrent then + -- report_issue("more lookups than currently matched characters") + -- end end return lookups end +-- not pruned +-- +-- local function readlookuparray(f,noflookups,nofcurrent) +-- local lookups = { } +-- for i=1,nofcurrent do +-- lookups[i] = false +-- end +-- for i=1,noflookups do +-- local index = readushort(f) + 1 +-- if index > nofcurrent then +-- report_issue("more lookups than currently matched characters") +-- for i=nofcurrent+1,index-1 do +-- lookups[i] = false +-- end +-- nofcurrent = index +-- end +-- lookups[index] = readushort(f) + 1 +-- end +-- return lookups +-- end + local function unchainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofglyphs,what) local tableoffset = lookupoffset + offset setposition(f,tableoffset) @@ -409,7 +447,7 @@ local function unchainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,n for i=2,nofcurrent do current[i] = { readushort(f) } end - local lookups = readlookuparray(f,noflookups) + local lookups = readlookuparray(f,noflookups,nofcurrent) rules[#rules+1] = { current = current, lookups = lookups @@ -433,7 +471,7 @@ local function unchainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,n local rules = { } if subclasssets then coverage = readcoverage(f,tableoffset + coverage) - currentclassdef = readclassdef(f,tableoffset + currentclassdef) + currentclassdef = readclassdef(f,tableoffset + currentclassdef,coverage) local currentclasses = classtocoverage(currentclassdef,fontdata.glyphs) for class=1,#subclasssets do local offset = subclasssets[class] @@ -452,7 +490,7 @@ local function unchainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,n for i=2,nofcurrent do current[i] = currentclasses[readushort(f) + 1] end - local lookups = readlookuparray(f,noflookups) + local lookups = readlookuparray(f,noflookups,nofcurrent) rules[#rules+1] = { current = current, lookups = lookups @@ -476,7 +514,7 @@ local function unchainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,n elseif subtype == 3 then local current = readarray(f) local noflookups = readushort(f) - local lookups = readlookuparray(f,noflookups) + local lookups = readlookuparray(f,noflookups,#current) current = readcoveragearray(f,tableoffset,current,true) return { format = "coverage", @@ -536,7 +574,7 @@ local function chainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nof end end local noflookups = readushort(f) - local lookups = readlookuparray(f,noflookups) + local lookups = readlookuparray(f,noflookups,nofcurrent) rules[#rules+1] = { before = before, current = current, @@ -562,9 +600,9 @@ local function chainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nof local rules = { } if subclasssets then local coverage = readcoverage(f,tableoffset + coverage) - local beforeclassdef = readclassdef(f,tableoffset + beforeclassdef) - local currentclassdef = readclassdef(f,tableoffset + currentclassdef) - local afterclassdef = readclassdef(f,tableoffset + afterclassdef) + local beforeclassdef = readclassdef(f,tableoffset + beforeclassdef,nofglyphs) + local currentclassdef = readclassdef(f,tableoffset + currentclassdef,coverage) + local afterclassdef = readclassdef(f,tableoffset + afterclassdef,nofglyphs) local beforeclasses = classtocoverage(beforeclassdef,fontdata.glyphs) local currentclasses = classtocoverage(currentclassdef,fontdata.glyphs) local afterclasses = classtocoverage(afterclassdef,fontdata.glyphs) @@ -604,7 +642,7 @@ local function chainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nof end -- no sequence index here (so why in context as it saves nothing) local noflookups = readushort(f) - local lookups = readlookuparray(f,noflookups) + local lookups = readlookuparray(f,noflookups,nofcurrent) rules[#rules+1] = { before = before, current = current, @@ -632,7 +670,7 @@ local function chainedcontext(f,fontdata,lookupid,lookupoffset,offset,glyphs,nof local current = readarray(f) local after = readarray(f) local noflookups = readushort(f) - local lookups = readlookuparray(f,noflookups) + local lookups = readlookuparray(f,noflookups,#current) before = readcoveragearray(f,tableoffset,before,true) current = readcoveragearray(f,tableoffset,current,true) after = readcoveragearray(f,tableoffset,after,true) @@ -988,8 +1026,8 @@ function gposhandlers.pair(f,fontdata,lookupid,lookupoffset,offset,glyphs,nofgly local nofclasses2 = readushort(f) -- incl class 0 local classlist = readpairclasssets(f,nofclasses1,nofclasses2,format1,format2) coverage = readcoverage(f,tableoffset+coverage) - classdef1 = readclassdef(f,tableoffset+classdef1) - classdef2 = readclassdef(f,tableoffset+classdef2) + classdef1 = readclassdef(f,tableoffset+classdef1,coverage) + classdef2 = readclassdef(f,tableoffset+classdef2,nofglyphs) local usedcoverage = { } for g1, c1 in next, classdef1 do if coverage[g1] then diff --git a/src/fontloader/misc/fontloader-font-map.lua b/src/fontloader/misc/fontloader-font-map.lua index 6151b37..7f3b0f9 100644 --- a/src/fontloader/misc/fontloader-font-map.lua +++ b/src/fontloader/misc/fontloader-font-map.lua @@ -13,8 +13,8 @@ local P, R, S, C, Ct, Cc, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, l local floor = math.floor local formatters = string.formatters -local trace_loading = false trackers.register("fonts.loading", function(v) trace_loading = v end) -local trace_mapping = false trackers.register("fonts.mapping", function(v) trace_unimapping = v end) +local trace_loading = false trackers.register("fonts.loading", function(v) trace_loading = v end) +local trace_mapping = false trackers.register("fonts.mapping", function(v) trace_mapping = v end) local report_fonts = logs.reporter("fonts","loading") -- not otf only @@ -265,6 +265,9 @@ function mappings.addtounicode(data,filename,checklookups) local resources = data.resources local unicodes = resources.unicodes if not unicodes then + if trace_mapping then + report_fonts("no unicode list, quitting tounicode for %a",filename) + end return end local properties = data.properties @@ -474,11 +477,10 @@ function mappings.addtounicode(data,filename,checklookups) if trace_mapping and unicoded > 0 then report_fonts("%n ligature tounicode mappings deduced from gsub ligature features",unicoded) end - if trace_mapping then for unic, glyph in table.sortedhash(descriptions) do - local name = glyph.name - local index = glyph.index + local name = glyph.name or "-" + local index = glyph.index or 0 local unicode = glyph.unicode if unicode then if type(unicode) == "table" then diff --git a/src/fontloader/misc/fontloader-font-ocl.lua b/src/fontloader/misc/fontloader-font-ocl.lua index b2aba7a..ed1be95 100644 --- a/src/fontloader/misc/fontloader-font-ocl.lua +++ b/src/fontloader/misc/fontloader-font-ocl.lua @@ -8,7 +8,7 @@ if not modules then modules = { } end modules ['font-ocl'] = { -- todo : user list of colors -local tostring, next = tostring, next +local tostring, next, format = tostring, next, string.format local formatters = string.formatters @@ -166,34 +166,56 @@ do end -if context and xml.convert then + +do local report_svg = logs.reporter("fonts","svg conversion") - local xmlconvert = xml.convert - local xmlfirst = xml.first local loaddata = io.loaddata local savedata = io.savedata local remove = os.remove + if context and xml.convert then + + local xmlconvert = xml.convert + local xmlfirst = xml.first + + function otfsvg.filterglyph(entry,index) + local svg = xmlconvert(entry.data) + local root = svg and xmlfirst(svg,"/svg[@id='glyph"..index.."']") + local data = root and tostring(root) + -- report_svg("data for glyph %04X: %s",index,data) + return data + end + + else + + function otfsvg.filterglyph(entry,index) -- can be overloaded + return entry.data + end + + end + -- function otfsvg.topdf(svgshapes) - -- local svgfile = "temp-otf-svg-shape.svg" - -- local pdffile = "temp-otf-svg-shape.pdf" - -- local command = "inkscape " .. svgfile .. " --export-pdf=" .. pdffile - -- local testrun = false - -- local pdfshapes = { } - -- local nofshapes = #svgshapes + -- local svgfile = "temp-otf-svg-shape.svg" + -- local pdffile = "temp-otf-svg-shape.pdf" + -- local command = "inkscape " .. svgfile .. " --export-pdf=" .. pdffile + -- local testrun = false + -- local pdfshapes = { } + -- local nofshapes = #svgshapes + -- local filterglyph = otfsvg.filterglyph -- report_svg("processing %i svg containers",nofshapes) -- statistics.starttiming() -- for i=1,nofshapes do -- local entry = svgshapes[i] - -- for j=entry.first,entry.last do - -- local svg = xmlconvert(entry.data) - -- local data = xmlfirst(svg,"/svg[@id='glyph"..j.."']") + -- for index=entry.first,entry.last do + -- local data = filterglyph(entry,index) -- savedata(svgfile,tostring(data)) - -- report_svg("processing svg shape of glyph %i in container %i",j,i) - -- os.execute(command) - -- pdfshapes[j] = loaddata(pdffile) + -- if data and data ~= "" then + -- report_svg("processing svg shape of glyph %i in container %i",index,i) + -- os.execute(command) + -- pdfshapes[index] = loaddata(pdffile) + -- end -- end -- if testrun and i > testrun then -- report_svg("quiting test run") @@ -207,26 +229,25 @@ if context and xml.convert then -- end function otfsvg.topdf(svgshapes) - local inkscape = io.popen("inkscape --shell 2>&1","w") - local pdfshapes = { } - local nofshapes = #svgshapes - local f_svgfile = formatters["temp-otf-svg-shape-%i.svg"] - local f_pdffile = formatters["temp-otf-svg-shape-%i.pdf"] - local f_convert = formatters["%s --export-pdf=%s\n"] + local inkscape = io.popen("inkscape --shell > temp-otf-svg-shape.log","w") + local pdfshapes = { } + local nofshapes = #svgshapes + local f_svgfile = formatters["temp-otf-svg-shape-%i.svg"] + local f_pdffile = formatters["temp-otf-svg-shape-%i.pdf"] + local f_convert = formatters["%s --export-pdf=%s\n"] + local filterglyph = otfsvg.filterglyph report_svg("processing %i svg containers",nofshapes) statistics.starttiming() for i=1,nofshapes do local entry = svgshapes[i] - for j=entry.first,entry.last do - local svg = xmlconvert(entry.data) - local root = svg and xmlfirst(svg,"/svg[@id='glyph"..j.."']") - local data = root and tostring(root) + for index=entry.first,entry.last do + local data = filterglyph(entry,index) if data and data ~= "" then - local svgfile = f_svgfile(j) - local pdffile = f_pdffile(j) + local svgfile = f_svgfile(index) + local pdffile = f_pdffile(index) savedata(svgfile,data) inkscape:write(f_convert(svgfile,pdffile)) - pdfshapes[j] = true + pdfshapes[index] = true end end end @@ -236,39 +257,17 @@ if context and xml.convert then -- end inkscape:close() report_svg("processing %i pdf results",nofshapes) - for i in next, pdfshapes do - local svgfile = f_svgfile(i) - local pdffile = f_pdffile(i) - pdfshapes[i] = loaddata(pdffile) + for index in next, pdfshapes do + local svgfile = f_svgfile(index) + local pdffile = f_pdffile(index) + pdfshapes[index] = loaddata(pdffile) remove(svgfile) remove(pdffile) end statistics.stoptiming() - report_svg("conversion time: %0.3f",statistics.elapsedtime()) - return pdfshapes - end - -else - - function otfsvg.topdf(svgshapes) - local svgfile = "temp-otf-svg-shape.svg" - local pdffile = "temp-otf-svg-shape.pdf" - local command = "inkscape " .. svgfile .. " --export-pdf=" .. pdffile - local pdfshapes = { } - local nofshapes = #svgshapes - texio.write(formatters["[converting %i svg glyphs to pdf using command %q : "](nofshapes,command)) - for i=1,nofshapes do - local entry = svgshapes[i] - for j=entry.first,entry.last do - -- cross our fingers .. some, day i will filter - texio.write(formatters["%i "](j)) - io.savedata(svgfile,tostring(entry.data)) - os.execute(command) - pdfshapes[j] = io.loaddata(pdffile) - end + if statistics.elapsedseconds then + report_svg("svg conversion time %s",statistics.elapsedseconds()) end - os.remove(svgfile) - texio.write("done]") return pdfshapes end diff --git a/src/fontloader/misc/fontloader-font-one.lua b/src/fontloader/misc/fontloader-font-one.lua index a6f47e8..8629850 100644 --- a/src/fontloader/misc/fontloader-font-one.lua +++ b/src/fontloader/misc/fontloader-font-one.lua @@ -86,7 +86,8 @@ local steps = { "add ligatures", "add extra kerns", "normalize features", - "fix names", + "check extra features", + "fix names", -- what a hack ... -- "add tounicode data", } @@ -318,6 +319,8 @@ enhancers["normalize features"] = function(data) data.resources.sequences = sequences end +enhancers["check extra features"] = otf.enhancers.enhance + enhancers["fix names"] = function(data) for k, v in next, data.descriptions do local n = v.name @@ -752,18 +755,12 @@ end

We have the usual two modes and related features initializers and processors.

--ldx]]-- -local function setmode(tfmdata,value) - if value then - tfmdata.properties.mode = lower(value) - end -end - registerafmfeature { name = "mode", description = "mode", initializers = { - base = setmode, - node = setmode, + base = otf.modeinitializer, + node = otf.modeinitializer, } } @@ -782,8 +779,6 @@ registerafmfeature { -- readers -local check_tfm = readers.check_tfm - fonts.formats.afm = "type1" fonts.formats.pfb = "type1" @@ -820,7 +815,8 @@ function readers.afm(specification,method) tfmdata = check_afm(specification,specification.name .. "." .. forced) end if not tfmdata then - method = method or definers.method or "afm or tfm" + local check_tfm = readers.check_tfm + method = (check_tfm and (method or definers.method or "afm or tfm")) or "afm" if method == "tfm" then tfmdata = check_tfm(specification,specification.name) elseif method == "afm" then diff --git a/src/fontloader/misc/fontloader-font-onr.lua b/src/fontloader/misc/fontloader-font-onr.lua index a4969ad..dcf7445 100644 --- a/src/fontloader/misc/fontloader-font-onr.lua +++ b/src/fontloader/misc/fontloader-font-onr.lua @@ -21,23 +21,21 @@ add features.

local fonts, logs, trackers, resolvers = fonts, logs, trackers, resolvers -local next, type, tonumber, rawget = next, type, tonumber, rawget +local next, type, tonumber, rawget, rawset = next, type, tonumber, rawget, rawset local match, lower, gsub, strip, find = string.match, string.lower, string.gsub, string.strip, string.find local char, byte, sub = string.char, string.byte, string.sub local abs = math.abs local bxor, rshift = bit32.bxor, bit32.rshift -local P, S, R, Cmt, C, Ct, Cs, Carg = lpeg.P, lpeg.S, lpeg.R, lpeg.Cmt, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg +local P, S, R, Cmt, C, Ct, Cs, Carg, Cf, Cg = lpeg.P, lpeg.S, lpeg.R, lpeg.Cmt, lpeg.C, lpeg.Ct, lpeg.Cs, lpeg.Carg, lpeg.Cf, lpeg.Cg local lpegmatch, patterns = lpeg.match, lpeg.patterns local trace_indexing = false trackers.register("afm.indexing", function(v) trace_indexing = v end) local trace_loading = false trackers.register("afm.loading", function(v) trace_loading = v end) local report_afm = logs.reporter("fonts","afm loading") -local report_afm = logs.reporter("fonts","pfb loading") +local report_pfb = logs.reporter("fonts","pfb loading") -fonts = fonts or { } -local handlers = fonts.handlers or { } -fonts.handlers = handlers +local handlers = fonts.handlers local afm = handlers.afm or { } handlers.afm = afm local readers = afm.readers or { } @@ -72,20 +70,36 @@ do local initialize = function(str,position,size) n = 0 - m = tonumber(size) + m = size -- % tonumber(size) return position + 1 end - local charstrings = P("/CharStrings") - local name = P("/") * C((R("az")+R("AZ")+R("09")+S("-_."))^1) - local size = C(R("09")^1) - local spaces = P(" ")^1 + local charstrings = P("/CharStrings") + local encoding = P("/Encoding") + local dup = P("dup") + local put = P("put") + local array = P("array") + local name = P("/") * C((R("az")+R("AZ")+R("09")+S("-_."))^1) + local digits = R("09")^1 + local cardinal = digits / tonumber + local spaces = P(" ")^1 + local spacing = patterns.whitespace^0 local p_filternames = Ct ( - (1-charstrings)^0 * charstrings * spaces * Cmt(size,initialize) - * (Cmt(name * P(" ")^1 * C(R("09")^1), progress) + P(1))^1 + (1-charstrings)^0 * charstrings * spaces * Cmt(cardinal,initialize) + * (Cmt(name * spaces * cardinal, progress) + P(1))^1 ) + -- /Encoding 256 array + -- 0 1 255 {1 index exch /.notdef put} for + -- dup 0 /Foo put + + local p_filterencoding = + (1-encoding)^0 * encoding * spaces * digits * spaces * array * (1-dup)^0 + * Cf( + Ct("") * Cg(spacing * dup * spaces * cardinal * spaces * name * spaces * put)^1 + ,rawset) + -- if one of first 4 not 0-9A-F then binary else hex local decrypt @@ -143,20 +157,31 @@ do local vector = lpegmatch(p_filternames,binary) - if vector[1] == ".notdef" then - -- tricky - vector[0] = table.remove(vector,1) +-- if vector[1] == ".notdef" then +-- -- tricky +-- vector[0] = table.remove(vector,1) +-- end + + for i=1,#vector do + vector[i-1] = vector[i] end + vector[#vector] = nil if not vector then report_pfb("no vector in %a",filename) return end - return vector + local encoding = lpegmatch(p_filterencoding,ascii) + + return vector, encoding end + local pfb = handlers.pfb or { } + handlers.pfb = pfb + pfb.loadvector = loadpfbvector + get_indexes = function(data,pfbname) local vector = loadpfbvector(pfbname) if vector then diff --git a/src/fontloader/misc/fontloader-font-oti.lua b/src/fontloader/misc/fontloader-font-oti.lua index d74d2d5..5e812bb 100644 --- a/src/fontloader/misc/fontloader-font-oti.lua +++ b/src/fontloader/misc/fontloader-font-oti.lua @@ -34,6 +34,8 @@ local function setmode(tfmdata,value) end end +otf.modeinitializer = setmode + local function setlanguage(tfmdata,value) if value then local cleanvalue = lower(value) diff --git a/src/fontloader/misc/fontloader-font-otl.lua b/src/fontloader/misc/fontloader-font-otl.lua index 59d868b..a35db5b 100644 --- a/src/fontloader/misc/fontloader-font-otl.lua +++ b/src/fontloader/misc/fontloader-font-otl.lua @@ -53,7 +53,7 @@ local report_otf = logs.reporter("fonts","otf loading") local fonts = fonts local otf = fonts.handlers.otf -otf.version = 3.023 -- beware: also sync font-mis.lua and in mtx-fonts +otf.version = 3.025 -- beware: also sync font-mis.lua and in mtx-fonts otf.cache = containers.define("fonts", "otl", otf.version, true) otf.svgcache = containers.define("fonts", "svg", otf.version, true) otf.pdfcache = containers.define("fonts", "pdf", otf.version, true) @@ -305,7 +305,7 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone collectgarbage("collect") end stoptiming(otfreaders) - if elapsedtime then -- not in generic + if elapsedtime then report_otf("loading, optimizing, packing and caching time %s", elapsedtime(otfreaders)) end if cleanup > 3 then @@ -340,7 +340,6 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone data.metadata.math = data.resources.mathconstants end - return data end @@ -507,14 +506,14 @@ local function copytotfm(data,cache_id) spaceunits, spacer = charwidth, "charwidth" end end - spaceunits = tonumber(spaceunits) or 500 -- brrr + spaceunits = tonumber(spaceunits) or units/2 -- parameters.slant = 0 - parameters.space = spaceunits -- 3.333 (cmr10) + parameters.space = spaceunits -- 3.333 (cmr10) parameters.space_stretch = 1*units/2 -- 500 -- 1.666 (cmr10) - parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10) - parameters.x_height = 2*units/5 -- 400 - parameters.quad = units -- 1000 + parameters.space_shrink = 1*units/3 -- 333 -- 1.111 (cmr10) + parameters.x_height = 2*units/5 -- 400 + parameters.quad = units -- 1000 if spaceunits < 2*units/5 then -- todo: warning end diff --git a/src/fontloader/misc/fontloader-font-oto.lua b/src/fontloader/misc/fontloader-font-oto.lua index 1199778..177382f 100644 --- a/src/fontloader/misc/fontloader-font-oto.lua +++ b/src/fontloader/misc/fontloader-font-oto.lua @@ -120,7 +120,7 @@ local function registerbasehash(tfmdata) basehash[hash] = base end properties.basehash = base - properties.fullname = properties.fullname .. "-" .. base + properties.fullname = (properties.fullname or properties.name) .. "-" .. base -- report_prepare("fullname base hash '%a, featureset %a",tfmdata.properties.fullname,hash) applied = { } end @@ -225,6 +225,11 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis local trace_alternatives = trace_baseinit and trace_alternatives local trace_ligatures = trace_baseinit and trace_ligatures + if not changed then + changed = { } + tfmdata.changed = changed + end + for i=1,#lookuplist do local sequence = lookuplist[i] local steps = sequence.steps @@ -392,7 +397,8 @@ local function featuresinitializer(tfmdata,value) local properties = tfmdata.properties local script = properties.script local language = properties.language - local rawfeatures = rawdata.resources.features + local rawresources = rawdata.resources + local rawfeatures = rawresources and rawresources.features local basesubstitutions = rawfeatures and rawfeatures.gsub local basepositionings = rawfeatures and rawfeatures.gpos -- diff --git a/src/fontloader/misc/fontloader-font-oup.lua b/src/fontloader/misc/fontloader-font-oup.lua index 571c69f..c494573 100644 --- a/src/fontloader/misc/fontloader-font-oup.lua +++ b/src/fontloader/misc/fontloader-font-oup.lua @@ -848,6 +848,8 @@ function readers.getcomponents(fontdata) -- handy for resolving ligatures when n end end +readers.unifymissing = unifymissing + function readers.rehash(fontdata,hashmethod) -- TODO: combine loops in one if not (fontdata and fontdata.glyphs) then return diff --git a/src/fontloader/misc/fontloader-font-tfm.lua b/src/fontloader/misc/fontloader-font-tfm.lua index ab6d795..d9b0523 100644 --- a/src/fontloader/misc/fontloader-font-tfm.lua +++ b/src/fontloader/misc/fontloader-font-tfm.lua @@ -6,8 +6,9 @@ if not modules then modules = { } end modules ['font-tfm'] = { license = "see context related readme files" } -local next = next -local match = string.match +local next, type = next, type +local match, format = string.match, string.format +local concat, sortedhash = table.concat, table.sortedhash local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) local trace_features = false trackers.register("tfm.features", function(v) trace_features = v end) @@ -16,6 +17,7 @@ local report_defining = logs.reporter("fonts","defining") local report_tfm = logs.reporter("fonts","tfm loading") local findbinfile = resolvers.findbinfile +local setmetatableindex = table.setmetatableindex local fonts = fonts local handlers = fonts.handlers @@ -28,8 +30,10 @@ tfm.version = 1.000 tfm.maxnestingdepth = 5 tfm.maxnestingsize = 65536*1024 +local otf = fonts.handlers.otf + local tfmfeatures = constructors.features.tfm ------ registertfmfeature = tfmfeatures.register +local registertfmfeature = tfmfeatures.register constructors.resolvevirtualtoo = false -- wil be set in font-ctx.lua @@ -69,7 +73,68 @@ function tfm.setfeatures(tfmdata,features) end end -local depth = { } -- table.setmetatableindex("number") +local depth = { } -- table.setmetatableindex("number") +local enhancers = { } + +local steps = { + "normalize features", + "check extra features" +} + +-- otf.enhancers.register("check extra features",enhance) + +enhancers["check extra features"] = otf.enhancers.enhance + +local function applyenhancers(data,filename) + for i=1,#steps do + local step = steps[i] + local enhancer = enhancers[step] + if enhancer then + if trace_loading then + report_tfm("applying enhancer %a",step) + end + enhancer(data,filename) + else + report_tfm("invalid enhancer %a",step) + end + end +end + +-- Normally we just load the tfm data and go on. However there was some demand for +-- loading good old tfm /pfb files where afm files were lacking and even enc files +-- of dubious quality so we now support loading such (often messy) setups too. +-- +-- Because such fonts also use (ugly) tweaks achieve some purpose (like swapping +-- accents) we need to delay the unicoding actions till after the features have been +-- applied. +-- +-- It must be noted that in ConTeXt we don't expect this to be used at all. Here is +-- example: +-- +-- tfm metrics + pfb vector for index + pfb file for shapes +-- +-- \font\foo=file:csr10.tfm:reencode=auto;mode=node;liga=yes;kern=yes +-- +-- tfm metrics + pfb vector for index + enc file for tfm mapping + pfb file for shapes +-- +-- \font\foo=file:csr10.tfm:reencode=csr.enc;mode=node;liga=yes;kern=yes +-- +-- tfm metrics + enc file for mapping to tfm + bitmaps shapes +-- +-- \font\foo=file:csr10.tfm:reencode=csr.enc;bitmap=yes;mode=node;liga=yes;kern=yes +-- +-- One can add features: +-- +-- fonts.handlers.otf.addfeature { +-- name = "czechdqcheat", +-- type = "substitution", +-- data = { +-- quotedblright = "csquotedblright", +-- }, +-- } +-- +-- So "czechdqcheat=yes" is then a valid feature. And yes, it's a cheat. + local function read_from_tfm(specification) local filename = specification.filename @@ -80,26 +145,116 @@ local function read_from_tfm(specification) end local tfmdata = font.read_tfm(filename,size) -- not cached, fast enough if tfmdata then - local features = specification.features and specification.features.normal or { } + + local features = specification.features and specification.features.normal or { } + local features = constructors.checkedfeatures("tfm",features) + specification.features.normal = features + + -- If reencode returns a new table, we assume that we're doing something + -- special. An 'auto' reencode pickt up its vector from the pfb file. + + local newtfmdata = (depth[filename] == 1) and tfm.reencode(tfmdata,specification) + if newtfmdata then + tfmdata = newtfmdata + end + local resources = tfmdata.resources or { } local properties = tfmdata.properties or { } local parameters = tfmdata.parameters or { } local shared = tfmdata.shared or { } - properties.name = tfmdata.name - properties.fontname = tfmdata.fontname - properties.psname = tfmdata.psname - properties.filename = specification.filename - properties.format = fonts.formats.tfm -- better than nothing - parameters.size = size + -- + shared.features = features + shared.resources = resources + -- + properties.name = tfmdata.name -- todo: fallback + properties.fontname = tfmdata.fontname -- todo: fallback + properties.psname = tfmdata.psname -- todo: fallback + properties.fullname = tfmdata.fullname -- todo: fallback + properties.filename = specification.filename -- todo: fallback + properties.format = fonts.formats.tfm -- better than nothing -- tfmdata.properties = properties tfmdata.resources = resources tfmdata.parameters = parameters tfmdata.shared = shared -- - shared.rawdata = { } + shared.rawdata = { resources = resources } shared.features = features + -- + -- The next branch is only entered when we have a proper encoded file i.e. + -- unicodes and such. It really nakes no sense to do feature juggling when + -- we have no names and unicodes. + -- + if newtfmdata then + -- + -- Some opentype processing assumes these to be present: + -- + if not resources.marks then + resources.marks = { } + end + if not resources.sequences then + resources.sequences = { } + end + if not resources.features then + resources.features = { + gsub = { }, + gpos = { }, + } + end + if not tfmdata.changed then + tfmdata.changed = { } + end + if not tfmdata.descriptions then + tfmdata.descriptions = tfmdata.characters + end + -- + -- It might be handy to have this: + -- + otf.readers.addunicodetable(tfmdata) + -- + -- We make a pseudo opentype font, e.g. kerns and ligatures etc: + -- + applyenhancers(tfmdata,filename) + -- + -- Now user stuff can kick in. + -- + constructors.applymanipulators("tfm",tfmdata,features,trace_features,report_tfm) + -- + -- As that can also mess with names and such, we are now ready for finalizing + -- the unicode information. This is a different order that for instance type one + -- (afm) files. First we try to deduce unicodes from already present information. + -- + otf.readers.unifymissing(tfmdata) + -- + -- Next we fill in the gaps, based on names from teh agl. Probably not much will + -- happen here. + -- + fonts.mappings.addtounicode(tfmdata,filename) + -- + -- The tounicode data is passed to the backend that constructs the vectors for us. + -- + tfmdata.tounicode = 1 + local tounicode = fonts.mappings.tounicode + for unicode, v in next, tfmdata.characters do + local u = v.unicode + if u then + v.tounicode = tounicode(u) + end + end + -- + -- However, when we use a bitmap font those vectors can't be constructed because + -- that information is not carried with those fonts (there is no name info, nor + -- proper index info, nor unicodes at that end). So, we provide it ourselves. + -- + if tfmdata.usedbitmap then + tfm.addtounicode(tfmdata) + end + end + -- shared.processes = next(features) and tfm.setfeatures(tfmdata,features) or nil + -- + parameters.factor = 1 -- already scaled + parameters.size = size parameters.slant = parameters.slant or parameters[1] or 0 parameters.space = parameters.space or parameters[2] or 0 parameters.space_stretch = parameters.space_stretch or parameters[3] or 0 @@ -110,7 +265,12 @@ local function read_from_tfm(specification) -- constructors.enhanceparameters(parameters) -- official copies for us -- - if constructors.resolvevirtualtoo then + if newtfmdata then + -- + -- We do nothing as we assume flat tfm files. It would become real messy + -- otherwise and I don't have something for testing on my system anyway. + -- + elseif constructors.resolvevirtualtoo then fonts.loggers.register(tfmdata,file.suffix(filename),specification) -- strange, why here local vfname = findbinfile(specification.name, 'ovf') if vfname and vfname ~= "" then @@ -145,21 +305,26 @@ local function read_from_tfm(specification) end end -- - local allfeatures = tfmdata.shared.features or specification.features.normal - constructors.applymanipulators("tfm",tfmdata,allfeatures.normal,trace_features,report_tfm) - if not features.encoding then - local encoding, filename = match(properties.filename,"^(.-)%-(.*)$") -- context: encoding-name.* - if filename and encoding and encodings.known and encodings.known[encoding] then - features.encoding = encoding - end - end - -- let's play safe: + -- This is for old times sake (and context specific) so we comment it. It has + -- to do with encoding prefixes (a context naming that was later adopted by + -- the lm/gyre project) + -- + -- if not features.encoding then + -- local encoding, filename = match(properties.filename,"^(.-)%-(.*)$") + -- if filename and encoding and encodings.known and encodings.known[encoding] then + -- features.encoding = encoding + -- end + -- end + -- + -- Some afterthoughts: + -- properties.haskerns = true properties.hasligatures = true resources.unicodes = { } resources.lookuptags = { } -- depth[filename] = depth[filename] - 1 + -- return tfmdata else depth[filename] = depth[filename] - 1 @@ -199,3 +364,366 @@ function readers.tfm(specification) end readers.ofm = readers.tfm + +-- The reencoding acts upon the 'reencode' feature which can have values 'auto' or +-- an enc file. You can also specify a 'pfbfile' feature (but it defaults to the +-- tfm filename) and a 'bitmap' feature. When no enc file is givven (auto) we will +-- get the vectors from the pfb file. + +do + + local outfiles = { } + + local tfmcache = table.setmetatableindex(function(t,tfmdata) + local id = font.define(tfmdata) + t[tfmdata] = id + return id + end) + + local encdone = table.setmetatableindex("table") + + function tfm.reencode(tfmdata,specification) + + local features = specification.features + + if not features then + return + end + + local features = features.normal + + if not features then + return + end + + local tfmfile = file.basename(tfmdata.name) + local encfile = features.reencode -- or features.enc + local pfbfile = features.pfbfile -- or features.pfb + local bitmap = features.bitmap -- or features.pk + + if not encfile then + return + end + + local pfbfile = outfiles[tfmfile] + + if pfbfile == nil then + if bitmap then + pfbfile = false + elseif type(pfbfile) ~= "string" then + pfbfile = tfmfile + end + if type(pfbfile) == "string" then + pfbfile = file.addsuffix(pfbfile,"pfb") + -- pdf.mapline(tfmfile .. "<" .. pfbfile) + report_tfm("using type1 shapes from %a for %a",pfbfile,tfmfile) + else + report_tfm("using bitmap shapes for %a",tfmfile) + pfbfile = false -- use bitmap + end + outfiles[tfmfile] = pfbfile + end + + local encoding = false + local vector = false + + if type(pfbfile) == "string" then + local pfb = fonts.constructors.handlers.pfb + if pfb and pfb.loadvector then + local v, e = pfb.loadvector(pfbfile) + if v then + vector = v + end + if e then + encoding = e + end + end + end + if type(encfile) == "string" and encfile ~= "auto" then + encoding = fonts.encodings.load(file.addsuffix(encfile,"enc")) + if encoding then + encoding = encoding.vector + end + end + if not encoding then + report_tfm("bad encoding for %a, quitting",tfmfile) + return + end + + local unicoding = fonts.encodings.agl and fonts.encodings.agl.unicodes + local virtualid = tfmcache[tfmdata] + local tfmdata = table.copy(tfmdata) -- good enough for small fonts + local characters = { } + local originals = tfmdata.characters + local indices = { } + local parentfont = { "font", 1 } + local private = fonts.constructors.privateoffset + local reported = encdone[tfmfile][encfile] + + -- create characters table + + local backmap = vector and table.swapped(vector) + local done = { } -- prevent duplicate + + for index, name in sortedhash(encoding) do -- predictable order + local unicode = unicoding[name] + local original = originals[index] + if original then + if unicode then + original.unicode = unicode + else + unicode = private + private = private + 1 + if not reported then + report_tfm("glyph %a in font %a with encoding %a gets unicode %U",name,tfmfile,encfile,unicode) + end + end + characters[unicode] = original + indices[index] = unicode + original.name = name -- so one can lookup weird names + if backmap then + original.index = backmap[name] + else -- probably bitmap + original.commands = { parentfont, { "char", index } } + original.oindex = index + end + done[name] = true + elseif not done[name] then + report_tfm("bad index %a in font %a with name %a",index,tfmfile,name) + end + end + + encdone[tfmfile][encfile] = true + + -- redo kerns and ligatures + + for k, v in next, characters do + local kerns = v.kerns + if kerns then + local t = { } + for k, v in next, kerns do + local i = indices[k] + if i then + t[i] = v + end + end + v.kerns = next(t) and t or nil + end + local ligatures = v.ligatures + if ligatures then + local t = { } + for k, v in next, ligatures do + local i = indices[k] + if i then + t[i] = v + v.char = indices[v.char] + end + end + v.ligatures = next(t) and t or nil + end + end + + -- wrap up + + tfmdata.fonts = { { id = virtualid } } + tfmdata.characters = characters + tfmdata.fullname = tfmdata.fullname or tfmdata.name + tfmdata.psname = file.nameonly(pfbfile or tfmdata.name) + tfmdata.filename = pfbfile + tfmdata.encodingbytes = 2 + tfmdata.format = "type1" + tfmdata.tounicode = 1 + tfmdata.embedding = "subset" + tfmdata.usedbitmap = bitmap and virtualid + + return tfmdata + end + +end + +-- This code adds a ToUnicode vector for bitmap fonts. We don't bother about +-- ranges because we have small fonts. it works ok with acrobat but fails with +-- the other viewers (they get confused by the bitmaps I guess). + +do + + local template = [[ +/CIDInit /ProcSet findresource begin + 12 dict begin + begincmap + /CIDSystemInfo << /Registry (TeX) /Ordering (bitmap-%s) /Supplement 0 >> def + /CMapName /TeX-bitmap-%s def + /CMapType 2 def + 1 begincodespacerange + <00> + endcodespacerange + %s beginbfchar +%s + endbfchar + endcmap +CMapName currentdict /CMap defineresource pop end +end +end +]] + + local flushstreamobject = lpdf and lpdf.flushstreamobject + local setfontattributes = pdf.setfontattributes + + if not flushstreamobject then + flushstreamobject = function(data) + return pdf.obj { + immediate = true, + type = "stream", + string = data, + } + end + end + + if not setfontattributes then + setfontattributes = function(id,data) + print(format("your luatex is too old so no tounicode bitmap font%i",id)) + end + end + + function tfm.addtounicode(tfmdata) + local id = tfmdata.usedbitmap + local map = { } + local char = { } -- no need for range, hardly used + for k, v in next, tfmdata.characters do + local index = v.oindex + local tounicode = v.tounicode + if index and tounicode then + map[index] = tounicode + end + end + for k, v in sortedhash(map) do + char[#char+1] = format("<%02X> <%s>",k,v) + end + char = concat(char,"\n") + local stream = format(template,id,id,#char,char) + local reference = flushstreamobject(stream,nil,true) + setfontattributes(id,format("/ToUnicode %i 0 R",reference)) + end + +end + +-- Now we implement the regular features handlers. We need to convert the +-- tfm specific structures to opentype structures. In basemode they are +-- converted back so that is a bti of a waste but it's fast enough. + +do + + local everywhere = { ["*"] = { ["*"] = true } } -- or: { ["*"] = { "*" } } + local noflags = { false, false, false, false } + + enhancers["normalize features"] = function(data) + local ligatures = setmetatableindex("table") + local kerns = setmetatableindex("table") + local characters = data.characters + for u, c in next, characters do + local l = c.ligatures + local k = c.kerns + if l then + ligatures[u] = l + for u, v in next, l do + l[u] = { ligature = v.char } + end + c.ligatures = nil + end + if k then + kerns[u] = k + for u, v in next, k do + k[u] = v -- { v, 0 } + end + c.kerns = nil + end + end + + for u, l in next, ligatures do + for k, v in next, l do + local vl = v.ligature + local dl = ligatures[vl] + if dl then + for kk, vv in next, dl do + v[kk] = vv -- table.copy(vv) + end + end + end + end + + local features = { + gpos = { }, + gsub = { }, + } + local sequences = { + -- only filled ones + } + if next(ligatures) then + features.gsub.liga = everywhere + data.properties.hasligatures = true + sequences[#sequences+1] = { + features = { + liga = everywhere, + }, + flags = noflags, + name = "s_s_0", + nofsteps = 1, + order = { "liga" }, + type = "gsub_ligature", + steps = { + { + coverage = ligatures, + }, + }, + } + end + if next(kerns) then + features.gpos.kern = everywhere + data.properties.haskerns = true + sequences[#sequences+1] = { + features = { + kern = everywhere, + }, + flags = noflags, + name = "p_s_0", + nofsteps = 1, + order = { "kern" }, + type = "gpos_pair", + steps = { + { + format = "kern", + coverage = kerns, + }, + }, + } + end + data.resources.features = features + data.resources.sequences = sequences + data.shared.resources = data.shared.resources or resources + end + +end + +-- As with type one (afm) loading, we just use the opentype ones: + +registertfmfeature { + name = "mode", + description = "mode", + initializers = { + base = otf.modeinitializer, + node = otf.modeinitializer, + } +} + +registertfmfeature { + name = "features", + description = "features", + default = true, + initializers = { + base = otf.basemodeinitializer, + node = otf.nodemodeinitializer, + }, + processors = { + node = otf.featuresprocessor, + } +} diff --git a/src/fontloader/misc/fontloader-fonts-demo-vf-1.lua b/src/fontloader/misc/fontloader-fonts-demo-vf-1.lua index 13acd16..793526f 100644 --- a/src/fontloader/misc/fontloader-fonts-demo-vf-1.lua +++ b/src/fontloader/misc/fontloader-fonts-demo-vf-1.lua @@ -1,3 +1,11 @@ +if not modules then modules = { } end modules ['luatex-fonts-demo-vf-1'] = { + version = 1.001, + comment = "companion to luatex-*.tex", + author = "Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright = "PRAGMA ADE / ConTeXt Development Team", + license = "see context related readme files" +} + local identifiers = fonts.hashes.identifiers return function(specification) diff --git a/src/fontloader/misc/fontloader-fonts-enc.lua b/src/fontloader/misc/fontloader-fonts-enc.lua index 2e1c6a4..c076d59 100644 --- a/src/fontloader/misc/fontloader-fonts-enc.lua +++ b/src/fontloader/misc/fontloader-fonts-enc.lua @@ -11,19 +11,66 @@ if context then os.exit() end -local fonts = fonts -fonts.encodings = { } -fonts.encodings.agl = { } -fonts.encodings.known = { } +local fonts = fonts +local encodings = { } +fonts.encodings = encodings +encodings.agl = { } +encodings.known = { } -setmetatable(fonts.encodings.agl, { __index = function(t,k) +setmetatable(encodings.agl, { __index = function(t,k) if k == "unicodes" then texio.write(" ") local unicodes = dofile(resolvers.findfile("font-age.lua")) - fonts.encodings.agl = { unicodes = unicodes } + encodings.agl = { unicodes = unicodes } return unicodes else return nil end end }) +-- adapted for generic + +encodings.cache = containers.define("fonts", "enc", encodings.version, true) + +function encodings.load(filename) + local name = file.removesuffix(filename) + local data = containers.read(encodings.cache,name) + if data then + return data + end + local vector, tag, hash, unicodes = { }, "", { }, { } + local foundname = resolvers.findfile(filename,'enc') + if foundname and foundname ~= "" then + local ok, encoding, size = resolvers.loadbinfile(foundname) + if ok and encoding then + encoding = string.gsub(encoding,"%%(.-)\n","") + local unicoding = encodings.agl.unicodes + local tag, vec = string.match(encoding,"/(%w+)%s*%[(.*)%]%s*def") + local i = 0 + for ch in string.gmatch(vec,"/([%a%d%.]+)") do + if ch ~= ".notdef" then + vector[i] = ch + if not hash[ch] then + hash[ch] = i + else + -- duplicate, play safe for tex ligs and take first + end + local u = unicoding[ch] + if u then + unicodes[u] = i + end + end + i = i + 1 + end + end + end + local data = { + name = name, + tag = tag, + vector = vector, + hash = hash, + unicodes = unicodes + } + return containers.write(encodings.cache, name, data) +end + diff --git a/src/fontloader/misc/fontloader-fonts.lua b/src/fontloader/misc/fontloader-fonts.lua index 83d52d9..41b95d9 100644 --- a/src/fontloader/misc/fontloader-fonts.lua +++ b/src/fontloader/misc/fontloader-fonts.lua @@ -230,7 +230,6 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then loadmodule('luatex-fonts-syn.lua') - loadmodule('font-tfm.lua') loadmodule('font-oti.lua') -- These are the old loader and processing modules. These use the built-in font loader and @@ -266,6 +265,10 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then loadmodule('font-one.lua') -- was font-afm.lua loadmodule('font-afk.lua') + -- traditional code + + loadmodule('font-tfm.lua') + -- common code loadmodule('font-lua.lua') diff --git a/src/fontloader/misc/fontloader-l-table.lua b/src/fontloader/misc/fontloader-l-table.lua index 552097e..d1e0592 100644 --- a/src/fontloader/misc/fontloader-l-table.lua +++ b/src/fontloader/misc/fontloader-l-table.lua @@ -673,6 +673,8 @@ local function do_serialize(root,name,depth,level,indexed) else handle(format("%s [%s]=%s,",depth,k and "true" or "false",v)) -- %.99g end + elseif tk ~= "string" then + -- ignore elseif noquotes and not reserved[k] and lpegmatch(propername,k) then if hexify then handle(format("%s %s=0x%X,",depth,k,v)) @@ -695,6 +697,8 @@ local function do_serialize(root,name,depth,level,indexed) end elseif tk == "boolean" then handle(format("%s [%s]=%q,",depth,k and "true" or "false",v)) + elseif tk ~= "string" then + -- ignore elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s=%q,",depth,k,v)) else @@ -710,6 +714,8 @@ local function do_serialize(root,name,depth,level,indexed) end elseif tk == "boolean" then handle(format("%s [%s]={},",depth,k and "true" or "false")) + elseif tk ~= "string" then + -- ignore elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s={},",depth,k)) else @@ -726,6 +732,8 @@ local function do_serialize(root,name,depth,level,indexed) end elseif tk == "boolean" then handle(format("%s [%s]={ %s },",depth,k and "true" or "false",concat(st,", "))) + elseif tk ~= "string" then + -- ignore elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s={ %s },",depth,k,concat(st,", "))) else @@ -746,6 +754,8 @@ local function do_serialize(root,name,depth,level,indexed) end elseif tk == "boolean" then handle(format("%s [%s]=%s,",depth,tostring(k),v and "true" or "false")) + elseif tk ~= "string" then + -- ignore elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s=%s,",depth,k,v and "true" or "false")) else @@ -763,6 +773,8 @@ local function do_serialize(root,name,depth,level,indexed) end elseif tk == "boolean" then handle(format("%s [%s]=load(%q),",depth,k and "true" or "false",f)) + elseif tk ~= "string" then + -- ignore elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s=load(%q),",depth,k,f)) else @@ -778,6 +790,8 @@ local function do_serialize(root,name,depth,level,indexed) end elseif tk == "boolean" then handle(format("%s [%s]=%q,",depth,k and "true" or "false",tostring(v))) + elseif tk ~= "string" then + -- ignore elseif noquotes and not reserved[k] and lpegmatch(propername,k) then handle(format("%s %s=%q,",depth,k,tostring(v))) else @@ -1165,7 +1179,7 @@ function table.has_one_entry(t) return t and next(t,next(t)) == nil end --- new +-- new (rather basic, not indexed and nested) function table.loweredkeys(t) -- maybe utf local l = { } diff --git a/src/fontloader/misc/fontloader-plain.tex b/src/fontloader/misc/fontloader-plain.tex index 99347ed..0a806c7 100644 --- a/src/fontloader/misc/fontloader-plain.tex +++ b/src/fontloader/misc/fontloader-plain.tex @@ -17,7 +17,9 @@ \input luatex-pdf \relax \fi -\pdfoutput 1 +\outputmode 1 + +% \outputmode 0 \magnification\magstep5 % We set the page dimensions because otherwise the backend does weird things % when we have for instance this on a line of its own: @@ -31,8 +33,8 @@ % has to deal with the lack of a page concept on tex by some guessing. Normally % a macro package will set the dimensions to something reasonable anyway. -\pagewidth 8.5in -\pageheight 11.0in +\pagewidth 8.5truein +\pageheight 11.0truein % We load some code at runtime: diff --git a/src/fontloader/misc/fontloader-test.tex b/src/fontloader/misc/fontloader-test.tex index 0bb752b..2aa4f22 100644 --- a/src/fontloader/misc/fontloader-test.tex +++ b/src/fontloader/misc/fontloader-test.tex @@ -158,4 +158,16 @@ $\sin{x}$ % \textdir TRT\amiri بِسْمِ اللَّـهِ الرَّ‌حْمَـٰنِ الرَّ‌حِيمِ % \egroup +% assumes csr10.tfm csr10.pfb csr.enc to be present + +% \font\foo=file:luatex-plain-tfm.lua:tfm=csr10;enc=csr;pfb=csr10 at 12pt +% +% \foo áäčďěíĺľňóôŕřšťúýž ff ffi + +% \font\foo=file:csr10.tfm:reencode=csr +% \font\foo=file:csr10.tfm:reencode=csr;bitmap=yes % use map file +% \font\foo=file:csr10.tfm:reencode=auto +% +% \foo áäčďěíĺľňóôŕřšťúýž ff ffi \input tufte\par + \end diff --git a/src/fontloader/misc/fontloader-util-fil.lua b/src/fontloader/misc/fontloader-util-fil.lua index 28c92c7..47d9d03 100644 --- a/src/fontloader/misc/fontloader-util-fil.lua +++ b/src/fontloader/misc/fontloader-util-fil.lua @@ -90,7 +90,8 @@ end function files.readinteger1(f) -- one byte local n = byte(f:read(1)) if n >= 0x80 then - return n - 0xFF - 1 + -- return n - 0xFF - 1 + return n - 0x100 else return n end @@ -109,7 +110,8 @@ function files.readinteger2(f) local a, b = byte(f:read(2),1,2) local n = 0x100 * a + b if n >= 0x8000 then - return n - 0xFFFF - 1 + -- return n - 0xFFFF - 1 + return n - 0x10000 else return n end @@ -120,6 +122,17 @@ function files.readcardinal3(f) return 0x10000 * a + 0x100 * b + c end +function files.readinteger3(f) + local a, b, c = byte(f:read(3),1,3) + local n = 0x10000 * a + 0x100 * b + c + if n >= 0x80000 then + -- return n - 0xFFFFFF - 1 + return n - 0x1000000 + else + return n + end +end + function files.readcardinal4(f) local a, b, c, d = byte(f:read(4),1,4) return 0x1000000 * a + 0x10000 * b + 0x100 * c + d @@ -129,7 +142,8 @@ function files.readinteger4(f) local a, b, c, d = byte(f:read(4),1,4) local n = 0x1000000 * a + 0x10000 * b + 0x100 * c + d if n >= 0x8000000 then - return n - 0xFFFFFFFF - 1 + -- return n - 0xFFFFFFFF - 1 + return n - 0x100000000 else return n end @@ -139,7 +153,8 @@ function files.readfixed4(f) local a, b, c, d = byte(f:read(4),1,4) local n = 0x100 * a + b if n >= 0x8000 then - return n - 0xFFFF - 1 + (0x100 * c + d)/0xFFFF + -- return n - 0xFFFF - 1 + (0x100 * c + d)/0xFFFF + return n - 0x10000 + (0x100 * c + d)/0xFFFF else return n + (0x100 * c + d)/0xFFFF end diff --git a/src/fontloader/misc/fontloader-util-str.lua b/src/fontloader/misc/fontloader-util-str.lua index 28b75db..a54a4aa 100644 --- a/src/fontloader/misc/fontloader-util-str.lua +++ b/src/fontloader/misc/fontloader-util-str.lua @@ -824,6 +824,8 @@ end -- extensions : %!tag! +-- can be made faster but not called that often + local builder = Cs { "start", start = ( ( @@ -852,10 +854,10 @@ local builder = Cs { "start", + V("a") -- new + V("A") -- new + V("j") + V("J") -- stripped e E - + V("m") + V("M") -- new + + V("m") + V("M") -- new (formatted number) + V("z") -- new -- - -- + V("?") -- ignores probably messed up % + -- + V("?") -- ignored, probably messed up % ) + V("*") ) -- cgit v1.2.3