diff options
author | Marius <mariausol@gmail.com> | 2011-04-19 17:40:15 +0300 |
---|---|---|
committer | Marius <mariausol@gmail.com> | 2011-04-19 17:40:15 +0300 |
commit | 865b5da32dfab508db87744c7542a59c192dd459 (patch) | |
tree | 59d82bdb555d95cd5734743d9259662fd5c16c62 | |
parent | 930f95164ea82514ff24bf71c6baddd40a569766 (diff) | |
download | context-865b5da32dfab508db87744c7542a59c192dd459.tar.gz |
beta 2011.04.19 16:38
35 files changed, 666 insertions, 368 deletions
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua index 9ce73a168..72f105d07 100644 --- a/scripts/context/lua/mtxrun.lua +++ b/scripts/context/lua/mtxrun.lua @@ -1304,9 +1304,11 @@ local function serialize(_handle,root,name,specification) -- handle wins end if root then -- The dummy access will initialize a table that has a delayed initialization - -- using a metatable. - local dummy = root._w_h_a_t_e_v_e_r_ - root._w_h_a_t_e_v_e_r_ = nil + -- using a metatable. (maybe explicitly test for metatable) + if getmetatable(root) then -- todo: make this an option, maybe even per subtable + local dummy = root._w_h_a_t_e_v_e_r_ + root._w_h_a_t_e_v_e_r_ = nil + end -- Let's forget about empty tables. if next(root) then do_serialize(root,name,"",0) @@ -9285,6 +9287,8 @@ if not modules then modules = { } end modules ['lxml-xml'] = { license = "see context related readme files" } +local concat = string.concat + local xml = xml local finalizers = xml.finalizers.xml @@ -9399,7 +9403,7 @@ local function texts(collected) if collected then local t, n = { }, 0 for c=1,#collected do - local e = collection[c] + local e = collected[c] if e and e.dt then n = n + 1 t[n] = e.dt @@ -9577,6 +9581,27 @@ end xml.table = totable finalizers.table = totable +local function textonly(e,t) + if e then + local edt = e.dt + if edt then + for i=1,#edt do + local e = edt[i] + if type(e) == "table" then + textonly(e,t) + else + t[#t+1] = e + end + end + end + end + return t +end + +function xml.textonly(e) -- no pattern + return concat(textonly(e,{})) +end + end -- of closure diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua index 9ce73a168..72f105d07 100644 --- a/scripts/context/stubs/mswin/mtxrun.lua +++ b/scripts/context/stubs/mswin/mtxrun.lua @@ -1304,9 +1304,11 @@ local function serialize(_handle,root,name,specification) -- handle wins end if root then -- The dummy access will initialize a table that has a delayed initialization - -- using a metatable. - local dummy = root._w_h_a_t_e_v_e_r_ - root._w_h_a_t_e_v_e_r_ = nil + -- using a metatable. (maybe explicitly test for metatable) + if getmetatable(root) then -- todo: make this an option, maybe even per subtable + local dummy = root._w_h_a_t_e_v_e_r_ + root._w_h_a_t_e_v_e_r_ = nil + end -- Let's forget about empty tables. if next(root) then do_serialize(root,name,"",0) @@ -9285,6 +9287,8 @@ if not modules then modules = { } end modules ['lxml-xml'] = { license = "see context related readme files" } +local concat = string.concat + local xml = xml local finalizers = xml.finalizers.xml @@ -9399,7 +9403,7 @@ local function texts(collected) if collected then local t, n = { }, 0 for c=1,#collected do - local e = collection[c] + local e = collected[c] if e and e.dt then n = n + 1 t[n] = e.dt @@ -9577,6 +9581,27 @@ end xml.table = totable finalizers.table = totable +local function textonly(e,t) + if e then + local edt = e.dt + if edt then + for i=1,#edt do + local e = edt[i] + if type(e) == "table" then + textonly(e,t) + else + t[#t+1] = e + end + end + end + end + return t +end + +function xml.textonly(e) -- no pattern + return concat(textonly(e,{})) +end + end -- of closure diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun index 9ce73a168..72f105d07 100644 --- a/scripts/context/stubs/unix/mtxrun +++ b/scripts/context/stubs/unix/mtxrun @@ -1304,9 +1304,11 @@ local function serialize(_handle,root,name,specification) -- handle wins end if root then -- The dummy access will initialize a table that has a delayed initialization - -- using a metatable. - local dummy = root._w_h_a_t_e_v_e_r_ - root._w_h_a_t_e_v_e_r_ = nil + -- using a metatable. (maybe explicitly test for metatable) + if getmetatable(root) then -- todo: make this an option, maybe even per subtable + local dummy = root._w_h_a_t_e_v_e_r_ + root._w_h_a_t_e_v_e_r_ = nil + end -- Let's forget about empty tables. if next(root) then do_serialize(root,name,"",0) @@ -9285,6 +9287,8 @@ if not modules then modules = { } end modules ['lxml-xml'] = { license = "see context related readme files" } +local concat = string.concat + local xml = xml local finalizers = xml.finalizers.xml @@ -9399,7 +9403,7 @@ local function texts(collected) if collected then local t, n = { }, 0 for c=1,#collected do - local e = collection[c] + local e = collected[c] if e and e.dt then n = n + 1 t[n] = e.dt @@ -9577,6 +9581,27 @@ end xml.table = totable finalizers.table = totable +local function textonly(e,t) + if e then + local edt = e.dt + if edt then + for i=1,#edt do + local e = edt[i] + if type(e) == "table" then + textonly(e,t) + else + t[#t+1] = e + end + end + end + end + return t +end + +function xml.textonly(e) -- no pattern + return concat(textonly(e,{})) +end + end -- of closure diff --git a/tex/context/base/attr-lay.lua b/tex/context/base/attr-lay.lua index ce76bb899..e12938fa4 100644 --- a/tex/context/base/attr-lay.lua +++ b/tex/context/base/attr-lay.lua @@ -178,7 +178,7 @@ function viewerlayers.define(settings) local tag = settings.tag if not tag or tag == "" then -- error - else + elseif not scopes[tag] then -- prevent duplicates local title = settings.title if not title or title == "" then settings.title = tag diff --git a/tex/context/base/back-exp.lua b/tex/context/base/back-exp.lua index 0d5b7cede..5ceb360dc 100644 --- a/tex/context/base/back-exp.lua +++ b/tex/context/base/back-exp.lua @@ -1018,12 +1018,12 @@ local displaymapping = { local e_template = [[ %s { - display: %s + display: %s ; }]] local d_template = [[ %s[detail=%s] { - display: %s + display: %s ; }]] -- encoding='utf-8' diff --git a/tex/context/base/back-exp.mkiv b/tex/context/base/back-exp.mkiv index 09eaf0109..4682e8047 100644 --- a/tex/context/base/back-exp.mkiv +++ b/tex/context/base/back-exp.mkiv @@ -105,7 +105,7 @@ % The action: \setupbackend[export=yes] % or filename -\def\c!export {export} % maybe: options={css,xhtml} +\def\c!export {export} % maybe: option={css,xhtml} \def\c!css {css} \def\c!xhtml {xhtml} diff --git a/tex/context/base/bibl-tra.lua b/tex/context/base/bibl-tra.lua index e0eaf64b3..4ae81188d 100644 --- a/tex/context/base/bibl-tra.lua +++ b/tex/context/base/bibl-tra.lua @@ -154,7 +154,7 @@ function hacks.resolve(prefix,block,reference) -- maybe already feed it split found = subset[tag] end if found then - local current = found.entries and found.entries.text + local current = tonumber(found.entries and found.entries.text) -- tonumber needed if current and not done[current] then nofresult = nofresult + 1 result[nofresult] = { blk, rest, current } @@ -188,7 +188,7 @@ function hacks.resolve(prefix,block,reference) -- maybe already feed it split first, last, firsti, lasti, firstr, lastr = current, current, i, i, r, r end end - if first then + if first and last then if last > first + 1 then nofcollected = nofcollected + 1 collected[nofcollected] = { firstr[1], firstr[2], lastr[1], lastr[2] } diff --git a/tex/context/base/buff-ini.lua b/tex/context/base/buff-ini.lua index a0bf29b80..70328f0f1 100644 --- a/tex/context/base/buff-ini.lua +++ b/tex/context/base/buff-ini.lua @@ -70,7 +70,7 @@ local function collectcontent(names,separator) -- no print t[n] = c end end - return concat(t,separator or "\r") + return concat(t,separator or "\n") -- was \r end end diff --git a/tex/context/base/cont-new.mkii b/tex/context/base/cont-new.mkii index 02109ffe8..5ba08f7fc 100644 --- a/tex/context/base/cont-new.mkii +++ b/tex/context/base/cont-new.mkii @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2011.04.13 09:23} +\newcontextversion{2011.04.19 16:38} %D This file is loaded at runtime, thereby providing an %D excellent place for hacks, patches, extensions and new diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv index 0953026d0..444c14b98 100644 --- a/tex/context/base/cont-new.mkiv +++ b/tex/context/base/cont-new.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2011.04.13 09:23} +\newcontextversion{2011.04.19 16:38} %D This file is loaded at runtime, thereby providing an %D excellent place for hacks, patches, extensions and new diff --git a/tex/context/base/context.mkii b/tex/context/base/context.mkii index 5101aaa80..bcb773bcf 100644 --- a/tex/context/base/context.mkii +++ b/tex/context/base/context.mkii @@ -20,7 +20,7 @@ %D your styles an modules. \edef\contextformat {\jobname} -\edef\contextversion{2011.04.13 09:23} +\edef\contextversion{2011.04.19 16:38} %D For those who want to use this: diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv index 070b2c3e8..a4e32ae3a 100644 --- a/tex/context/base/context.mkiv +++ b/tex/context/base/context.mkiv @@ -20,7 +20,7 @@ %D your styles an modules. \edef\contextformat {\jobname} -\edef\contextversion{2011.04.13 09:23} +\edef\contextversion{2011.04.19 16:38} %D For those who want to use this: diff --git a/tex/context/base/context.todo b/tex/context/base/context.todo index 111243f96..27d914a7b 100644 --- a/tex/context/base/context.todo +++ b/tex/context/base/context.todo @@ -2,3 +2,5 @@ % consistently use label/name/tag % consistently use type/kind + +% option vs options diff --git a/tex/context/base/core-con.lua b/tex/context/base/core-con.lua index 3b2d396b0..adc2595ef 100644 --- a/tex/context/base/core-con.lua +++ b/tex/context/base/core-con.lua @@ -754,6 +754,7 @@ local v_year = variables.year local v_month = variables.month local v_weekday = variables.weekday local v_referral = variables.referral +local v_space = variables.space local convert = converters.convert diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua index 6d67e70f6..553640313 100644 --- a/tex/context/base/font-mis.lua +++ b/tex/context/base/font-mis.lua @@ -22,7 +22,7 @@ local handlers = fonts.handlers handlers.otf = handlers.otf or { } local otf = handlers.otf -otf.version = otf.version or 2.727 +otf.version = otf.version or 2.728 otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true) function otf.loadcached(filename,format,sub) diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua index 8faa88b64..2838c1570 100644 --- a/tex/context/base/font-otf.lua +++ b/tex/context/base/font-otf.lua @@ -47,7 +47,7 @@ local otf = fonts.handlers.otf otf.glists = { "gsub", "gpos" } -otf.version = 2.727 -- beware: also sync font-mis.lua +otf.version = 2.728 -- beware: also sync font-mis.lua otf.cache = containers.define("fonts", "otf", otf.version, true) local fontdata = fonts.hashes.identifiers @@ -201,7 +201,6 @@ local valid_fields = table.tohash { local ordered_enhancers = { "prepare tables", "prepare glyphs", - "prepare unicodes", "prepare lookups", "analyze glyphs", @@ -226,6 +225,9 @@ local ordered_enhancers = { "check metadata", "check extra features", -- after metadata + "add duplicates", + "check encoding", + "cleanup tables", } @@ -413,7 +415,10 @@ function otf.load(filename,format,sub,featurefile) creator = "context mkiv", unicodes = unicodes, indices = { - -- unicodes to names + -- index to unicodes + }, + duplicates = { + -- alternative unicodes }, lookuptypes = { }, @@ -594,6 +599,7 @@ actions["prepare glyphs"] = function(data,filename,raw) local descriptions = data.descriptions local unicodes = resources.unicodes -- name to unicode local indices = resources.indices -- index to unicode + local duplicates = resources.duplicates if rawsubfonts then @@ -636,7 +642,7 @@ actions["prepare glyphs"] = function(data,filename,raw) unicodes[name] = unicode nofunicodes = nofunicodes + 1 end - indices[unicode] = index -- each index in unique (at least now) + indices[index] = unicode -- each index is unique (at least now) local description = { -- width = glyph.width, @@ -678,13 +684,12 @@ actions["prepare glyphs"] = function(data,filename,raw) end private = private + 1 else - unicodes[name] = unicode + unicodes[name] = unicode end - indices[unicode] = index + indices[index] = unicode if not name then name = format("u%06X",unicode) end - descriptions[unicode] = { -- width = glyph.width, boundingbox = glyph.boundingbox, @@ -692,6 +697,14 @@ actions["prepare glyphs"] = function(data,filename,raw) index = index, glyph = glyph, } + local altuni = glyph.altuni + if altuni then + local d = { } + for i=1,#altuni do + d[#d+1] = altuni[i].unicode + end + duplicates[unicode] = d + end else report_otf("potential problem: glyph 0x%04X is used but empty",index) end @@ -706,12 +719,13 @@ end -- the next one is still messy but will get better when we have -- flattened map/enc tables in the font loader -actions["prepare unicodes"] = function(data,filename,raw) +actions["check encoding"] = function(data,filename,raw) local descriptions = data.descriptions local resources = data.resources local properties = data.properties local unicodes = resources.unicodes -- name to unicode local indices = resources.indices -- index to unicodes + local duplicates = resources.duplicates -- begin of messy (not needed whwn cidmap) @@ -725,36 +739,18 @@ actions["prepare unicodes"] = function(data,filename,raw) if find(encname,"unicode") then -- unicodebmp, unicodefull, ... if trace_loading then - report_otf("using embedded unicode map '%s'",encname) + report_otf("checking embedded unicode map '%s'",encname) end - local multiples, nofmultiples = { }, 0 - for unicode, index in next, unicodetoindex do + for unicode, index in next, unicodetoindex do -- altuni already covers this if unicode <= criterium and not descriptions[unicode] then - local parent = indices[index] - local description = descriptions[parent] - if description then - local c = fastcopy(description) - c.comment = format("copy of U+%05X", parent) - descriptions[unicode] = c - local name = c.name - if not unicodes[name] then - unicodes[name] = unicode - end - nofmultiples = nofmultiples + 1 - multiples[nofmultiples] = name -- we can save duplicates if needed + local parent = indices[index] -- why nil? + if parent then + report_otf("weird, unicode U+%05X points to U+%05X with index 0x%04X",unicode,parent,index) else - -- make it a notdef - report_otf("weird unicode U+%05X at index 0x%04X",unicode,index) + report_otf("weird, unicode U+%05X points to nowhere with index 0x%04X",unicode,index) end end end - if trace_loading then - if nofmultiples > 0 then - report_otf("%s glyphs are reused: %s",nofmultiples,concat(multiples," ")) - else - report_otf("no glyphs are reused") - end - end elseif properties.cidinfo then report_otf("warning: no unicode map, used cidmap '%s'",properties.cidinfo.usedname or "?") else @@ -766,6 +762,48 @@ actions["prepare unicodes"] = function(data,filename,raw) end end +-- for the moment we assume that a fotn with lookups will not use +-- altuni so we stick to kerns only + +actions["add duplicates"] = function(data,filename,raw) + local descriptions = data.descriptions + local resources = data.resources + local properties = data.properties + local unicodes = resources.unicodes -- name to unicode + local indices = resources.indices -- index to unicodes + local duplicates = resources.duplicates + + for unicode, d in next, duplicates do + for i=1,#d do + local u = d[i] + if not descriptions[u] then + local description = descriptions[unicode] + local duplicate = table.copy(description) -- else packing problem + duplicate.comment = format("copy of U+%05X", unicode) + descriptions[u] = duplicate + local n = 0 + for _, description in next, descriptions do + if kerns then + local kerns = description.kerns + for _, k in next, kerns do + local ku = k[unicode] + if ku then + k[u] = ku + n = n + 1 + end + end + end + -- todo: lookups etc + end + if trace_loading then + report_otf("duplicating U+%05X to U+%05X with index 0x%04X (%s kerns)",unicode,u,description.index,n) + end + end + end + end + +end + -- class : nil base mark ligature component (maybe we don't need it in description) -- boundingbox: split into ht/dp takes more memory (larger tables and less sharing) diff --git a/tex/context/base/grph-inc.lua b/tex/context/base/grph-inc.lua index 48cc2e4b0..04d335862 100644 --- a/tex/context/base/grph-inc.lua +++ b/tex/context/base/grph-inc.lua @@ -836,7 +836,11 @@ function checkers.generic(data) .. "->" .. mask local figure = figures.loaded[hash] if figure == nil then - figure = img.new { filename = name, page = page, pagebox = dr.size } + figure = img.new { + filename = name, + page = page, + pagebox = dr.size, + } codeinjections.setfigurecolorspace(data,figure) codeinjections.setfiguremask(data,figure) figure = (figure and img.scan(figure)) or false diff --git a/tex/context/base/l-table.lua b/tex/context/base/l-table.lua index b1a5cc4cc..7b91e06ba 100644 --- a/tex/context/base/l-table.lua +++ b/tex/context/base/l-table.lua @@ -642,9 +642,11 @@ local function serialize(_handle,root,name,specification) -- handle wins end if root then -- The dummy access will initialize a table that has a delayed initialization - -- using a metatable. - local dummy = root._w_h_a_t_e_v_e_r_ - root._w_h_a_t_e_v_e_r_ = nil + -- using a metatable. (maybe explicitly test for metatable) + if getmetatable(root) then -- todo: make this an option, maybe even per subtable + local dummy = root._w_h_a_t_e_v_e_r_ + root._w_h_a_t_e_v_e_r_ = nil + end -- Let's forget about empty tables. if next(root) then do_serialize(root,name,"",0) diff --git a/tex/context/base/lpdf-epa.lua b/tex/context/base/lpdf-epa.lua index 6afdb23d3..17c55eb0c 100644 --- a/tex/context/base/lpdf-epa.lua +++ b/tex/context/base/lpdf-epa.lua @@ -25,6 +25,10 @@ local layerspec = { -- predefining saves time "epdflinks" } +local function makenamespace(filename) + return format("lpdf-epa-%s-",file.removesuffix(file.basename(filename))) +end + local function add_link(x,y,w,h,destination,what) if trace_links then report_link("dx: % 4i, dy: % 4i, wd: % 4i, ht: % 4i, destination: %s, type: %s",x,y,w,h,destination,what) @@ -48,25 +52,21 @@ local function add_link(x,y,w,h,destination,what) ) end -local function link_goto(x,y,w,h,document,annotation,pagesdata,pagedata,namespace) - -- print("border",table.unpack(annotation.Border.all)) - -- print("flags",annotation.F) - -- print("pagenumbers",pagedata.reference.num,destination[1].num) - -- print("pagerefs",pagedata.number,pagesdata.references[destination[1].num]) +local function link_goto(x,y,w,h,document,annotation,pagedata,namespace) local destination = annotation.A.D -- [ 18 0 R /Fit ] local what = "page" if type(destination) == "string" then - local destinations = document.Catalog.Destinations + local destinations = document.destinations local wanted = destinations[destination] destination = wanted and wanted.D if destination then what = "named" end end - local whereto = destination and destination[1] -- array - if whereto and whereto.num then - local currentpage = pagedata.number - local destinationpage = pagesdata.references[whereto.num] - add_link(x,y,w,h,namespace .. destinationpage,what) - return + local pagedata = destination and destination[1] + if pagedata then + local destinationpage = pagedata.number + if destinationpage then + add_link(x,y,w,h,namespace .. destinationpage,what) + end end end @@ -103,31 +103,32 @@ function codeinjections.mergereferences(specification) end if specification then local fullname = specification.fullname - local document = lpdf.load(fullname) + local document = lpdf.epdf.load(fullname) if document then local pagenumber = specification.page or 1 local xscale = specification.yscale or 1 local yscale = specification.yscale or 1 local size = specification.size or "crop" -- todo - local pagesdata = document.Catalog.Pages - local pagedata = pagesdata[pagenumber] + local pagedata = document.pages[pagenumber] local annotations = pagedata.Annots local namespace = format("lpdf-epa-%s-",file.removesuffix(file.basename(fullname))) local reference = namespace .. pagenumber - if annotations.size > 0 then - local llx, lly, urx, ury = table.unpack(pagedata.MediaBox.all) + if annotations.n > 0 then + local mediabox = pagedata.MediaBox + local llx, lly, urx, ury = mediabox[1], mediabox[2], mediabox[3], mediabox[4] local width, height = xscale * (urx - llx), yscale * (ury - lly) -- \\overlaywidth, \\overlayheight context.definelayer( { "epdflinks" }, { height = height.."bp" , width = width.."bp" }) - for i=1,annotations.size do + for i=1,annotations.n do local annotation = annotations[i] local subtype = annotation.Subtype - local a_llx, a_lly, a_urx, a_ury = table.unpack(annotation.Rect.all) + local rectangle = annotation.Rect + local a_llx, a_lly, a_urx, a_ury = rectangle[1], rectangle[2], rectangle[3], rectangle[4] local x, y = xscale * (a_llx - llx), yscale * (a_lly - lly) local w, h = xscale * (a_urx - a_llx), yscale * (a_ury - a_lly) if subtype == "Link" then local linktype = annotation.A.S if linktype == "GoTo" then - link_goto(x,y,w,h,document,annotation,pagesdata,pagedata,namespace) + link_goto(x,y,w,h,document,annotation,pagedata,namespace) elseif linktype == "GoToR" then link_file(x,y,w,h,document,annotation) elseif linktype == "URI" then @@ -154,20 +155,39 @@ function codeinjections.mergereferences(specification) end function codeinjections.mergeviewerlayers(specification) + -- todo: parse included page for layers + if true then + return + end if not specification then specification = figures and figures.current() specification = specification and specification.status end if specification then local fullname = specification.fullname - local document = lpdf.load(fullname) + local document = lpdf.epdf.load(fullname) if document then - local pagenumber = specification.page or 1 - local pagesdata = document.Catalog.Pages - local pagedata = pagesdata[pagenumber] - local resources = pagedata.Resources ---~ table.print(resources) ---~ local properties = resources.Properties + local namespace = format("lpdf:epa:%s:",file.removesuffix(file.basename(fullname))) + local layers = document.layers + if layers then + for i=1,layers.n do + local tag = layers[i] +tag = namespace .. string.gsub(tag," ",":") +local title = tag +if trace_links then + report_link("using layer '%s'",tag) +end + attributes.viewerlayers.define { -- also does some cleaning + tag = tag, -- todo: #3A or so + title = title, + visible = variables.start, + editable = variables.yes, + printable = variables.yes, + } + codeinjections.useviewerlayer(tag) + end + end end end end + diff --git a/tex/context/base/lpdf-epd.lua b/tex/context/base/lpdf-epd.lua index 7cd46f962..19c1f9bd8 100644 --- a/tex/context/base/lpdf-epd.lua +++ b/tex/context/base/lpdf-epd.lua @@ -6,252 +6,303 @@ if not modules then modules = { } end modules ['lpdf-epd'] = { license = "see context related readme files" } --- This is an experimental layer around the epdf library. Because that --- library is not yet finished and will get a clear api (independent of --- the underlying pdf library which has an instable api) it will take --- a while before this module is completed. Also, some integration with --- other lpdf code might happen (i.e. we might generate lpdf objects). +-- This is an experimental layer around the epdf library. The reason for +-- this layer is that I want to be independent of the library (which +-- implements a selection of what a file provides) and also because I +-- want an interface closer to Lua's table model while the API stays +-- close to the original xpdf library. Of course, after prototyping a +-- solution, we can optimize it using the low level epdf accessors. -local setmetatable, rawset = setmetatable, rawset +-- It will be handy when we have a __length and __next that can trigger +-- the resolve till then we will provide .n as #. --- used: --- --- arrayGet arrayGetNF dictLookup getTypeName arrayGetLength --- getNum getString getBool getName getRef --- getResourceDict getMediaBox getCropBox getBleedBox getTrimBox getArtBox --- getPageRef getKindName findDestgetNumPages getDests getPage getCatalog getAnnots --- --- needed: --- --- add accessor methods to the resource dict --- a function to mark objects as to be included +-- As there can be references to the parent we cannot expand a tree. I +-- played with some expansion variants but it does to pay off. -lpdf = lpdf or { } +-- Maybe we need a close(). +-- We cannot access all destinations in one run. -local lpdf = lpdf +local setmetatable, rawset, rawget, tostring, tonumber = setmetatable, rawset, rawget, tostring, tonumber +local lower, match, char = string.lower, string.match, string.char +local concat = table.concat + +function epdf.type(o) + local t = lower(match(tostring(o),"[^ :]+")) + return t or "?" +end --- -- -- helpers -- -- -- +lpdf = lpdf or { } +local lpdf = lpdf -local cache_lookups = false +lpdf.epdf = { } local checked_access -local array_access = { - __index = function(t,k) - local d = t.__data__ - if tonumber(k) then - return checked_access(t,k,d:arrayGetNF(k)) - elseif k == "all" then - local result = { } - for i=1,t.size do - result[i] = checked_access(t,k,d:arrayGetNF(i)) +local function prepare(document,d,t,n,k) + for i=1,n do + local v = d:getVal(i) + local r = d:getValNF(i) + if r:getTypeName() ~= "ref" then + t[d:getKey(i)] = checked_access[v:getTypeName()](v,document) + else + r = r:getRef().num + local c = document.cache[r] + if c then + -- + else + c = checked_access[v:getTypeName()](v,document,r) + document.cache[r] = c + document.xrefs[c] = r end - return result - elseif k == "width" then - return checked_access(t,k,d:arrayGetNF(3)) - checked_access(t,k,d:arrayGetNF(1)) - elseif k == "height" then - return checked_access(t,k,d:arrayGetNF(4)) - checked_access(t,k,d:arrayGetNF(2)) + t[d:getKey(i)] = c end - end, -} - -local dictionary_access = { - __index = function(t,k) - return checked_access(t,k,t.__data__:dictLookup(k)) end -} + getmetatable(t).__index = nil + return t[k] +end -checked_access = function(tab,key,v) - local n = v:getTypeName() - if n == "array" then - local t = { __data__ = v, size = v:arrayGetLength() or 0 } - setmetatable(t,array_access) - if cache_lookups then rawset(tab,key,t) end - return t - elseif n == "dictionary" then - local t = { __data__ = v, } - setmetatable(t,dictionary_access) - if cache_lookups then rawset(tab,key,t) end +local function some_dictionary(d,document,r) + local n = d and d:getLength() or 0 + if n > 0 then + local t = { } + setmetatable(t, { __index = function(t,k) return prepare(document,d,t,n,k) end } ) return t - elseif n == "real" or n == "integer" then - return v:getNum() - elseif n == "string" then - return v:getString() - elseif n == "boolean" then - return v:getBool() - elseif n == "name" then - return v:getName() - elseif n == "ref" then - return v:getRef(v.num,v.gen) - else - return v end end -local basic_annots_access = { - __index = function(t,k) - local a = { - __data__ = t.__data__:arrayGet(k), - } - setmetatable(a,dictionary_access) - if cache_lookups then rawset(t,k,a) end - return a - end -} +local done = { } -local basic_resources_access = { -- == dictionary_access - __index = function(t,k) ---~ local d = t.__data__ ---~ print(d) ---~ print(d:getTypeName()) - return checked_access(t,k,t.__data__:dictLookup(k)) +local function prepare(document,a,t,n,k) + for i=1,n do + local v = a:get(i) + local r = a:getNF(i) + if r:getTypeName() ~= "ref" then + t[i] = checked_access[v:getTypeName()](v,document) + else + r = r:getRef().num + local c = document.cache[r] + if c then + -- + else + c = checked_access[v:getTypeName()](v,document,r) + document.cache[r] = c + document.xrefs[c] = r + end + t[i] = c + end end -} + getmetatable(t).__index = nil + return t[k] +end -local basic_box_access = { -- here it makes sense to do the rawset - __index = function(t,k) - local d = t.__data__ - if k == "all" then return { d.x1, d.y1, d.x2, d.y2 } - elseif k == "width" then return d.x2 - d.x1 - elseif k == "height" then return d.y2 - d.y1 - elseif k == 1 or k == "llx" then return d.x1 - elseif k == 2 or k == "lly" then return d.y1 - elseif k == 3 or k == "urx" then return d.x2 - elseif k == 4 or k == "lly" then return d.y2 - else return 0 end +local function some_array(a,document,r) + local n = a and a:getLength() or 0 + if n > 0 then + local t = { n = n } + setmetatable(t, { __index = function(t,k) return prepare(document,a,t,n,k) end } ) + return t end -} - --- -- -- pages -- -- -- +end -local page_access = { - __index = function(t,k) - local d = t.__data__ - if k == "Annots" then - local annots = d:getAnnots() - local a = { - __data__ = annots, - size = annots:arrayGetLength() or 0 - } - setmetatable(a,basic_annots_access) - rawset(t,k,a) - return a - elseif k == "Resources" then - local r = { - __data__ = d:getResourceDict(), - } - setmetatable(r,basic_resources_access) - rawset(t,k,r) - return r - elseif k == "MediaBox" or k == "TrimBox" or k == "CropBox" or k == "ArtBox" or k == "BleedBox" then - local b = { - -- __data__ = d:getMediaBox(), - __data__ = d["get"..k](d), - } - setmetatable(b,basic_box_access) - rawset(t,k,b) - return b +local function streamaccess(s,_,what) + if not what or what == "all" or what == "*all" then + local t, n = { }, 0 + s:streamReset() + while true do + local c = s:streamGetChar() + if c < 0 then + break + else + n = n + 1 + t[n] = char(c) + end end + return concat(t) end +end + +local function some_stream(d,document,r) + if d then + d:streamReset() + local s = some_dictionary(d:streamGetDict(),document,r) + getmetatable(s).__call = function(...) return streamaccess(d,...) end + return s + end +end + +-- we need epdf.getBool + +checked_access = { + dictionary = function(d,document,r) + return some_dictionary(d:getDict(),document,r) + end, + array = function(a,document,r) + return some_array(a:getArray(),document,r) + end, + stream = function(v,document,r) + return some_stream(v,document,r) + end, + real = function(v) + return v:getReal() + end, + integer = function(v) + return v:getNum() + end, + string = function(v) + return v:getString() + end, + boolean = function(v) + return v:getBool() + end, + name = function(v) + return v:getName() + end, + ref = function(v) + return v:getRef() + end, } --- -- -- catalog -- -- -- +--~ checked_access.real = epdf.real +--~ checked_access.integer = epdf.integer +--~ checked_access.string = epdf.string +--~ checked_access.boolean = epdf.boolean +--~ checked_access.name = epdf.name +--~ checked_access.ref = epdf.ref -local destination_access = { - __index = function(t,k) - if k == "D" then - local d = t.__data__ - local p = { - d:getPageRef(k), d:getKindName(k) - } - if cache_lookups then rawset(t,k,p) end -- not needed - return p +local function getnames(document,n,target) -- direct + if n then + local Names = n.Names + if Names then + if not target then + target = { } + end + for i=1,Names.n,2 do + target[Names[i]] = Names[i+1] + end + else + local Kids = n.Kids + if Kids then + for i=1,Kids.n do + target = getnames(document,Kids[i],target) + end + end end + return target end -} +end -local destinations_access = { - __index = function(t,k) - local d = t.__catalog__ - local p = { - __data__ = d:findDest(k), - } - setmetatable(p,destination_access) - if cache_lookups then rawset(t,k,p) end - return p +local function getkids(document,n,target) -- direct + if n then + local Kids = n.Kids + if Kids then + for i=1,Kids.n do + target = getkids(document,Kids[i],target) + end + elseif target then + target[#target+1] = n + else + target = { n } + end + return target end -} +end -local catalog_access = { - __index = function(t,k) - local c = t.__catalog__ - if k == "Pages" then - local s = c:getNumPages() - local r = { - } - local p = { - __catalog__ = c, - size = s, - references = r, - } - -- we load all pages as we need to resolve refs - for i=1,s do - local di, ri = c:getPage(i), c:getPageRef(i) - local pi = { - __data__ = di, - reference = ri, - number = i, - } - setmetatable(pi,page_access) - p[i], r[ri.num] = pi, i +-- /OCProperties << +-- /OCGs [ 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 25 0 R 27 0 R ] +-- /D << +-- /Order [ 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 25 0 R 27 0 R ] +-- /ON [ 15 0 R 17 0 R 19 0 R 21 0 R 23 0 R 25 0 R 27 0 R ] +-- /OFF [ ] +-- >> +-- >> + +local function getlayers(document) + local properties = document.Catalog.OCProperties + if properties then + local layers = properties.OCGs + if layers then + local t = { } + local n = layers.n + for i=1,n do + local layer = layers[i] +--~ print(document.xrefs[layer]) + t[i] = layer.Name end - -- setmetatable(p,pages_access) - rawset(t,k,p) - return p - elseif k == "Destinations" or k == "Dest" then - local d = c:getDests() - local p = { - __catalog__ = c, - } - setmetatable(p,destinations_access) - rawset(t,k,p) - return p - elseif k == "Metadata" then - local m = c:readMetadata() - local p = { -- we fake a stream dictionary - __catalog__ = c, - stream = m, - Type = "Metadata", - Subtype = "XML", - Length = #m, - } - -- rawset(t,k,p) - return p - else - print(c:dictLookup(k)) ---~ return checked_access(t,k,t:dictLookup(k)) + t.n = n + return t end end -} +end + +local function getpages(document) + local data = document.data + local xrefs = document.xrefs + local cache = document.cache + local cata = data:getCatalog() + local xref = data:getXRef() + local pages = { } + for pagenumber=1,cata:getNumPages() do + local pagereference = cata:getPageRef(pagenumber).num + local pagedata = some_dictionary(xref:fetch(pagereference,0):getDict(),document,pagereference) + pagedata.number = pagenumber + pages[pagenumber] = pagedata + xrefs[pagedata] = pagereference + cache[pagereference] = pagedata + end + return pages +end -local document_access = { - __index = function(t,k) - if k == "Catalog" then - local c = { - __catalog__ = t.__root__:getCatalog(), +-- loader + +local function delayed(document,tag,f) + local t = { } + setmetatable(t, { __index = function(t,k) + local result = f() + if result then + document[tag] = result + return result[k] + end + end } ) + return t +end + +local loaded = { } + +function lpdf.epdf.load(filename) + local document = loaded[filename] + if not document then + statistics.starttiming(lpdf.epdf) + local data = epdf.open(filename) -- maybe resolvers.find_file + if data then + document = { + filename = filename, + cache = { }, + xrefs = { }, + data = data, } - setmetatable(c,catalog_access) - rawset(t,k,c) - return c + local Catalog = some_dictionary(data:getXRef():getCatalog():getDict(),document) + document.Catalog = Catalog + -- document.catalog = Catalog + -- a few handy helper tables + document.pages = delayed(document,"pages", function() return getpages(document) end) + document.destinations = delayed(document,"destinations", function() return getnames(document,Catalog.Names.Dests) end) + document.javascripts = delayed(document,"javascripts", function() return getnames(document,Catalog.Names.JS) end) + document.widgets = delayed(document,"widgets", function() return getnames(document,Catalog.Names.AcroForm) end) + document.embeddedfiles = delayed(document,"embeddedfiles",function() return getnames(document,Catalog.Names.EmbeddedFiles) end) + document.layers = delayed(document,"layers", function() return getlayers(document) end) + else + document = false end + loaded[filename] = document + statistics.stoptiming(lpdf.epdf) + -- print(statistics.elapsedtime(lpdf.epdf)) end -} - -function lpdf.load(filename) - local document = { - __root__ = epdf.open(filename), - filename = filename, - } - setmetatable(document,document_access) return document end + +-- helpers + +-- function lpdf.epdf.getdestinationpage(document,name) +-- local destination = document.data:findDest(name) +-- return destination and destination.number +-- end diff --git a/tex/context/base/lpdf-fld.lua b/tex/context/base/lpdf-fld.lua index 12f8a9f46..ec09a00ee 100644 --- a/tex/context/base/lpdf-fld.lua +++ b/tex/context/base/lpdf-fld.lua @@ -134,7 +134,7 @@ plus.auto = plus.AutoView -- some day .. lpeg with function or table local function fieldflag(specification) - local o, n = specification.options, 0 + local o, n = specification.option, 0 if o and o ~= "" then for f in gmatch(o,"[^, ]+") do n = n + (flag[f] or 0) @@ -144,7 +144,7 @@ local function fieldflag(specification) end local function fieldplus(specification) - local o, n = specification.options, 0 + local o, n = specification.option, 0 if o and o ~= "" then for p in gmatch(o,"[^, ]+") do n = n + (plus[p] or 0) @@ -638,11 +638,11 @@ local function fieldalignment(specification) end local function enhance(specification,option) - local so = specification.options + local so = specification.option if so and so ~= "" then - specification.options = so .. "," .. option + specification.option = so .. "," .. option else - specification.options = option + specification.option = option end return specification end diff --git a/tex/context/base/lpdf-ini.lua b/tex/context/base/lpdf-ini.lua index 206e44688..6d7282036 100644 --- a/tex/context/base/lpdf-ini.lua +++ b/tex/context/base/lpdf-ini.lua @@ -93,12 +93,12 @@ end lpdf.toeight = toeight ---~ local escaped = lpeg.Cs((lpeg.S("\0\t\n\r\f ()[]{}/%")/"#" + lpeg.P(1))^0) ---~ +--~ local escaped = lpeg.Cs((lpeg.S("\0\t\n\r\f ()[]{}/%")/function(s) return format("#%02X",byte(s)) end + lpeg.P(1))^0) + --~ local function cleaned(str) --~ return (str and str ~= "" and lpegmatch(escaped,str)) or "" --~ end ---~ + --~ lpdf.cleaned = cleaned -- not public yet local function merge_t(a,b) diff --git a/tex/context/base/lpdf-ren.lua b/tex/context/base/lpdf-ren.lua index 2fc1bf23c..742d42273 100644 --- a/tex/context/base/lpdf-ren.lua +++ b/tex/context/base/lpdf-ren.lua @@ -59,17 +59,22 @@ local textlayers, hidelayers, videlayers = pdfarray(), pdfarray(), pdfarray() local pagelayers, pagelayersreference, cache = nil, nil, { } local specifications = { } +local initialized = { } function codeinjections.defineviewerlayer(specification) if viewerlayers.supported and textlayers then - specifications[specification.tag] = specification + local tag = specification.tag + if not specifications[tag] then + specifications[tag] = specification + end end end local function useviewerlayer(name) local specification = specifications[name] - if specification then + if not environment.initex and specification and not initialized[name] then specifications[name] = nil -- or not + initialized [name] = true if not pagelayers then pagelayers = pdfdictionary() pagelayersreference = pdfreserveobject() diff --git a/tex/context/base/lpdf-wid.lua b/tex/context/base/lpdf-wid.lua index d986e96f6..d9885e30e 100644 --- a/tex/context/base/lpdf-wid.lua +++ b/tex/context/base/lpdf-wid.lua @@ -28,6 +28,7 @@ local v_normal = variables.normal local v_auto = variables.auto local v_embed = variables.embed local v_unknown = variables.unknown +local v_max = variables.max local pdfconstant = lpdf.constant local pdfdictionary = lpdf.dictionary @@ -35,6 +36,7 @@ local pdfarray = lpdf.array local pdfreference = lpdf.reference local pdfunicode = lpdf.unicode local pdfstring = lpdf.string +local pdfboolean = lpdf.boolean local pdfcolorspec = lpdf.colorspec local pdfflushobject = lpdf.flushobject local pdfreserveannotation = lpdf.reserveannotation @@ -341,7 +343,37 @@ end local nofcomments, usepopupcomments, stripleading = 0, false, true -function nodeinjections.comment(specification) +local defaultattributes = { + ["xmlns"] = "http://www.w3.org/1999/xhtml", + ["xmlns:xfa"] = "http://www.xfa.org/schema/xfa-data/1.0/", + ["xfa:contentType"] = "text/html", + ["xfa:APIVersion"] = "Acrobat:8.0.0", + ["xfa:spec"] = "2.4", +} + +local function checkcontent(text,option) + if option and option.xml then + local root = xml.convert(text) + if root and not root.er then + xml.checkbom(root) + local body = xml.first(root,"/body") + if body then + local at = body.at + for k, v in next, defaultattributes do + if not at[k] then + at[k] = v + end + end + -- local content = xml.textonly(root) + local richcontent = xml.tostring(root) + return nil, pdfunicode(richcontent) + end + end + end + return pdfunicode(text) +end + +function nodeinjections.comment(specification) -- brrr: seems to be done twice nofcomments = nofcomments + 1 local text = string.strip(specification.data or "") if stripleading then @@ -352,6 +384,7 @@ function nodeinjections.comment(specification) local title = specification.title or "" -- versions of acrobat see the title local subtitle = specification.subtitle or "" -- as author local author = specification.author or "" + local option = settings_to_hash(specification.option or "") if author == "" then if title == "" then title = tag @@ -364,10 +397,12 @@ function nodeinjections.comment(specification) end title = author end + local content, richcontent = checkcontent(text,option) local d = pdfdictionary { Subtype = pdfconstant("Text"), - -- Open = specification.open, -- now options - Contents = pdfunicode(text), + Open = option[v_max] and pdfboolean(true) or nil, + Contents = content, + RC = richcontent, T = title ~= "" and pdfunicode(title) or nil, Subj = subtitle ~= "" and pdfunicode(subtitle) or nil, C = analyzecolor(specification.colorvalue,specification.colormodel), @@ -447,7 +482,7 @@ local function insertrenderingwindow(specification) local label = specification.label --~ local openpage = specification.openpage --~ local closepage = specification.closepage - if specification.options == v_auto then + if specification.option == v_auto then if openpageaction then -- \handlereferenceactions{\v!StartRendering{#2}} end @@ -485,7 +520,7 @@ end local function insertrendering(specification) local label = specification.label - local options = utilities.parsers.settings_to_hash(specification.options) + local option = settings_to_hash(specification.option) if not mf[label] then local filename = specification.filename local isurl = find(filename,"://") @@ -523,7 +558,7 @@ local function insertrendering(specification) } if isurl then descriptor.FS = pdfconstant("URL") - elseif options[v_embed] then + elseif option[v_embed] then descriptor.EF = codeinjections.embedfile(filename) end local clip = pdfdictionary { diff --git a/tex/context/base/luat-ini.mkiv b/tex/context/base/luat-ini.mkiv index d58d9977a..a79ef6a02 100644 --- a/tex/context/base/luat-ini.mkiv +++ b/tex/context/base/luat-ini.mkiv @@ -209,11 +209,11 @@ % tree=true % \stopluaparameterset % -% options=u3d:myset:display:1 +% option=u3d:myset:display:1 % % or: % -% options=\luaparameterset{u3d:myset:display:1}{toolbar=false,tree=true} +% option=\luaparameterset{u3d:myset:display:1}{toolbar=false,tree=true} %D A Handy helper: diff --git a/tex/context/base/lxml-xml.lua b/tex/context/base/lxml-xml.lua index 2053d2353..89fcba871 100644 --- a/tex/context/base/lxml-xml.lua +++ b/tex/context/base/lxml-xml.lua @@ -6,6 +6,8 @@ if not modules then modules = { } end modules ['lxml-xml'] = { license = "see context related readme files" } +local concat = string.concat + local xml = xml local finalizers = xml.finalizers.xml @@ -130,7 +132,7 @@ local function texts(collected) if collected then local t, n = { }, 0 for c=1,#collected do - local e = collection[c] + local e = collected[c] if e and e.dt then n = n + 1 t[n] = e.dt @@ -307,3 +309,24 @@ end xml.table = totable finalizers.table = totable + +local function textonly(e,t) + if e then + local edt = e.dt + if edt then + for i=1,#edt do + local e = edt[i] + if type(e) == "table" then + textonly(e,t) + else + t[#t+1] = e + end + end + end + end + return t +end + +function xml.textonly(e) -- no pattern + return concat(textonly(e,{})) +end diff --git a/tex/context/base/node-ser.lua b/tex/context/base/node-ser.lua index 3c2e1facc..a58b7d745 100644 --- a/tex/context/base/node-ser.lua +++ b/tex/context/base/node-ser.lua @@ -100,7 +100,7 @@ local function totable(n,flat,verbose) local tt = { } for k=1,#f do local v = f[k] - local nv = n[v] + local nv = v and n[v] if nv then if ignore[v] then -- skip diff --git a/tex/context/base/s-fnt-25.mkiv b/tex/context/base/s-fnt-25.mkiv index 1f03bab4d..6a9924616 100644 --- a/tex/context/base/s-fnt-25.mkiv +++ b/tex/context/base/s-fnt-25.mkiv @@ -88,7 +88,7 @@ function document.showmathfont(id,slot) local virtual, names = tfmdata.properties.virtualized, { } if virtual then for k, v in ipairs(tfmdata.fonts) do - local id = v.properties.id + local id = v.id local name = fontdata[id].properties.name names[k] = (name and file.basename(name)) or id end diff --git a/tex/context/base/scrn-fld.mkvi b/tex/context/base/scrn-fld.mkvi index 764bf2f16..bf84f15b7 100644 --- a/tex/context/base/scrn-fld.mkvi +++ b/tex/context/base/scrn-fld.mkvi @@ -241,7 +241,7 @@ framecolorvalue = "\currentfieldframecolorvalue", \fi layer = "\fieldbodyparameter\c!fieldlayer", - options = "\fieldbodyparameter\c!option", + option = "\fieldbodyparameter\c!option", align = "\fieldbodyparameter\c!align", clickin = "\fieldbodyparameter\c!clickin", clickout = "\fieldbodyparameter\c!clickout", diff --git a/tex/context/base/scrn-wid.mkvi b/tex/context/base/scrn-wid.mkvi index 4674dea28..7f2ea7d42 100644 --- a/tex/context/base/scrn-wid.mkvi +++ b/tex/context/base/scrn-wid.mkvi @@ -15,6 +15,8 @@ \registerctxluafile{scrn-wid}{1.001} +% todo: expansion in comments (default is expanded) + \unprotect %D Attachments (mkiv upgraded): @@ -389,7 +391,7 @@ {\doifassignmentelse{#title} {\setupcomment[\currentcomment][#title]} {\setupcomment[\currentcomment][\c!title=#title,#settings]}% - \ctxlua{buffers.assign("\v!comment",\!!bs\detokenize{#text}\!!es)}% + \ctxlua{buffers.assign("\v!comment",\!!bs#text\!!es)}% todo: expansion control, but expanded by default (xml) \scrn_comment_inject \ignorespaces} @@ -556,16 +558,16 @@ \def\useexternalrendering{\doquadrupleempty\scrn_rendering_use} \def\setinternalrendering{\dodoubleempty \scrn_rendering_set} -\def\scrn_rendering_use[#tag][#mime][#file][#options]% +\def\scrn_rendering_use[#tag][#mime][#file][#option]% {\ctxlua{interactions.renderings.register { type = "external", label = "#tag", mime = "#mime", filename = "#file", - options = "#options", + option = "#option", }}} -\def\scrn_rendering_set[#tag][#options]% {content} +\def\scrn_rendering_set[#tag][#option]% {content} {\bgroup \dowithnextbox {\ctxlua{interactions.renderings.register { @@ -573,15 +575,15 @@ label = "#tag", mime = "IRO", % brrr filename = "#tag", - options = "#options", + option = "#option", }}% \let\objectoffset\zeropoint \setobject{IRO}{#tag}\hbox{\box\nextbox}% \egroup}% \hbox} -\def\renderingtype #tag{\ctxlua{interactions.renderings.var("#tag","type")}} -\def\renderingoptions#tag{\ctxlua{interactions.renderings.var("#tag","options")}} +\def\renderingtype #tag{\ctxlua{interactions.renderings.var("#tag","type")}} +\def\renderingoption#tag{\ctxlua{interactions.renderings.var("#tag","option")}} \def\renderingwidth {8cm} % will become private \def\renderingheight {6cm} % will become private @@ -631,11 +633,11 @@ [\??rw\currentrenderingwindow][\c!offset=\v!overlay]% {\vfill \ctxcommand{insertrenderingwindow { - label = "\currentrendering", - width = \number\dimexpr\renderingwidth\relax, - height = \number\dimexpr\renderingheight\relax, - options = "\renderingoptions\currentrendering", - page = \number\renderingpage, + label = "\currentrendering", + width = \number\dimexpr\renderingwidth\relax, + height = \number\dimexpr\renderingheight\relax, + option = "\renderingoption\currentrendering", + page = \number\renderingpage, }}\hfill}% \egroup} diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf Binary files differindex 77031a786..a7fb4838d 100644 --- a/tex/context/base/status-files.pdf +++ b/tex/context/base/status-files.pdf diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf Binary files differindex 405fea2c5..1f67d617a 100644 --- a/tex/context/base/status-lua.pdf +++ b/tex/context/base/status-lua.pdf diff --git a/tex/context/base/strc-flt.mkiv b/tex/context/base/strc-flt.mkiv index 6e8c0b5dd..32024a276 100644 --- a/tex/context/base/strc-flt.mkiv +++ b/tex/context/base/strc-flt.mkiv @@ -768,9 +768,9 @@ \let\currentfloatattribute\empty % to be checked \def\floatcaptionattribute - {\iflocation \ifnofloatnumber \else \ifnofloatcaption \else + {\iflocation \ifnofloatnumber \else \ifnofloatcaption \else \ifinsidesplitfloat \else attr \destinationattribute \currentfloatattribute - \fi \fi \fi} + \fi \fi \fi \fi} \newconditional\usesamefloatnumber diff --git a/tex/context/base/type-set.mkiv b/tex/context/base/type-set.mkiv index 89dd68a8f..a67be14af 100644 --- a/tex/context/base/type-set.mkiv +++ b/tex/context/base/type-set.mkiv @@ -17,7 +17,7 @@ \definetypescriptprefix [sans] [Sans] \definetypescriptprefix [mono] [Mono] -\definefontsynonym [DefaultFont] [cmvtt10] +\definefontsynonym [DefaultFont] [\s!file:lmmonoprop10-regular] % was cmvtt10 \startsetups [font:fallback:serif] \definefontsynonym [Serif] [DefaultFont] diff --git a/tex/generic/context/luatex-fonts-merged.lua b/tex/generic/context/luatex-fonts-merged.lua index 22117dff6..82e90df27 100644 --- a/tex/generic/context/luatex-fonts-merged.lua +++ b/tex/generic/context/luatex-fonts-merged.lua @@ -1,6 +1,6 @@ -- merged file : luatex-fonts-merged.lua -- parent file : luatex-fonts.lua --- merge date : 04/13/11 09:23:15 +-- merge date : 04/19/11 16:38:06 do -- begin closure to overcome local limits and interference @@ -1415,9 +1415,11 @@ local function serialize(_handle,root,name,specification) -- handle wins end if root then -- The dummy access will initialize a table that has a delayed initialization - -- using a metatable. - local dummy = root._w_h_a_t_e_v_e_r_ - root._w_h_a_t_e_v_e_r_ = nil + -- using a metatable. (maybe explicitly test for metatable) + if getmetatable(root) then -- todo: make this an option, maybe even per subtable + local dummy = root._w_h_a_t_e_v_e_r_ + root._w_h_a_t_e_v_e_r_ = nil + end -- Let's forget about empty tables. if next(root) then do_serialize(root,name,"",0) @@ -4788,7 +4790,7 @@ local otf = fonts.handlers.otf otf.glists = { "gsub", "gpos" } -otf.version = 2.727 -- beware: also sync font-mis.lua +otf.version = 2.728 -- beware: also sync font-mis.lua otf.cache = containers.define("fonts", "otf", otf.version, true) local fontdata = fonts.hashes.identifiers @@ -4942,7 +4944,6 @@ local valid_fields = table.tohash { local ordered_enhancers = { "prepare tables", "prepare glyphs", - "prepare unicodes", "prepare lookups", "analyze glyphs", @@ -4967,6 +4968,9 @@ local ordered_enhancers = { "check metadata", "check extra features", -- after metadata + "add duplicates", + "check encoding", + "cleanup tables", } @@ -5154,7 +5158,10 @@ function otf.load(filename,format,sub,featurefile) creator = "context mkiv", unicodes = unicodes, indices = { - -- unicodes to names + -- index to unicodes + }, + duplicates = { + -- alternative unicodes }, lookuptypes = { }, @@ -5335,6 +5342,7 @@ actions["prepare glyphs"] = function(data,filename,raw) local descriptions = data.descriptions local unicodes = resources.unicodes -- name to unicode local indices = resources.indices -- index to unicode + local duplicates = resources.duplicates if rawsubfonts then @@ -5377,7 +5385,7 @@ actions["prepare glyphs"] = function(data,filename,raw) unicodes[name] = unicode nofunicodes = nofunicodes + 1 end - indices[unicode] = index -- each index in unique (at least now) + indices[index] = unicode -- each index is unique (at least now) local description = { -- width = glyph.width, @@ -5419,13 +5427,12 @@ actions["prepare glyphs"] = function(data,filename,raw) end private = private + 1 else - unicodes[name] = unicode + unicodes[name] = unicode end - indices[unicode] = index + indices[index] = unicode if not name then name = format("u%06X",unicode) end - descriptions[unicode] = { -- width = glyph.width, boundingbox = glyph.boundingbox, @@ -5433,6 +5440,14 @@ actions["prepare glyphs"] = function(data,filename,raw) index = index, glyph = glyph, } + local altuni = glyph.altuni + if altuni then + local d = { } + for i=1,#altuni do + d[#d+1] = altuni[i].unicode + end + duplicates[unicode] = d + end else report_otf("potential problem: glyph 0x%04X is used but empty",index) end @@ -5447,12 +5462,13 @@ end -- the next one is still messy but will get better when we have -- flattened map/enc tables in the font loader -actions["prepare unicodes"] = function(data,filename,raw) +actions["check encoding"] = function(data,filename,raw) local descriptions = data.descriptions local resources = data.resources local properties = data.properties local unicodes = resources.unicodes -- name to unicode local indices = resources.indices -- index to unicodes + local duplicates = resources.duplicates -- begin of messy (not needed whwn cidmap) @@ -5466,36 +5482,18 @@ actions["prepare unicodes"] = function(data,filename,raw) if find(encname,"unicode") then -- unicodebmp, unicodefull, ... if trace_loading then - report_otf("using embedded unicode map '%s'",encname) + report_otf("checking embedded unicode map '%s'",encname) end - local multiples, nofmultiples = { }, 0 - for unicode, index in next, unicodetoindex do + for unicode, index in next, unicodetoindex do -- altuni already covers this if unicode <= criterium and not descriptions[unicode] then - local parent = indices[index] - local description = descriptions[parent] - if description then - local c = fastcopy(description) - c.comment = format("copy of U+%05X", parent) - descriptions[unicode] = c - local name = c.name - if not unicodes[name] then - unicodes[name] = unicode - end - nofmultiples = nofmultiples + 1 - multiples[nofmultiples] = name -- we can save duplicates if needed + local parent = indices[index] -- why nil? + if parent then + report_otf("weird, unicode U+%05X points to U+%05X with index 0x%04X",unicode,parent,index) else - -- make it a notdef - report_otf("weird unicode U+%05X at index 0x%04X",unicode,index) + report_otf("weird, unicode U+%05X points to nowhere with index 0x%04X",unicode,index) end end end - if trace_loading then - if nofmultiples > 0 then - report_otf("%s glyphs are reused: %s",nofmultiples,concat(multiples," ")) - else - report_otf("no glyphs are reused") - end - end elseif properties.cidinfo then report_otf("warning: no unicode map, used cidmap '%s'",properties.cidinfo.usedname or "?") else @@ -5507,6 +5505,48 @@ actions["prepare unicodes"] = function(data,filename,raw) end end +-- for the moment we assume that a fotn with lookups will not use +-- altuni so we stick to kerns only + +actions["add duplicates"] = function(data,filename,raw) + local descriptions = data.descriptions + local resources = data.resources + local properties = data.properties + local unicodes = resources.unicodes -- name to unicode + local indices = resources.indices -- index to unicodes + local duplicates = resources.duplicates + + for unicode, d in next, duplicates do + for i=1,#d do + local u = d[i] + if not descriptions[u] then + local description = descriptions[unicode] + local duplicate = table.copy(description) -- else packing problem + duplicate.comment = format("copy of U+%05X", unicode) + descriptions[u] = duplicate + local n = 0 + for _, description in next, descriptions do + if kerns then + local kerns = description.kerns + for _, k in next, kerns do + local ku = k[unicode] + if ku then + k[u] = ku + n = n + 1 + end + end + end + -- todo: lookups etc + end + if trace_loading then + report_otf("duplicating U+%05X to U+%05X with index 0x%04X (%s kerns)",unicode,u,description.index,n) + end + end + end + end + +end + -- class : nil base mark ligature component (maybe we don't need it in description) -- boundingbox: split into ht/dp takes more memory (larger tables and less sharing) |