diff options
76 files changed, 2774 insertions, 984 deletions
diff --git a/doc/context/manuals/allkind/mkiv-publications.pdf b/doc/context/manuals/allkind/mkiv-publications.pdf Binary files differindex b00659d04..74bcb637e 100644 --- a/doc/context/manuals/allkind/mkiv-publications.pdf +++ b/doc/context/manuals/allkind/mkiv-publications.pdf diff --git a/scripts/context/lua/mtx-epub.lua b/scripts/context/lua/mtx-epub.lua index fa5a85134..6d8dfa63f 100644 --- a/scripts/context/lua/mtx-epub.lua +++ b/scripts/context/lua/mtx-epub.lua @@ -23,9 +23,8 @@ if not modules then modules = { } end modules ['mtx-epub'] = { -- OEBPS -- content.opf -- toc.ncx --- Images --- Styles --- Text +-- images +-- styles -- mimetype -- todo: diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua index fcfdc1f17..ff54a0927 100644 --- a/scripts/context/lua/mtxrun.lua +++ b/scripts/context/lua/mtxrun.lua @@ -1212,7 +1212,7 @@ do -- create closure to overcome 200 locals limit package.loaded["l-table"] = package.loaded["l-table"] or true --- original size: 33477, stripped down to: 21843 +-- original size: 33499, stripped down to: 21844 if not modules then modules={} end modules ['l-table']={ version=1.001, @@ -1259,7 +1259,7 @@ local function compare(a,b) if ta==tb then return a<b else - return tostring(a)<tostring(b) + return tostring(a)<tostring(b) end end local function sortedkeys(tab) @@ -8942,7 +8942,7 @@ do -- create closure to overcome 200 locals limit package.loaded["util-tpl"] = package.loaded["util-tpl"] or true --- original size: 6251, stripped down to: 3488 +-- original size: 6621, stripped down to: 3627 if not modules then modules={} end modules ['util-tpl']={ version=1.001, @@ -8984,7 +8984,7 @@ local sqlescape=lpeg.replacer { { "\r\n","\\n" }, { "\r","\\n" }, } -local sqlquoted=lpeg.Cs(lpeg.Cc("'")*sqlescape*lpeg.Cc("'")) +local sqlquoted=Cs(Cc("'")*sqlescape*Cc("'")) lpegpatterns.sqlescape=sqlescape lpegpatterns.sqlquoted=sqlquoted local luaescape=lpegpatterns.luaescape @@ -9007,12 +9007,20 @@ local quotedescapers={ local luaescaper=escapers.lua local quotedluaescaper=quotedescapers.lua local function replacekeyunquoted(s,t,how,recurse) - local escaper=how and escapers[how] or luaescaper - return escaper(replacekey(s,t,how,recurse)) + if how==false then + return replacekey(s,t,how,recurse) + else + local escaper=how and escapers[how] or luaescaper + return escaper(replacekey(s,t,how,recurse)) + end end local function replacekeyquoted(s,t,how,recurse) - local escaper=how and quotedescapers[how] or quotedluaescaper - return escaper(replacekey(s,t,how,recurse)) + if how==false then + return replacekey(s,t,how,recurse) + else + local escaper=how and quotedescapers[how] or quotedluaescaper + return escaper(replacekey(s,t,how,recurse)) + end end local single=P("%") local double=P("%%") @@ -17585,8 +17593,8 @@ end -- of closure -- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua -- skipped libraries : - --- original bytes : 724607 --- stripped bytes : 257595 +-- original bytes : 724999 +-- stripped bytes : 257847 -- end library merge diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua index fcfdc1f17..ff54a0927 100644 --- a/scripts/context/stubs/mswin/mtxrun.lua +++ b/scripts/context/stubs/mswin/mtxrun.lua @@ -1212,7 +1212,7 @@ do -- create closure to overcome 200 locals limit package.loaded["l-table"] = package.loaded["l-table"] or true --- original size: 33477, stripped down to: 21843 +-- original size: 33499, stripped down to: 21844 if not modules then modules={} end modules ['l-table']={ version=1.001, @@ -1259,7 +1259,7 @@ local function compare(a,b) if ta==tb then return a<b else - return tostring(a)<tostring(b) + return tostring(a)<tostring(b) end end local function sortedkeys(tab) @@ -8942,7 +8942,7 @@ do -- create closure to overcome 200 locals limit package.loaded["util-tpl"] = package.loaded["util-tpl"] or true --- original size: 6251, stripped down to: 3488 +-- original size: 6621, stripped down to: 3627 if not modules then modules={} end modules ['util-tpl']={ version=1.001, @@ -8984,7 +8984,7 @@ local sqlescape=lpeg.replacer { { "\r\n","\\n" }, { "\r","\\n" }, } -local sqlquoted=lpeg.Cs(lpeg.Cc("'")*sqlescape*lpeg.Cc("'")) +local sqlquoted=Cs(Cc("'")*sqlescape*Cc("'")) lpegpatterns.sqlescape=sqlescape lpegpatterns.sqlquoted=sqlquoted local luaescape=lpegpatterns.luaescape @@ -9007,12 +9007,20 @@ local quotedescapers={ local luaescaper=escapers.lua local quotedluaescaper=quotedescapers.lua local function replacekeyunquoted(s,t,how,recurse) - local escaper=how and escapers[how] or luaescaper - return escaper(replacekey(s,t,how,recurse)) + if how==false then + return replacekey(s,t,how,recurse) + else + local escaper=how and escapers[how] or luaescaper + return escaper(replacekey(s,t,how,recurse)) + end end local function replacekeyquoted(s,t,how,recurse) - local escaper=how and quotedescapers[how] or quotedluaescaper - return escaper(replacekey(s,t,how,recurse)) + if how==false then + return replacekey(s,t,how,recurse) + else + local escaper=how and quotedescapers[how] or quotedluaescaper + return escaper(replacekey(s,t,how,recurse)) + end end local single=P("%") local double=P("%%") @@ -17585,8 +17593,8 @@ end -- of closure -- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua -- skipped libraries : - --- original bytes : 724607 --- stripped bytes : 257595 +-- original bytes : 724999 +-- stripped bytes : 257847 -- end library merge diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun index fcfdc1f17..ff54a0927 100644 --- a/scripts/context/stubs/unix/mtxrun +++ b/scripts/context/stubs/unix/mtxrun @@ -1212,7 +1212,7 @@ do -- create closure to overcome 200 locals limit package.loaded["l-table"] = package.loaded["l-table"] or true --- original size: 33477, stripped down to: 21843 +-- original size: 33499, stripped down to: 21844 if not modules then modules={} end modules ['l-table']={ version=1.001, @@ -1259,7 +1259,7 @@ local function compare(a,b) if ta==tb then return a<b else - return tostring(a)<tostring(b) + return tostring(a)<tostring(b) end end local function sortedkeys(tab) @@ -8942,7 +8942,7 @@ do -- create closure to overcome 200 locals limit package.loaded["util-tpl"] = package.loaded["util-tpl"] or true --- original size: 6251, stripped down to: 3488 +-- original size: 6621, stripped down to: 3627 if not modules then modules={} end modules ['util-tpl']={ version=1.001, @@ -8984,7 +8984,7 @@ local sqlescape=lpeg.replacer { { "\r\n","\\n" }, { "\r","\\n" }, } -local sqlquoted=lpeg.Cs(lpeg.Cc("'")*sqlescape*lpeg.Cc("'")) +local sqlquoted=Cs(Cc("'")*sqlescape*Cc("'")) lpegpatterns.sqlescape=sqlescape lpegpatterns.sqlquoted=sqlquoted local luaescape=lpegpatterns.luaescape @@ -9007,12 +9007,20 @@ local quotedescapers={ local luaescaper=escapers.lua local quotedluaescaper=quotedescapers.lua local function replacekeyunquoted(s,t,how,recurse) - local escaper=how and escapers[how] or luaescaper - return escaper(replacekey(s,t,how,recurse)) + if how==false then + return replacekey(s,t,how,recurse) + else + local escaper=how and escapers[how] or luaescaper + return escaper(replacekey(s,t,how,recurse)) + end end local function replacekeyquoted(s,t,how,recurse) - local escaper=how and quotedescapers[how] or quotedluaescaper - return escaper(replacekey(s,t,how,recurse)) + if how==false then + return replacekey(s,t,how,recurse) + else + local escaper=how and quotedescapers[how] or quotedluaescaper + return escaper(replacekey(s,t,how,recurse)) + end end local single=P("%") local double=P("%%") @@ -17585,8 +17593,8 @@ end -- of closure -- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua -- skipped libraries : - --- original bytes : 724607 --- stripped bytes : 257595 +-- original bytes : 724999 +-- stripped bytes : 257847 -- end library merge diff --git a/scripts/context/stubs/win64/mtxrun.lua b/scripts/context/stubs/win64/mtxrun.lua index fcfdc1f17..ff54a0927 100644 --- a/scripts/context/stubs/win64/mtxrun.lua +++ b/scripts/context/stubs/win64/mtxrun.lua @@ -1212,7 +1212,7 @@ do -- create closure to overcome 200 locals limit package.loaded["l-table"] = package.loaded["l-table"] or true --- original size: 33477, stripped down to: 21843 +-- original size: 33499, stripped down to: 21844 if not modules then modules={} end modules ['l-table']={ version=1.001, @@ -1259,7 +1259,7 @@ local function compare(a,b) if ta==tb then return a<b else - return tostring(a)<tostring(b) + return tostring(a)<tostring(b) end end local function sortedkeys(tab) @@ -8942,7 +8942,7 @@ do -- create closure to overcome 200 locals limit package.loaded["util-tpl"] = package.loaded["util-tpl"] or true --- original size: 6251, stripped down to: 3488 +-- original size: 6621, stripped down to: 3627 if not modules then modules={} end modules ['util-tpl']={ version=1.001, @@ -8984,7 +8984,7 @@ local sqlescape=lpeg.replacer { { "\r\n","\\n" }, { "\r","\\n" }, } -local sqlquoted=lpeg.Cs(lpeg.Cc("'")*sqlescape*lpeg.Cc("'")) +local sqlquoted=Cs(Cc("'")*sqlescape*Cc("'")) lpegpatterns.sqlescape=sqlescape lpegpatterns.sqlquoted=sqlquoted local luaescape=lpegpatterns.luaescape @@ -9007,12 +9007,20 @@ local quotedescapers={ local luaescaper=escapers.lua local quotedluaescaper=quotedescapers.lua local function replacekeyunquoted(s,t,how,recurse) - local escaper=how and escapers[how] or luaescaper - return escaper(replacekey(s,t,how,recurse)) + if how==false then + return replacekey(s,t,how,recurse) + else + local escaper=how and escapers[how] or luaescaper + return escaper(replacekey(s,t,how,recurse)) + end end local function replacekeyquoted(s,t,how,recurse) - local escaper=how and quotedescapers[how] or quotedluaescaper - return escaper(replacekey(s,t,how,recurse)) + if how==false then + return replacekey(s,t,how,recurse) + else + local escaper=how and quotedescapers[how] or quotedluaescaper + return escaper(replacekey(s,t,how,recurse)) + end end local single=P("%") local double=P("%%") @@ -17585,8 +17593,8 @@ end -- of closure -- used libraries : l-lua.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-mrg.lua util-tpl.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua -- skipped libraries : - --- original bytes : 724607 --- stripped bytes : 257595 +-- original bytes : 724999 +-- stripped bytes : 257847 -- end library merge diff --git a/tex/context/base/back-exp.lua b/tex/context/base/back-exp.lua index e64b7b77c..5fa765b79 100644 --- a/tex/context/base/back-exp.lua +++ b/tex/context/base/back-exp.lua @@ -38,7 +38,6 @@ local validstring = string.valid local lpegmatch = lpeg.match local utfchar, utfvalues = utf.char, utf.values local insert, remove = table.insert, table.remove -local fromunicode16 = fonts.mappings.fromunicode16 local sortedhash = table.sortedhash local formatters = string.formatters local todimen = number.todimen @@ -2507,13 +2506,18 @@ or pap if fc then fc = fc and fc[c] if fc then - local u = fc.tounicode - if u and u ~= "" then + local u = fc.unicode + if not u then nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = utfchar(fromunicode16(u)) + currentcontent[nofcurrentcontent] = utfchar(c) + elseif type(u) == "table" then + for i=1,#u do + nofcurrentcontent = nofcurrentcontent + 1 + currentcontent[nofcurrentcontent] = utfchar(u[i]) + end else nofcurrentcontent = nofcurrentcontent + 1 - currentcontent[nofcurrentcontent] = utfchar(c) + currentcontent[nofcurrentcontent] = utfchar(u) end else -- weird, happens in hz (we really need to get rid of the pseudo fonts) nofcurrentcontent = nofcurrentcontent + 1 @@ -3092,7 +3096,7 @@ end end end - local cssfile = nil directives.register("backend.export.css", function(v) cssfile = v end) + -- local cssfile = nil directives.register("backend.export.css", function(v) cssfile = v end) local function stopexport(v) starttiming(treehash) @@ -3177,6 +3181,8 @@ end local stylefilename = file.join(stylepath,stylefilebase ) local templatefilename = file.join(stylepath,templatefilebase) + local cssfile = finetuning.cssfile + -- we keep track of all used files local files = { @@ -3196,7 +3202,7 @@ end file.copy(examplefilename,defaultfilename) end - if type(cssfile) == "string" then + if cssfile then local list = table.unique(settings_to_array(cssfile)) for i=1,#list do local source = file.addsuffix(list[i],"css") diff --git a/tex/context/base/back-exp.mkiv b/tex/context/base/back-exp.mkiv index 7a9824555..dc510b2a6 100644 --- a/tex/context/base/back-exp.mkiv +++ b/tex/context/base/back-exp.mkiv @@ -165,10 +165,8 @@ \c!alternative=, % html, div \c!properties=\v!no, % no: ignore, yes: as attribute, otherwise: use as prefix \c!hyphen=\v!no, - \c!svgstyle=] - -\setupbackend - [css=] % ? + \c!svgstyle=, + \c!cssfile=] \def\dosynchronizeexport {\let\currentexport\empty @@ -184,6 +182,7 @@ firstpage = "\exportparameter\c!firstpage", lastpage = "\exportparameter\c!lastpage", svgstyle = "\exportparameter\c!svgstyle", + cssfile = "\exportparameter\c!cssfile", }}} \appendtoks @@ -194,14 +193,14 @@ \doifsomething{\backendparameter\c!export}\dosynchronizeexport % in case it is done inside \starttext \to \everysetupdocument -\appendtoks - \doifsomething{\backendparameter\c!xhtml} - {\enabledirectives[backend.export.xhtml=\backendparameter\c!xhtml]}% - \doifsomething{\backendparameter\c!css} - {\enabledirectives[backend.export.css={\backendparameter\c!css}]}% - \doifsomething{\backendparameter\c!alternative} - {\enabledirectives[backend.export.alternative={\backendparameter\c!alternative}]}% -\to \everysetupbackend +% \appendtoks +% \doifsomething{\backendparameter\c!xhtml} +% {\enabledirectives[backend.export.xhtml=\backendparameter\c!xhtml]}% +% \doifsomething{\backendparameter\c!css} +% {\enabledirectives[backend.export.css={\backendparameter\c!css}]}% +% \doifsomething{\backendparameter\c!alternative} +% {\enabledirectives[backend.export.alternative={\backendparameter\c!alternative}]}% +% \to \everysetupbackend \appendtoks \doifelsenothing{\backendparameter\c!export} diff --git a/tex/context/base/back-pdf.mkiv b/tex/context/base/back-pdf.mkiv index c91d2251d..413365539 100644 --- a/tex/context/base/back-pdf.mkiv +++ b/tex/context/base/back-pdf.mkiv @@ -50,7 +50,7 @@ %D These too and most of them will be protected as well: -\pdfminorversion \plussix +\pdfminorversion \plusseven \pdfgentounicode \plusone \let\pdfgentounicode \undefined \newcount\pdfgentounicode \pdfinclusioncopyfonts \plusone \let\pdfinclusioncopyfonts \undefined \newcount\pdfinclusioncopyfonts \pdfinclusionerrorlevel \zerocount \let\pdfinclusionerrorlevel\undefined \newcount\pdfinclusionerrorlevel diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv index 0083e7aee..93cd0ad0f 100644 --- a/tex/context/base/cont-new.mkiv +++ b/tex/context/base/cont-new.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2014.09.27 14:46} +\newcontextversion{2014.10.02 23:07} %D This file is loaded at runtime, thereby providing an excellent place for %D hacks, patches, extensions and new features. diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf Binary files differindex ae0441b81..81a033e83 100644 --- a/tex/context/base/context-version.pdf +++ b/tex/context/base/context-version.pdf diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv index 6e183c88b..6e999630c 100644 --- a/tex/context/base/context.mkiv +++ b/tex/context/base/context.mkiv @@ -28,7 +28,7 @@ %D up and the dependencies are more consistent. \edef\contextformat {\jobname} -\edef\contextversion{2014.09.27 14:46} +\edef\contextversion{2014.10.02 23:07} \edef\contextkind {beta} %D For those who want to use this: diff --git a/tex/context/base/file-job.lua b/tex/context/base/file-job.lua index 3b67057e0..0d1986463 100644 --- a/tex/context/base/file-job.lua +++ b/tex/context/base/file-job.lua @@ -961,16 +961,24 @@ luatex.registerstopactions(function() logsnewline() report_options("start commandline options") logsnewline() - for argument, value in sortedhash(arguments) do - report_option("%s=%A",argument,value) + if arguments and next(arguments) then + for argument, value in sortedhash(arguments) do + report_option("%s=%A",argument,value) + end + else + report_file("no arguments") end logsnewline() report_options("stop commandline options") logsnewline() report_options("start commandline files") logsnewline() - for i=1,#files do - report_file("% 4i: %s",i,files[i]) + if files and #files > 0 then + for i=1,#files do + report_file("% 4i: %s",i,files[i]) + end + else + report_file("no files") end logsnewline() report_options("stop commandline files") diff --git a/tex/context/base/font-afm.lua b/tex/context/base/font-afm.lua index 46ea8a423..e5c9af759 100644 --- a/tex/context/base/font-afm.lua +++ b/tex/context/base/font-afm.lua @@ -40,6 +40,8 @@ local trace_defining = false trackers.register("fonts.defining", function(v local report_afm = logs.reporter("fonts","afm loading") +local setmetatableindex = table.setmetatableindex + local findbinfile = resolvers.findbinfile local definers = fonts.definers @@ -52,7 +54,7 @@ local pfb = constructors.newhandler("pfb") local afmfeatures = constructors.newfeatures("afm") local registerafmfeature = afmfeatures.register -afm.version = 1.410 -- incrementing this number one up will force a re-cache +afm.version = 1.500 -- incrementing this number one up will force a re-cache afm.cache = containers.define("fonts", "afm", afm.version, true) afm.autoprefixed = true -- this will become false some day (catches texnansi-blabla.*) @@ -79,6 +81,16 @@ registerafmfeature { } } +local remappednames = { + ff = { name = "f_f", unicode = { 0x66, 0x66 } }, + fi = { name = "f_i", unicode = { 0x66, 0x69 } }, + fj = { name = "f_j", unicode = { 0x66, 0x6A } }, + fk = { name = "f_k", unicode = { 0x66, 0x6B } }, + fl = { name = "f_l", unicode = { 0x66, 0x6C } }, + ffi = { name = "f_f_i", unicode = { 0x66, 0x66, 0x69 } }, + ffl = { name = "f_f_l", unicode = { 0x66, 0x66, 0x6C } }, +} + --[[ldx-- <p>We start with the basic reader which we give a name similar to the built in <l n='tfm'/> and <l n='otf'/> reader.</p> @@ -315,7 +327,7 @@ by adding ligatures and kern information to the afm derived data. That way we can set them faster when defining a font.</p> --ldx]]-- -local addkerns, addligatures, addtexligatures, unify, normalize -- we will implement these later +local addkerns, addligatures, addtexligatures, unify, normalize, fixnames -- we will implement these later function afm.load(filename) -- hm, for some reasons not resolved yet @@ -362,6 +374,7 @@ function afm.load(filename) addkerns(data) end normalize(data) + fixnames(data) report_afm("add tounicode data") fonts.mappings.addtounicode(data,filename) data.size = size @@ -369,6 +382,7 @@ function afm.load(filename) data.pfbsize = pfbsize data.pfbtime = pfbtime report_afm("saving %a in cache",name) + data.resources.unicodes = nil -- consistent with otf but here we save not much data = containers.write(afm.cache, name, data) data = containers.read(afm.cache,name) end @@ -432,13 +446,28 @@ unify = function(data, filename) resources.filename = resolvers.unresolve(filename) -- no shortcut resources.unicodes = unicodes -- name to unicode resources.marks = { } -- todo - resources.names = names -- name to index + -- resources.names = names -- name to index resources.private = private end normalize = function(data) end +fixnames = function(data) + for k, v in next, data.descriptions do + local n = v.name + local r = remappednames[n] + if r then + if trace_indexing then + report_afm("renaming characters %a to %a",n,r.name) + end + v.name = r.name + v.unicode = r.unicode + end + end +end + + --[[ldx-- <p>These helpers extend the basic table with extra ligatures, texligatures and extra kerns. This saves quite some lookups later.</p> @@ -449,7 +478,7 @@ local addthem = function(rawdata,ligatures) local descriptions = rawdata.descriptions local resources = rawdata.resources local unicodes = resources.unicodes - local names = resources.names + -- local names = resources.names for ligname, ligdata in next, ligatures do local one = descriptions[unicodes[ligname]] if one then @@ -608,8 +637,8 @@ local function copytotfm(data) local filename = constructors.checkedfilename(resources) local fontname = metadata.fontname or metadata.fullname local fullname = metadata.fullname or metadata.fontname - local endash = unicodes['space'] - local emdash = unicodes['emdash'] + local endash = 0x0020 -- space + local emdash = 0x2014 local spacer = "space" local spaceunits = 500 -- @@ -669,7 +698,7 @@ local function copytotfm(data) parameters.x_height = charxheight else -- same as otf - local x = unicodes['x'] + local x = 0x0078 -- x if x then local x = descriptions[x] if x then @@ -729,7 +758,34 @@ function afm.setfeatures(tfmdata,features) end end -local function checkfeatures(specification) +local function addtables(data) + local resources = data.resources + local lookuptags = resources.lookuptags + local unicodes = resources.unicodes + if not lookuptags then + lookuptags = { } + resources.lookuptags = lookuptags + end + setmetatableindex(lookuptags,function(t,k) + local v = type(k) == "number" and ("lookup " .. k) or k + t[k] = v + return v + end) + if not unicodes then + unicodes = { } + resources.unicodes = unicodes + setmetatableindex(unicodes,function(t,k) + setmetatableindex(unicodes,nil) + for u, d in next, data.descriptions do + local n = d.name + if n then + t[n] = u + end + end + return rawget(t,k) + end) + end + constructors.addcoreunicodes(unicodes) -- do we really need this? end local function afmtotfm(specification) @@ -759,6 +815,7 @@ local function afmtotfm(specification) if not tfmdata then local rawdata = afm.load(afmname) if rawdata and next(rawdata) then + addtables(rawdata) adddimensions(rawdata) tfmdata = copytotfm(rawdata) if tfmdata and next(tfmdata) then @@ -808,6 +865,7 @@ those that make sense for this format.</p> local function prepareligatures(tfmdata,ligatures,value) if value then local descriptions = tfmdata.descriptions + local hasligatures = false for unicode, character in next, tfmdata.characters do local description = descriptions[unicode] local dligatures = description.ligatures @@ -823,17 +881,20 @@ local function prepareligatures(tfmdata,ligatures,value) type = 0 } end + hasligatures = true end end + tfmdata.properties.hasligatures = hasligatures end end local function preparekerns(tfmdata,kerns,value) if value then - local rawdata = tfmdata.shared.rawdata - local resources = rawdata.resources - local unicodes = resources.unicodes + local rawdata = tfmdata.shared.rawdata + local resources = rawdata.resources + local unicodes = resources.unicodes local descriptions = tfmdata.descriptions + local haskerns = false for u, chr in next, tfmdata.characters do local d = descriptions[u] local newkerns = d[kerns] @@ -849,8 +910,10 @@ local function preparekerns(tfmdata,kerns,value) kerns[uk] = v end end + haskerns = true end end + tfmdata.properties.haskerns = haskerns end end diff --git a/tex/context/base/font-agl.lua b/tex/context/base/font-agl.lua index 42a41a15d..122d1adc2 100644 --- a/tex/context/base/font-agl.lua +++ b/tex/context/base/font-agl.lua @@ -656,6 +656,8 @@ end -- We load this table only when needed. We could use a loading mechanism -- return the table but there are no more vectors like this so why bother. +-- +-- Well, we currently hav ethis table preloaded anyway. local agl = { names = names, -- unicode -> name diff --git a/tex/context/base/font-con.lua b/tex/context/base/font-con.lua index aca705523..dd4cfa56a 100644 --- a/tex/context/base/font-con.lua +++ b/tex/context/base/font-con.lua @@ -394,7 +394,8 @@ function constructors.scale(tfmdata,specification) targetparameters.forcedsize = forcedsize -- context specific targetparameters.extrafactor = extrafactor -- context specific -- - local tounicode = resources.tounicode + local tounicode = fonts.mappings.tounicode + -- local defaultwidth = resources.defaultwidth or 0 local defaultheight = resources.defaultheight or 0 local defaultdepth = resources.defaultdepth or 0 @@ -500,7 +501,8 @@ function constructors.scale(tfmdata,specification) local autoitalicamount = properties.autoitalicamount local stackmath = not properties.nostackmath local nonames = properties.noglyphnames - local nodemode = properties.mode == "node" + local haskerns = properties.haskerns or properties.mode == "base" -- we can have afm in node mode + local hasligatures = properties.hasligatures or properties.mode == "base" -- we can have afm in node mode -- if changed and not next(changed) then changed = false @@ -594,39 +596,20 @@ function constructors.scale(tfmdata,specification) -- we can have a dumb mode (basemode without math etc) that skips most -- for unicode, character in next, characters do - local chr, description, index, touni + local chr, description, index if changed then - -- basemode hack (we try to catch missing tounicodes, e.g. needed for ssty in math cambria) local c = changed[unicode] if c then - local ligatures = character.ligatures -- the original ligatures (as we cannot rely on remapping) description = descriptions[c] or descriptions[unicode] or character character = characters[c] or character index = description.index or c - if tounicode then - touni = tounicode[index] -- nb: index! - if not touni then -- goodie - local d = descriptions[unicode] or characters[unicode] - local i = d.index or unicode - touni = tounicode[i] -- nb: index! - end - end - if ligatures and not character.ligatures then - character.ligatures = ligatures -- the original targets (for now at least.. see libertine smallcaps) - end else description = descriptions[unicode] or character index = description.index or unicode - if tounicode then - touni = tounicode[index] -- nb: index! - end end else description = descriptions[unicode] or character index = description.index or unicode - if tounicode then - touni = tounicode[index] -- nb: index! - end end local width = description.width local height = description.height @@ -669,8 +652,10 @@ function constructors.scale(tfmdata,specification) } end end - if touni then - chr.tounicode = touni + local isunicode = description.unicode + if isunicode then + chr.unicode = isunicode + chr.tounicode = tounicode(isunicode) end if hasquality then -- we could move these calculations elsewhere (saves calculations) @@ -767,7 +752,7 @@ function constructors.scale(tfmdata,specification) end end end - if not nodemode then + if haskerns then local vk = character.kerns if vk then local s = sharedkerns[vk] @@ -778,6 +763,8 @@ function constructors.scale(tfmdata,specification) end chr.kerns = s end + end + if hasligatures then local vl = character.ligatures if vl then if true then @@ -1362,3 +1349,50 @@ function constructors.applymanipulators(what,tfmdata,features,trace,report) end end end + +function constructors.addcoreunicodes(unicodes) -- maybe make this a metatable if used at all + if not unicodes then + unicodes = { } + end + unicodes.space = 0x0020 + unicodes.hyphen = 0x002D + unicodes.zwj = 0x200D + unicodes.zwnj = 0x200C + return unicodes +end + +-- -- keep for a while: old tounicode code +-- +-- if changed then +-- -- basemode hack (we try to catch missing tounicodes, e.g. needed for ssty in math cambria) +-- local c = changed[unicode] +-- if c then +-- -- local ligatures = character.ligatures -- the original ligatures (as we cannot rely on remapping) +-- description = descriptions[c] or descriptions[unicode] or character +-- character = characters[c] or character +-- index = description.index or c +-- if tounicode then +-- touni = tounicode[index] -- nb: index! +-- if not touni then -- goodie +-- local d = descriptions[unicode] or characters[unicode] +-- local i = d.index or unicode +-- touni = tounicode[i] -- nb: index! +-- end +-- end +-- -- if ligatures and not character.ligatures then +-- -- character.ligatures = ligatures -- the original targets (for now at least.. see libertine smallcaps) +-- -- end +-- else +-- description = descriptions[unicode] or character +-- index = description.index or unicode +-- if tounicode then +-- touni = tounicode[index] -- nb: index! +-- end +-- end +-- else +-- description = descriptions[unicode] or character +-- index = description.index or unicode +-- if tounicode then +-- touni = tounicode[index] -- nb: index! +-- end +-- end diff --git a/tex/context/base/font-ctx.lua b/tex/context/base/font-ctx.lua index 5920501dd..51f152baf 100644 --- a/tex/context/base/font-ctx.lua +++ b/tex/context/base/font-ctx.lua @@ -57,6 +57,8 @@ local helpers = fonts.helpers local hashes = fonts.hashes local currentfont = font.current +local aglunicodes = fonts.encodings.agl.unicodes + local nuts = nodes.nuts local tonut = nuts.tonut @@ -82,6 +84,7 @@ local characters = hashes.characters local descriptions = hashes.descriptions local properties = hashes.properties local resources = hashes.resources +local unicodes = hashes.unicodes local csnames = hashes.csnames local lastmathids = hashes.lastmathids local exheights = hashes.exheights @@ -89,6 +92,9 @@ local emwidths = hashes.emwidths local designsizefilename = fontgoodies.designsizes.filename +local context_char = context.char +local context_getvalue = context.getvalue + local otffeatures = otf.features local otftables = otf.tables @@ -1439,12 +1445,27 @@ mappings.reset() -- resets the default file -- => commands + local function nametoslot(name) local t = type(name) + local s = nil if t == "string" then - return resources[true].unicodes[name] + local slot = unicodes[true][name] + if slot then + return slot + end + slot = aglunicodes[name] + if characters[true][slot] then + return slot + else + -- not in font + end elseif t == "number" then - return n + if characters[true][name] then + return slot + else + -- not in font + end end end @@ -1472,14 +1493,14 @@ do -- else too many locals local entities = characters.entities local lowered = { } -- delayed initialization - table.setmetatableindex(lowered,function(t,k) + setmetatableindex(lowered,function(t,k) for k, v in next, entities do local l = lower(k) if not entities[l] then lowered[l] = v end end - table.setmetatableindex(lowered,nil) + setmetatableindex(lowered,nil) return lowered[k] end) @@ -1523,7 +1544,7 @@ do -- else too many locals -- -- nicer: -- - -- table.setmetatableindex(methods,function(t,k) return methods.c end) + -- setmetatableindex(methods,function(t,k) return methods.c end) -- -- local splitter = (C(1) * P(":") + Cc("c")) * C(P(1)^1) / function(method,name) -- return methods[method](name) @@ -1712,9 +1733,6 @@ end -- interfaces -local context_char = context.char -local context_getvalue = context.getvalue - local commands_doifelse = commands.doifelse function commands.doifelsecurrentfonthasfeature(name) -- can be made faster with a supportedfeatures hash diff --git a/tex/context/base/font-enh.lua b/tex/context/base/font-enh.lua index 2bf0741f5..3439a434a 100644 --- a/tex/context/base/font-enh.lua +++ b/tex/context/base/font-enh.lua @@ -114,24 +114,24 @@ local registerotffeature = otffeatures.register -- unicodes = { -- a1 = 0x2701, -local tosixteen = fonts.mappings.tounicode16 +----- tosixteen = fonts.mappings.tounicode16 local function initializeunicoding(tfmdata) local goodies = tfmdata.goodies local newcoding = nil - local tounicode = false + -- local tounicode = false for i=1,#goodies do local remapping = goodies[i].remapping if remapping and remapping.unicodes then - newcoding = remapping.unicodes -- names to unicodes - tounicode = remapping.tounicode + newcoding = remapping.unicodes -- names to unicodes + -- tounicode = remapping.tounicode -- not used end end if newcoding then local characters = tfmdata.characters local descriptions = tfmdata.descriptions local oldcoding = tfmdata.resources.unicodes - local tounicodes = tfmdata.resources.tounicode -- index to unicode + -- local tounicodes = tfmdata.resources.tounicode -- index to unicode local originals = { } for name, newcode in next, newcoding do local oldcode = oldcoding[name] @@ -153,15 +153,15 @@ local function initializeunicoding(tfmdata) else oldcoding[name] = newcode end - if tounicode then - local description = descriptions[newcode] - if description then - local index = description.index - if not tounicodes[index] then - tounicodes[index] = tosixteen(newcode) -- shared (we could have a metatable) - end - end - end + -- if tounicode then + -- local description = descriptions[newcode] + -- if description then + -- local index = description.index + -- if not tounicodes[index] then + -- tounicodes[index] = tosixteen(newcode) -- shared (we could have a metatable) + -- end + -- end + -- end if trace_unicoding then if oldcode then report_unicoding("aliasing glyph %a from %U to %U",name,oldcode,newcode) diff --git a/tex/context/base/font-ext.lua b/tex/context/base/font-ext.lua index ede2151d6..68dab3c46 100644 --- a/tex/context/base/font-ext.lua +++ b/tex/context/base/font-ext.lua @@ -328,8 +328,10 @@ local function map_opbd_onto_protrusion(tfmdata,value,opbd) local characters = tfmdata.characters local descriptions = tfmdata.descriptions local properties = tfmdata.properties + local resources = tfmdata.resources local rawdata = tfmdata.shared.rawdata local lookuphash = rawdata.lookuphash + local lookuptags = resources.lookuptags local script = properties.script local language = properties.language local done, factor, left, right = false, 1, 1, 1 @@ -349,14 +351,14 @@ local function map_opbd_onto_protrusion(tfmdata,value,opbd) local data = lookuphash[lookup] if data then if trace_protrusion then - report_protrusions("setting left using lfbd lookup %a",lookup) + report_protrusions("setting left using lfbd lookup %a",lookuptags[lookup]) end for k, v in next, data do -- local p = - v[3] / descriptions[k].width-- or 1 ~= 0 too but the same local p = - (v[1] / 1000) * factor * left characters[k].left_protruding = p if trace_protrusion then - report_protrusions("lfbd -> %s -> %C -> %0.03f (% t)",lookup,k,p,v) + report_protrusions("lfbd -> %s -> %C -> %0.03f (% t)",lookuptags[lookup],k,p,v) end end done = true @@ -372,14 +374,14 @@ local function map_opbd_onto_protrusion(tfmdata,value,opbd) local data = lookuphash[lookup] if data then if trace_protrusion then - report_protrusions("setting right using rtbd lookup %a",lookup) + report_protrusions("setting right using rtbd lookup %a",lookuptags[lookup]) end for k, v in next, data do -- local p = v[3] / descriptions[k].width -- or 3 local p = (v[1] / 1000) * factor * right characters[k].right_protruding = p if trace_protrusion then - report_protrusions("rtbd -> %s -> %C -> %0.03f (% t)",lookup,k,p,v) + report_protrusions("rtbd -> %s -> %C -> %0.03f (% t)",lookuptags[lookup],k,p,v) end end end diff --git a/tex/context/base/font-gds.lua b/tex/context/base/font-gds.lua index 9e7cb841e..c2c506b7a 100644 --- a/tex/context/base/font-gds.lua +++ b/tex/context/base/font-gds.lua @@ -853,7 +853,7 @@ local function setkeepligatures(tfmdata,value) if letterspacing then local keptligatures = letterspacing.keptligatures if keptligatures then - local unicodes = tfmdata.resources.unicodes + local unicodes = tfmdata.resources.unicodes -- so we accept names local hash = { } for k, v in next, keptligatures do local u = unicodes[k] diff --git a/tex/context/base/font-hsh.lua b/tex/context/base/font-hsh.lua index 1b0dd08b8..2be84165a 100644 --- a/tex/context/base/font-hsh.lua +++ b/tex/context/base/font-hsh.lua @@ -35,6 +35,7 @@ local italics = hashes.italics or allocate() local lastmathids = hashes.lastmathids or allocate() local dynamics = hashes.dynamics or allocate() local unicodes = hashes.unicodes or allocate() +local originals = hashes.originals or allocate() hashes.characters = characters hashes.descriptions = descriptions @@ -52,6 +53,7 @@ hashes.italics = italics hashes.lastmathids = lastmathids hashes.dynamics = dynamics hashes.unicodes = unicodes +hashes.originals = originals local nodepool = nodes.pool local dummyglyph = nodepool.register(nodepool.glyph()) @@ -261,21 +263,31 @@ setmetatableindex(dynamics, function(t,k) end end) -setmetatableindex(unicodes, function(t,k) +setmetatableindex(unicodes, function(t,k) -- always a unicode + if k == true then + return unicodes[currentfont()] + else + local resources = resources[k] + local unicodes = resources and resources.unicodes or { } + t[k] = unicodes + return unicodes + end +end) + +setmetatableindex(originals, function(t,k) -- always a unicode if k == true then return originals[currentfont()] else - local resources = resources[k] - local originals = resources and resources.originals or { } - local characters = characters[k] - local unicodes = { } - setmetatableindex(unicodes,function(t,k) - local v = originals[characters[k].index] or k - t[k] = v + local resolved = { } + setmetatableindex(resolved,function(t,name) + local u = unicodes[k][name] + local d = u and descriptions[k][u] + local v = d and d.unicode or u or 0 -- so we return notdef (at least for the moment) + t[name] = u return v end) - t[k] = unicodes - return unicodes + t[k] = resolved + return resolved end end) diff --git a/tex/context/base/font-map.lua b/tex/context/base/font-map.lua index 429c73597..309435e0d 100644 --- a/tex/context/base/font-map.lua +++ b/tex/context/base/font-map.lua @@ -79,18 +79,46 @@ end local function tounicode16sequence(unicodes,name) local t = { } for l=1,#unicodes do - local unicode = unicodes[l] - if unicode < 0x10000 then - t[l] = format("%04X",unicode) + local u = unicodes[l] + if u < 0x10000 then + t[l] = format("%04X",u) elseif unicode < 0x1FFFFFFFFF then - t[l] = format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00) + t[l] = format("%04X%04X",floor(u/1024),u%1024+0xDC00) else - report_fonts ("can't convert %a in %a into tounicode",unicode,name) + report_fonts ("can't convert %a in %a into tounicode",u,name) + return end end return concat(t) end +local function tounicode(unicode,name) + if type(unicode) == "table" then + local t = { } + for l=1,#unicode do + local u = unicode[l] + if u < 0x10000 then + t[l] = format("%04X",u) + elseif u < 0x1FFFFFFFFF then + t[l] = format("%04X%04X",floor(u/1024),u%1024+0xDC00) + else + report_fonts ("can't convert %a in %a into tounicode",u,name) + return + end + end + return concat(t) + else + if unicode < 0x10000 then + return format("%04X",unicode) + elseif unicode < 0x1FFFFFFFFF then + return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00) + else + report_fonts("can't convert %a in %a into tounicode",unicode,name) + end + end +end + + local function fromunicode16(str) if #str == 4 then return tonumber(str,16) @@ -136,6 +164,7 @@ end mappings.loadlumtable = loadlumtable mappings.makenameparser = makenameparser +mappings.tounicode = tounicode mappings.tounicode16 = tounicode16 mappings.tounicode16sequence = tounicode16sequence mappings.fromunicode16 = fromunicode16 @@ -158,6 +187,322 @@ local namesplitter = Ct(C((1 - ligseparator - varseparator)^1) * (ligseparator * -- test("such_so_more") -- test("such_so_more.that") +-- function mappings.addtounicode(data,filename) +-- local resources = data.resources +-- local properties = data.properties +-- local descriptions = data.descriptions +-- local unicodes = resources.unicodes +-- local lookuptypes = resources.lookuptypes +-- if not unicodes then +-- return +-- end +-- -- we need to move this code +-- unicodes['space'] = unicodes['space'] or 32 +-- unicodes['hyphen'] = unicodes['hyphen'] or 45 +-- unicodes['zwj'] = unicodes['zwj'] or 0x200D +-- unicodes['zwnj'] = unicodes['zwnj'] or 0x200C +-- -- the tounicode mapping is sparse and only needed for alternatives +-- local private = fonts.constructors.privateoffset +-- local unknown = format("%04X",utfbyte("?")) +-- local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context +-- ----- namevector = fonts.encodings.agl.names -- loaded runtime in context +-- local tounicode = { } +-- local originals = { } +-- local missing = { } +-- resources.tounicode = tounicode +-- resources.originals = originals +-- local lumunic, uparser, oparser +-- local cidinfo, cidnames, cidcodes, usedmap +-- -- if false then -- will become an option +-- -- lumunic = loadlumtable(filename) +-- -- lumunic = lumunic and lumunic.tounicode +-- -- end +-- -- +-- cidinfo = properties.cidinfo +-- usedmap = cidinfo and fonts.cid.getmap(cidinfo) +-- -- +-- if usedmap then +-- oparser = usedmap and makenameparser(cidinfo.ordering) +-- cidnames = usedmap.names +-- cidcodes = usedmap.unicodes +-- end +-- uparser = makenameparser() +-- local ns, nl = 0, 0 +-- for unic, glyph in next, descriptions do +-- local index = glyph.index +-- local name = glyph.name +-- if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then +-- local unicode = lumunic and lumunic[name] or unicodevector[name] +-- if unicode then +-- originals[index] = unicode +-- tounicode[index] = tounicode16(unicode,name) +-- ns = ns + 1 +-- end +-- -- cidmap heuristics, beware, there is no guarantee for a match unless +-- -- the chain resolves +-- if (not unicode) and usedmap then +-- local foundindex = lpegmatch(oparser,name) +-- if foundindex then +-- unicode = cidcodes[foundindex] -- name to number +-- if unicode then +-- originals[index] = unicode +-- tounicode[index] = tounicode16(unicode,name) +-- ns = ns + 1 +-- else +-- local reference = cidnames[foundindex] -- number to name +-- if reference then +-- local foundindex = lpegmatch(oparser,reference) +-- if foundindex then +-- unicode = cidcodes[foundindex] +-- if unicode then +-- originals[index] = unicode +-- tounicode[index] = tounicode16(unicode,name) +-- ns = ns + 1 +-- end +-- end +-- if not unicode or unicode == "" then +-- local foundcodes, multiple = lpegmatch(uparser,reference) +-- if foundcodes then +-- originals[index] = foundcodes +-- if multiple then +-- tounicode[index] = tounicode16sequence(foundcodes) +-- nl = nl + 1 +-- unicode = true +-- else +-- tounicode[index] = tounicode16(foundcodes,name) +-- ns = ns + 1 +-- unicode = foundcodes +-- end +-- end +-- end +-- end +-- end +-- end +-- end +-- -- a.whatever or a_b_c.whatever or a_b_c (no numbers) a.b_ +-- -- +-- -- It is not trivial to find a solution that suits all fonts. We tried several alternatives +-- -- and this one seems to work reasonable also with fonts that use less standardized naming +-- -- schemes. The extra private test is tested by KE and seems to work okay with non-typical +-- -- fonts as well. +-- -- +-- -- The next time I look into this, I'll add an extra analysis step to the otf loader (we can +-- -- resolve some tounicodes by looking into the gsub data tables that are bound to glyphs. +-- -- +-- if not unicode or unicode == "" then +-- local split = lpegmatch(namesplitter,name) +-- local nsplit = split and #split or 0 +-- local t, n = { }, 0 +-- unicode = true +-- for l=1,nsplit do +-- local base = split[l] +-- local u = unicodes[base] or unicodevector[base] +-- if not u then +-- break +-- elseif type(u) == "table" then +-- if u[1] >= private then +-- unicode = false +-- break +-- end +-- n = n + 1 +-- t[n] = u[1] +-- else +-- if u >= private then +-- unicode = false +-- break +-- end +-- n = n + 1 +-- t[n] = u +-- end +-- end +-- if n == 0 then -- done then +-- -- nothing +-- elseif n == 1 then +-- local unicode = t[1] +-- originals[index] = unicode +-- tounicode[index] = tounicode16(unicode,name) +-- else +-- originals[index] = t +-- tounicode[index] = tounicode16sequence(t) +-- end +-- nl = nl + 1 +-- end +-- -- last resort (we might need to catch private here as well) +-- if not unicode or unicode == "" then +-- local foundcodes, multiple = lpegmatch(uparser,name) +-- if foundcodes then +-- if multiple then +-- originals[index] = foundcodes +-- tounicode[index] = tounicode16sequence(foundcodes,name) +-- nl = nl + 1 +-- unicode = true +-- else +-- originals[index] = foundcodes +-- tounicode[index] = tounicode16(foundcodes,name) +-- ns = ns + 1 +-- unicode = foundcodes +-- end +-- end +-- end +-- -- check using substitutes and alternates +-- -- +-- if not unicode then +-- missing[name] = true +-- end +-- -- if not unicode then +-- -- originals[index] = 0xFFFD +-- -- tounicode[index] = "FFFD" +-- -- end +-- end +-- end +-- if next(missing) then +-- local guess = { } +-- -- helper +-- local function check(gname,code,unicode) +-- local description = descriptions[code] +-- -- no need to add a self reference +-- local variant = description.name +-- if variant == gname then +-- return +-- end +-- -- the variant already has a unicode (normally that resultrs in a default tounicode to self) +-- local unic = unicodes[variant] +-- if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then +-- -- no default mapping and therefore maybe no tounicode yet +-- else +-- return +-- end +-- -- the variant already has a tounicode +-- local index = descriptions[code].index +-- if tounicode[index] then +-- return +-- end +-- -- add to the list +-- local g = guess[variant] +-- if g then +-- g[gname] = unicode +-- else +-- guess[variant] = { [gname] = unicode } +-- end +-- end +-- -- +-- for unicode, description in next, descriptions do +-- local slookups = description.slookups +-- if slookups then +-- local gname = description.name +-- for tag, data in next, slookups do +-- local lookuptype = lookuptypes[tag] +-- if lookuptype == "alternate" then +-- for i=1,#data do +-- check(gname,data[i],unicode) +-- end +-- elseif lookuptype == "substitution" then +-- check(gname,data,unicode) +-- end +-- end +-- end +-- local mlookups = description.mlookups +-- if mlookups then +-- local gname = description.name +-- for tag, list in next, mlookups do +-- local lookuptype = lookuptypes[tag] +-- if lookuptype == "alternate" then +-- for i=1,#list do +-- local data = list[i] +-- for i=1,#data do +-- check(gname,data[i],unicode) +-- end +-- end +-- elseif lookuptype == "substitution" then +-- for i=1,#list do +-- check(gname,list[i],unicode) +-- end +-- end +-- end +-- end +-- end +-- -- resolve references +-- local done = true +-- while done do +-- done = false +-- for k, v in next, guess do +-- if type(v) ~= "number" then +-- for kk, vv in next, v do +-- if vv == -1 or vv >= private or (vv >= 0xE000 and vv <= 0xF8FF) or vv == 0xFFFE or vv == 0xFFFF then +-- local uu = guess[kk] +-- if type(uu) == "number" then +-- guess[k] = uu +-- done = true +-- end +-- else +-- guess[k] = vv +-- done = true +-- end +-- end +-- end +-- end +-- end +-- -- generate tounicodes +-- for k, v in next, guess do +-- if type(v) == "number" then +-- guess[k] = tounicode16(v) +-- else +-- local t = nil +-- local l = lower(k) +-- local u = unicodes[l] +-- if not u then +-- -- forget about it +-- elseif u == -1 or u >= private or (u >= 0xE000 and u <= 0xF8FF) or u == 0xFFFE or u == 0xFFFF then +-- local du = descriptions[u] +-- local index = du.index +-- t = tounicode[index] +-- if t then +-- tounicode[index] = v +-- originals[index] = unicode +-- end +-- else +-- -- t = u +-- end +-- if t then +-- guess[k] = t +-- else +-- guess[k] = "FFFD" +-- end +-- end +-- end +-- local orphans = 0 +-- local guessed = 0 +-- for k, v in next, guess do +-- if v == "FFFD" then +-- orphans = orphans + 1 +-- guess[k] = false +-- else +-- guessed = guessed + 1 +-- guess[k] = true +-- end +-- end +-- -- resources.nounicode = guess -- only when we test things +-- if trace_loading and orphans > 0 or guessed > 0 then +-- report_fonts("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans) +-- end +-- end +-- if trace_mapping then +-- for unic, glyph in table.sortedhash(descriptions) do +-- local name = glyph.name +-- local index = glyph.index +-- local toun = tounicode[index] +-- if toun then +-- report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun) +-- else +-- report_fonts("internal slot %U, name %a, unicode %U",index,name,unic) +-- end +-- end +-- end +-- if trace_loading and (ns > 0 or nl > 0) then +-- report_fonts("%s tounicode entries added, ligatures %s",nl+ns,ns) +-- end +-- end + function mappings.addtounicode(data,filename) local resources = data.resources local properties = data.properties @@ -172,22 +517,13 @@ function mappings.addtounicode(data,filename) unicodes['hyphen'] = unicodes['hyphen'] or 45 unicodes['zwj'] = unicodes['zwj'] or 0x200D unicodes['zwnj'] = unicodes['zwnj'] or 0x200C - -- the tounicode mapping is sparse and only needed for alternatives local private = fonts.constructors.privateoffset local unknown = format("%04X",utfbyte("?")) local unicodevector = fonts.encodings.agl.unicodes -- loaded runtime in context ----- namevector = fonts.encodings.agl.names -- loaded runtime in context - local tounicode = { } - local originals = { } local missing = { } - resources.tounicode = tounicode - resources.originals = originals local lumunic, uparser, oparser local cidinfo, cidnames, cidcodes, usedmap - if false then -- will become an option - lumunic = loadlumtable(filename) - lumunic = lumunic and lumunic.tounicode - end -- cidinfo = properties.cidinfo usedmap = cidinfo and fonts.cid.getmap(cidinfo) @@ -205,9 +541,8 @@ function mappings.addtounicode(data,filename) if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then local unicode = lumunic and lumunic[name] or unicodevector[name] if unicode then - originals[index] = unicode - tounicode[index] = tounicode16(unicode,name) - ns = ns + 1 + glyph.unicode = unicode + ns = ns + 1 end -- cidmap heuristics, beware, there is no guarantee for a match unless -- the chain resolves @@ -216,9 +551,8 @@ function mappings.addtounicode(data,filename) if foundindex then unicode = cidcodes[foundindex] -- name to number if unicode then - originals[index] = unicode - tounicode[index] = tounicode16(unicode,name) - ns = ns + 1 + glyph.unicode = unicode + ns = ns + 1 else local reference = cidnames[foundindex] -- number to name if reference then @@ -226,23 +560,20 @@ function mappings.addtounicode(data,filename) if foundindex then unicode = cidcodes[foundindex] if unicode then - originals[index] = unicode - tounicode[index] = tounicode16(unicode,name) - ns = ns + 1 + glyph.unicode = unicode + ns = ns + 1 end end if not unicode or unicode == "" then local foundcodes, multiple = lpegmatch(uparser,reference) if foundcodes then - originals[index] = foundcodes + glyph.unicode = foundcodes if multiple then - tounicode[index] = tounicode16sequence(foundcodes) - nl = nl + 1 - unicode = true + nl = nl + 1 + unicode = true else - tounicode[index] = tounicode16(foundcodes,name) - ns = ns + 1 - unicode = foundcodes + ns = ns + 1 + unicode = foundcodes end end end @@ -289,11 +620,9 @@ function mappings.addtounicode(data,filename) if n == 0 then -- done then -- nothing elseif n == 1 then - originals[index] = t[1] - tounicode[index] = tounicode16(t[1],name) + glyph.unicode = t[1] else - originals[index] = t - tounicode[index] = tounicode16sequence(t) + glyph.unicode = t end nl = nl + 1 end @@ -301,16 +630,13 @@ function mappings.addtounicode(data,filename) if not unicode or unicode == "" then local foundcodes, multiple = lpegmatch(uparser,name) if foundcodes then + glyph.unicode = foundcodes if multiple then - originals[index] = foundcodes - tounicode[index] = tounicode16sequence(foundcodes,name) - nl = nl + 1 - unicode = true + nl = nl + 1 + unicode = true else - originals[index] = foundcodes - tounicode[index] = tounicode16(foundcodes,name) - ns = ns + 1 - unicode = foundcodes + ns = ns + 1 + unicode = foundcodes end end end @@ -319,14 +645,9 @@ function mappings.addtounicode(data,filename) if not unicode then missing[name] = true end - -- if not unicode then - -- originals[index] = 0xFFFD - -- tounicode[index] = "FFFD" - -- end end end if next(missing) then --- inspect(missing) local guess = { } -- helper local function check(gname,code,unicode) @@ -344,8 +665,7 @@ function mappings.addtounicode(data,filename) return end -- the variant already has a tounicode - local index = descriptions[code].index - if tounicode[index] then + if descriptions[code].unicode then return end -- add to the list @@ -413,52 +733,51 @@ function mappings.addtounicode(data,filename) end end end - -- generate tounicodes + -- wrap up + local orphans = 0 + local guessed = 0 for k, v in next, guess do if type(v) == "number" then - guess[k] = tounicode16(v) + descriptions[unicodes[k]].unicode = descriptions[v].unicode or v -- can also be a table + guessed = guessed + 1 else local t = nil local l = lower(k) local u = unicodes[l] if not u then - -- forget about it + orphans = orphans + 1 elseif u == -1 or u >= private or (u >= 0xE000 and u <= 0xF8FF) or u == 0xFFFE or u == 0xFFFF then - t = tounicode[descriptions[u].index] - else - -- t = u - end - if t then - guess[k] = t + local unicode = descriptions[u].unicode + if unicode then + descriptions[unicodes[k]].unicode = unicode + guessed = guessed + 1 + else + orphans = orphans + 1 + end else - guess[k] = "FFFD" + orphans = orphans + 1 end end end - local orphans = 0 - local guessed = 0 - for k, v in next, guess do - tounicode[descriptions[unicodes[k]].index] = v - if v == "FFFD" then - orphans = orphans + 1 - guess[k] = false - else - guessed = guessed + 1 - guess[k] = true - end - end - -- resources.nounicode = guess -- only when we test things if trace_loading and orphans > 0 or guessed > 0 then report_fonts("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans) end end if trace_mapping then for unic, glyph in table.sortedhash(descriptions) do - local name = glyph.name - local index = glyph.index - local toun = tounicode[index] - if toun then - report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun) + local name = glyph.name + local index = glyph.index + local unicode = glyph.unicode + if unicode then + if type(unicode) == "table" then + local unicodes = { } + for i=1,#unicode do + unicodes[i] = formatters("%U",unicode[i]) + end + report_fonts("internal slot %U, name %a, unicode %U, tounicode % t",index,name,unic,unicodes) + else + report_fonts("internal slot %U, name %a, unicode %U, tounicode %U",index,name,unic,unicode) + end else report_fonts("internal slot %U, name %a, unicode %U",index,name,unic) end diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua index b934837f4..96d240300 100644 --- a/tex/context/base/font-mis.lua +++ b/tex/context/base/font-mis.lua @@ -22,7 +22,7 @@ local handlers = fonts.handlers handlers.otf = handlers.otf or { } local otf = handlers.otf -otf.version = otf.version or 2.762 +otf.version = otf.version or 2.801 otf.cache = otf.cache or containers.define("fonts", "otf", otf.version, true) function otf.loadcached(filename,format,sub) diff --git a/tex/context/base/font-nod.lua b/tex/context/base/font-nod.lua index 2311cebeb..da3d9def9 100644 --- a/tex/context/base/font-nod.lua +++ b/tex/context/base/font-nod.lua @@ -407,16 +407,18 @@ local function toutf(list,result,nofresult,stopcriterium) if fc then local fcc = fc[c] if fcc then - -- == fromunicode - local u = fcc.tounicode - if u then - for s in gmatch(u,"....") do + local u = fcc.unicode + if not u then + nofresult = nofresult + 1 + result[nofresult] = utfchar(c) + elseif type(u) == "table" then + for i=1,#u do nofresult = nofresult + 1 - result[nofresult] = utfchar(tonumber(s,16)) + result[nofresult] = utfchar(u[i]) end else nofresult = nofresult + 1 - result[nofresult] = utfchar(c) + result[nofresult] = utfchar(u) end else nofresult = nofresult + 1 diff --git a/tex/context/base/font-otb.lua b/tex/context/base/font-otb.lua index a68b57c8a..4e955a197 100644 --- a/tex/context/base/font-otb.lua +++ b/tex/context/base/font-otb.lua @@ -7,7 +7,7 @@ if not modules then modules = { } end modules ['font-otb'] = { } local concat = table.concat local format, gmatch, gsub, find, match, lower, strip = string.format, string.gmatch, string.gsub, string.find, string.match, string.lower, string.strip -local type, next, tonumber, tostring = type, next, tonumber, tostring +local type, next, tonumber, tostring, rawget = type, next, tonumber, tostring, rawget local lpegmatch = lpeg.match local utfchar = utf.char @@ -63,40 +63,40 @@ local function gref(descriptions,n) end end -local function cref(feature,lookupname) +local function cref(feature,lookuptags,lookupname) if lookupname then - return formatters["feature %a, lookup %a"](feature,lookupname) + return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname]) else return formatters["feature %a"](feature) end end -local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment) +local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment) report_prepare("%s: base alternate %s => %s (%S => %S)", - cref(feature,lookupname), + cref(feature,lookuptags,lookupname), gref(descriptions,unicode), replacement and gref(descriptions,replacement), value, comment) end -local function report_substitution(feature,lookupname,descriptions,unicode,substitution) +local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution) report_prepare("%s: base substitution %s => %S", - cref(feature,lookupname), + cref(feature,lookuptags,lookupname), gref(descriptions,unicode), gref(descriptions,substitution)) end -local function report_ligature(feature,lookupname,descriptions,unicode,ligature) +local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature) report_prepare("%s: base ligature %s => %S", - cref(feature,lookupname), + cref(feature,lookuptags,lookupname), gref(descriptions,ligature), gref(descriptions,unicode)) end -local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value) +local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value) report_prepare("%s: base kern %s + %s => %S", - cref(feature,lookupname), + cref(feature,lookuptags,lookupname), gref(descriptions,unicode), gref(descriptions,otherunicode), value) @@ -181,7 +181,7 @@ local function finalize_ligatures(tfmdata,ligatures) local characters = tfmdata.characters local descriptions = tfmdata.descriptions local resources = tfmdata.resources - local unicodes = resources.unicodes + local unicodes = resources.unicodes -- we use rawget in order to avoid bulding the table local private = resources.private local alldone = false while not alldone do @@ -217,12 +217,12 @@ local function finalize_ligatures(tfmdata,ligatures) local secondname = firstname .. "_" .. secondcode if i == size - 1 then target = unicode - if not unicodes[secondname] then + if not rawget(unicodes,secondname) then unicodes[secondname] = unicode -- map final ligature onto intermediates end okay = true else - target = unicodes[secondname] + target = rawget(unicodes,secondname) if not target then break end @@ -258,6 +258,7 @@ local function finalize_ligatures(tfmdata,ligatures) end end resources.private = private + return true end end @@ -265,10 +266,11 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis local characters = tfmdata.characters local descriptions = tfmdata.descriptions local resources = tfmdata.resources + local properties = tfmdata.properties local changed = tfmdata.changed - local unicodes = resources.unicodes local lookuphash = resources.lookuphash local lookuptypes = resources.lookuptypes + local lookuptags = resources.lookuptags local ligatures = { } local alternate = tonumber(value) or true and 1 @@ -279,39 +281,39 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis local trace_ligatures = trace_baseinit and trace_ligatures local actions = { - substitution = function(lookupdata,lookupname,description,unicode) + substitution = function(lookupdata,lookuptags,lookupname,description,unicode) if trace_singles then - report_substitution(feature,lookupname,descriptions,unicode,lookupdata) + report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) end changed[unicode] = lookupdata end, - alternate = function(lookupdata,lookupname,description,unicode) + alternate = function(lookupdata,lookuptags,lookupname,description,unicode) local replacement = lookupdata[alternate] if replacement then changed[unicode] = replacement if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal") + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") end elseif defaultalt == "first" then replacement = lookupdata[1] changed[unicode] = replacement if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) end elseif defaultalt == "last" then replacement = lookupdata[#data] if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) end else if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown") + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") end end end, - ligature = function(lookupdata,lookupname,description,unicode) + ligature = function(lookupdata,lookuptags,lookupname,description,unicode) if trace_ligatures then - report_ligature(feature,lookupname,descriptions,unicode,lookupdata) + report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) end ligatures[#ligatures+1] = { unicode, lookupdata } end, @@ -328,7 +330,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis local lookuptype = lookuptypes[lookupname] local action = actions[lookuptype] if action then - action(lookupdata,lookupname,description,unicode) + action(lookupdata,lookuptags,lookupname,description,unicode) end end end @@ -343,24 +345,25 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis local action = actions[lookuptype] if action then for i=1,#lookuplist do - action(lookuplist[i],lookupname,description,unicode) + action(lookuplist[i],lookuptags,lookupname,description,unicode) end end end end end end - - finalize_ligatures(tfmdata,ligatures) + properties.hasligatures = finalize_ligatures(tfmdata,ligatures) end local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) -- todo what kind of kerns, currently all local characters = tfmdata.characters local descriptions = tfmdata.descriptions local resources = tfmdata.resources - local unicodes = resources.unicodes + local properties = tfmdata.properties + local lookuptags = resources.lookuptags local sharedkerns = { } local traceindeed = trace_baseinit and trace_kerns + local haskerns = false for unicode, character in next, characters do local description = descriptions[unicode] local rawkerns = description.kerns -- shared @@ -384,13 +387,13 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist newkerns = { [otherunicode] = value } done = true if traceindeed then - report_kern(feature,lookup,descriptions,unicode,otherunicode,value) + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) end elseif not newkerns[otherunicode] then -- first wins newkerns[otherunicode] = value done = true if traceindeed then - report_kern(feature,lookup,descriptions,unicode,otherunicode,value) + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) end end end @@ -399,12 +402,14 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist if done then sharedkerns[rawkerns] = newkerns character.kerns = newkerns -- no empty assignments + haskerns = true else sharedkerns[rawkerns] = false end end end end + properties.haskerns = haskerns end basemethods.independent = { @@ -434,13 +439,13 @@ local function make_1(present,tree,name) end end -local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname) +local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname) for k, v in next, tree do if k == "ligature" then local character = characters[preceding] if not character then if trace_baseinit then - report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding) + report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding) end character = makefake(tfmdata,name,present) end @@ -461,7 +466,7 @@ local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,don else local code = present[name] or unicode local name = name .. "_" .. k - make_2(present,tfmdata,characters,v,name,code,k,done,lookupname) + make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname) end end end @@ -473,6 +478,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis local changed = tfmdata.changed local lookuphash = resources.lookuphash local lookuptypes = resources.lookuptypes + local lookuptags = resources.lookuptags local ligatures = { } local alternate = tonumber(value) or true and 1 @@ -489,7 +495,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis for unicode, data in next, lookupdata do if lookuptype == "substitution" then if trace_singles then - report_substitution(feature,lookupname,descriptions,unicode,data) + report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data) end changed[unicode] = data elseif lookuptype == "alternate" then @@ -497,28 +503,28 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis if replacement then changed[unicode] = replacement if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal") + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") end elseif defaultalt == "first" then replacement = data[1] changed[unicode] = replacement if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) end elseif defaultalt == "last" then replacement = data[#data] if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) end else if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown") + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") end end elseif lookuptype == "ligature" then ligatures[#ligatures+1] = { unicode, data, lookupname } if trace_ligatures then - report_ligature(feature,lookupname,descriptions,unicode,data) + report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data) end end end @@ -541,7 +547,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis for i=1,nofligatures do local ligature = ligatures[i] local unicode, tree, lookupname = ligature[1], ligature[2], ligature[3] - make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname) + make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname) end end @@ -552,11 +558,11 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist local characters = tfmdata.characters local descriptions = tfmdata.descriptions local resources = tfmdata.resources + local properties = tfmdata.properties local lookuphash = resources.lookuphash + local lookuptags = resources.lookuptags local traceindeed = trace_baseinit and trace_kerns - -- check out this sharedkerns trickery - for l=1,#lookuplist do local lookupname = lookuplist[l] local lookupdata = lookuphash[lookupname] @@ -571,7 +577,7 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist for otherunicode, kern in next, data do if not kerns[otherunicode] and kern ~= 0 then kerns[otherunicode] = kern - report_kern(feature,lookup,descriptions,unicode,otherunicode,kern) + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern) end end else diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua index c1bb4419c..58a72508a 100644 --- a/tex/context/base/font-otf.lua +++ b/tex/context/base/font-otf.lua @@ -24,7 +24,9 @@ local reversed, concat, remove, sortedkeys = table.reversed, table.concat, table local ioflush = io.flush local fastcopy, tohash, derivetable = table.fastcopy, table.tohash, table.derive local formatters = string.formatters +local P, R, S, C, Ct, lpegmatch = lpeg.P, lpeg.R, lpeg.S, lpeg.C, lpeg.Ct, lpeg.match +local setmetatableindex = table.setmetatableindex local allocate = utilities.storage.allocate local registertracker = trackers.register local registerdirective = directives.register @@ -33,13 +35,16 @@ local stoptiming = statistics.stoptiming local elapsedtime = statistics.elapsedtime local findbinfile = resolvers.findbinfile -local trace_private = false registertracker("otf.private", function(v) trace_private = v end) -local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end) -local trace_features = false registertracker("otf.features", function(v) trace_features = v end) -local trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end) -local trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end) -local trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end) -local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end) +local trace_private = false registertracker("otf.private", function(v) trace_private = v end) +local trace_loading = false registertracker("otf.loading", function(v) trace_loading = v end) +local trace_features = false registertracker("otf.features", function(v) trace_features = v end) +local trace_dynamics = false registertracker("otf.dynamics", function(v) trace_dynamics = v end) +local trace_sequences = false registertracker("otf.sequences", function(v) trace_sequences = v end) +local trace_markwidth = false registertracker("otf.markwidth", function(v) trace_markwidth = v end) +local trace_defining = false registertracker("fonts.defining", function(v) trace_defining = v end) + +local compact_lookups = true registertracker("otf.compactlookups", function(v) compact_lookups = v end) +local purge_names = true registertracker("otf.purgenames", function(v) purge_names = v end) local report_otf = logs.reporter("fonts","otf loading") @@ -48,13 +53,17 @@ local otf = fonts.handlers.otf otf.glists = { "gsub", "gpos" } -otf.version = 2.762 -- beware: also sync font-mis.lua +otf.version = 2.801 -- beware: also sync font-mis.lua otf.cache = containers.define("fonts", "otf", otf.version, true) local fontdata = fonts.hashes.identifiers local chardata = characters and characters.data -- not used -local otffeatures = fonts.constructors.newfeatures("otf") +local definers = fonts.definers +local readers = fonts.readers +local constructors = fonts.constructors + +local otffeatures = constructors.newfeatures("otf") local registerotffeature = otffeatures.register local enhancers = allocate() @@ -62,13 +71,8 @@ otf.enhancers = enhancers local patches = { } enhancers.patches = patches -local definers = fonts.definers -local readers = fonts.readers -local constructors = fonts.constructors - local forceload = false local cleanup = 0 -- mk: 0=885M 1=765M 2=735M (regular run 730M) -local usemetatables = false -- .4 slower on mk but 30 M less mem so we might change the default -- will be directive local packdata = true local syncspace = true local forcenotdef = false @@ -93,7 +97,6 @@ formats.dfont = "truetype" registerdirective("fonts.otf.loader.cleanup", function(v) cleanup = tonumber(v) or (v and 1) or 0 end) registerdirective("fonts.otf.loader.force", function(v) forceload = v end) -registerdirective("fonts.otf.loader.usemetatables", function(v) usemetatables = v end) registerdirective("fonts.otf.loader.pack", function(v) packdata = v end) registerdirective("fonts.otf.loader.syncspace", function(v) syncspace = v end) registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = v end) @@ -280,6 +283,9 @@ local ordered_enhancers = { "add duplicates", "cleanup tables", + + "compact lookups", + "purge names", } --[[ldx-- @@ -495,7 +501,7 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone }, helpers = { -- might go away tounicodelist = splitter, - tounicodetable = lpeg.Ct(splitter), + tounicodetable = Ct(splitter), }, } starttiming(data) @@ -538,6 +544,39 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone report_otf("loading from cache using hash %a",hash) end enhance("unpack",data,filename,nil,false) + -- + local resources = data.resources + local lookuptags = resources.lookuptags + local unicodes = resources.unicodes + if not lookuptags then + lookuptags = { } + resources.lookuptags = lookuptags + end + setmetatableindex(lookuptags,function(t,k) + local v = type(k) == "number" and ("lookup " .. k) or k + t[k] = v + return v + end) + if not unicodes then + unicodes = { } + resources.unicodes = unicodes + setmetatableindex(unicodes,function(t,k) + -- use rawget when no table has to be built + setmetatableindex(unicodes,nil) + for u, d in next, data.descriptions do + local n = d.name + if n then + t[n] = u + -- report_otf("accessing known name %a",k) + else + -- report_otf("accessing unknown name %a",k) + end + end + return rawget(t,k) + end) + end + constructors.addcoreunicodes(unicodes) -- do we really need this? + -- if applyruntimefixes then applyruntimefixes(filename,data) end @@ -579,41 +618,29 @@ actions["add dimensions"] = function(data,filename) local defaultheight = resources.defaultheight or 0 local defaultdepth = resources.defaultdepth or 0 local basename = trace_markwidth and file.basename(filename) - if usemetatables then - for _, d in next, descriptions do - local wd = d.width - if not wd then - d.width = defaultwidth - elseif trace_markwidth and wd ~= 0 and d.class == "mark" then - report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename) - -- d.width = -wd - end - setmetatable(d,mt) + for _, d in next, descriptions do + local bb, wd = d.boundingbox, d.width + if not wd then + -- or bb? + d.width = defaultwidth + elseif trace_markwidth and wd ~= 0 and d.class == "mark" then + report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename) + -- d.width = -wd end - else - for _, d in next, descriptions do - local bb, wd = d.boundingbox, d.width - if not wd then - d.width = defaultwidth - elseif trace_markwidth and wd ~= 0 and d.class == "mark" then - report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename) - -- d.width = -wd + -- if forcenotdef and not d.name then + -- d.name = ".notdef" + -- end + if bb then + local ht, dp = bb[4], -bb[2] + if ht == 0 or ht < 0 then + -- not set + else + d.height = ht end - -- if forcenotdef and not d.name then - -- d.name = ".notdef" - -- end - if bb then - local ht, dp = bb[4], -bb[2] - if ht == 0 or ht < 0 then - -- not set - else - d.height = ht - end - if dp == 0 or dp < 0 then - -- not set - else - d.depth = dp - end + if dp == 0 or dp < 0 then + -- not set + else + d.depth = dp end end end @@ -1301,9 +1328,9 @@ local function s_uncover(splitter,cache,cover) local uncovered = cache[cover] if not uncovered then uncovered = lpegmatch(splitter,cover) --- for i=1,#uncovered do --- uncovered[i] = { [uncovered[i]] = true } --- end + -- for i=1,#uncovered do + -- uncovered[i] = { [uncovered[i]] = true } + -- end cache[cover] = uncovered end return { uncovered } @@ -1317,9 +1344,14 @@ local function t_hashed(t,cache) local ti = t[i] local tih = cache[ti] if not tih then - tih = { } - for i=1,#ti do - tih[ti[i]] = true + local tn = #ti + if tn == 1 then + tih = { [ti[1]] = true } + else + tih = { } + for i=1,tn do + tih[ti[i]] = true + end end cache[ti] = tih end @@ -1335,12 +1367,17 @@ end local function s_hashed(t,cache) if t then - local ht = { } local tf = t[1] - for i=1,#tf do - ht[i] = { [tf[i]] = true } + local nf = #tf + if nf == 1 then + return { [tf[1]] = true } + else + local ht = { } + for i=1,nf do + ht[i] = { [tf[i]] = true } + end + return ht end - return ht else return nil end @@ -1791,7 +1828,7 @@ end -- future versions will remove _ -local valid = (lpeg.R("\x00\x7E") - lpeg.S("(){}[]<>%/ \n\r\f\v"))^0 * lpeg.P(-1) +local valid = (R("\x00\x7E") - S("(){}[]<>%/ \n\r\f\v"))^0 * P(-1) local function valid_ps_name(str) return str and str ~= "" and #str < 64 and lpegmatch(valid,str) and true or false @@ -1853,8 +1890,17 @@ actions["check metadata"] = function(data,filename,raw) end actions["cleanup tables"] = function(data,filename,raw) - data.resources.indices = nil -- not needed - data.helpers = nil + local duplicates = data.resources.duplicates + if duplicates then + for k, v in next, duplicates do + if #v == 1 then + duplicates[k] = v[1] + end + end + end + data.resources.indices = nil -- not needed + data.resources.unicodes = nil -- delayed + data.helpers = nil -- tricky as we have no unicodes any more end -- kern: ttf has a table with kerns @@ -1976,6 +2022,164 @@ actions["reorganize glyph anchors"] = function(data,filename,raw) -- when we rep end end +local bogusname = (P("uni") + P("u")) * R("AF","09")^4 + + (P("index") + P("glyph") + S("Ii") * P("dentity") * P(".")^0) * R("09")^1 +local uselessname = (1-bogusname)^0 * bogusname + +actions["purge names"] = function(data,filename,raw) -- not used yet + if purge_names then + local n = 0 + for u, d in next, data.descriptions do + if lpegmatch(uselessname,d.name) then + n = n + 1 + d.name = nil + end + -- d.comment = nil + end + if n > 0 then + report_otf("%s bogus names removed",n) + end + end +end + +actions["compact lookups"] = function(data,filename,raw) + if not compact_lookups then + report_otf("not compacting") + return + end + -- create keyhash + local last = 0 + local tags = table.setmetatableindex({ }, + function(t,k) + last = last + 1 + t[k] = last + return last + end + ) + -- + local descriptions = data.descriptions + local resources = data.resources + -- + for u, d in next, descriptions do + -- + -- -- we can also compact anchors and cursives (basechar basemark baselig mark) + -- + local slookups = d.slookups + if type(slookups) == "table" then + local s = { } + for k, v in next, slookups do + s[tags[k]] = v + end + d.slookups = s + end + -- + local mlookups = d.mlookups + if type(mlookups) == "table" then + local m = { } + for k, v in next, mlookups do + m[tags[k]] = v + end + d.mlookups = m + end + -- + local kerns = d.kerns + if type(kerns) == "table" then + local t = { } + for k, v in next, kerns do + t[tags[k]] = v + end + d.kerns = t + end + end + -- + local lookups = data.lookups + if lookups then + local l = { } + for k, v in next, lookups do + local rules = v.rules + if rules then + for i=1,#rules do + local l = rules[i].lookups + if type(l) == "table" then + for i=1,#l do + l[i] = tags[l[i]] + end + end + end + end + l[tags[k]] = v + end + data.lookups = l + end + -- + local lookups = resources.lookups + if lookups then + local l = { } + for k, v in next, lookups do + local s = v.subtables + if type(s) == "table" then + for i=1,#s do + s[i] = tags[s[i]] + end + end + l[tags[k]] = v + end + resources.lookups = l + end + -- + local sequences = resources.sequences + if sequences then + for i=1,#sequences do + local s = sequences[i] + local n = s.name + if n then + s.name = tags[n] + end + local t = s.subtables + if type(t) == "table" then + for i=1,#t do + t[i] = tags[t[i]] + end + end + end + end + -- + local lookuptypes = resources.lookuptypes + if lookuptypes then + local l = { } + for k, v in next, lookuptypes do + l[tags[k]] = v + end + resources.lookuptypes = l + end + -- + local anchor_to_lookup = resources.anchor_to_lookup + if anchor_to_lookup then + for anchor, lookups in next, anchor_to_lookup do + local l = { } + for lookup, value in next, lookups do + l[tags[lookup]] = value + end + anchor_to_lookup[anchor] = l + end + end + -- + local lookup_to_anchor = resources.lookup_to_anchor + if lookup_to_anchor then + local l = { } + for lookup, value in next, lookup_to_anchor do + l[tags[lookup]] = value + end + resources.lookup_to_anchor = l + end + -- + tags = table.swapped(tags) + -- + report_otf("%s lookup tags compacted",#tags) + -- + resources.lookuptags = tags +end + -- modes: node, base, none function otf.setfeatures(tfmdata,features) @@ -2116,8 +2320,8 @@ local function copytotfm(data,cache_id) parameters.charwidth = charwidth parameters.charxheight = charxheight -- - local space = 0x0020 -- unicodes['space'], unicodes['emdash'] - local emdash = 0x2014 -- unicodes['space'], unicodes['emdash'] + local space = 0x0020 + local emdash = 0x2014 if monospaced then if descriptions[space] then spaceunits, spacer = descriptions[space].width, "space" @@ -2166,7 +2370,7 @@ local function copytotfm(data,cache_id) if charxheight then parameters.x_height = charxheight else - local x = 0x78 -- unicodes['x'] + local x = 0x0078 if x then local x = descriptions[x] if x then @@ -2204,7 +2408,6 @@ local function copytotfm(data,cache_id) end report_otf() end - -- return { characters = characters, descriptions = descriptions, @@ -2234,14 +2437,23 @@ local function otftotfm(specification) if duplicates then local nofduplicates, nofduplicated = 0, 0 for parent, list in next, duplicates do - for i=1,#list do - local unicode = list[i] - if not descriptions[unicode] then - descriptions[unicode] = descriptions[parent] -- or copy + if type(list) == "table" then + local n = #list + for i=1,n do + local unicode = list[i] + if not descriptions[unicode] then + descriptions[unicode] = descriptions[parent] -- or copy + nofduplicated = nofduplicated + 1 + end + end + nofduplicates = nofduplicates + n + else + if not descriptions[list] then + descriptions[list] = descriptions[parent] -- or copy nofduplicated = nofduplicated + 1 end + nofduplicates = nofduplicates + 1 end - nofduplicates = nofduplicates + #list end if trace_otf and nofduplicated ~= nofduplicates then report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates) diff --git a/tex/context/base/font-otn.lua b/tex/context/base/font-otn.lua index c35dcf27c..32dc820d3 100644 --- a/tex/context/base/font-otn.lua +++ b/tex/context/base/font-otn.lua @@ -269,6 +269,7 @@ local currentfont = false local lookuptable = false local anchorlookups = false local lookuptypes = false +local lookuptags = false local handlers = { } local rlmode = 0 local featurevalue = false @@ -323,20 +324,20 @@ end local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_ if index then - return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index) + return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index) elseif lookupname then - return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname) + return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname]) elseif chainlookupname then - return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname) + return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname]) elseif chainname then - return formatters["feature %a, chain %a"](kind,chainname) + return formatters["feature %a, chain %a"](kind,lookuptags[chainname]) else return formatters["feature %a"](kind) end end local function pref(kind,lookupname) - return formatters["feature %a, lookup %a"](kind,lookupname) + return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname]) end -- We can assume that languages that use marks are not hyphenated. We can also assume @@ -1924,7 +1925,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq end else local i = 1 - repeat + while true do if skipped then while true do local char = getchar(start) @@ -1965,12 +1966,14 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq end end end - if start then + if i > nofchainlookups then + break + elseif start then start = getnext(start) else -- weird end - until i > nofchainlookups + end end else local replacements = ck[7] @@ -2169,6 +2172,7 @@ local function featuresprocessor(head,font,attr) anchorlookups = resources.lookup_to_anchor lookuptable = resources.lookups lookuptypes = resources.lookuptypes + lookuptags = resources.lookuptags currentfont = font rlmode = 0 @@ -2769,6 +2773,7 @@ local function prepare_contextchains(tfmdata) local rawdata = tfmdata.shared.rawdata local resources = rawdata.resources local lookuphash = resources.lookuphash + local lookuptags = resources.lookuptags local lookups = rawdata.lookups if lookups then for lookupname, lookupdata in next, rawdata.lookups do @@ -2782,7 +2787,7 @@ local function prepare_contextchains(tfmdata) report_prepare("unsupported format %a",format) elseif not validformat[lookuptype] then -- todo: dejavu-serif has one (but i need to see what use it has) - report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname) + report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname]) else local contexts = lookuphash[lookupname] if not contexts then @@ -2838,7 +2843,7 @@ local function prepare_contextchains(tfmdata) -- no rules end else - report_prepare("missing lookuptype for lookupname %a",lookupname) + report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname]) end end end diff --git a/tex/context/base/font-otp.lua b/tex/context/base/font-otp.lua index 60eee0738..63e4184c1 100644 --- a/tex/context/base/font-otp.lua +++ b/tex/context/base/font-otp.lua @@ -140,6 +140,11 @@ end -- return b -- end +-- beware: we cannot unpack and repack the same table because then sharing +-- interferes (we could catch this if needed) .. so for now: save, reload +-- and repack in such cases (never needed anyway) .. a tricky aspect is that +-- we then need to sort more thanks to random hashing + local function packdata(data) if data then -- stripdata(data) @@ -898,3 +903,4 @@ if otf.enhancers.register then end otf.enhancers.unpack = unpackdata -- used elsewhere +otf.enhancers.pack = packdata -- used elsewhere diff --git a/tex/context/base/font-tfm.lua b/tex/context/base/font-tfm.lua index 827d70586..14c130d10 100644 --- a/tex/context/base/font-tfm.lua +++ b/tex/context/base/font-tfm.lua @@ -114,6 +114,11 @@ local function read_from_tfm(specification) features.encoding = encoding end end + -- let's play safe: + properties.haskerns = true + properties.haslogatures = true + resources.unicodes = { } + resources.lookuptags = { } -- return tfmdata end diff --git a/tex/context/base/grph-epd.lua b/tex/context/base/grph-epd.lua index 4f9d46097..8dcb58b3d 100644 --- a/tex/context/base/grph-epd.lua +++ b/tex/context/base/grph-epd.lua @@ -22,4 +22,7 @@ function figures.mergegoodies(optionlist) if all or options[variables.layer] then codeinjections.mergeviewerlayers() end + if all or options[variables.bookmark] then + codeinjections.mergebookmarks() + end end diff --git a/tex/context/base/grph-epd.mkiv b/tex/context/base/grph-epd.mkiv index 58526fd44..444fa55a6 100644 --- a/tex/context/base/grph-epd.mkiv +++ b/tex/context/base/grph-epd.mkiv @@ -32,7 +32,7 @@ \c!offset=\v!overlay, \c!background={\v!foreground,system:graphics:epdf}] -\def\grph_epdf_add_overlay +\unexpanded\def\grph_epdf_add_overlay {\global\setbox\foundexternalfigure\vbox\bgroup \system_graphics_epdf{\box\foundexternalfigure}% \egroup} diff --git a/tex/context/base/grph-inc.lua b/tex/context/base/grph-inc.lua index f83c759b3..28ef5d462 100644 --- a/tex/context/base/grph-inc.lua +++ b/tex/context/base/grph-inc.lua @@ -38,6 +38,8 @@ The TeX-Lua mix is suboptimal. This has to do with the fact that we cannot run TeX code from within Lua. Some more functionality will move to Lua. ]]-- +-- todo: store loaded pages per pdf file someplace + local format, lower, find, match, gsub, gmatch = string.format, string.lower, string.find, string.match, string.gsub, string.gmatch local contains = table.contains local concat, insert, remove = table.concat, table.insert, table.remove @@ -67,6 +69,8 @@ local texsetbox = tex.setbox local hpack = node.hpack +local new_latelua = nodes.pool.latelua + local context = context local variables = interfaces.variables @@ -1172,6 +1176,13 @@ function checkers.generic(data) return data end +local nofimages = 0 +local pofimages = { } + +function figures.getrealpage(index) + return pofimages[index] or 0 +end + function includers.generic(data) local dr, du, ds = data.request, data.used, data.status -- here we set the 'natural dimensions' @@ -1195,7 +1206,18 @@ function includers.generic(data) if figure then local nr = figures.boxnumber -- it looks like we have a leak in attributes here .. todo - local box = hpack(images.node(figure)) -- images.node(figure) not longer valid + + nofimages = nofimages + 1 + ds.pageindex = nofimages + local image = images.node(figure) + local pager = new_latelua(function() + pofimages[nofimages] = pofimages[nofimages] or tex.count.realpageno -- so when reused we register the first one only + end) + image.next = pager + pager.prev = image + + local box = hpack(image) -- images.node(figure) not longer valid + indexed[figure.index] = figure box.width, box.height, box.depth = figure.width, figure.height, 0 -- new, hm, tricky, we need to do that in tex (yet) texsetbox(nr,box) diff --git a/tex/context/base/l-table.lua b/tex/context/base/l-table.lua index e642106cc..3eb8b8514 100644 --- a/tex/context/base/l-table.lua +++ b/tex/context/base/l-table.lua @@ -54,7 +54,7 @@ local function compare(a,b) if ta == tb then return a < b else - return tostring(a) < tostring(b) + return tostring(a) < tostring(b) -- not that efficient end end diff --git a/tex/context/base/lang-def.mkiv b/tex/context/base/lang-def.mkiv index 5c1d6de9c..088f86eb8 100644 --- a/tex/context/base/lang-def.mkiv +++ b/tex/context/base/lang-def.mkiv @@ -510,7 +510,8 @@ \c!rightquote=’, \c!leftquotation=“, \c!rightquotation=”, - \c!date={서기,\v!year,년,\v!month,월,\v!day,일}] + % \c!date={서기,\space,\v!year,\labeltext{\v!year},\space,\v!month,\labeltext{\v!month},\space,\v!day,\labeltext{\v!day}}] + \c!date={\v!year,\labeltext{\v!year},\space,\v!month,\labeltext{\v!month},\space,\v!day,\labeltext{\v!day}}] % Greek diff --git a/tex/context/base/lang-txt.lua b/tex/context/base/lang-txt.lua index 4f9f242e6..0cfb5bcea 100644 --- a/tex/context/base/lang-txt.lua +++ b/tex/context/base/lang-txt.lua @@ -415,6 +415,27 @@ data.labels={ }, }, texts={ + ["year"]={ + labels={ + en="year", + nl="jaar", + kr="년", + }, + }, + ["month"]={ + labels={ + en="month", + nl="maand", + kr="월", + }, + }, + ["day"]={ + labels={ + en="day", + nl="dag", + kr="일", + }, + }, ["and"]={ labels={ af="", @@ -506,7 +527,7 @@ data.labels={ hu="április", it="aprile", ja="4", - kr="4월", + kr="4", la="Aprilis", lt="balandžio", nb="april", @@ -613,7 +634,7 @@ data.labels={ hu="augusztus", it="agosto", ja="8", - kr="8월", + kr="8", la="Augustus", lt="rugpjūčio", nb="august", @@ -756,7 +777,7 @@ data.labels={ hu="december", it="dicembre", ja="12", - kr="12월", + kr="12", la="December", lt="gruodžio", nb="desember", @@ -828,7 +849,7 @@ data.labels={ hu="február", it="febbraio", ja="2", - kr="2월", + kr="2", la="Februarius", lt="vasario", nb="februar", @@ -898,7 +919,7 @@ data.labels={ hu=",. ábra:", it="Fig. ", ja="図", - kr="그림", + kr="그림 ", la="Imago ", lt=", pav.", nb="Figur ", @@ -1053,6 +1074,7 @@ data.labels={ hr="vidi ispod", hu="lásd lejjebb", it="come mostrato sotto", + kr="이후로", la="", lt="kaip parodyta žemiau", nb="som vist under", @@ -1128,7 +1150,7 @@ data.labels={ hu="január", it="gennaio", ja="1", - kr="1월", + kr="1", la="Ianuarius", lt="sausio", nb="januar", @@ -1201,7 +1223,7 @@ data.labels={ hu="július", it="luglio", ja="7", - kr="7월", + kr="7", la="Iulius", lt="liepos", nb="juli", @@ -1273,7 +1295,7 @@ data.labels={ hu="június", it="giugno", ja="6", - kr="6월", + kr="6", la="Iunius", lt="birželio", nb="juni", @@ -1418,7 +1440,7 @@ data.labels={ hu="március", it="marzo", ja="3", - kr="3월", + kr="3", la="Martius", lt="kovo", nb="mars", @@ -1491,7 +1513,7 @@ data.labels={ hu="május", it="maggio", ja="5", - kr="5월", + kr="5", la="Maius", lt="gegužės", nb="mai", @@ -1600,7 +1622,7 @@ data.labels={ hu="november", it="novembre", ja="11", - kr="11월", + kr="11", la="November", lt="lapkričio", nb="november", @@ -1671,7 +1693,7 @@ data.labels={ hu="október", it="ottobre", ja="10", - kr="10월", + kr="10", la="October", lt="spalio", nb="oktober", @@ -1925,7 +1947,7 @@ data.labels={ hu="szeptember", it="settembre", ja="9", - kr="9월", + kr="9", la="September", lt="rugsėjo", nb="september", @@ -2143,7 +2165,7 @@ data.labels={ hu=",. táblázat:", it="Tabella ", ja="表", - kr="표", + kr="표 ", la="Tabula ", lt=", lentelė.", nb="Tabell ", @@ -2367,7 +2389,7 @@ data.labels={ hu="Ábrák", it="Figure", ja="図", - kr="그림", + kr="그림 ", la="Imagines", lt="Iliustracijos", nb="Figurer", @@ -2404,7 +2426,7 @@ data.labels={ hu="Grafikák", it="Grafici", ja="グラフ", - kr="그래픽", + kr="그래픽 ", la="Typi", lt="Graphics", nb="Bilde", @@ -2441,7 +2463,7 @@ data.labels={ hu="Index", it="Indice", ja="目次", - kr="색인", + kr="찾아보기", la="Indices", lt="Rodyklė", nb="Register", @@ -2549,6 +2571,7 @@ data.labels={ hr="Literatura", hu="Bibliográfia", it="Bibliografia", + kr="참고문헌", la="", lt="Literatūra", nb="", @@ -2585,7 +2608,7 @@ data.labels={ hu="Táblázatok", it="Tabelle", ja="机", - kr="표", + kr="표 ", la="Tabulae", lt="Lentelės", nb="Tabeller", diff --git a/tex/context/base/lpdf-ano.lua b/tex/context/base/lpdf-ano.lua index 14359e2c7..ab78ec0a1 100644 --- a/tex/context/base/lpdf-ano.lua +++ b/tex/context/base/lpdf-ano.lua @@ -1032,52 +1032,103 @@ function specials.action(var) end local function build(levels,start,parent,method) - local startlevel = levels[start][1] + local startlevel = levels[start].level local i, n = start, 0 local child, entry, m, prev, first, last, f, l while i and i <= #levels do - local li = levels[i] - local level, title, reference, open = li[1], li[2], li[3], li[4] - if level < startlevel then - pdfflushobject(child,entry) - return i, n, first, last - elseif level == startlevel then - if trace_bookmarks then - report_bookmark("%3i %w%s %s",reference.realpage,(level-1)*2,(open and "+") or "-",title) - end - local prev = child - child = pdfreserveobject() - if entry then - entry.Next = child and pdfreference(child) - pdfflushobject(prev,entry) - end - entry = pdfdictionary { - Title = pdfunicode(title), - Parent = parent, - Prev = prev and pdfreference(prev), - A = somedestination(reference.internal,reference.internal,reference.realpage), - } - -- entry.Dest = somedestination(reference.internal,reference.internal,reference.realpage) - if not first then first, last = child, child end - prev = child - last = prev - n = n + 1 + local current = levels[i] + if current.usedpage == false then + -- safeguard i = i + 1 - elseif i < #levels and level > startlevel then - i, m, f, l = build(levels,i,pdfreference(child),method) - entry.Count = (open and m) or -m - if m > 0 then - entry.First, entry.Last = pdfreference(f), pdfreference(l) - end else - -- missing intermediate level but ok - i, m, f, l = build(levels,i,pdfreference(child),method) - entry.Count = (open and m) or -m - if m > 0 then - entry.First, entry.Last = pdfreference(f), pdfreference(l) + local level = current.level + local title = current.title + local reference = current.reference + local opened = current.opened + local reftype = type(reference) + local variant = "unknown" + if reftype == "table" then + -- we're okay + variant = "list" + elseif reftype == "string" then + local resolved = references.identify("",reference) + local realpage = resolved and structures.references.setreferencerealpage(resolved) or 0 + if realpage > 0 then + variant = "realpage" + realpage = realpage + end + elseif reftype == "number" then + if reference > 0 then + variant = "realpage" + realpage = reference + end + else + -- error + end + if variant == "unknown" then + -- error, ignore + i = i + 1 + elseif level < startlevel then + if entry then + pdfflushobject(child,entry) + else + -- some error + end + return i, n, first, last + elseif level == startlevel then + if trace_bookmarks then + report_bookmark("%3i %w%s %s",reference.realpage,(level-1)*2,(opened and "+") or "-",title) + end + local prev = child + child = pdfreserveobject() + if entry then + entry.Next = child and pdfreference(child) + pdfflushobject(prev,entry) + end + local action = nil + if variant == "list" then + action = somedestination(reference.internal,reference.internal,reference.realpage) + elseif variant == "realpage" then + action = pagereferences[realpage] + end + entry = pdfdictionary { + Title = pdfunicode(title), + Parent = parent, + Prev = prev and pdfreference(prev), + A = action, + } + -- entry.Dest = somedestination(reference.internal,reference.internal,reference.realpage) + if not first then first, last = child, child end + prev = child + last = prev + n = n + 1 + i = i + 1 + elseif i < #levels and level > startlevel then + i, m, f, l = build(levels,i,pdfreference(child),method) + if entry then + entry.Count = (opened and m) or -m + if m > 0 then + entry.First = pdfreference(f) + entry.Last = pdfreference(l) + end + else + -- some error + end + else + -- missing intermediate level but ok + i, m, f, l = build(levels,i,pdfreference(child),method) + if entry then + entry.Count = (opened and m) or -m + if m > 0 then + entry.First = pdfreference(f) + entry.Last = pdfreference(l) + end + pdfflushobject(child,entry) + else + -- some error + end + return i, n, first, last end - pdfflushobject(child,entry) - return i, n, first, last end end pdfflushobject(child,entry) @@ -1085,8 +1136,7 @@ local function build(levels,start,parent,method) end function codeinjections.addbookmarks(levels,method) - if #levels > 0 then - structures.bookmarks.flatten(levels) -- dirty trick for lack of structure + if levels and #levels > 0 then local parent = pdfreserveobject() local _, m, first, last = build(levels,1,pdfreference(parent),method or "internal") local dict = pdfdictionary { diff --git a/tex/context/base/lpdf-epa.lua b/tex/context/base/lpdf-epa.lua index fd4d9eb7e..0440ba2cd 100644 --- a/tex/context/base/lpdf-epa.lua +++ b/tex/context/base/lpdf-epa.lua @@ -10,30 +10,40 @@ if not modules then modules = { } end modules ['lpdf-epa'] = { -- change. local type, tonumber = type, tonumber -local format, gsub = string.format, string.gsub +local format, gsub, lower = string.format, string.gsub, string.lower local formatters = string.formatters ----- lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns -local trace_links = false trackers.register("figures.links", function(v) trace_links = v end) -local report_link = logs.reporter("backend","merging") +local trace_links = false trackers.register("figures.links", function(v) trace_links = v end) +local trace_outlines = false trackers.register("figures.outliness", function(v) trace_outlines = v end) + +local report_link = logs.reporter("backend","link") +local report_outline = logs.reporter("backend","outline") local backends = backends local lpdf = lpdf local context = context +local nameonly = file.nameonly + local variables = interfaces.variables local codeinjections = backends.pdf.codeinjections ----- urlescaper = lpegpatterns.urlescaper ----- utftohigh = lpegpatterns.utftohigh local escapetex = characters.filters.utf.private.escape +local bookmarks = structures.bookmarks + local layerspec = { -- predefining saves time "epdflinks" } +local f_namespace = formatters["lpdf-epa-%s-"] + local function makenamespace(filename) - return format("lpdf-epa-%s-",file.removesuffix(file.basename(filename))) + filename = gsub(lower(nameonly(filename)),"[^%a%d]+","-") + return f_namespace(filename) end local function add_link(x,y,w,h,destination,what) @@ -71,7 +81,7 @@ local function link_goto(x,y,w,h,document,annotation,pagedata,namespace) if type(destination) == "string" then local destinations = document.destinations local wanted = destinations[destination] - destination = wanted and wanted.D + destination = wanted and wanted.D -- is this ok? isn't it destination already a string? if destination then what = "named" end end local pagedata = destination and destination[1] @@ -131,7 +141,7 @@ function codeinjections.mergereferences(specification) end if specification then local fullname = specification.fullname - local document = lpdf.epdf.load(fullname) + local document = lpdf.epdf.load(fullname) -- costs time if document then local pagenumber = specification.page or 1 local xscale = specification.yscale or 1 @@ -139,22 +149,33 @@ function codeinjections.mergereferences(specification) local size = specification.size or "crop" -- todo local pagedata = document.pages[pagenumber] local annotations = pagedata and pagedata.Annots +local namespace = makenamespace(fullname) +local reference = namespace .. pagenumber if annotations and annotations.n > 0 then - local namespace = format("lpdf-epa-%s-",file.removesuffix(file.basename(fullname))) - local reference = namespace .. pagenumber - local mediabox = pagedata.MediaBox - local llx, lly, urx, ury = mediabox[1], mediabox[2], mediabox[3], mediabox[4] - local width, height = xscale * (urx - llx), yscale * (ury - lly) -- \\overlaywidth, \\overlayheight +-- local namespace = makenamespace(fullname) +-- local reference = namespace .. pagenumber + local mediabox = pagedata.MediaBox + local llx = mediabox[1] + local lly = mediabox[2] + local urx = mediabox[3] + local ury = mediabox[4] + local width = xscale * (urx - llx) -- \\overlaywidth, \\overlayheight + local height = yscale * (ury - lly) -- \\overlaywidth, \\overlayheight context.definelayer( { "epdflinks" }, { height = height.."bp" , width = width.."bp" }) for i=1,annotations.n do local annotation = annotations[i] if annotation then - local subtype = annotation.Subtype + local subtype = annotation.Subtype local rectangle = annotation.Rect - local a_llx, a_lly, a_urx, a_ury = rectangle[1], rectangle[2], rectangle[3], rectangle[4] - local x, y = xscale * (a_llx - llx), yscale * (a_lly - lly) - local w, h = xscale * (a_urx - a_llx), yscale * (a_ury - a_lly) - if subtype == "Link" then + local a_llx = rectangle[1] + local a_lly = rectangle[2] + local a_urx = rectangle[3] + local a_ury = rectangle[4] + local x = xscale * (a_llx - llx) + local y = yscale * (a_lly - lly) + local w = xscale * (a_urx - a_llx) + local h = yscale * (a_ury - a_lly) + if subtype == "Link" then local a = annotation.A if a then local linktype = a.S @@ -168,7 +189,7 @@ function codeinjections.mergereferences(specification) report_link("unsupported link annotation %a",linktype) end else - report_link("mising link annotation") + report_link("missing link annotation") end elseif trace_links then report_link("unsupported annotation %a",subtype) @@ -178,21 +199,21 @@ function codeinjections.mergereferences(specification) end end context.flushlayer { "epdflinks" } - -- context("\\gdef\\figurereference{%s}",reference) -- global +end context.setgvalue("figurereference",reference) -- global if trace_links then report_link("setting figure reference to %a",reference) end specification.reference = reference return namespace - end +-- end end end return ""-- no namespace, empty, not nil end function codeinjections.mergeviewerlayers(specification) - -- todo: parse included page for layers + -- todo: parse included page for layers .. or only for whole document inclusion if true then return end @@ -204,7 +225,7 @@ function codeinjections.mergeviewerlayers(specification) local fullname = specification.fullname local document = lpdf.epdf.load(fullname) if document then - local namespace = format("lpdf:epa:%s:",file.removesuffix(file.basename(fullname))) + local namespace = makenamespace(fullname) local layers = document.layers if layers then for i=1,layers.n do @@ -232,3 +253,155 @@ function codeinjections.mergeviewerlayers(specification) end end +-- new: for taco + +function codeinjections.getbookmarks(filename) + + -- The first version built a nested tree and flattened that afterwards ... but I decided + -- to keep it simple and flat. + + local list = bookmarks.extras.get(filename) + + if list then + return list + else + list = { } + end + + local document = nil + + if lfs.isfile(filename) then + document = lpdf.epdf.load(filename) + else + report_outline("unknown file %a",filename) + bookmarks.extras.register(filename,list) + return list + end + + local outlines = document.Catalog.Outlines + local pages = document.pages + local nofpages = pages.n -- we need to access once in order to initialize + local destinations = document.destinations + + -- I need to check this destination analyzer with the one in annotations .. best share + -- code (and not it's inconsistent). On the todo list ... + + local function setdestination(current,entry) + local destination = nil + local action = current.A + if action then + local subtype = action.S + if subtype == "GoTo" then + destination = action.D + if type(destination) == "string" then + entry.destination = destination + destination = destinations[destination] + local pagedata = destination and destination[1] + if pagedata then + entry.realpage = pagedata.number + end + else + -- maybe + end + else + -- maybe + end + else + local destination = current.Dest + if destination then + if type(destination) == "string" then + local wanted = destinations[destination] + destination = wanted and wanted.D + if destination then + entry.destination = destination + end + else + local pagedata = destination and destination[1] + if pagedata and pagedata.Type == "Page" then + entry.realpage = pagedata.number + end + end + end + end + end + + local function traverse(current,depth) + while current do + local title = current.Title + if title then + local entry = { + level = depth, + title = title, + } + list[#list+1] = entry + setdestination(current,entry) + if trace_outlines then + report_outline("%w%s",2*depth,title) + end + end + local first = current.First + if first then + local current = first + while current do + local title = current.Title + if title and trace_outlines then + report_outline("%w%s",2*depth,title) + end + local entry = { + level = depth, + title = title, + } + setdestination(current,entry) + list[#list+1] = entry + traverse(current.First,depth+1) + current = current.Next + end + end + current = current.Next + end + end + + if outlines then + if trace_outlines then + report_outline("outline of %a:",document.filename) + report_outline() + end + traverse(outlines,0) + if trace_outlines then + report_outline() + end + elseif trace_outlines then + report_outline("no outline in %a",document.filename) + end + + bookmarks.extras.register(filename,list) + + return list + +end + +function codeinjections.mergebookmarks(specification) + -- codeinjections.getbookmarks(document) + if not specification then + specification = figures and figures.current() + specification = specification and specification.status + end + if specification then + local fullname = specification.fullname + local bookmarks = backends.codeinjections.getbookmarks(fullname) + local realpage = tonumber(specification.page) or 1 + for i=1,#bookmarks do + local b = bookmarks[i] + if not b.usedpage then + if b.realpage == realpage then + if trace_options then + report_outline("using %a at page %a of file %a",b.title,realpage,fullname) + end + b.usedpage = true + b.section = structures.sections.currentsectionindex() + b.pageindex = specification.pageindex + end + end + end + end +end diff --git a/tex/context/base/lpdf-epd.lua b/tex/context/base/lpdf-epd.lua index c37336d07..c13f09614 100644 --- a/tex/context/base/lpdf-epd.lua +++ b/tex/context/base/lpdf-epd.lua @@ -6,22 +6,28 @@ if not modules then modules = { } end modules ['lpdf-epd'] = { license = "see context related readme files" } --- This is an experimental layer around the epdf library. The reason for --- this layer is that I want to be independent of the library (which --- implements a selection of what a file provides) and also because I --- want an interface closer to Lua's table model while the API stays --- close to the original xpdf library. Of course, after prototyping a --- solution, we can optimize it using the low level epdf accessors. +-- This is an experimental layer around the epdf library. The reason for this layer is that +-- I want to be independent of the library (which implements a selection of what a file +-- provides) and also because I want an interface closer to Lua's table model while the API +-- stays close to the original xpdf library. Of course, after prototyping a solution, we can +-- optimize it using the low level epdf accessors. --- It will be handy when we have a __length and __next that can trigger --- the resolve till then we will provide .n as #. +-- It will be handy when we have a __length and __next that can trigger the resolve till then +-- we will provide .n as #. --- As there can be references to the parent we cannot expand a tree. I --- played with some expansion variants but it does to pay off. +-- As there can be references to the parent we cannot expand a tree. I played with some +-- expansion variants but it does to pay off. + +-- Maybe we need a close(). In fact, nilling the document root will result in a gc at some +-- point. --- Maybe we need a close(). -- We cannot access all destinations in one run. +-- We have much more checking then needed in the prepare functions because occasionally +-- we run into bugs in poppler or the epdf interface. It took us a while to realize that +-- there was a long standing gc issue the on long runs with including many pages could +-- crash the analyzer. + local setmetatable, rawset, rawget, tostring, tonumber = setmetatable, rawset, rawget, tostring, tonumber local lower, match, char, find, sub = string.lower, string.match, string.char, string.find, string.sub local concat = table.concat @@ -29,9 +35,39 @@ local toutf = string.toutf local report_epdf = logs.reporter("epdf") --- a bit of protection +-- v:getTypeName(), versus types[v:getType()], the last variant is about twice as fast + +local typenames = { [0] = + "boolean", + "integer", + "real", + "string", + "name", + "null", + "array", + "dictionary", + "stream", + "ref", + "cmd", + "error", + "eof", + "none", + "integer64", +} + +local typenumbers = table.swapped(typenames) -local limited = false +local null_code = typenumbers.null +local ref_code = typenumbers.ref + +local function fatal_error(...) + report_epdf(...) + -- we exit as we will crash anyway + report_epdf("aborting job in order to avoid crash") + os.exit() +end + +local limited = false -- abit of protection directives.register("system.inputmode", function(v) if not limited then @@ -57,29 +93,75 @@ lpdf.epdf = { } local checked_access +-- dictionaries + +-- local function prepare(document,d,t,n,k,mt) +-- for i=1,n do +-- local v = d:getVal(i) +-- local r = d:getValNF(i) +-- local key = d:getKey(i) +-- if r and r:getTypeName() == "ref" then +-- r = r:getRef().num +-- local c = document.cache[r] +-- if c then +-- -- +-- else +-- c = checked_access[v:getTypeName()](v,document,r) +-- if c then +-- document.cache[r] = c +-- document.xrefs[c] = r +-- end +-- end +-- t[key] = c +-- elseif v then +-- t[key] = checked_access[v:getTypeName()](v,document) +-- else +-- fatal_error("fatal error: no data for key %s in dictionary",key) +-- end +-- end +-- getmetatable(t).__index = nil -- ?? weird +-- setmetatable(t,mt) +-- return t[k] +-- end + local function prepare(document,d,t,n,k,mt) +-- print("start prepare dict, requesting key ",k,"out of",n) for i=1,n do local v = d:getVal(i) - local r = d:getValNF(i) - if r:getTypeName() == "ref" then - r = r:getRef().num - local c = document.cache[r] - if c then - -- + if v then + local r = d:getValNF(i) + local kind = v:getType() +-- print("checking",i,d:getKey(i),v:getTypeName()) + if kind == null_code then + -- report_epdf("warning: null value for key %a in dictionary",key) else - c = checked_access[v:getTypeName()](v,document,r) - if c then - document.cache[r] = c - document.xrefs[c] = r + local key = d:getKey(i) + if kind then + if r and r:getType() == ref_code then + local objnum = r:getRef().num + local cached = document.cache[objnum] + if not cached then + cached = checked_access[kind](v,document,objnum) + if c then + document.cache[objnum] = cached + document.xrefs[cached] = objnum + end + end + t[key] = cached + else + t[key] = checked_access[kind](v,document) + end + else + report_epdf("warning: nil value for key %a in dictionary",key) end end - t[d:getKey(i)] = c else - t[d:getKey(i)] = checked_access[v:getTypeName()](v,document) + fatal_error("error: invalid value at index %a in dictionary of %a",i,document.filename) end end +-- print("done") getmetatable(t).__index = nil -- ?? weird -setmetatable(t,mt) + setmetatable(t,mt) return t[k] end @@ -92,27 +174,62 @@ local function some_dictionary(d,document,r,mt) end end +-- arrays + local done = { } +-- local function prepare(document,a,t,n,k) +-- for i=1,n do +-- local v = a:get(i) +-- local r = a:getNF(i) +-- local kind = v:getTypeName() +-- if kind == "null" then +-- -- TH: weird, but appears possible +-- elseif r:getTypeName() == "ref" then +-- r = r:getRef().num +-- local c = document.cache[r] +-- if c then +-- -- +-- else +-- c = checked_access[kind](v,document,r) +-- document.cache[r] = c +-- document.xrefs[c] = r +-- end +-- t[i] = c +-- else +-- t[i] = checked_access[kind](v,document) +-- end +-- end +-- getmetatable(t).__index = nil +-- return t[k] +-- end + local function prepare(document,a,t,n,k) for i=1,n do local v = a:get(i) - local r = a:getNF(i) - if v:getTypeName() == "null" then - -- TH: weird, but appears possible - elseif r:getTypeName() == "ref" then - r = r:getRef().num - local c = document.cache[r] - if c then - -- + if v then + local kind = v:getType() + if kind == null_code then + -- report_epdf("warning: null value for index %a in array",i) + elseif kind then + local r = a:getNF(i) + if r and r:getType() == ref_code then + local objnum = r:getRef().num + local cached = document.cache[objnum] + if not cached then + cached = checked_access[kind](v,document,objnum) + document.cache[objnum] = cached + document.xrefs[cached] = objnum + end + t[i] = cached + else + t[i] = checked_access[kind](v,document) + end else - c = checked_access[v:getTypeName()](v,document,r) - document.cache[r] = c - document.xrefs[c] = r + report_epdf("warning: nil value for index %a in array",i) end - t[i] = c else - t[i] = checked_access[v:getTypeName()](v,document) + fatal_error("error: invalid value at index %a in array of %a",i,document.filename) end end getmetatable(t).__index = nil @@ -156,38 +273,167 @@ end -- we need epdf.boolean(v) in addition to v:getBool() [dictionary, array, stream, real, integer, string, boolean, name, ref, null] -checked_access = { - dictionary = function(d,document,r) - return some_dictionary(d:getDict(),document,r) - end, - array = function(a,document,r) - return some_array(a:getArray(),document,r) - end, - stream = function(v,document,r) - return some_stream(v,document,r) - end, - real = function(v) - return v:getReal() - end, - integer = function(v) - return v:getNum() - end, - string = function(v) - return toutf(v:getString()) - end, - boolean = function(v) - return v:getBool() - end, - name = function(v) - return v:getName() - end, - ref = function(v) - return v:getRef() - end, - null = function() - return nil - end, -} +-- checked_access = { +-- dictionary = function(d,document,r) +-- return some_dictionary(d:getDict(),document,r) +-- end, +-- array = function(a,document,r) +-- return some_array(a:getArray(),document,r) +-- end, +-- stream = function(v,document,r) +-- return some_stream(v,document,r) +-- end, +-- real = function(v) +-- return v:getReal() +-- end, +-- integer = function(v) +-- return v:getNum() +-- end, +-- -- integer64 = function(v) +-- -- return v:getNum() +-- -- end, +-- string = function(v) +-- return toutf(v:getString()) +-- end, +-- boolean = function(v) +-- return v:getBool() +-- end, +-- name = function(v) +-- return v:getName() +-- end, +-- ref = function(v) +-- return v:getRef() +-- end, +-- null = function() +-- return nil +-- end, +-- none = function() +-- -- why not null +-- return nil +-- end, +-- -- error = function() +-- -- -- shouldn't happen +-- -- return nil +-- -- end, +-- -- eof = function() +-- -- -- we don't care +-- -- return nil +-- -- end, +-- -- cmd = function() +-- -- -- shouldn't happen +-- -- return nil +-- -- end +-- } + +-- a bit of a speedup in case we want to play with large pdf's and have millions +-- of access .. it might not be worth the trouble + +-- we have dual access: by typenumber and by typename + +local function invalidaccess(k,document) + local fullname = type(document) == "table" and document.fullname + if fullname then + fatal_error("error, asking for key %a in checker of %a",k,fullname) + else + fatal_error("error, asking for key %a in checker",k) + end +end + +checked_access = table.setmetatableindex(function(t,k) + return function(v,document) + invalidaccess(k,document) + end +end) + +for i=0,#typenames do + checked_access[i] = function() + return function(v,document) + invalidaccess(i,document) + end + end +end + +checked_access[typenumbers.dictionary] = function(d,document,r) + local getDict = d.getDict + local getter = function(d,document,r) + return some_dictionary(getDict(d),document,r) + end + checked_access.dictionary = getter + checked_access[typenumbers.dictionary] = getter + return getter(d,document,r) +end + +checked_access[typenumbers.array] = function(a,document,r) + local getArray = a.getArray + local getter = function(a,document,r) + return some_array(getArray(a),document,r) + end + checked_access.array = getter + checked_access[typenumbers.array] = getter + return getter(a,document,r) +end + +checked_access[typenumbers.stream] = function(v,document,r) + return some_stream(v,document,r) -- or just an equivalent +end + +checked_access[typenumbers.real] = function(v) + local getReal = v.getReal + checked_access.real = getReal + checked_access[typenumbers.real] = getReal + return getReal(v) +end + +checked_access[typenumbers.integer] = function(v) + local getNum = v.getNum + checked_access.integer = getNum + checked_access[typenumbers.integer] = getNum + return getNum(v) +end + +checked_access[typenumbers.string] = function(v) + local getString = v.getString + local function getter(v) + return toutf(getString(v)) + end + checked_access.string = getter + checked_access[typenumbers.string] = getter + return toutf(getString(v)) +end + +checked_access[typenumbers.boolean] = function(v) + local getBool = v.getBool + checked_access.boolean = getBool + checked_access[typenumbers.boolean] = getBool + return getBool(v) +end + +checked_access[typenumbers.name] = function(v) + local getName = v.getName + checked_access.name = getName + checked_access[typenumbers.name] = getName + return getName(v) +end + +checked_access[typenumbers.ref] = function(v) + local getRef = v.getRef + checked_access.ref = getRef + checked_access[typenumbers.ref] = getRef + return getRef(v) +end + +checked_access[typenumbers.null] = function() + return nil +end + +checked_access[typenumbers.none] = function() + -- is actually an error + return nil +end + +for i=0,#typenames do + checked_access[typenames[i]] = checked_access[i] +end -- checked_access.real = epdf.real -- checked_access.integer = epdf.integer @@ -345,6 +591,14 @@ function lpdf.epdf.load(filename) xrefs = { }, data = data, } + -- table.setmetatablenewindex(document.cache,function(t,k,v) + -- if rawget(t,k) then + -- report_epdf("updating object %a in cache",k) + -- else + -- report_epdf("storing object %a in cache",k) + -- end + -- rawset(t,k,v) + -- end) local Catalog = some_dictionary(data:getXRef():getCatalog():getDict(),document) local Info = some_dictionary(data:getXRef():getDocInfo():getDict(),document) document.Catalog = Catalog @@ -362,10 +616,19 @@ function lpdf.epdf.load(filename) document = false end loaded[filename] = document + loaded[document] = document statistics.stoptiming(lpdf.epdf) -- print(statistics.elapsedtime(lpdf.epdf)) end - return document + return document or nil +end + +function lpdf.epdf.unload(filename) + local document = loaded[filename] + if document then + loaded[document] = nil + loaded[filename] = nil + end end -- for k, v in next, expand(t) do diff --git a/tex/context/base/m-pstricks.mkii b/tex/context/base/m-pstricks.mkii index bdcf13b24..d41f19871 100644 --- a/tex/context/base/m-pstricks.mkii +++ b/tex/context/base/m-pstricks.mkii @@ -43,7 +43,7 @@ {\input multido \relax \input pstricks \relax \input pst-plot \relax - \loadpstrickscolors{colo-rgb}} + \loadpstrickscolors{colo-rgb.mkii}} {\writestatus{pstricks}{using indirect method; enable write18}} \catcode`\|=\oldbarcode diff --git a/tex/context/base/math-fbk.lua b/tex/context/base/math-fbk.lua index 63a0e9f88..7c97249d6 100644 --- a/tex/context/base/math-fbk.lua +++ b/tex/context/base/math-fbk.lua @@ -20,7 +20,6 @@ local virtualcharacters = { } local identifiers = fonts.hashes.identifiers local lastmathids = fonts.hashes.lastmathids -local tounicode16 = fonts.mappings.tounicode16 -- we need a trick (todo): if we define scriptscript, script and text in -- that order we could use their id's .. i.e. we could always add a font @@ -346,11 +345,11 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s end local correction = swap and { "down", (olddata.height or 0) - height } or { "down", olddata.height + (offset or 0)} local newdata = { - commands = { correction, { "slot", 1, oldchr } }, - width = olddata.width, - height = height, - depth = depth, - tounicode = tounicode16(unicode), + commands = { correction, { "slot", 1, oldchr } }, + width = olddata.width, + height = height, + depth = depth, + unicode = unicode, } local glyphdata = newdata local nextglyph = olddata.next @@ -401,9 +400,6 @@ local function accent_to_extensible(target,newchr,original,oldchr,height,depth,s end return glyphdata, true else --- if not olddata.tounicode then --- olddata.tounicode = tounicode16(unicode), --- end return olddata, false end end @@ -448,9 +444,9 @@ addextra(0xFE3DF, { description="EXTENSIBLE OF 0x03DF", unicodeslot=0xFE3DF, mat addextra(0xFE3DD, { description="EXTENSIBLE OF 0x03DD", unicodeslot=0xFE3DD, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } ) addextra(0xFE3B5, { description="EXTENSIBLE OF 0x03B5", unicodeslot=0xFE3B5, mathextensible = "r", mathstretch = "h", mathclass = "botaccent" } ) -virtualcharacters[0xFE3DF] = function(data) local c = data.target.characters[0x23DF] if c then c.tounicode = tounicode16(0x23DF) return c end end -virtualcharacters[0xFE3DD] = function(data) local c = data.target.characters[0x23DD] if c then c.tounicode = tounicode16(0x23DD) return c end end -virtualcharacters[0xFE3B5] = function(data) local c = data.target.characters[0x23B5] if c then c.tounicode = tounicode16(0x23B5) return c end end +virtualcharacters[0xFE3DF] = function(data) local c = data.target.characters[0x23DF] if c then c.unicode = 0x23DF return c end end +virtualcharacters[0xFE3DD] = function(data) local c = data.target.characters[0x23DD] if c then c.unicode = 0x23DD return c end end +virtualcharacters[0xFE3B5] = function(data) local c = data.target.characters[0x23B5] if c then c.unicode = 0x23B5 return c end end -- todo: add some more .. numbers might change @@ -524,7 +520,7 @@ local function actuarian(data) -- todo: add alttext -- compromise: lm has large hooks e.g. \actuarial{a} width = basewidth + 4 * linewidth, - tounicode = tounicode16(0x20E7), + unicode = 0x20E7, commands = { { "right", 2 * linewidth }, { "down", - baseheight - 3 * linewidth }, diff --git a/tex/context/base/math-tag.lua b/tex/context/base/math-tag.lua index 638c4629c..6b555650e 100644 --- a/tex/context/base/math-tag.lua +++ b/tex/context/base/math-tag.lua @@ -141,8 +141,7 @@ local function getunicode(n) -- instead of getchar local char = getchar(n) local font = font_of_family(getfield(n,"fam")) -- font_of_family local data = fontcharacters[font][char] - local unic = data.tounicode - return unic and fromunicode16(unic) or char + return data.unicode or char end process = function(start) -- we cannot use the processor as we have no finalizers (yet) diff --git a/tex/context/base/mult-de.mkii b/tex/context/base/mult-de.mkii index 1751f4780..53f0dfd6a 100644 --- a/tex/context/base/mult-de.mkii +++ b/tex/context/base/mult-de.mkii @@ -653,7 +653,7 @@ \setinterfaceconstant{coupling}{verknuepfung} \setinterfaceconstant{couplingway}{verkopplungsart} \setinterfaceconstant{criterium}{kriterium} -\setinterfaceconstant{css}{css} +\setinterfaceconstant{cssfile}{cssfile} \setinterfaceconstant{current}{aktuell} \setinterfaceconstant{cutspace}{cutspace} \setinterfaceconstant{dash}{strich} diff --git a/tex/context/base/mult-def.lua b/tex/context/base/mult-def.lua index ffb95f76b..b41053dd5 100644 --- a/tex/context/base/mult-def.lua +++ b/tex/context/base/mult-def.lua @@ -6508,8 +6508,8 @@ return { ["export"] = { ["en"]="export", }, - ["css"] = { - ["en"]="css", + ["cssfile"] = { + ["en"]="cssfile", }, ["xhtml"] = { ["en"]="xhtml", diff --git a/tex/context/base/mult-def.mkiv b/tex/context/base/mult-def.mkiv index 5761e6cfb..30a21d377 100644 --- a/tex/context/base/mult-def.mkiv +++ b/tex/context/base/mult-def.mkiv @@ -96,6 +96,8 @@ \def\v!long {long} \def\v!box {box} +\def\v!bookmark {bookmark} + \def\v!vfenced {vfenced} \def\v!bothtext {bothtext} diff --git a/tex/context/base/mult-en.mkii b/tex/context/base/mult-en.mkii index 72185f3ab..346f94cad 100644 --- a/tex/context/base/mult-en.mkii +++ b/tex/context/base/mult-en.mkii @@ -653,7 +653,7 @@ \setinterfaceconstant{coupling}{coupling} \setinterfaceconstant{couplingway}{couplingway} \setinterfaceconstant{criterium}{criterium} -\setinterfaceconstant{css}{css} +\setinterfaceconstant{cssfile}{cssfile} \setinterfaceconstant{current}{current} \setinterfaceconstant{cutspace}{cutspace} \setinterfaceconstant{dash}{dash} diff --git a/tex/context/base/mult-fr.mkii b/tex/context/base/mult-fr.mkii index 2a6d85c91..681c67bbc 100644 --- a/tex/context/base/mult-fr.mkii +++ b/tex/context/base/mult-fr.mkii @@ -653,7 +653,7 @@ \setinterfaceconstant{coupling}{couplage} \setinterfaceconstant{couplingway}{modecouplage} \setinterfaceconstant{criterium}{critere} -\setinterfaceconstant{css}{css} +\setinterfaceconstant{cssfile}{cssfile} \setinterfaceconstant{current}{courant} \setinterfaceconstant{cutspace}{cutspace} \setinterfaceconstant{dash}{pointille} diff --git a/tex/context/base/mult-it.mkii b/tex/context/base/mult-it.mkii index 9eb0139a1..4f9941a04 100644 --- a/tex/context/base/mult-it.mkii +++ b/tex/context/base/mult-it.mkii @@ -653,7 +653,7 @@ \setinterfaceconstant{coupling}{accoppiamento} \setinterfaceconstant{couplingway}{modoaccoppiamento} \setinterfaceconstant{criterium}{criterio} -\setinterfaceconstant{css}{css} +\setinterfaceconstant{cssfile}{cssfile} \setinterfaceconstant{current}{corrente} \setinterfaceconstant{cutspace}{cutspace} \setinterfaceconstant{dash}{dash} diff --git a/tex/context/base/mult-nl.mkii b/tex/context/base/mult-nl.mkii index 881b4f467..771d48c3c 100644 --- a/tex/context/base/mult-nl.mkii +++ b/tex/context/base/mult-nl.mkii @@ -653,7 +653,7 @@ \setinterfaceconstant{coupling}{koppeling} \setinterfaceconstant{couplingway}{koppelwijze} \setinterfaceconstant{criterium}{criterium} -\setinterfaceconstant{css}{css} +\setinterfaceconstant{cssfile}{cssfile} \setinterfaceconstant{current}{huidige} \setinterfaceconstant{cutspace}{snijwit} \setinterfaceconstant{dash}{streep} diff --git a/tex/context/base/mult-pe.mkii b/tex/context/base/mult-pe.mkii index 076342282..7fa8bb772 100644 --- a/tex/context/base/mult-pe.mkii +++ b/tex/context/base/mult-pe.mkii @@ -653,7 +653,7 @@ \setinterfaceconstant{coupling}{تزویج} \setinterfaceconstant{couplingway}{روشتزویج} \setinterfaceconstant{criterium}{criterium} -\setinterfaceconstant{css}{css} +\setinterfaceconstant{cssfile}{cssfile} \setinterfaceconstant{current}{جاری} \setinterfaceconstant{cutspace}{فضایبرش} \setinterfaceconstant{dash}{دش} diff --git a/tex/context/base/mult-ro.mkii b/tex/context/base/mult-ro.mkii index ec372c6ba..0ed0df4f1 100644 --- a/tex/context/base/mult-ro.mkii +++ b/tex/context/base/mult-ro.mkii @@ -653,7 +653,7 @@ \setinterfaceconstant{coupling}{cuplare} \setinterfaceconstant{couplingway}{modcuplare} \setinterfaceconstant{criterium}{criteriu} -\setinterfaceconstant{css}{css} +\setinterfaceconstant{cssfile}{cssfile} \setinterfaceconstant{current}{curent} \setinterfaceconstant{cutspace}{cutspace} \setinterfaceconstant{dash}{dash} diff --git a/tex/context/base/publ-dat.lua b/tex/context/base/publ-dat.lua index f35ae2fa9..20f545d8b 100644 --- a/tex/context/base/publ-dat.lua +++ b/tex/context/base/publ-dat.lua @@ -322,7 +322,7 @@ local value = Cs((somevalue * ((spacing * hash * spacing)/"" * somevalue)^0 local forget = percent^1 * (1-lineending)^0 local spacing = spacing * forget^0 * spacing local assignment = spacing * key * spacing * equal * spacing * value * spacing -local shortcut = P("@") * (P("string") + P("STRING")) * spacing * left * ((assignment * Carg(1))/do_shortcut * comma^0)^0 * spacing * right +local shortcut = P("@") * (P("string") + P("STRING") + P("String")) * spacing * left * ((assignment * Carg(1))/do_shortcut * comma^0)^0 * spacing * right local definition = category * spacing * left * spacing * tag * spacing * comma * Ct((assignment * comma^0)^0) * spacing * right * Carg(1) / do_definition local comment = keyword * spacing * left * (1-right)^0 * spacing * right diff --git a/tex/context/base/publ-imp-apa.mkvi b/tex/context/base/publ-imp-apa.mkvi index 38ea0c74f..8732e782e 100644 --- a/tex/context/base/publ-imp-apa.mkvi +++ b/tex/context/base/publ-imp-apa.mkvi @@ -21,6 +21,20 @@ \startbtxrenderingdefinitions[apa] +%D Reference: +%D \startTEX +%D @Book{APA2010, +%D title ={Publication Manual of the American Psychological Association}, +%D year ={2010}, +%D edition ={Sixth}, +%D address ={Washington, DC}, +%D publisher={American Psychological Association}, +%D pages ={291}, +%D url ={http://www.apa.org/books/}, +%D } +%D \stopTEX + + %D In order to get journals expanded (or normalized or abbreviated) you need to load %D a list: %D @@ -35,22 +49,30 @@ [en] [apa:mastersthesis={Master's thesis}, apa:phdthesis={PhD thesis}, - apa:technicalreport={Technical report}, + apa:technicalreport={Tech. Rep.}, % Technical report + apa:supplement={Suppl.}, % Supplement apa:patent=patent, - apa:editor=editor, - apa:editors=editors, - apa:edition=edition, + apa:Translator={Trans.}, % Translator(s) + apa:editor={Ed.}, % editor + apa:editors={Eds.}, % editors + apa:edition={ed.}, % edition apa:volume=volume, - apa:Volume=Volume, + apa:Volume={Vol.}, % Volume + apa:Volumes={Vols.}, % Volumes apa:number=number, - apa:Number=Number, + apa:Number={No.}, % Number + apa:nd={n.d.}, % no date apa:in=in, apa:of=of, apa:In=In, + apa:Part={Pt.}, % Part apa:p=p, apa:pp=pp, apa:pages=pages, apa:and=and, + apa:Author=Author, % TODO, should be typeset in italic... + apa:Advanced={Advanced online publication}, + apa:Retrieved={Retrieved from}, apa:others={et al.}] \setupbtxlabeltext @@ -59,8 +81,8 @@ apa:phdthesis={Thèse de doctorat}, apa:technicalreport={Rapport technique}, apa:patent=brevet, - apa:editor=éditeur, - apa:editors=éditeurs, + apa:editor={Éd.}, % éditeur + apa:editors={Éds.}, % éditeurs apa:edition=édition, apa:volume=volume, apa:Volume=Volume, @@ -73,6 +95,9 @@ apa:pp=pp, apa:pages=pages, apa:and=et, + apa:Author=Auteur, + apa:Advanced={Publication en ligne anticipée}, + apa:Retrieved={Téléchargé de}, apa:others={et al.}] \setupbtxlabeltext @@ -95,6 +120,9 @@ apa:pp=S, apa:pages=Seiten, apa:and=und, + apa:Author=Autor, + apa:Advanced={Erweiterte Online-Publikation}, % Check this German! + apa:Retrieved={aus abgerufen}, % heruntergeladen? Check this German! apa:others={et al.}] %D The variables control the shared code for which we use a tex definition with @@ -123,39 +151,35 @@ otherwise=\btxperiod] \starttexdefinition btx:apa:wherefrom #field +% TODO: for publisher, if =author use "Author" \btxdoifelse {address} { - \getvariable{btx:apa:\currentbtxcategory}{left} - \btxdoifelse {country} { - \btxflush{address} + \btxspace + \btxflush{address} + \btxdoif {country} { \btxcomma \btxflush{country} - \btxdoif {#field} { - \btxcolon - \btxflush{field} - } - } { - \btxflush{address} - \btxdoif {#field} { - \btxcomma - \btxflush{#field} - } } - \getvariable{btx:apa:\currentbtxcategory}{right} + \btxdoif {#field} { + \btxcolon + \btxflush{#field} + } + \btxperiod } { \btxdoifelse {country} { - \getvariable{btx:apa:\currentbtxcategory}{left} + \btxspace \btxflush{country} \btxdoif {#field} { \btxcolon \btxflush{#field} } - \getvariable{btx:apa:\currentbtxcategory}{right} + \btxperiod } { \btxdoifelse {#field} { \getvariable{btx:apa:#field}{left} \btxflush{#field} \getvariable{btx:apa:#field}{right} } { + % check that this is needed! \getvariable{btx:apa:#field}{otherwise} } } @@ -172,25 +196,27 @@ \starttexdefinition btx:apa:title-and-series \btxdoif {title} { - %btxflush{converters.Word -> title} + \btxspace \btxflush{Word -> title} \btxdoif {series} { - \btxlparent + \btxlparenthesis \btxflush{series} - \btxrparent + \btxrparenthesis } \btxperiod } \stoptexdefinition +% can these two be elegantly collapsed somehow using #it, for example? + \starttexdefinition btx:apa:title-it-and-series \btxdoif {title} { - %texdefinition{btx:apa:italic}{converters.Word -> title} + \btxspace \texdefinition{btx:apa:italic}{Word -> title} \btxdoif {series} { - \btxlparent + \btxlparenthesis \btxflush{series} - \btxrparent + \btxrparenthesis } \btxperiod } @@ -198,19 +224,22 @@ \disablemode[btx:apa:edited-book] % hm, ugly -\starttexdefinition btx:apa:author-and-year - \btxdoif {author} { - \btxflushauthor{author} - } +\starttexdefinition btx:apa:suffixedyear \btxdoif {year} { - \btxlparent - % \btxflush{suffixedyear} + \btxlparenthesis \btxflush{year} \btxdoif {suffix} { \btxflush{suffix} } - \btxrparent + \btxrparenthesis + } +\stoptexdefinition + +\starttexdefinition btx:apa:author-and-year + \btxdoif {author} { + \btxflushauthor{author} } + \texdefinition{btx:apa:suffixedyear} \btxperiod \stoptexdefinition @@ -224,22 +253,15 @@ \btxrbracket } } - \btxdoif {year} { - \btxlparent - % \btxflush{suffixedyear} - \btxflush{year} - \btxdoif {suffix} { - \btxflush{suffix} - } - \btxrparent - } + \texdefinition{btx:apa:suffixedyear} \btxperiod \stoptexdefinition \starttexdefinition btx:apa:author-editors-crossref-year - \btxdoif {author} { + % TODO: if there is no author or editor, then use publisher... + \btxdoifelse {author} { \btxflushauthor{author} - } { + } { \btxdoifelse {editor} { \setmode{btx:apa:edited-book} \btxflushauthor{editor} @@ -259,15 +281,7 @@ } } } - \btxdoif {year} { - \btxlparent - % \btxflush{suffixedyear} - \btxflush{year} - \btxdoif {suffix} { - \btxflush{suffix} - } - \btxrparent - } + \texdefinition{btx:apa:suffixedyear} \btxperiod \stoptexdefinition @@ -288,21 +302,58 @@ \btxrbracket } } - \btxspace - \btxdoif {year} { - \btxlparent - % \btxflush{suffixedyear} - \btxflush{year} - \btxdoif {suffix} { - \btxflush{suffix} + \texdefinition{btx:apa:suffixedyear} + \btxperiod +\stoptexdefinition + +% No longer used (not conforming to APA style) +\starttexdefinition btx:apa:title-it + \btxdoif {title} { + \btxspace + \texdefinition{btx:apa:italic}{Word -> title} + \btxflush{Word -> title} + \btxperiod + } +\stoptexdefinition + +\starttexdefinition btx:apa:journal-volume-issue + \btxdoifelse {journal} { + % expandedjournal abbreviatedjournal + \texdefinition{btx:apa:italic}{expandedjournal -> journal} + } { + \btxdoif {crossref} { + \btxlabeltext{apa:In} + \btxspace + \btxflush{crossref} + } + } + \btxdoifelse {volume} { + \btxspace + \texdefinition{btx:apa:italic}{volume} + \btxdoif {issue} { + \btxlparenthesis + \btxflush{issue} + \btxrparenthesis + } + \btxcomma + } { + \btxdoifelse {doi} { + \btxspace + \btxlabeltext{apa:Advanced} + \btxperiod + } { + \btxdoif {url} { + \btxspace + \btxlabeltext{apa:Advanced} + \btxperiod + } } - \btxrparent } - \btxperiod \stoptexdefinition \starttexdefinition btx:apa:note \btxdoif {note} { + % Note: no punctuation \btxspace \btxflush{note} \btxperiod @@ -311,18 +362,48 @@ \starttexdefinition btx:apa:comment \btxdoif {comment} { + % Note: no punctuation \btxspace \btxflush{comment} \btxperiod } \stoptexdefinition -\starttexdefinition btx:apa:pages:p +% if interaction, we should make these active hyperlinks! + +\starttexdefinition btx:apa:doi-or-url + \btxdoifelse {doi} { + \btxspace + \hyphenatedurl{doi:\btxflush{doi}} + } { + \btxdoif {url} { + \btxspace + \btxlabeltext{apa:Retrieved} + \btxspace + \hyphenatedurl{\btxflush{url}} + } + } +\stoptexdefinition + +\starttexdefinition btx:apa:pages \btxdoif {pages} { \btxspace \btxflush{pages} + \btxperiod + } +\stoptexdefinition + +\starttexdefinition btx:apa:pages:p + \btxdoif {pages} { \btxspace - \btxlabeltext{apa:p} + \btxoneorrange {pages} { + \btxlabeltext{apa:p} + } { + \btxlabeltext{apa:pp} + } + \btxperiod + \btxnbsp + \btxflush{pages} \btxperiod } \stoptexdefinition @@ -331,22 +412,25 @@ \btxdoif {pages} { \btxspace \btxflush{pages} - \btxspace + \btxnbsp \btxlabeltext{apa:pp} \btxperiod } \stoptexdefinition +% this does not seem to comply with APA style - need to verify! + \starttexdefinition btx:apa:pages:pages \btxdoif {pages} { \btxcomma \btxlabeltext{apa:pages} \btxnbsp \btxflush{pages} + \btxperiod } \stoptexdefinition -\starttexdefinition btx:apa:edition:sentense +\starttexdefinition btx:apa:edition:sentence \btxdoif {edition} { \btxspace \btxflush{edition} @@ -371,37 +455,10 @@ \startsetups btx:apa:article \texdefinition{btx:apa:author-or-key-and-year} - \btxdoif {title} { - %btxflush{converters.Word -> title} - \btxflush{Word -> title} - \btxperiod - } - \btxdoifelse {journal} { - % expandedjournal abbreviatedjournal - \texdefinition{btx:apa:italic}{expandedjournal -> journal} - } { - \btxdoif {crossref} { - \btxlabeltext{apa:In} - \btxspace - \btxflush{crossref} - } - } - \btxdoifelse {volume} { - \btxspace - \texdefinition{btx:apa:italic}{volume} - \btxdoif {issue} { - \btxlparent - \btxflush{issue} - \btxrparent - } - \btxdoif {pages} { - \btxcomma - \btxflush{pages} - } - \btxperiod - } { - \texdefinition{btx:apa:pages:pp} - } + \texdefinition{btx:apa:title} + \texdefinition{btx:apa:journal-volume-issue} + \texdefinition{btx:apa:pages} + \texdefinition{btx:apa:doi-or-url} \texdefinition{btx:apa:note} \texdefinition{btx:apa:comment} \stopsetups @@ -481,7 +538,8 @@ } \texdefinition{btx:apa:edition:sentence} \texdefinition{btx:apa:wherefrom}{publisher} - \texdefinition{btx:apa:pages:p}% twice? + \texdefinition{btx:apa:pages:pp}% twice? + \texdefinition{btx:apa:doi-or-url} \texdefinition{btx:apa:note} \stopsetups @@ -775,6 +833,7 @@ \btxcomma \texdefinition{btx:apa:wherefrom}{institution} \texdefinition{btx:apa:pages:p} + \texdefinition{btx:apa:doi-or-url} \texdefinition{btx:apa:note} \stopsetups @@ -812,7 +871,7 @@ \texdefinition{btx:apa:title-and-series} \texdefinition{btx:apa:pages:p} \btxdoif {type} { - \btxlparent + \btxlparenthesis \btxflush{type} \btxrparent } diff --git a/tex/context/base/publ-ini.lua b/tex/context/base/publ-ini.lua index 63da84576..018126573 100644 --- a/tex/context/base/publ-ini.lua +++ b/tex/context/base/publ-ini.lua @@ -134,7 +134,11 @@ statistics.register("publications load time", function() local nofbytes = publicationsstats.nofbytes if nofbytes > 0 then return string.format("%s seconds, %s bytes, %s definitions, %s shortcuts", - statistics.elapsedtime(publications),nofbytes,publicationsstats.nofdefinitions,publicationsstats.nofshortcuts) + statistics.elapsedtime(publications), + nofbytes, + publicationsstats.nofdefinitions or 0, + publicationsstats.nofshortcuts or 0 + ) else return nil end @@ -562,7 +566,7 @@ function commands.oneorrange(dataset,tag,name) commands.doifelse(not d) -- so singular is default end -function commands.firstinrange(dataset,tag,name) +function commands.firstofrange(dataset,tag,name) local d = datasets[dataset].luadata[tag] -- details ? if d then d = d[name] diff --git a/tex/context/base/publ-ini.mkiv b/tex/context/base/publ-ini.mkiv index 46fe46543..161934384 100644 --- a/tex/context/base/publ-ini.mkiv +++ b/tex/context/base/publ-ini.mkiv @@ -229,6 +229,7 @@ \definebtxdataset [\v!standard] +% [\c!language=] % nothing set so use current % \usebtxdataset % [standard] @@ -346,8 +347,10 @@ \unexpanded\def\btxcomma {\removeunwantedspaces,\space} \unexpanded\def\btxcolon {\removeunwantedspaces:\space} \unexpanded\def\btxsemicolon {\removeunwantedspaces;\space} -\unexpanded\def\btxlparent {\removeunwantedspaces\space(} -\unexpanded\def\btxrparent {\removeunwantedspaces)\space} +\unexpanded\def\btxlparent {\removeunwantedspaces\space(} % obsolete +\unexpanded\def\btxrparent {\removeunwantedspaces)\space} % obsolete +\unexpanded\def\btxlparenthesis{\removeunwantedspaces\space(} +\unexpanded\def\btxrparenthesis{\removeunwantedspaces)\space} \unexpanded\def\btxlbracket {\removeunwantedspaces\space[} \unexpanded\def\btxrbracket {\removeunwantedspaces]\space} @@ -589,6 +592,7 @@ % \determinelistcharacteristics[\currentbtxrendering]% \btx_set_rendering_alternative \edef\currentbtxdataset{\btxrenderingparameter\c!dataset}% + \uselanguageparameter\btxdatasetparameter % new \let\currentlist\s!btx \let\currentbtxlist\currentbtxrendering \the\everysetupbtxlistplacement @@ -984,6 +988,7 @@ \def\publ_cite_handle_variant_indeed[#1]% {\usebtxcitevariantstyleandcolor\c!style\c!color + \uselanguageparameter\btxdatasetparameter % new \letbtxcitevariantparameter\c!alternative\currentbtxcitevariant \btxcitevariantparameter\v!left \ctxcommand{btxhandlecite{% @@ -1060,8 +1065,8 @@ %D Whatever helpers: \unexpanded\def\btxsingularplural#1{\ctxcommand{btxsingularorplural("\currentbtxdataset","\currentbtxtag","#1")}} -\unexpanded\def\btxoneorrange #1{\ctxcommand{btxoneorrange("\currentbtxdataset","\currentbtxtag","#1")}} -\unexpanded\def\btxfirstofrange #1{\ctxcommand{btxfirstofrange("\currentbtxdataset","\currentbtxtag","#1")}} +\unexpanded\def\btxoneorrange #1{\ctxcommand{oneorrange("\currentbtxdataset","\currentbtxtag","#1")}} +\unexpanded\def\btxfirstofrange #1{\ctxcommand{firstofrange("\currentbtxdataset","\currentbtxtag","#1")}} \let\btxsingularorplural\btxsingularplural diff --git a/tex/context/base/scrp-ini.lua b/tex/context/base/scrp-ini.lua index a6bfe4cf9..fa2bc771f 100644 --- a/tex/context/base/scrp-ini.lua +++ b/tex/context/base/scrp-ini.lua @@ -475,18 +475,19 @@ function scripts.injectors.handler(head) normal_process = handler.injector end if normal_process then + -- wrong: originals are indices ! local f = getfont(start) if f ~= lastfont then originals = fontdata[f].resources if resources then originals = resources.originals else - -- can't happen + originals = nil -- can't happen end lastfont = f end local c = getchar(start) - if originals then + if originals and type(originals) == "number" then c = originals[c] or c end local h = hash[c] diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf Binary files differindex 540ac5255..5da0e7556 100644 --- a/tex/context/base/status-files.pdf +++ b/tex/context/base/status-files.pdf diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf Binary files differindex 0b92a158c..09c5ba3ad 100644 --- a/tex/context/base/status-lua.pdf +++ b/tex/context/base/status-lua.pdf diff --git a/tex/context/base/strc-bkm.lua b/tex/context/base/strc-bkm.lua index c38ab3c2e..848fe8b93 100644 --- a/tex/context/base/strc-bkm.lua +++ b/tex/context/base/strc-bkm.lua @@ -13,7 +13,9 @@ if not modules then modules = { } end modules ['strc-bkm'] = { -- we should hook the placement into everystoptext ... needs checking -local format, concat, gsub = string.format, table.concat, string.gsub +-- todo: make an lpeg for stripped + +local format, concat, gsub, lower = string.format, table.concat, string.gsub, string.lower local utfvalues = utf.values local settings_to_hash = utilities.parsers.settings_to_hash @@ -101,54 +103,6 @@ function bookmarks.setup(spec) end end --- function bookmarks.place() --- if next(names) then --- local list = lists.filtercollected(names,"all",nil,lists.collected,forced) --- if #list > 0 then --- local levels, noflevels, lastlevel = { }, 0, 1 --- for i=1,#list do --- local li = list[i] --- local metadata = li.metadata --- local name = metadata.name --- if not metadata.nolist or forced[name] then -- and levelmap[name] then --- local titledata = li.titledata --- if titledata then --- local structural = levelmap[name] --- lastlevel = structural or lastlevel --- local title = titledata.bookmark --- if not title or title == "" then --- -- We could typeset the title and then convert it. --- if not structural then --- -- placeholder, todo: bookmarklabel --- title = name .. ": " .. (titledata.title or "?") --- else --- title = titledata.title or "?" --- end --- end --- if numbered[name] then --- local sectiondata = sections.collected[li.references.section] --- local numberdata = li.numberdata --- if sectiondata and numberdata and not numberdata.hidenumber then --- -- we could typeset the number and convert it --- title = concat(sections.typesetnumber(sectiondata,"direct",numberspec,sectiondata)) .. " " .. title --- end --- end --- noflevels = noflevels + 1 --- levels[noflevels] = { --- lastlevel, --- stripped(title), -- can be replaced by converter --- li.references, -- has internal and realpage --- allopen or opened[name] --- } --- end --- end --- end --- bookmarks.finalize(levels) --- end --- function bookmarks.place() end -- prevent second run --- end --- end - function bookmarks.place() if next(names) then local levels = { } @@ -172,11 +126,14 @@ function bookmarks.place() -- add block entry local blockdata = sections.sectionblockdata[block] noflevels = noflevels + 1 + local references = li.references levels[noflevels] = { - 1, -- toplevel - stripped(blockdata.bookmark ~= "" and blockdata.bookmark or block), - li.references, - allopen or opened[name] -- same as first entry + level = 1, -- toplevel + title = stripped(blockdata.bookmark ~= "" and blockdata.bookmark or block), + reference = references, + opened = allopen or opened[name], -- same as first entry + realpage = references and references.realpage or 0, -- handy for later + usedpage = true, } end blockdone = true @@ -206,11 +163,14 @@ function bookmarks.place() end end noflevels = noflevels + 1 + local references = li.references levels[noflevels] = { - lastlevel, - stripped(title), -- can be replaced by converter - li.references, -- has internal and realpage - allopen or opened[name] + level = lastlevel, + title = stripped(title), -- can be replaced by converter + reference = references, -- has internal and realpage + opened = allopen or opened[name], + realpage = references and references.realpage or 0, -- handy for later + usedpage = true, } end end @@ -222,43 +182,238 @@ function bookmarks.place() end function bookmarks.flatten(levels) + if not levels then + -- a plugin messed up + return { } + end -- This function promotes leading structurelements with a higher level -- to the next lower level. Such situations are the result of lack of -- structure: a subject preceding a chapter in a sectionblock. So, the -- following code runs over section blocks as well. (bookmarks-007.tex) local noflevels = #levels if noflevels > 1 then - local skip, start, one = false, 1, levels[1] - local first, block = one[1], one[3].block + local skip = false + local start = 1 + local one = levels[1] + local first = one.level + local block = one.reference.block for i=2,noflevels do - local li = levels[i] - local new, newblock = li[1], li[3].block + local current = levels[i] + local new = current.level + local reference = current.reference + local newblock = type(reference) == "table" and current.reference.block or block if newblock ~= block then - first, block, start, skip = new, newblock, i, false + first = new + block = newblock + start = i + skip = false elseif skip then -- go on elseif new > first then skip = true elseif new < first then for j=start,i-1 do - local lj = levels[j] - local old = lj[1] - lj[1] = new + local previous = levels[j] + local old = previous.level + previous.level = new if trace_bookmarks then - report_bookmarks("promoting entry %a from level %a to %a: %s",j,old,new,lj[2]) + report_bookmarks("promoting entry %a from level %a to %a: %s",j,old,new,previous.title) end end skip = true end end end + return levels +end + +local extras = { } +local lists = { } +local names = { } + +bookmarks.extras = extras + +local function cleanname(name) + return lower(file.basename(name)) +end + +function extras.register(name,levels) + if name and levels then + name = cleanname(name) + local found = names[name] + if found then + lists[found].levels = levels + else + lists[#lists+1] = { + name = name, + levels = levels, + } + names[name] = #lists + end + end +end + +function extras.get(name) + if name then + local found = names[cleanname(name)] + if found then + return lists[found].levels + end + else + return lists + end +end + +function extras.reset(name) + local l, n = { }, { } + if name then + name = cleanname(name) + for i=1,#lists do + local li = lists[i] + local ln = li.name + if name == ln then + -- skip + else + local m = #l + 1 + l[m] = li + n[ln] = m + end + end + end + lists, names = l, n +end + +local function checklists() + for i=1,#lists do + local levels = lists[i].levels + for j=1,#levels do + local entry = levels[j] + local pageindex = entry.pageindex + if pageindex then + entry.reference = figures.getrealpage(pageindex) + entry.pageindex = nil + end + end + end +end + +function extras.tosections(levels) + local sections = { } + local noflists = #lists + for i=1,noflists do + local levels = lists[i].levels + local data = { } + sections[i] = data + for j=1,#levels do + local entry = levels[j] + if entry.usedpage then + local section = entry.section + local d = data[section] + if d then + d[#d+1] = entry + else + data[section] = { entry } + end + end + end + end + return sections +end + +function extras.mergesections(levels,sections) + if not sections or #sections == 0 then + return levels + elseif not levels then + return { } + else + local merge = { } + local noflists = #lists + if #levels == 0 then + local level = 0 + local section = 0 + for i=1,noflists do + local entries = sections[i][0] + if entries then + for i=1,#entries do + local entry = entries[i] + merge[#merge+1] = entry + entry.level = entry.level + level + end + end + end + else + for j=1,#levels do + local entry = levels[j] + merge[#merge+1] = entry + local section = entry.reference.section + local level = entry.level + entry.section = section -- for tracing + for i=1,noflists do + local entries = sections[i][section] + if entries then + for i=1,#entries do + local entry = entries[i] + merge[#merge+1] = entry + entry.level = entry.level + level + end + end + end + end + end + return merge + end +end + +function bookmarks.merge(levels,mode) + return extras.mergesections(levels,extras.tosections()) end +local sequencers = utilities.sequencers +local appendgroup = sequencers.appendgroup +local appendaction = sequencers.appendaction + +local bookmarkactions = sequencers.new { + arguments = "levels,method", + returnvalues = "levels", + results = "levels", +} + +appendgroup(bookmarkactions,"before") -- user +appendgroup(bookmarkactions,"system") -- private +appendgroup(bookmarkactions,"after" ) -- user + +appendaction(bookmarkactions,"system",bookmarks.flatten) +appendaction(bookmarkactions,"system",bookmarks.merge) + function bookmarks.finalize(levels) - -- This function can be overloaded by an optional converter - -- that uses nodes.toutf on a typeset stream. This is something - -- that we will support when the main loop has become a coroutine. - codeinjections.addbookmarks(levels,bookmarks.method) + local method = bookmarks.method or "internal" + checklists() -- so that plugins have the adapted page number + levels = bookmarkactions.runner(levels,method) + if levels and #levels > 0 then + -- normally this is not needed + local purged = { } + for i=1,#levels do + local l = levels[i] + if l.usedpage ~= false then + purged[#purged+1] = l + end + end + -- + codeinjections.addbookmarks(purged,method) + else + -- maybe a plugin messed up + end +end + +function bookmarks.installhandler(what,where,func) + if not func then + where, func = "after", where + end + if where == "before" or where == "after" then + sequencers.appendaction(bookmarkactions,where,func) + else + report_tex("installing bookmark %a handlers in %a is not possible",what,tostring(where)) + end end -- interface diff --git a/tex/context/base/strc-doc.lua b/tex/context/base/strc-doc.lua index 38830a4e7..7d3be1620 100644 --- a/tex/context/base/strc-doc.lua +++ b/tex/context/base/strc-doc.lua @@ -136,20 +136,27 @@ function sections.currentid() return #tobesaved end +local lastsaved = 0 + function sections.save(sectiondata) -- local sectionnumber = helpers.simplify(section.sectiondata) -- maybe done earlier local numberdata = sectiondata.numberdata local ntobesaved = #tobesaved if not numberdata or sectiondata.metadata.nolist then - return ntobesaved + -- stay else ntobesaved = ntobesaved + 1 tobesaved[ntobesaved] = numberdata if not collected[ntobesaved] then collected[ntobesaved] = numberdata end - return ntobesaved end + lastsaved = ntobesaved + return ntobesaved +end + +function sections.currentsectionindex() + return lastsaved -- only for special controlled situations end function sections.load() diff --git a/tex/context/base/strc-ref.lua b/tex/context/base/strc-ref.lua index 8a2a668c0..fb1c98c32 100644 --- a/tex/context/base/strc-ref.lua +++ b/tex/context/base/strc-ref.lua @@ -2179,6 +2179,8 @@ local function setreferencerealpage(actions) end end +references.setreferencerealpage = setreferencerealpage + -- we store some analysis data alongside the indexed array -- at this moment only the real reference page is analyzed -- normally such an analysis happens in the backend code diff --git a/tex/context/base/syst-ini.mkiv b/tex/context/base/syst-ini.mkiv index ff74efecc..29a97b6bd 100644 --- a/tex/context/base/syst-ini.mkiv +++ b/tex/context/base/syst-ini.mkiv @@ -1020,7 +1020,7 @@ %D \PDFTEX, we default to \DVI. Why? \pdfoutput \zerocount -\pdfminorversion \plussix +\pdfminorversion \plusseven \pdfgentounicode \plusone \pdfinclusioncopyfonts \plusone \pdfinclusionerrorlevel \zerocount diff --git a/tex/context/base/typo-dig.lua b/tex/context/base/typo-dig.lua index 67849c6d4..f5b8a6ddd 100644 --- a/tex/context/base/typo-dig.lua +++ b/tex/context/base/typo-dig.lua @@ -103,15 +103,14 @@ end actions[1] = function(head,start,attr) local font = getfont(start) local char = getchar(start) - local unic = chardata[font][char].tounicode - local what = unic and tonumber(unic,16) or char - if charbase[what].category == "nd" then + local unic = chardata[font][char].unicode or char + if charbase[unic].category == "nd" then -- ignore unic tables local oldwidth = getfield(start,"width") local newwidth = getdigitwidth(font) if newwidth ~= oldwidth then if trace_digits then report_digits("digit trigger %a, instance %a, char %C, unicode %U, delta %s", - attr%100,div(attr,100),char,what,newwidth-oldwidth) + attr%100,div(attr,100),char,unic,newwidth-oldwidth) end head, start = nodes.aligned(head,start,start,newwidth,"middle") return head, start, true diff --git a/tex/context/base/typo-tal.lua b/tex/context/base/typo-tal.lua index 1e9c815b3..d41a63dd5 100644 --- a/tex/context/base/typo-tal.lua +++ b/tex/context/base/typo-tal.lua @@ -12,6 +12,8 @@ if not modules then modules = { } end modules ['typo-tal'] = { -- Currently we have two methods: text and number with some downward compatible -- defaulting. +-- We can speed up by saving the current fontcharacters[font] + lastfont. + local next, type = next, type local div = math.div local utfbyte = utf.byte @@ -23,7 +25,7 @@ local glyph_code = nodecodes.glyph local glue_code = nodecodes.glue local fontcharacters = fonts.hashes.characters -local unicodes = fonts.hashes.unicodes +----- unicodes = fonts.hashes.unicodes local categories = characters.categories -- nd local variables = interfaces.variables @@ -135,7 +137,8 @@ function characteralign.handler(originalhead,where) if id == glyph_code then local char = getchar(current) local font = getfont(current) - local unicode = unicodes[font][char] + -- local unicode = unicodes[font][char] + local unicode = fontcharacters[font][char].unicode or char -- ignore tables if not unicode then -- no unicode so forget about it elseif unicode == separator then @@ -213,7 +216,8 @@ function characteralign.handler(originalhead,where) if id == glyph_code then local char = getchar(current) local font = getfont(current) - local unicode = unicodes[font][char] + -- local unicode = unicodes[font][char] + local unicode = fontcharacters[font][char].unicode or char -- ignore tables if not unicode then -- no unicode so forget about it elseif unicode == separator then diff --git a/tex/context/base/util-tpl.lua b/tex/context/base/util-tpl.lua index 67d058221..bd0e261a9 100644 --- a/tex/context/base/util-tpl.lua +++ b/tex/context/base/util-tpl.lua @@ -52,7 +52,7 @@ local sqlescape = lpeg.replacer { -- { "\t", "\\t" }, } -local sqlquoted = lpeg.Cs(lpeg.Cc("'") * sqlescape * lpeg.Cc("'")) +local sqlquoted = Cs(Cc("'") * sqlescape * Cc("'")) lpegpatterns.sqlescape = sqlescape lpegpatterns.sqlquoted = sqlquoted @@ -111,13 +111,21 @@ local luaescaper = escapers.lua local quotedluaescaper = quotedescapers.lua local function replacekeyunquoted(s,t,how,recurse) -- ".. \" " - local escaper = how and escapers[how] or luaescaper - return escaper(replacekey(s,t,how,recurse)) + if how == false then + return replacekey(s,t,how,recurse) + else + local escaper = how and escapers[how] or luaescaper + return escaper(replacekey(s,t,how,recurse)) + end end local function replacekeyquoted(s,t,how,recurse) -- ".. \" " - local escaper = how and quotedescapers[how] or quotedluaescaper - return escaper(replacekey(s,t,how,recurse)) + if how == false then + return replacekey(s,t,how,recurse) + else + local escaper = how and quotedescapers[how] or quotedluaescaper + return escaper(replacekey(s,t,how,recurse)) + end end local single = P("%") -- test %test% test : resolves test @@ -188,3 +196,5 @@ end -- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" })) -- inspect(utilities.templates.resolve({ one = "%two%", two = "two", three = "%three%" })) +-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" },false,true)) +-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" },false)) diff --git a/tex/context/fonts/treatments.lfg b/tex/context/fonts/treatments.lfg index 07bb51def..40bac427c 100644 --- a/tex/context/fonts/treatments.lfg +++ b/tex/context/fonts/treatments.lfg @@ -25,50 +25,50 @@ local fix_unifraktur = { end, } -local fix_lmmonoregular = { - -- - -- there are now some extra safeguards for idris - -- - comment = "wrong widths of some glyphs", - fixes = function(data) - report("fixing some wrong widths") - local unicodes = data.resources.unicodes - local descriptions = data.descriptions - local function getdescription(name) - local unicode = unicodes[name] - if not unicode then - report("no valid unicode for %a",name) - return - end - local description = descriptions[unicode] - if not description then - report("no glyph names %a in font",name) - return - end - return description - end - local zero = getdescription("zero") - if not zero then - return - end - local defaultwidth = zero.width - local function setwidth(name) - local data = getdescription(name) - if data then - data.width = defaultwidth - end - end - setwidth("six") - setwidth("nine") - setwidth("caron") - setwidth("perthousand") - setwidth("numero") - setwidth("caron.cap") - setwidth("six.taboldstyle") - setwidth("nine.taboldstyle") - setwidth("dollar.oldstyle") - end -} +-- local fix_lmmonoregular = { +-- -- +-- -- there are now some extra safeguards for idris +-- -- +-- comment = "wrong widths of some glyphs", +-- fixes = function(data) +-- report("fixing some wrong widths") +-- local unicodes = data.resources.unicodes +-- local descriptions = data.descriptions +-- local function getdescription(name) +-- local unicode = unicodes[name] +-- if not unicode then +-- report("no valid unicode for %a",name) +-- return +-- end +-- local description = descriptions[unicode] +-- if not description then +-- report("no glyph names %a in font",name) +-- return +-- end +-- return description +-- end +-- local zero = getdescription("zero") +-- if not zero then +-- return +-- end +-- local defaultwidth = zero.width +-- local function setwidth(name) +-- local data = getdescription(name) +-- if data then +-- data.width = defaultwidth +-- end +-- end +-- setwidth("six") +-- setwidth("nine") +-- setwidth("caron") +-- setwidth("perthousand") +-- setwidth("numero") +-- setwidth("caron.cap") +-- setwidth("six.taboldstyle") +-- setwidth("nine.taboldstyle") +-- setwidth("dollar.oldstyle") +-- end +-- } return { name = "treatments", diff --git a/tex/context/interface/keys-cs.xml b/tex/context/interface/keys-cs.xml index e32918566..b261a74e4 100644 --- a/tex/context/interface/keys-cs.xml +++ b/tex/context/interface/keys-cs.xml @@ -659,7 +659,7 @@ <cd:constant name='coupling' value='propojeni'/> <cd:constant name='couplingway' value='zpusobpropojeni'/> <cd:constant name='criterium' value='kriterium'/> - <cd:constant name='css' value='css'/> + <cd:constant name='cssfile' value='cssfile'/> <cd:constant name='current' value='aktualni'/> <cd:constant name='cutspace' value='cutspace'/> <cd:constant name='dash' value='pomlcka'/> diff --git a/tex/context/interface/keys-de.xml b/tex/context/interface/keys-de.xml index 48329fabf..b9f6c400b 100644 --- a/tex/context/interface/keys-de.xml +++ b/tex/context/interface/keys-de.xml @@ -659,7 +659,7 @@ <cd:constant name='coupling' value='verknuepfung'/> <cd:constant name='couplingway' value='verkopplungsart'/> <cd:constant name='criterium' value='kriterium'/> - <cd:constant name='css' value='css'/> + <cd:constant name='cssfile' value='cssfile'/> <cd:constant name='current' value='aktuell'/> <cd:constant name='cutspace' value='cutspace'/> <cd:constant name='dash' value='strich'/> diff --git a/tex/context/interface/keys-en.xml b/tex/context/interface/keys-en.xml index dd8de7312..50e15c05b 100644 --- a/tex/context/interface/keys-en.xml +++ b/tex/context/interface/keys-en.xml @@ -659,7 +659,7 @@ <cd:constant name='coupling' value='coupling'/> <cd:constant name='couplingway' value='couplingway'/> <cd:constant name='criterium' value='criterium'/> - <cd:constant name='css' value='css'/> + <cd:constant name='cssfile' value='cssfile'/> <cd:constant name='current' value='current'/> <cd:constant name='cutspace' value='cutspace'/> <cd:constant name='dash' value='dash'/> diff --git a/tex/context/interface/keys-fr.xml b/tex/context/interface/keys-fr.xml index 4a9f2b78b..15ebc93c6 100644 --- a/tex/context/interface/keys-fr.xml +++ b/tex/context/interface/keys-fr.xml @@ -659,7 +659,7 @@ <cd:constant name='coupling' value='couplage'/> <cd:constant name='couplingway' value='modecouplage'/> <cd:constant name='criterium' value='critere'/> - <cd:constant name='css' value='css'/> + <cd:constant name='cssfile' value='cssfile'/> <cd:constant name='current' value='courant'/> <cd:constant name='cutspace' value='cutspace'/> <cd:constant name='dash' value='pointille'/> diff --git a/tex/context/interface/keys-it.xml b/tex/context/interface/keys-it.xml index ce1be9a61..e256c965b 100644 --- a/tex/context/interface/keys-it.xml +++ b/tex/context/interface/keys-it.xml @@ -659,7 +659,7 @@ <cd:constant name='coupling' value='accoppiamento'/> <cd:constant name='couplingway' value='modoaccoppiamento'/> <cd:constant name='criterium' value='criterio'/> - <cd:constant name='css' value='css'/> + <cd:constant name='cssfile' value='cssfile'/> <cd:constant name='current' value='corrente'/> <cd:constant name='cutspace' value='cutspace'/> <cd:constant name='dash' value='dash'/> diff --git a/tex/context/interface/keys-nl.xml b/tex/context/interface/keys-nl.xml index bdcf2a96a..eebc048c0 100644 --- a/tex/context/interface/keys-nl.xml +++ b/tex/context/interface/keys-nl.xml @@ -659,7 +659,7 @@ <cd:constant name='coupling' value='koppeling'/> <cd:constant name='couplingway' value='koppelwijze'/> <cd:constant name='criterium' value='criterium'/> - <cd:constant name='css' value='css'/> + <cd:constant name='cssfile' value='cssfile'/> <cd:constant name='current' value='huidige'/> <cd:constant name='cutspace' value='snijwit'/> <cd:constant name='dash' value='streep'/> diff --git a/tex/context/interface/keys-pe.xml b/tex/context/interface/keys-pe.xml index 77b137293..926cb4f1e 100644 --- a/tex/context/interface/keys-pe.xml +++ b/tex/context/interface/keys-pe.xml @@ -659,7 +659,7 @@ <cd:constant name='coupling' value='تزویج'/> <cd:constant name='couplingway' value='روشتزویج'/> <cd:constant name='criterium' value='criterium'/> - <cd:constant name='css' value='css'/> + <cd:constant name='cssfile' value='cssfile'/> <cd:constant name='current' value='جاری'/> <cd:constant name='cutspace' value='فضایبرش'/> <cd:constant name='dash' value='دش'/> diff --git a/tex/context/interface/keys-ro.xml b/tex/context/interface/keys-ro.xml index ad07ea880..f07634521 100644 --- a/tex/context/interface/keys-ro.xml +++ b/tex/context/interface/keys-ro.xml @@ -659,7 +659,7 @@ <cd:constant name='coupling' value='cuplare'/> <cd:constant name='couplingway' value='modcuplare'/> <cd:constant name='criterium' value='criteriu'/> - <cd:constant name='css' value='css'/> + <cd:constant name='cssfile' value='cssfile'/> <cd:constant name='current' value='curent'/> <cd:constant name='cutspace' value='cutspace'/> <cd:constant name='dash' value='dash'/> diff --git a/tex/generic/context/luatex/luatex-basics-gen.lua b/tex/generic/context/luatex/luatex-basics-gen.lua index c19a49af3..e7cdc7b39 100644 --- a/tex/generic/context/luatex/luatex-basics-gen.lua +++ b/tex/generic/context/luatex/luatex-basics-gen.lua @@ -351,7 +351,12 @@ end -- function table.setmetatableindex(t,f) + if type(t) ~= "table" then + f = f or t + t = { } + end setmetatable(t,{ __index = f }) + return t end -- helper for plain: diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua index 98e98d806..42f85340e 100644 --- a/tex/generic/context/luatex/luatex-fonts-merged.lua +++ b/tex/generic/context/luatex/luatex-fonts-merged.lua @@ -1,6 +1,6 @@ -- merged file : luatex-fonts-merged.lua -- parent file : luatex-fonts.lua --- merge date : 09/27/14 14:46:07 +-- merge date : 10/02/14 23:07:34 do -- begin closure to overcome local limits and interference @@ -895,7 +895,7 @@ local function compare(a,b) if ta==tb then return a<b else - return tostring(a)<tostring(b) + return tostring(a)<tostring(b) end end local function sortedkeys(tab) @@ -3560,7 +3560,12 @@ function caches.compile(data,luaname,lucname) end end function table.setmetatableindex(t,f) + if type(t)~="table" then + f=f or t + t={} + end setmetatable(t,{ __index=f }) + return t end arguments={} if arg then @@ -4134,7 +4139,7 @@ function constructors.scale(tfmdata,specification) targetparameters.textsize=textsize targetparameters.forcedsize=forcedsize targetparameters.extrafactor=extrafactor - local tounicode=resources.tounicode + local tounicode=fonts.mappings.tounicode local defaultwidth=resources.defaultwidth or 0 local defaultheight=resources.defaultheight or 0 local defaultdepth=resources.defaultdepth or 0 @@ -4214,7 +4219,8 @@ function constructors.scale(tfmdata,specification) local autoitalicamount=properties.autoitalicamount local stackmath=not properties.nostackmath local nonames=properties.noglyphnames - local nodemode=properties.mode=="node" + local haskerns=properties.haskerns or properties.mode=="base" + local hasligatures=properties.hasligatures or properties.mode=="base" if changed and not next(changed) then changed=false end @@ -4277,38 +4283,20 @@ function constructors.scale(tfmdata,specification) constructors.beforecopyingcharacters(target,tfmdata) local sharedkerns={} for unicode,character in next,characters do - local chr,description,index,touni + local chr,description,index if changed then local c=changed[unicode] if c then - local ligatures=character.ligatures description=descriptions[c] or descriptions[unicode] or character character=characters[c] or character index=description.index or c - if tounicode then - touni=tounicode[index] - if not touni then - local d=descriptions[unicode] or characters[unicode] - local i=d.index or unicode - touni=tounicode[i] - end - end - if ligatures and not character.ligatures then - character.ligatures=ligatures - end else description=descriptions[unicode] or character index=description.index or unicode - if tounicode then - touni=tounicode[index] - end end else description=descriptions[unicode] or character index=description.index or unicode - if tounicode then - touni=tounicode[index] - end end local width=description.width local height=description.height @@ -4349,8 +4337,10 @@ function constructors.scale(tfmdata,specification) } end end - if touni then - chr.tounicode=touni + local isunicode=description.unicode + if isunicode then + chr.unicode=isunicode + chr.tounicode=tounicode(isunicode) end if hasquality then local ve=character.expansion_factor @@ -4443,7 +4433,7 @@ function constructors.scale(tfmdata,specification) end end end - if not nodemode then + if haskerns then local vk=character.kerns if vk then local s=sharedkerns[vk] @@ -4454,6 +4444,8 @@ function constructors.scale(tfmdata,specification) end chr.kerns=s end + end + if hasligatures then local vl=character.ligatures if vl then if true then @@ -4954,6 +4946,16 @@ function constructors.applymanipulators(what,tfmdata,features,trace,report) end end end +function constructors.addcoreunicodes(unicodes) + if not unicodes then + unicodes={} + end + unicodes.space=0x0020 + unicodes.hyphen=0x002D + unicodes.zwj=0x200D + unicodes.zwnj=0x200C + return unicodes +end end -- closure @@ -5205,17 +5207,43 @@ end local function tounicode16sequence(unicodes,name) local t={} for l=1,#unicodes do - local unicode=unicodes[l] - if unicode<0x10000 then - t[l]=format("%04X",unicode) + local u=unicodes[l] + if u<0x10000 then + t[l]=format("%04X",u) elseif unicode<0x1FFFFFFFFF then - t[l]=format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00) + t[l]=format("%04X%04X",floor(u/1024),u%1024+0xDC00) else - report_fonts ("can't convert %a in %a into tounicode",unicode,name) + report_fonts ("can't convert %a in %a into tounicode",u,name) + return end end return concat(t) end +local function tounicode(unicode,name) + if type(unicode)=="table" then + local t={} + for l=1,#unicode do + local u=unicode[l] + if u<0x10000 then + t[l]=format("%04X",u) + elseif u<0x1FFFFFFFFF then + t[l]=format("%04X%04X",floor(u/1024),u%1024+0xDC00) + else + report_fonts ("can't convert %a in %a into tounicode",u,name) + return + end + end + return concat(t) + else + if unicode<0x10000 then + return format("%04X",unicode) + elseif unicode<0x1FFFFFFFFF then + return format("%04X%04X",floor(unicode/1024),unicode%1024+0xDC00) + else + report_fonts("can't convert %a in %a into tounicode",unicode,name) + end + end +end local function fromunicode16(str) if #str==4 then return tonumber(str,16) @@ -5226,6 +5254,7 @@ local function fromunicode16(str) end mappings.loadlumtable=loadlumtable mappings.makenameparser=makenameparser +mappings.tounicode=tounicode mappings.tounicode16=tounicode16 mappings.tounicode16sequence=tounicode16sequence mappings.fromunicode16=fromunicode16 @@ -5248,17 +5277,9 @@ function mappings.addtounicode(data,filename) local private=fonts.constructors.privateoffset local unknown=format("%04X",utfbyte("?")) local unicodevector=fonts.encodings.agl.unicodes - local tounicode={} - local originals={} local missing={} - resources.tounicode=tounicode - resources.originals=originals local lumunic,uparser,oparser local cidinfo,cidnames,cidcodes,usedmap - if false then - lumunic=loadlumtable(filename) - lumunic=lumunic and lumunic.tounicode - end cidinfo=properties.cidinfo usedmap=cidinfo and fonts.cid.getmap(cidinfo) if usedmap then @@ -5274,8 +5295,7 @@ function mappings.addtounicode(data,filename) if unic==-1 or unic>=private or (unic>=0xE000 and unic<=0xF8FF) or unic==0xFFFE or unic==0xFFFF then local unicode=lumunic and lumunic[name] or unicodevector[name] if unicode then - originals[index]=unicode - tounicode[index]=tounicode16(unicode,name) + glyph.unicode=unicode ns=ns+1 end if (not unicode) and usedmap then @@ -5283,8 +5303,7 @@ function mappings.addtounicode(data,filename) if foundindex then unicode=cidcodes[foundindex] if unicode then - originals[index]=unicode - tounicode[index]=tounicode16(unicode,name) + glyph.unicode=unicode ns=ns+1 else local reference=cidnames[foundindex] @@ -5293,21 +5312,18 @@ function mappings.addtounicode(data,filename) if foundindex then unicode=cidcodes[foundindex] if unicode then - originals[index]=unicode - tounicode[index]=tounicode16(unicode,name) + glyph.unicode=unicode ns=ns+1 end end if not unicode or unicode=="" then local foundcodes,multiple=lpegmatch(uparser,reference) if foundcodes then - originals[index]=foundcodes + glyph.unicode=foundcodes if multiple then - tounicode[index]=tounicode16sequence(foundcodes) nl=nl+1 unicode=true else - tounicode[index]=tounicode16(foundcodes,name) ns=ns+1 unicode=foundcodes end @@ -5345,25 +5361,20 @@ function mappings.addtounicode(data,filename) end if n==0 then elseif n==1 then - originals[index]=t[1] - tounicode[index]=tounicode16(t[1],name) + glyph.unicode=t[1] else - originals[index]=t - tounicode[index]=tounicode16sequence(t) + glyph.unicode=t end nl=nl+1 end if not unicode or unicode=="" then local foundcodes,multiple=lpegmatch(uparser,name) if foundcodes then + glyph.unicode=foundcodes if multiple then - originals[index]=foundcodes - tounicode[index]=tounicode16sequence(foundcodes,name) nl=nl+1 unicode=true else - originals[index]=foundcodes - tounicode[index]=tounicode16(foundcodes,name) ns=ns+1 unicode=foundcodes end @@ -5387,8 +5398,7 @@ function mappings.addtounicode(data,filename) else return end - local index=descriptions[code].index - if tounicode[index] then + if descriptions[code].unicode then return end local g=guess[variant] @@ -5453,37 +5463,31 @@ function mappings.addtounicode(data,filename) end end end + local orphans=0 + local guessed=0 for k,v in next,guess do if type(v)=="number" then - guess[k]=tounicode16(v) + descriptions[unicodes[k]].unicode=descriptions[v].unicode or v + guessed=guessed+1 else local t=nil local l=lower(k) local u=unicodes[l] if not u then + orphans=orphans+1 elseif u==-1 or u>=private or (u>=0xE000 and u<=0xF8FF) or u==0xFFFE or u==0xFFFF then - t=tounicode[descriptions[u].index] - else - end - if t then - guess[k]=t + local unicode=descriptions[u].unicode + if unicode then + descriptions[unicodes[k]].unicode=unicode + guessed=guessed+1 + else + orphans=orphans+1 + end else - guess[k]="FFFD" + orphans=orphans+1 end end end - local orphans=0 - local guessed=0 - for k,v in next,guess do - tounicode[descriptions[unicodes[k]].index]=v - if v=="FFFD" then - orphans=orphans+1 - guess[k]=false - else - guessed=guessed+1 - guess[k]=true - end - end if trace_loading and orphans>0 or guessed>0 then report_fonts("%s glyphs with no related unicode, %s guessed, %s orphans",guessed+orphans,guessed,orphans) end @@ -5492,9 +5496,17 @@ function mappings.addtounicode(data,filename) for unic,glyph in table.sortedhash(descriptions) do local name=glyph.name local index=glyph.index - local toun=tounicode[index] - if toun then - report_fonts("internal slot %U, name %a, unicode %U, tounicode %a",index,name,unic,toun) + local unicode=glyph.unicode + if unicode then + if type(unicode)=="table" then + local unicodes={} + for i=1,#unicode do + unicodes[i]=formatters("%U",unicode[i]) + end + report_fonts("internal slot %U, name %a, unicode %U, tounicode % t",index,name,unic,unicodes) + else + report_fonts("internal slot %U, name %a, unicode %U, tounicode %U",index,name,unic,unicode) + end else report_fonts("internal slot %U, name %a, unicode %U",index,name,unic) end @@ -5675,6 +5687,10 @@ local function read_from_tfm(specification) features.encoding=encoding end end + properties.haskerns=true + properties.haslogatures=true + resources.unicodes={} + resources.lookuptags={} return tfmdata end end @@ -5730,6 +5746,7 @@ local trace_indexing=false trackers.register("afm.indexing",function(v) trace_in local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end) local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) local report_afm=logs.reporter("fonts","afm loading") +local setmetatableindex=table.setmetatableindex local findbinfile=resolvers.findbinfile local definers=fonts.definers local readers=fonts.readers @@ -5738,7 +5755,7 @@ local afm=constructors.newhandler("afm") local pfb=constructors.newhandler("pfb") local afmfeatures=constructors.newfeatures("afm") local registerafmfeature=afmfeatures.register -afm.version=1.410 +afm.version=1.500 afm.cache=containers.define("fonts","afm",afm.version,true) afm.autoprefixed=true afm.helpdata={} @@ -5760,6 +5777,15 @@ registerafmfeature { node=setmode, } } +local remappednames={ + ff={ name="f_f",unicode={ 0x66,0x66 } }, + fi={ name="f_i",unicode={ 0x66,0x69 } }, + fj={ name="f_j",unicode={ 0x66,0x6A } }, + fk={ name="f_k",unicode={ 0x66,0x6B } }, + fl={ name="f_l",unicode={ 0x66,0x6C } }, + ffi={ name="f_f_i",unicode={ 0x66,0x66,0x69 } }, + ffl={ name="f_f_l",unicode={ 0x66,0x66,0x6C } }, +} local comment=P("Comment") local spacing=patterns.spacer local lineend=patterns.newline @@ -5933,7 +5959,7 @@ local function readafm(filename) return nil end end -local addkerns,addligatures,addtexligatures,unify,normalize +local addkerns,addligatures,addtexligatures,unify,normalize,fixnames function afm.load(filename) filename=resolvers.findfile(filename,'afm') or "" if filename~="" and not fonts.names.ignoredfile(filename) then @@ -5976,6 +6002,7 @@ function afm.load(filename) addkerns(data) end normalize(data) + fixnames(data) report_afm("add tounicode data") fonts.mappings.addtounicode(data,filename) data.size=size @@ -5983,6 +6010,7 @@ function afm.load(filename) data.pfbsize=pfbsize data.pfbtime=pfbtime report_afm("saving %a in cache",name) + data.resources.unicodes=nil data=containers.write(afm.cache,name,data) data=containers.read(afm.cache,name) end @@ -6042,18 +6070,29 @@ unify=function(data,filename) local filename=resources.filename or file.removesuffix(file.basename(filename)) resources.filename=resolvers.unresolve(filename) resources.unicodes=unicodes - resources.marks={} - resources.names=names + resources.marks={} resources.private=private end normalize=function(data) end +fixnames=function(data) + for k,v in next,data.descriptions do + local n=v.name + local r=remappednames[n] + if r then + if trace_indexing then + report_afm("renaming characters %a to %a",n,r.name) + end + v.name=r.name + v.unicode=r.unicode + end + end +end local addthem=function(rawdata,ligatures) if ligatures then local descriptions=rawdata.descriptions local resources=rawdata.resources local unicodes=resources.unicodes - local names=resources.names for ligname,ligdata in next,ligatures do local one=descriptions[unicodes[ligname]] if one then @@ -6186,8 +6225,8 @@ local function copytotfm(data) local filename=constructors.checkedfilename(resources) local fontname=metadata.fontname or metadata.fullname local fullname=metadata.fullname or metadata.fontname - local endash=unicodes['space'] - local emdash=unicodes['emdash'] + local endash=0x0020 + local emdash=0x2014 local spacer="space" local spaceunits=500 local monospaced=metadata.isfixedpitch @@ -6241,7 +6280,7 @@ local function copytotfm(data) if charxheight then parameters.x_height=charxheight else - local x=unicodes['x'] + local x=0x0078 if x then local x=descriptions[x] if x then @@ -6288,7 +6327,34 @@ function afm.setfeatures(tfmdata,features) return {} end end -local function checkfeatures(specification) +local function addtables(data) + local resources=data.resources + local lookuptags=resources.lookuptags + local unicodes=resources.unicodes + if not lookuptags then + lookuptags={} + resources.lookuptags=lookuptags + end + setmetatableindex(lookuptags,function(t,k) + local v=type(k)=="number" and ("lookup "..k) or k + t[k]=v + return v + end) + if not unicodes then + unicodes={} + resources.unicodes=unicodes + setmetatableindex(unicodes,function(t,k) + setmetatableindex(unicodes,nil) + for u,d in next,data.descriptions do + local n=d.name + if n then + t[n]=u + end + end + return rawget(t,k) + end) + end + constructors.addcoreunicodes(unicodes) end local function afmtotfm(specification) local afmname=specification.filename or specification.name @@ -6315,6 +6381,7 @@ local function afmtotfm(specification) if not tfmdata then local rawdata=afm.load(afmname) if rawdata and next(rawdata) then + addtables(rawdata) adddimensions(rawdata) tfmdata=copytotfm(rawdata) if tfmdata and next(tfmdata) then @@ -6349,6 +6416,7 @@ end local function prepareligatures(tfmdata,ligatures,value) if value then local descriptions=tfmdata.descriptions + local hasligatures=false for unicode,character in next,tfmdata.characters do local description=descriptions[unicode] local dligatures=description.ligatures @@ -6364,8 +6432,10 @@ local function prepareligatures(tfmdata,ligatures,value) type=0 } end + hasligatures=true end end + tfmdata.properties.hasligatures=hasligatures end end local function preparekerns(tfmdata,kerns,value) @@ -6374,6 +6444,7 @@ local function preparekerns(tfmdata,kerns,value) local resources=rawdata.resources local unicodes=resources.unicodes local descriptions=tfmdata.descriptions + local haskerns=false for u,chr in next,tfmdata.characters do local d=descriptions[u] local newkerns=d[kerns] @@ -6389,8 +6460,10 @@ local function preparekerns(tfmdata,kerns,value) kerns[uk]=v end end + haskerns=true end end + tfmdata.properties.haskerns=haskerns end end local list={ @@ -6820,6 +6893,8 @@ local reversed,concat,remove,sortedkeys=table.reversed,table.concat,table.remove local ioflush=io.flush local fastcopy,tohash,derivetable=table.fastcopy,table.tohash,table.derive local formatters=string.formatters +local P,R,S,C,Ct,lpegmatch=lpeg.P,lpeg.R,lpeg.S,lpeg.C,lpeg.Ct,lpeg.match +local setmetatableindex=table.setmetatableindex local allocate=utilities.storage.allocate local registertracker=trackers.register local registerdirective=directives.register @@ -6834,26 +6909,27 @@ local trace_dynamics=false registertracker("otf.dynamics",function(v) trace_dyna local trace_sequences=false registertracker("otf.sequences",function(v) trace_sequences=v end) local trace_markwidth=false registertracker("otf.markwidth",function(v) trace_markwidth=v end) local trace_defining=false registertracker("fonts.defining",function(v) trace_defining=v end) +local compact_lookups=true registertracker("otf.compactlookups",function(v) compact_lookups=v end) +local purge_names=true registertracker("otf.purgenames",function(v) purge_names=v end) local report_otf=logs.reporter("fonts","otf loading") local fonts=fonts local otf=fonts.handlers.otf otf.glists={ "gsub","gpos" } -otf.version=2.762 +otf.version=2.801 otf.cache=containers.define("fonts","otf",otf.version,true) local fontdata=fonts.hashes.identifiers local chardata=characters and characters.data -local otffeatures=fonts.constructors.newfeatures("otf") +local definers=fonts.definers +local readers=fonts.readers +local constructors=fonts.constructors +local otffeatures=constructors.newfeatures("otf") local registerotffeature=otffeatures.register local enhancers=allocate() otf.enhancers=enhancers local patches={} enhancers.patches=patches -local definers=fonts.definers -local readers=fonts.readers -local constructors=fonts.constructors local forceload=false local cleanup=0 -local usemetatables=false local packdata=true local syncspace=true local forcenotdef=false @@ -6872,7 +6948,6 @@ formats.ttc="truetype" formats.dfont="truetype" registerdirective("fonts.otf.loader.cleanup",function(v) cleanup=tonumber(v) or (v and 1) or 0 end) registerdirective("fonts.otf.loader.force",function(v) forceload=v end) -registerdirective("fonts.otf.loader.usemetatables",function(v) usemetatables=v end) registerdirective("fonts.otf.loader.pack",function(v) packdata=v end) registerdirective("fonts.otf.loader.syncspace",function(v) syncspace=v end) registerdirective("fonts.otf.loader.forcenotdef",function(v) forcenotdef=v end) @@ -7017,6 +7092,8 @@ local ordered_enhancers={ "check encoding", "add duplicates", "cleanup tables", + "compact lookups", + "purge names", } local actions=allocate() local before=allocate() @@ -7207,7 +7284,7 @@ function otf.load(filename,sub,featurefile) goodies={}, helpers={ tounicodelist=splitter, - tounicodetable=lpeg.Ct(splitter), + tounicodetable=Ct(splitter), }, } starttiming(data) @@ -7250,6 +7327,34 @@ function otf.load(filename,sub,featurefile) report_otf("loading from cache using hash %a",hash) end enhance("unpack",data,filename,nil,false) + local resources=data.resources + local lookuptags=resources.lookuptags + local unicodes=resources.unicodes + if not lookuptags then + lookuptags={} + resources.lookuptags=lookuptags + end + setmetatableindex(lookuptags,function(t,k) + local v=type(k)=="number" and ("lookup "..k) or k + t[k]=v + return v + end) + if not unicodes then + unicodes={} + resources.unicodes=unicodes + setmetatableindex(unicodes,function(t,k) + setmetatableindex(unicodes,nil) + for u,d in next,data.descriptions do + local n=d.name + if n then + t[n]=u + else + end + end + return rawget(t,k) + end) + end + constructors.addcoreunicodes(unicodes) if applyruntimefixes then applyruntimefixes(filename,data) end @@ -7286,34 +7391,22 @@ actions["add dimensions"]=function(data,filename) local defaultheight=resources.defaultheight or 0 local defaultdepth=resources.defaultdepth or 0 local basename=trace_markwidth and file.basename(filename) - if usemetatables then - for _,d in next,descriptions do - local wd=d.width - if not wd then - d.width=defaultwidth - elseif trace_markwidth and wd~=0 and d.class=="mark" then - report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename) - end - setmetatable(d,mt) + for _,d in next,descriptions do + local bb,wd=d.boundingbox,d.width + if not wd then + d.width=defaultwidth + elseif trace_markwidth and wd~=0 and d.class=="mark" then + report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename) end - else - for _,d in next,descriptions do - local bb,wd=d.boundingbox,d.width - if not wd then - d.width=defaultwidth - elseif trace_markwidth and wd~=0 and d.class=="mark" then - report_otf("mark %a with width %b found in %a",d.name or "<noname>",wd,basename) - end - if bb then - local ht,dp=bb[4],-bb[2] - if ht==0 or ht<0 then - else - d.height=ht - end - if dp==0 or dp<0 then - else - d.depth=dp - end + if bb then + local ht,dp=bb[4],-bb[2] + if ht==0 or ht<0 then + else + d.height=ht + end + if dp==0 or dp<0 then + else + d.depth=dp end end end @@ -7878,9 +7971,14 @@ local function t_hashed(t,cache) local ti=t[i] local tih=cache[ti] if not tih then - tih={} - for i=1,#ti do - tih[ti[i]]=true + local tn=#ti + if tn==1 then + tih={ [ti[1]]=true } + else + tih={} + for i=1,tn do + tih[ti[i]]=true + end end cache[ti]=tih end @@ -7893,12 +7991,17 @@ local function t_hashed(t,cache) end local function s_hashed(t,cache) if t then - local ht={} local tf=t[1] - for i=1,#tf do - ht[i]={ [tf[i]]=true } + local nf=#tf + if nf==1 then + return { [tf[1]]=true } + else + local ht={} + for i=1,nf do + ht[i]={ [tf[i]]=true } + end + return ht end - return ht else return nil end @@ -8326,7 +8429,7 @@ actions["check glyphs"]=function(data,filename,raw) description.glyph=nil end end -local valid=(lpeg.R("\x00\x7E")-lpeg.S("(){}[]<>%/ \n\r\f\v"))^0*lpeg.P(-1) +local valid=(R("\x00\x7E")-S("(){}[]<>%/ \n\r\f\v"))^0*P(-1) local function valid_ps_name(str) return str and str~="" and #str<64 and lpegmatch(valid,str) and true or false end @@ -8380,8 +8483,17 @@ actions["check metadata"]=function(data,filename,raw) end end actions["cleanup tables"]=function(data,filename,raw) + local duplicates=data.resources.duplicates + if duplicates then + for k,v in next,duplicates do + if #v==1 then + duplicates[k]=v[1] + end + end + end data.resources.indices=nil - data.helpers=nil + data.resources.unicodes=nil + data.helpers=nil end actions["reorganize glyph lookups"]=function(data,filename,raw) local resources=data.resources @@ -8486,6 +8598,142 @@ actions["reorganize glyph anchors"]=function(data,filename,raw) end end end +local bogusname=(P("uni")+P("u"))*R("AF","09")^4+(P("index")+P("glyph")+S("Ii")*P("dentity")*P(".")^0)*R("09")^1 +local uselessname=(1-bogusname)^0*bogusname +actions["purge names"]=function(data,filename,raw) + if purge_names then + local n=0 + for u,d in next,data.descriptions do + if lpegmatch(uselessname,d.name) then + n=n+1 + d.name=nil + end + end + if n>0 then + report_otf("%s bogus names removed",n) + end + end +end +actions["compact lookups"]=function(data,filename,raw) + if not compact_lookups then + report_otf("not compacting") + return + end + local last=0 + local tags=table.setmetatableindex({}, + function(t,k) + last=last+1 + t[k]=last + return last + end + ) + local descriptions=data.descriptions + local resources=data.resources + for u,d in next,descriptions do + local slookups=d.slookups + if type(slookups)=="table" then + local s={} + for k,v in next,slookups do + s[tags[k]]=v + end + d.slookups=s + end + local mlookups=d.mlookups + if type(mlookups)=="table" then + local m={} + for k,v in next,mlookups do + m[tags[k]]=v + end + d.mlookups=m + end + local kerns=d.kerns + if type(kerns)=="table" then + local t={} + for k,v in next,kerns do + t[tags[k]]=v + end + d.kerns=t + end + end + local lookups=data.lookups + if lookups then + local l={} + for k,v in next,lookups do + local rules=v.rules + if rules then + for i=1,#rules do + local l=rules[i].lookups + if type(l)=="table" then + for i=1,#l do + l[i]=tags[l[i]] + end + end + end + end + l[tags[k]]=v + end + data.lookups=l + end + local lookups=resources.lookups + if lookups then + local l={} + for k,v in next,lookups do + local s=v.subtables + if type(s)=="table" then + for i=1,#s do + s[i]=tags[s[i]] + end + end + l[tags[k]]=v + end + resources.lookups=l + end + local sequences=resources.sequences + if sequences then + for i=1,#sequences do + local s=sequences[i] + local n=s.name + if n then + s.name=tags[n] + end + local t=s.subtables + if type(t)=="table" then + for i=1,#t do + t[i]=tags[t[i]] + end + end + end + end + local lookuptypes=resources.lookuptypes + if lookuptypes then + local l={} + for k,v in next,lookuptypes do + l[tags[k]]=v + end + resources.lookuptypes=l + end + local anchor_to_lookup=resources.anchor_to_lookup + if anchor_to_lookup then + for anchor,lookups in next,anchor_to_lookup do + local l={} + for lookup,value in next,lookups do + l[tags[lookup]]=value + end + anchor_to_lookup[anchor]=l + end + end + local lookup_to_anchor=resources.lookup_to_anchor + if lookup_to_anchor then + local l={} + for lookup,value in next,lookup_to_anchor do + l[tags[lookup]]=value + end + resources.lookup_to_anchor=l + end + tags=table.swapped(tags) + report_otf("%s lookup tags compacted",#tags) + resources.lookuptags=tags +end function otf.setfeatures(tfmdata,features) local okay=constructors.initializefeatures("otf",tfmdata,features,trace_features,report_otf) if okay then @@ -8587,8 +8835,8 @@ local function copytotfm(data,cache_id) parameters.italicangle=italicangle parameters.charwidth=charwidth parameters.charxheight=charxheight - local space=0x0020 - local emdash=0x2014 + local space=0x0020 + local emdash=0x2014 if monospaced then if descriptions[space] then spaceunits,spacer=descriptions[space].width,"space" @@ -8635,7 +8883,7 @@ local function copytotfm(data,cache_id) if charxheight then parameters.x_height=charxheight else - local x=0x78 + local x=0x0078 if x then local x=descriptions[x] if x then @@ -8691,14 +8939,23 @@ local function otftotfm(specification) if duplicates then local nofduplicates,nofduplicated=0,0 for parent,list in next,duplicates do - for i=1,#list do - local unicode=list[i] - if not descriptions[unicode] then - descriptions[unicode]=descriptions[parent] + if type(list)=="table" then + local n=#list + for i=1,n do + local unicode=list[i] + if not descriptions[unicode] then + descriptions[unicode]=descriptions[parent] + nofduplicated=nofduplicated+1 + end + end + nofduplicates=nofduplicates+n + else + if not descriptions[list] then + descriptions[list]=descriptions[parent] nofduplicated=nofduplicated+1 end + nofduplicates=nofduplicates+1 end - nofduplicates=nofduplicates+#list end if trace_otf and nofduplicated~=nofduplicates then report_otf("%i extra duplicates copied out of %i",nofduplicated,nofduplicates) @@ -8829,7 +9086,7 @@ if not modules then modules={} end modules ['font-otb']={ } local concat=table.concat local format,gmatch,gsub,find,match,lower,strip=string.format,string.gmatch,string.gsub,string.find,string.match,string.lower,string.strip -local type,next,tonumber,tostring=type,next,tonumber,tostring +local type,next,tonumber,tostring,rawget=type,next,tonumber,tostring,rawget local lpegmatch=lpeg.match local utfchar=utf.char local trace_baseinit=false trackers.register("otf.baseinit",function(v) trace_baseinit=v end) @@ -8876,36 +9133,36 @@ local function gref(descriptions,n) return "<error in base mode tracing>" end end -local function cref(feature,lookupname) +local function cref(feature,lookuptags,lookupname) if lookupname then - return formatters["feature %a, lookup %a"](feature,lookupname) + return formatters["feature %a, lookup %a"](feature,lookuptags[lookupname]) else return formatters["feature %a"](feature) end end -local function report_alternate(feature,lookupname,descriptions,unicode,replacement,value,comment) +local function report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,comment) report_prepare("%s: base alternate %s => %s (%S => %S)", - cref(feature,lookupname), + cref(feature,lookuptags,lookupname), gref(descriptions,unicode), replacement and gref(descriptions,replacement), value, comment) end -local function report_substitution(feature,lookupname,descriptions,unicode,substitution) +local function report_substitution(feature,lookuptags,lookupname,descriptions,unicode,substitution) report_prepare("%s: base substitution %s => %S", - cref(feature,lookupname), + cref(feature,lookuptags,lookupname), gref(descriptions,unicode), gref(descriptions,substitution)) end -local function report_ligature(feature,lookupname,descriptions,unicode,ligature) +local function report_ligature(feature,lookuptags,lookupname,descriptions,unicode,ligature) report_prepare("%s: base ligature %s => %S", - cref(feature,lookupname), + cref(feature,lookuptags,lookupname), gref(descriptions,ligature), gref(descriptions,unicode)) end -local function report_kern(feature,lookupname,descriptions,unicode,otherunicode,value) +local function report_kern(feature,lookuptags,lookupname,descriptions,unicode,otherunicode,value) report_prepare("%s: base kern %s + %s => %S", - cref(feature,lookupname), + cref(feature,lookuptags,lookupname), gref(descriptions,unicode), gref(descriptions,otherunicode), value) @@ -8942,7 +9199,7 @@ local function finalize_ligatures(tfmdata,ligatures) local characters=tfmdata.characters local descriptions=tfmdata.descriptions local resources=tfmdata.resources - local unicodes=resources.unicodes + local unicodes=resources.unicodes local private=resources.private local alldone=false while not alldone do @@ -8978,12 +9235,12 @@ local function finalize_ligatures(tfmdata,ligatures) local secondname=firstname.."_"..secondcode if i==size-1 then target=unicode - if not unicodes[secondname] then + if not rawget(unicodes,secondname) then unicodes[secondname]=unicode end okay=true else - target=unicodes[secondname] + target=rawget(unicodes,secondname) if not target then break end @@ -9019,16 +9276,18 @@ local function finalize_ligatures(tfmdata,ligatures) end end resources.private=private + return true end end local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplist) local characters=tfmdata.characters local descriptions=tfmdata.descriptions local resources=tfmdata.resources + local properties=tfmdata.properties local changed=tfmdata.changed - local unicodes=resources.unicodes local lookuphash=resources.lookuphash local lookuptypes=resources.lookuptypes + local lookuptags=resources.lookuptags local ligatures={} local alternate=tonumber(value) or true and 1 local defaultalt=otf.defaultbasealternate @@ -9036,39 +9295,39 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis local trace_alternatives=trace_baseinit and trace_alternatives local trace_ligatures=trace_baseinit and trace_ligatures local actions={ - substitution=function(lookupdata,lookupname,description,unicode) + substitution=function(lookupdata,lookuptags,lookupname,description,unicode) if trace_singles then - report_substitution(feature,lookupname,descriptions,unicode,lookupdata) + report_substitution(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) end changed[unicode]=lookupdata end, - alternate=function(lookupdata,lookupname,description,unicode) + alternate=function(lookupdata,lookuptags,lookupname,description,unicode) local replacement=lookupdata[alternate] if replacement then changed[unicode]=replacement if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal") + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") end elseif defaultalt=="first" then replacement=lookupdata[1] changed[unicode]=replacement if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) end elseif defaultalt=="last" then replacement=lookupdata[#data] if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) end else if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown") + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") end end end, - ligature=function(lookupdata,lookupname,description,unicode) + ligature=function(lookupdata,lookuptags,lookupname,description,unicode) if trace_ligatures then - report_ligature(feature,lookupname,descriptions,unicode,lookupdata) + report_ligature(feature,lookuptags,lookupname,descriptions,unicode,lookupdata) end ligatures[#ligatures+1]={ unicode,lookupdata } end, @@ -9084,7 +9343,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis local lookuptype=lookuptypes[lookupname] local action=actions[lookuptype] if action then - action(lookupdata,lookupname,description,unicode) + action(lookupdata,lookuptags,lookupname,description,unicode) end end end @@ -9099,22 +9358,24 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis local action=actions[lookuptype] if action then for i=1,#lookuplist do - action(lookuplist[i],lookupname,description,unicode) + action(lookuplist[i],lookuptags,lookupname,description,unicode) end end end end end end - finalize_ligatures(tfmdata,ligatures) + properties.hasligatures=finalize_ligatures(tfmdata,ligatures) end local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist) local characters=tfmdata.characters local descriptions=tfmdata.descriptions local resources=tfmdata.resources - local unicodes=resources.unicodes + local properties=tfmdata.properties + local lookuptags=resources.lookuptags local sharedkerns={} local traceindeed=trace_baseinit and trace_kerns + local haskerns=false for unicode,character in next,characters do local description=descriptions[unicode] local rawkerns=description.kerns @@ -9136,13 +9397,13 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist newkerns={ [otherunicode]=value } done=true if traceindeed then - report_kern(feature,lookup,descriptions,unicode,otherunicode,value) + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) end elseif not newkerns[otherunicode] then newkerns[otherunicode]=value done=true if traceindeed then - report_kern(feature,lookup,descriptions,unicode,otherunicode,value) + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,value) end end end @@ -9151,12 +9412,14 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist if done then sharedkerns[rawkerns]=newkerns character.kerns=newkerns + haskerns=true else sharedkerns[rawkerns]=false end end end end + properties.haskerns=haskerns end basemethods.independent={ preparesubstitutions=preparesubstitutions, @@ -9182,13 +9445,13 @@ local function make_1(present,tree,name) end end end -local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookupname) +local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,done,lookuptags,lookupname) for k,v in next,tree do if k=="ligature" then local character=characters[preceding] if not character then if trace_baseinit then - report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookupname,v,preceding) + report_prepare("weird ligature in lookup %a, current %C, preceding %C",lookuptags[lookupname],v,preceding) end character=makefake(tfmdata,name,present) end @@ -9209,7 +9472,7 @@ local function make_2(present,tfmdata,characters,tree,name,preceding,unicode,don else local code=present[name] or unicode local name=name.."_"..k - make_2(present,tfmdata,characters,v,name,code,k,done,lookupname) + make_2(present,tfmdata,characters,v,name,code,k,done,lookuptags,lookupname) end end end @@ -9220,6 +9483,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis local changed=tfmdata.changed local lookuphash=resources.lookuphash local lookuptypes=resources.lookuptypes + local lookuptags=resources.lookuptags local ligatures={} local alternate=tonumber(value) or true and 1 local defaultalt=otf.defaultbasealternate @@ -9233,7 +9497,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis for unicode,data in next,lookupdata do if lookuptype=="substitution" then if trace_singles then - report_substitution(feature,lookupname,descriptions,unicode,data) + report_substitution(feature,lookuptags,lookupname,descriptions,unicode,data) end changed[unicode]=data elseif lookuptype=="alternate" then @@ -9241,28 +9505,28 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis if replacement then changed[unicode]=replacement if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"normal") + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"normal") end elseif defaultalt=="first" then replacement=data[1] changed[unicode]=replacement if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) end elseif defaultalt=="last" then replacement=data[#data] if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,defaultalt) + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,defaultalt) end else if trace_alternatives then - report_alternate(feature,lookupname,descriptions,unicode,replacement,value,"unknown") + report_alternate(feature,lookuptags,lookupname,descriptions,unicode,replacement,value,"unknown") end end elseif lookuptype=="ligature" then ligatures[#ligatures+1]={ unicode,data,lookupname } if trace_ligatures then - report_ligature(feature,lookupname,descriptions,unicode,data) + report_ligature(feature,lookuptags,lookupname,descriptions,unicode,data) end end end @@ -9280,7 +9544,7 @@ local function preparesubstitutions(tfmdata,feature,value,validlookups,lookuplis for i=1,nofligatures do local ligature=ligatures[i] local unicode,tree,lookupname=ligature[1],ligature[2],ligature[3] - make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookupname) + make_2(present,tfmdata,characters,tree,"ctx_"..unicode,unicode,unicode,done,lookuptags,lookupname) end end end @@ -9288,7 +9552,9 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist local characters=tfmdata.characters local descriptions=tfmdata.descriptions local resources=tfmdata.resources + local properties=tfmdata.properties local lookuphash=resources.lookuphash + local lookuptags=resources.lookuptags local traceindeed=trace_baseinit and trace_kerns for l=1,#lookuplist do local lookupname=lookuplist[l] @@ -9304,7 +9570,7 @@ local function preparepositionings(tfmdata,feature,value,validlookups,lookuplist for otherunicode,kern in next,data do if not kerns[otherunicode] and kern~=0 then kerns[otherunicode]=kern - report_kern(feature,lookup,descriptions,unicode,otherunicode,kern) + report_kern(feature,lookuptags,lookup,descriptions,unicode,otherunicode,kern) end end else @@ -10318,6 +10584,7 @@ local currentfont=false local lookuptable=false local anchorlookups=false local lookuptypes=false +local lookuptags=false local handlers={} local rlmode=0 local featurevalue=false @@ -10362,19 +10629,19 @@ local function gref(n) end local function cref(kind,chainname,chainlookupname,lookupname,index) if index then - return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index) + return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index) elseif lookupname then - return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname) + return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname]) elseif chainlookupname then - return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname) + return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname]) elseif chainname then - return formatters["feature %a, chain %a"](kind,chainname) + return formatters["feature %a, chain %a"](kind,lookuptags[chainname]) else return formatters["feature %a"](kind) end end local function pref(kind,lookupname) - return formatters["feature %a, lookup %a"](kind,lookupname) + return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname]) end local function copy_glyph(g) local components=g.components @@ -11728,7 +11995,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq end else local i=1 - repeat + while true do if skipped then while true do local char=start.char @@ -11765,11 +12032,13 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq end end end - if start then + if i>nofchainlookups then + break + elseif start then start=start.next else end - until i>nofchainlookups + end end else local replacements=ck[7] @@ -11910,6 +12179,7 @@ local function featuresprocessor(head,font,attr) anchorlookups=resources.lookup_to_anchor lookuptable=resources.lookups lookuptypes=resources.lookuptypes + lookuptags=resources.lookuptags currentfont=font rlmode=0 local sequences=resources.sequences @@ -12441,6 +12711,7 @@ local function prepare_contextchains(tfmdata) local rawdata=tfmdata.shared.rawdata local resources=rawdata.resources local lookuphash=resources.lookuphash + local lookuptags=resources.lookuptags local lookups=rawdata.lookups if lookups then for lookupname,lookupdata in next,rawdata.lookups do @@ -12453,7 +12724,7 @@ local function prepare_contextchains(tfmdata) if not validformat then report_prepare("unsupported format %a",format) elseif not validformat[lookuptype] then - report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname) + report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname]) else local contexts=lookuphash[lookupname] if not contexts then @@ -12502,7 +12773,7 @@ local function prepare_contextchains(tfmdata) else end else - report_prepare("missing lookuptype for lookupname %a",lookupname) + report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname]) end end end @@ -13374,6 +13645,7 @@ if otf.enhancers.register then otf.enhancers.register("unpack",unpackdata) end otf.enhancers.unpack=unpackdata +otf.enhancers.pack=packdata end -- closure diff --git a/tex/generic/context/luatex/luatex-fonts-otn.lua b/tex/generic/context/luatex/luatex-fonts-otn.lua index 068f0a9b9..831b23350 100644 --- a/tex/generic/context/luatex/luatex-fonts-otn.lua +++ b/tex/generic/context/luatex/luatex-fonts-otn.lua @@ -252,6 +252,7 @@ local currentfont = false local lookuptable = false local anchorlookups = false local lookuptypes = false +local lookuptags = false local handlers = { } local rlmode = 0 local featurevalue = false @@ -306,20 +307,20 @@ end local function cref(kind,chainname,chainlookupname,lookupname,index) -- not in the mood to alias f_ if index then - return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookupname,index) + return formatters["feature %a, chain %a, sub %a, lookup %a, index %a"](kind,chainname,chainlookupname,lookuptags[lookupname],index) elseif lookupname then - return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookupname) + return formatters["feature %a, chain %a, sub %a, lookup %a"](kind,chainname,chainlookupname,lookuptags[lookupname]) elseif chainlookupname then - return formatters["feature %a, chain %a, sub %a"](kind,chainname,chainlookupname) + return formatters["feature %a, chain %a, sub %a"](kind,lookuptags[chainname],lookuptags[chainlookupname]) elseif chainname then - return formatters["feature %a, chain %a"](kind,chainname) + return formatters["feature %a, chain %a"](kind,lookuptags[chainname]) else return formatters["feature %a"](kind) end end local function pref(kind,lookupname) - return formatters["feature %a, lookup %a"](kind,lookupname) + return formatters["feature %a, lookup %a"](kind,lookuptags[lookupname]) end -- We can assume that languages that use marks are not hyphenated. We can also assume @@ -1896,7 +1897,7 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq end else local i = 1 - repeat + while true do if skipped then while true do local char = start.char @@ -1937,12 +1938,14 @@ local function normal_handle_contextchain(head,start,kind,chainname,contexts,seq end end end - if start then + if i > nofchainlookups then + break + elseif start then start = start.next else -- weird end - until i > nofchainlookups + end end else local replacements = ck[7] @@ -2139,6 +2142,7 @@ local function featuresprocessor(head,font,attr) anchorlookups = resources.lookup_to_anchor lookuptable = resources.lookups lookuptypes = resources.lookuptypes + lookuptags = resources.lookuptags currentfont = font rlmode = 0 @@ -2734,6 +2738,7 @@ local function prepare_contextchains(tfmdata) local rawdata = tfmdata.shared.rawdata local resources = rawdata.resources local lookuphash = resources.lookuphash + local lookuptags = resources.lookuptags local lookups = rawdata.lookups if lookups then for lookupname, lookupdata in next, rawdata.lookups do @@ -2747,7 +2752,7 @@ local function prepare_contextchains(tfmdata) report_prepare("unsupported format %a",format) elseif not validformat[lookuptype] then -- todo: dejavu-serif has one (but i need to see what use it has) - report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookupname) + report_prepare("unsupported format %a, lookuptype %a, lookupname %a",format,lookuptype,lookuptags[lookupname]) else local contexts = lookuphash[lookupname] if not contexts then @@ -2803,7 +2808,7 @@ local function prepare_contextchains(tfmdata) -- no rules end else - report_prepare("missing lookuptype for lookupname %a",lookupname) + report_prepare("missing lookuptype for lookupname %a",lookuptags[lookupname]) end end end |