diff options
author | Context Git Mirror Bot <phg42.2a@gmail.com> | 2016-05-07 15:20:36 +0200 |
---|---|---|
committer | Context Git Mirror Bot <phg42.2a@gmail.com> | 2016-05-07 15:20:36 +0200 |
commit | 00a715fb30c2765bffae1bc07a9249359aeea38a (patch) | |
tree | 70f9bff572256afbb15bf15a0ffaae3708f965c4 | |
parent | b162038956eefaaa786603e8faceff35016bdc82 (diff) | |
download | context-00a715fb30c2765bffae1bc07a9249359aeea38a.tar.gz |
2016-05-07 14:41:00
53 files changed, 1771 insertions, 1485 deletions
diff --git a/doc/context/documents/general/manuals/luatex.pdf b/doc/context/documents/general/manuals/luatex.pdf Binary files differindex 61ff94bfb..a63c4d917 100644 --- a/doc/context/documents/general/manuals/luatex.pdf +++ b/doc/context/documents/general/manuals/luatex.pdf diff --git a/doc/context/scripts/mkiv/context.html b/doc/context/scripts/mkiv/context.html index 3161eac72..5fbf49d2b 100644 --- a/doc/context/scripts/mkiv/context.html +++ b/doc/context/scripts/mkiv/context.html @@ -82,7 +82,8 @@ <tr><th>--batchmode</th><td></td><td>run without stopping and do not show messages on the console</td></tr> <tr><th>--nonstopmode</th><td></td><td>run without stopping</td></tr> <tr><th>--synctex</th><td></td><td>run with synctex enabled (optional value: zipped, unzipped, 1, -1)</td></tr> - <tr><th>--nodates</th><td></td><td>omit runtime dates in pdf file</td></tr> + <tr><th>--nodates</th><td></td><td>omit runtime dates in pdf file (optional value: a number (this 1970 offset time) or string "YYYY-MM-DD HH:MM")</td></tr> + <tr><th>--nocompression</th><td></td><td>forcefully turns off compression in the backend</td></tr> <tr><th>--trailerid</th><td></td><td>alternative trailer id (or constant one)</td></tr> <tr><th/><td/><td/></tr> <tr><th>--generate</th><td></td><td>generate file database etc. (as luatools does)</td></tr> diff --git a/doc/context/scripts/mkiv/context.man b/doc/context/scripts/mkiv/context.man index f1c552832..c8359e9e8 100644 --- a/doc/context/scripts/mkiv/context.man +++ b/doc/context/scripts/mkiv/context.man @@ -114,7 +114,10 @@ run without stopping run with synctex enabled (optional value: zipped, unzipped, 1, -1) .TP .B --nodates -omit runtime dates in pdf file +omit runtime dates in pdf file (optional value: a number (this 1970 offset time) or string "YYYY-MM-DD HH:MM") +.TP +.B --nocompression +forcefully turns off compression in the backend .TP .B --trailerid alternative trailer id (or constant one) diff --git a/doc/context/scripts/mkiv/context.xml b/doc/context/scripts/mkiv/context.xml index f5a674241..2ba7ee59e 100644 --- a/doc/context/scripts/mkiv/context.xml +++ b/doc/context/scripts/mkiv/context.xml @@ -129,7 +129,10 @@ <short>run with synctex enabled (optional value: zipped, unzipped, 1, -1)</short> </flag> <flag name="nodates"> - <short>omit runtime dates in pdf file</short> + <short>omit runtime dates in pdf file (optional value: a number (this 1970 offset time) or string "YYYY-MM-DD HH:MM")</short> + </flag> + <flag name="nocompression"> + <short>forcefully turns off compression in the backend</short> </flag> <flag name="trailerid"> <short>alternative trailer id (or constant one)</short> diff --git a/doc/context/scripts/mkiv/mtx-context.html b/doc/context/scripts/mkiv/mtx-context.html index 3161eac72..5fbf49d2b 100644 --- a/doc/context/scripts/mkiv/mtx-context.html +++ b/doc/context/scripts/mkiv/mtx-context.html @@ -82,7 +82,8 @@ <tr><th>--batchmode</th><td></td><td>run without stopping and do not show messages on the console</td></tr> <tr><th>--nonstopmode</th><td></td><td>run without stopping</td></tr> <tr><th>--synctex</th><td></td><td>run with synctex enabled (optional value: zipped, unzipped, 1, -1)</td></tr> - <tr><th>--nodates</th><td></td><td>omit runtime dates in pdf file</td></tr> + <tr><th>--nodates</th><td></td><td>omit runtime dates in pdf file (optional value: a number (this 1970 offset time) or string "YYYY-MM-DD HH:MM")</td></tr> + <tr><th>--nocompression</th><td></td><td>forcefully turns off compression in the backend</td></tr> <tr><th>--trailerid</th><td></td><td>alternative trailer id (or constant one)</td></tr> <tr><th/><td/><td/></tr> <tr><th>--generate</th><td></td><td>generate file database etc. (as luatools does)</td></tr> diff --git a/doc/context/scripts/mkiv/mtx-context.man b/doc/context/scripts/mkiv/mtx-context.man index f1c552832..c8359e9e8 100644 --- a/doc/context/scripts/mkiv/mtx-context.man +++ b/doc/context/scripts/mkiv/mtx-context.man @@ -114,7 +114,10 @@ run without stopping run with synctex enabled (optional value: zipped, unzipped, 1, -1) .TP .B --nodates -omit runtime dates in pdf file +omit runtime dates in pdf file (optional value: a number (this 1970 offset time) or string "YYYY-MM-DD HH:MM") +.TP +.B --nocompression +forcefully turns off compression in the backend .TP .B --trailerid alternative trailer id (or constant one) diff --git a/doc/context/scripts/mkiv/mtx-context.xml b/doc/context/scripts/mkiv/mtx-context.xml index f5a674241..2ba7ee59e 100644 --- a/doc/context/scripts/mkiv/mtx-context.xml +++ b/doc/context/scripts/mkiv/mtx-context.xml @@ -129,7 +129,10 @@ <short>run with synctex enabled (optional value: zipped, unzipped, 1, -1)</short> </flag> <flag name="nodates"> - <short>omit runtime dates in pdf file</short> + <short>omit runtime dates in pdf file (optional value: a number (this 1970 offset time) or string "YYYY-MM-DD HH:MM")</short> + </flag> + <flag name="nocompression"> + <short>forcefully turns off compression in the backend</short> </flag> <flag name="trailerid"> <short>alternative trailer id (or constant one)</short> diff --git a/doc/context/sources/general/manuals/luatex/luatex-lua.tex b/doc/context/sources/general/manuals/luatex/luatex-lua.tex index 79c4e08ed..8a9c26789 100644 --- a/doc/context/sources/general/manuals/luatex/luatex-lua.tex +++ b/doc/context/sources/general/manuals/luatex/luatex-lua.tex @@ -56,44 +56,32 @@ similar fashion as the other \TEX\ engines. Some options are accepted but have n consequence. The following command|-|line options are understood: \starttabulate[|lT|p|] +\NC --credits \NC display credits and exit \NC \NR +\NC --debug-format \NC enable format debugging \NC \NR +\NC --draftmode \NC switch on draft mode i.e.\ generate no output in \PDF\ mode \NC \NR +\NC --[no-]file-line-error \NC disable/enable \type {file:line:error} style messages \NC \NR +\NC --[no-]file-line-error-style \NC aliases of \type {--[no-]file-line-error} \NC \NR \NC --fmt=FORMAT \NC load the format file \type {FORMAT} \NC\NR -\NC --lua=FILE \NC load and execute a \LUA\ initialization script\NC\NR -\NC --safer \NC disable easily exploitable \LUA\ commands \NC\NR -\NC --nosocket \NC disable the \LUA\ socket library \NC\NR +\NC --halt-on-error \NC stop processing at the first error\NC \NR \NC --help \NC display help and exit \NC\NR \NC --ini \NC be \type {iniluatex}, for dumping formats \NC\NR -\NC --interaction=STRING \NC set interaction mode: \type {batchmode}, \type {nonstopmode}, - \type {scrollmode} or \type {errorstopmode} \NC \NR -\NC --halt-on-error \NC stop processing at the first error\NC \NR -\NC --kpathsea-debug=NUMBER \NC set path searching debugging flags according to - the bits of \type {NUMBER} \NC \NR +\NC --interaction=STRING \NC set interaction mode: \type {batchmode}, \type {nonstopmode}, \type {scrollmode} or \type {errorstopmode} \NC \NR +\NC --jobname=STRING \NC set the job name to \type {STRING} \NC \NR +\NC --kpathsea-debug=NUMBER \NC set path searching debugging flags according to the bits of \type {NUMBER} \NC \NR +\NC --lua=FILE \NC load and execute a \LUA\ initialization script \NC\NR +\NC --[no-]mktex=FMT \NC disable/enable \type {mktexFMT} generation with \type {FMT} is \type {tex} or \type {tfm} \NC \NR +\NC --nosocket \NC disable the \LUA\ socket library \NC\NR +\NC --output-comment=STRING \NC use \type {STRING} for \DVI\ file comment instead of date (no effect for \PDF) \NC \NR +\NC --output-directory=DIR \NC use \type {DIR} as the directory to write files to \NC \NR +\NC --output-format=FORMAT \NC use \type {FORMAT} for job output; \type {FORMAT} is \type {dvi} or \type {pdf} \NC \NR \NC --progname=STRING \NC set the program name to \type {STRING} \NC \NR -\NC --version \NC display version and exit \NC \NR -\NC --credits \NC display credits and exit \NC \NR \NC --recorder \NC enable filename recorder \NC \NR -\NC --output-comment=STRING \NC use \type {STRING} for \DVI\ file comment - instead of date (no effect for \PDF) \NC \NR -\NC --output-directory=DIR \NC use \type {DIR} as the directory to write - files to \NC \NR -\NC --draftmode \NC switch on draft mode i.e.\ generate no - output in \PDF\ mode \NC \NR -\NC --output-format=FORMAT \NC use \type {FORMAT} for job output; \type - {FORMAT} is \type {dvi} or \type {pdf} \NC - \NR +\NC --safer \NC disable easily exploitable \LUA\ commands \NC\NR \NC --[no-]shell-escape \NC disable/enable system calls \NC \NR -\NC --enable-write18 \NC enable system calls \NC \NR -\NC --disable-write18 \NC disable system calls \NC \NR -\NC --shell-restricted \NC restrict system calls to a list of commands - given in \type {texmf.cnf} \NC \NR -\NC --debug-format \NC enable format debugging \NC \NR -\NC --[no-]file-line-error \NC disable/enable \type {file:line:error} style - messages \NC \NR -\NC --[no-]file-line-error-style \NC aliases of \type {--[no-]file-line-error} \NC \NR -\NC --jobname=STRING \NC set the job name to \type {STRING} \NC \NR -\NC --[no-]mktex=FMT \NC disable/enable \type {mktexFMT} generation - with \type {FMT} is \type {tex} or \type - {tfm} \NC \NR +\NC --shell-restricted \NC restrict system calls to a list of commands given in \type {texmf.cnf} \NC \NR \NC --synctex=NUMBER \NC enable \type {synctex} \NC \NR +\NC --utc \NC use utc times when applicable \NC \NR +\NC --version \NC display version and exit \NC \NR \stoptabulate Some of the traditional flags are just ignored: \type {--etex}, \type @@ -543,6 +531,42 @@ At some point (this also depends on distributions) \LUATEX\ might have these libraries loaded on demand. For this reason you can best use \type {require} to make sure they are loaded. +\section{Testing} + +For development reasons you can influence the used startup date and time. This can +be done in two ways. + +\startitemize[n] + +\startitem + By setting the environmment variable \type {SOURCE_DATE_EPOCH}. This will + influence the \TEX\ parameters \type {time} and \type {date}, the random seed, + the \PDF\ timestamp and the \PDF\ id that is derived from the time as well. This + variable is consulted when the \KPSE\ library is enabled. Resolving is + delegated to this library. +\stopitem + +\startitem + By setting the \type {start_time} variable in the \type {texconfig} table; as + with other variables we use the internal name there. For compatibility + reasons we also honour a \type {SOURCE_DATE_EPOCH} entry. It should be noted + that there are no such variables in other engines and this method is only + relevant in case the while setup happens in \LUA. +\stopitem + +\stopitemize + +When Universal Time is needed, you can pass the flag \type {utc} to the engine. This +property also works when the date and time are set by \LUATEX\ itself. It has a +complementary entry \type {use_utf_time} in the \type {texconfig} table. + +{\em To some extend a cleaner solution would be to have a flag that disables all +variable data in one go (like filenames and so) but we just follow the method +implemented in \PDFTEX\ where primitives are used to influence other properties.} + +{\em In \CONTEXT\ we provide the command line argument \type {--nodates} that +does bit more disabling of dates.} + \stopchapter \stopcomponent diff --git a/doc/context/sources/general/manuals/luatex/luatex-style.tex b/doc/context/sources/general/manuals/luatex/luatex-style.tex index b07190e21..26688abaf 100644 --- a/doc/context/sources/general/manuals/luatex/luatex-style.tex +++ b/doc/context/sources/general/manuals/luatex/luatex-style.tex @@ -198,7 +198,7 @@ withpen pencircle scaled (p/2) withcolor (luaorbitfactor * luaholecolor) ; fill fullcircle scaled r shifted (d+1/8,d+1/8) - rotated luaextraangle + rotated - luaextraangle withcolor luaplanetcolor ; fill fullcircle scaled r shifted (d-1/8,d-1/8) withcolor luaholecolor ; @@ -228,7 +228,7 @@ \startuseMPgraphic{luanumber} % luaextraangle := \luaextraangle; - luaextraangle := if (LastPageNumber == 0) : 0 else : RealPageNumber * 360 / LastPageNumber fi; + luaextraangle := if (LastPageNumber == 0) : 0 else : (RealPageNumber / LastPageNumber) * 360 fi; luaorbitfactor := 0.25 ; picture p ; p := lualogo ; setbounds p to boundingbox fullcircle ; diff --git a/doc/context/sources/general/manuals/luatex/luatex.tex b/doc/context/sources/general/manuals/luatex/luatex.tex index a9524246b..59a969396 100644 --- a/doc/context/sources/general/manuals/luatex/luatex.tex +++ b/doc/context/sources/general/manuals/luatex/luatex.tex @@ -3,6 +3,8 @@ % \tex vs \type vs \syntax vs. \luatex % \em \it \/ +% "context --nodates --nocompression luatex" can be used for comparison runs + \environment luatex-style \environment luatex-logos diff --git a/scripts/context/lua/mtx-context.lua b/scripts/context/lua/mtx-context.lua index dce9170cc..26d7e008b 100644 --- a/scripts/context/lua/mtx-context.lua +++ b/scripts/context/lua/mtx-context.lua @@ -572,29 +572,30 @@ function scripts.context.run(ctxdata,filename) return end -- - local a_mkii = getargument("mkii") or getargument("pdftex") or getargument("xetex") - local a_purge = getargument("purge") - local a_purgeall = getargument("purgeall") - local a_purgeresult = getargument("purgeresult") - local a_global = getargument("global") - local a_timing = getargument("timing") - local a_profile = getargument("profile") - local a_batchmode = getargument("batchmode") - local a_nonstopmode = getargument("nonstopmode") - local a_scollmode = getargument("scrollmode") - local a_once = getargument("once") - local a_synctex = getargument("synctex") - local a_backend = getargument("backend") - local a_arrange = getargument("arrange") - local a_noarrange = getargument("noarrange") - local a_jiton = getargument("jiton") - local a_jithash = getargument("jithash") - local a_texformat = getargument("texformat") - local a_keeptuc = getargument("keeptuc") - local a_keeplog = getargument("keeplog") - local a_export = getargument("export") - local a_nodates = getargument("nodates") - local a_trailerid = getargument("trailerid") + local a_mkii = getargument("mkii") or getargument("pdftex") or getargument("xetex") + local a_purge = getargument("purge") + local a_purgeall = getargument("purgeall") + local a_purgeresult = getargument("purgeresult") + local a_global = getargument("global") + local a_timing = getargument("timing") + local a_profile = getargument("profile") + local a_batchmode = getargument("batchmode") + local a_nonstopmode = getargument("nonstopmode") + local a_scollmode = getargument("scrollmode") + local a_once = getargument("once") + local a_synctex = getargument("synctex") + local a_backend = getargument("backend") + local a_arrange = getargument("arrange") + local a_noarrange = getargument("noarrange") + local a_jiton = getargument("jiton") + local a_jithash = getargument("jithash") + local a_texformat = getargument("texformat") + local a_keeptuc = getargument("keeptuc") + local a_keeplog = getargument("keeplog") + local a_export = getargument("export") + local a_nodates = getargument("nodates") + local a_trailerid = getargument("trailerid") + local a_nocompression = getargument("nocompression") -- the following flag is not officially supported because i cannot forsee -- side effects (so no bug reports please) .. we provide --sandbox that @@ -712,16 +713,17 @@ function scripts.context.run(ctxdata,filename) local maxnofruns = once and 1 or multipass_nofruns -- local c_flags = { - directives = directives, -- gets passed via mtxrun - trackers = trackers, -- gets passed via mtxrun - experiments = experiments, -- gets passed via mtxrun + directives = directives, -- gets passed via mtxrun + trackers = trackers, -- gets passed via mtxrun + experiments = experiments, -- gets passed via mtxrun -- - result = validstring(resultname), - input = validstring(getargument("input") or filename), -- alternative input - fulljobname = validstring(filename), - files = concat(files,","), - ctx = validstring(ctxname), - export = a_export and true or nil, + result = validstring(resultname), + input = validstring(getargument("input") or filename), -- alternative input + fulljobname = validstring(filename), + files = concat(files,","), + ctx = validstring(ctxname), + export = a_export and true or nil, + nocompression = a_nocompression and true or nil, } -- for k, v in next, environment.arguments do @@ -758,7 +760,7 @@ function scripts.context.run(ctxdata,filename) local directives = { } -- if a_nodates then - directives[#directives+1] = "backend.nodates" + directives[#directives+1] = format("backend.date=%s",type(a_nodates) == "string" and a_nodates or " no") end -- if a_trailerid then diff --git a/scripts/context/lua/mtx-context.xml b/scripts/context/lua/mtx-context.xml index f5a674241..2ba7ee59e 100644 --- a/scripts/context/lua/mtx-context.xml +++ b/scripts/context/lua/mtx-context.xml @@ -129,7 +129,10 @@ <short>run with synctex enabled (optional value: zipped, unzipped, 1, -1)</short> </flag> <flag name="nodates"> - <short>omit runtime dates in pdf file</short> + <short>omit runtime dates in pdf file (optional value: a number (this 1970 offset time) or string "YYYY-MM-DD HH:MM")</short> + </flag> + <flag name="nocompression"> + <short>forcefully turns off compression in the backend</short> </flag> <flag name="trailerid"> <short>alternative trailer id (or constant one)</short> diff --git a/scripts/context/lua/mtx-patterns.lua b/scripts/context/lua/mtx-patterns.lua index 716fed281..b3f5f5bb1 100644 --- a/scripts/context/lua/mtx-patterns.lua +++ b/scripts/context/lua/mtx-patterns.lua @@ -441,13 +441,13 @@ function scripts.patterns.save(destination,mnemonic,name,patternsnew,hyphenation if nofpatternsnew > 0 then local data = concat(patternsnew," ") patterndata = { - n = nofpatternsnew, - compression = compression, - length = #data, - data = compression and zlib.compress(data,9) or data, - characters = concat(table.sortedkeys(pusednew),""), - minhyphenmin = 1, -- determined by pattern author - minhyphenmax = 1, -- determined by pattern author + n = nofpatternsnew, + compression = compression, + length = #data, + data = compression and zlib.compress(data,9) or data, + characters = concat(table.sortedkeys(pusednew),""), + lefthyphenmin = 1, -- determined by pattern author + righthyphenmax = 1, -- determined by pattern author } else patterndata = { diff --git a/scripts/context/lua/mtx-plain.lua b/scripts/context/lua/mtx-plain.lua index 949b57952..347f63f1d 100644 --- a/scripts/context/lua/mtx-plain.lua +++ b/scripts/context/lua/mtx-plain.lua @@ -12,6 +12,8 @@ if not modules then modules = { } end modules ['mtx-plain'] = { -- instead of kpse here, just like with the font database code (as that -- one also works with kpse runtime) +local format = string.format + local helpinfo = [[ <?xml version="1.0"?> <application> @@ -46,14 +48,18 @@ local report = application.report scripts = scripts or { } scripts.plain = scripts.plain or { } +local passed_options = table.tohash { + "utc" +} + local function execute(...) - local command = string.format(...) + local command = format(...) report("running command %a\n",command) return os.execute(command) end local function resultof(...) - local command = string.format(...) + local command = format(...) report("running command %a",command) local result = os.resultof(command) or "" result = string.gsub(result,"[\n\r]+","") @@ -72,7 +78,7 @@ function scripts.plain.make(texengine,texformat) report("using path expansion %a",fmtpathspec) else report("no valid path reported, trying alternative") --- fmtpathspec = resultof("kpsewhich --show-path=fmt --engine=%s",texengine) + -- fmtpathspec = resultof("kpsewhich --show-path=fmt --engine=%s",texengine) if fmtpathspec ~= "" then report("using path expansion %a",fmtpathspec) else @@ -108,7 +114,13 @@ end function scripts.plain.run(texengine,texformat,filename) local t = { } for k, v in next, environment.arguments do - t[#t+1] = string.format("--mtx:%s=%s",k,v) + local m = passed_options[k] and "" or "mtx:" + if type(v) == "string" and v ~= "" then + v = format("--%s%s=%s",m,k,v) + elseif v then + v = format("--%s%s",m,k) + end + t[#t+1] = v end execute('%s --fmt=%s %s "%s"',texengine,file.removesuffix(texformat),table.concat(t," "),filename) end diff --git a/tex/context/base/context-version.pdf b/tex/context/base/context-version.pdf Binary files differindex a1a04de92..4cae0a462 100644 --- a/tex/context/base/context-version.pdf +++ b/tex/context/base/context-version.pdf diff --git a/tex/context/base/mkiv/back-pdf.lua b/tex/context/base/mkiv/back-pdf.lua index 0821abb28..323f1d57f 100644 --- a/tex/context/base/mkiv/back-pdf.lua +++ b/tex/context/base/mkiv/back-pdf.lua @@ -25,6 +25,8 @@ local scankeyword = scanners.keyword local scanners = interfaces.scanners local implement = interfaces.implement +local report = logs.reporter("backend") + local outputfilename function codeinjections.getoutputfilename() @@ -145,6 +147,15 @@ scanners.pdfstartmirroring = function() context(pdfsetmatrix(-1,0,0,1)) end +if environment.arguments.nocompression then + pdf.setcompresslevel(0) + pdf.setobjcompresslevel(0) + function pdf.setcompresslevel() + -- blocked from now on + end + pdf.setobjcompresslevel = pdf.setcompresslevel +end + scanners.pdfstopmirroring = scanners.pdfstartmirroring -- todo, change the above to implement too -- diff --git a/tex/context/base/mkiv/char-ini.lua b/tex/context/base/mkiv/char-ini.lua index ad53cae8f..63328a177 100644 --- a/tex/context/base/mkiv/char-ini.lua +++ b/tex/context/base/mkiv/char-ini.lua @@ -1083,8 +1083,8 @@ if not characters.lhash then lhash[utfchar(k)] = utfchar(l) elseif #l == 2 then lhash[utfchar(k)] = utfchar(l[1]) .. utfchar(l[2]) - else - inspect(v) + -- else + -- inspect(v) end else local u = v.uccode @@ -1094,8 +1094,8 @@ if not characters.lhash then uhash[utfchar(k)] = utfchar(u) elseif #u == 2 then uhash[utfchar(k)] = utfchar(u[1]) .. utfchar(u[2]) - else - inspect(v) + -- else + -- inspect(v) end end end @@ -1105,8 +1105,8 @@ if not characters.lhash then shash[utfchar(k)] = utfchar(s) elseif #s == 2 then shash[utfchar(k)] = utfchar(s[1]) .. utfchar(s[2]) - else - inspect(v) + -- else + -- inspect(v) end end -- end diff --git a/tex/context/base/mkiv/cont-new.mkiv b/tex/context/base/mkiv/cont-new.mkiv index 3e5ff1ffc..975d8dc5c 100644 --- a/tex/context/base/mkiv/cont-new.mkiv +++ b/tex/context/base/mkiv/cont-new.mkiv @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2016.05.01 09:52} +\newcontextversion{2016.05.07 14:37} %D This file is loaded at runtime, thereby providing an excellent place for %D hacks, patches, extensions and new features. diff --git a/tex/context/base/mkiv/context-todo.tex b/tex/context/base/mkiv/context-todo.tex index 0674ad9fb..66889c4d3 100644 --- a/tex/context/base/mkiv/context-todo.tex +++ b/tex/context/base/mkiv/context-todo.tex @@ -18,6 +18,13 @@ \startitem add \type {--output-filename} for \PDF\ filename \stopitem + \startitem + more consistent \type {lang_variables} and \type {tex_language} in \type + {texlang.w} and also store the \type {*mins} + \stopitem + \startitem + get rid of \type {temp} node in hyphenator i.e. postpone to when needed + \stopitem \stopitemize \subsubject{\CONTEXT} @@ -33,7 +40,7 @@ play with box attributes \stopitem \startitem - check consistency between foonotes and running text (main color, + check consistency between footnotes and running text (main color, styles, properties) \stopitem \startitem diff --git a/tex/context/base/mkiv/context.mkiv b/tex/context/base/mkiv/context.mkiv index 7ec285ed1..ab8eafd4d 100644 --- a/tex/context/base/mkiv/context.mkiv +++ b/tex/context/base/mkiv/context.mkiv @@ -39,7 +39,7 @@ %D up and the dependencies are more consistent. \edef\contextformat {\jobname} -\edef\contextversion{2016.05.01 09:52} +\edef\contextversion{2016.05.07 14:37} \edef\contextkind {beta} %D For those who want to use this: diff --git a/tex/context/base/mkiv/core-con.lua b/tex/context/base/mkiv/core-con.lua index bfe5357a6..6913ac569 100644 --- a/tex/context/base/mkiv/core-con.lua +++ b/tex/context/base/mkiv/core-con.lua @@ -19,8 +19,9 @@ slower but look nicer this way.</p> local floor, date, time, concat = math.floor, os.date, os.time, table.concat local lower, upper, rep, match, gsub = string.lower, string.upper, string.rep, string.match, string.gsub local utfchar, utfbyte = utf.char, utf.byte -local tonumber, tostring = tonumber, tostring -local P, C, Cs, lpegmatch = lpeg.P, lpeg.C, lpeg.Cs, lpeg.match +local tonumber, tostring, type, rawset = tonumber, tostring, type, rawset +local P, S, R, Cc, Cf, Cg, Ct, Cs, C = lpeg.P, lpeg.S, lpeg.R, lpeg.Cc, lpeg.Cf, lpeg.Cg, lpeg.Ct, lpeg.Cs, lpeg.C +local lpegmatch = lpeg.match local context = context local commands = commands @@ -33,6 +34,8 @@ local formatters = string.formatters local variables = interfaces.variables local constants = interfaces.constants +local texset = tex.set + converters = converters or { } local converters = converters @@ -1355,3 +1358,39 @@ implement { actions = { formatters["U+%05X"], context }, arguments = "integer" } + +local n = lpeg.R("09")^1 / tonumber + +local p = Cf( Ct("") + * Cg(Cc("year") * (n )) * P("-")^-1 + * Cg(Cc("month") * (n + Cc( 1))) * P("-")^-1 + * Cg(Cc("day") * (n + Cc( 1))) * lpeg.patterns.whitespace^-1 + * Cg(Cc("hour") * (n + Cc( 0))) * P(":")^-1 + * Cg(Cc("min") * (n + Cc( 0))) + , rawset) + +function converters.totime(s) + if not s then + return + elseif type(s) == "table" then + return s + elseif type(s) == "string" then + return lpegmatch(p,s) + end + local n = tonumber(s) + if n and n >= 0 then + return date("*t",n) + end +end + +function converters.settime(t) + if type(t) ~= "table" then + t = converters.totime(t) + end + if t then + texset("year", t.year or 1000) + texset("month", t.month or 1) + texset("day", t.day or 1) + texset("time", (t.hour or 0) * 60 + (t.min or 0)) + end +end diff --git a/tex/context/base/mkiv/font-chk.lua b/tex/context/base/mkiv/font-chk.lua index 5b1ad9920..15291052f 100644 --- a/tex/context/base/mkiv/font-chk.lua +++ b/tex/context/base/mkiv/font-chk.lua @@ -437,7 +437,7 @@ local function adddummysymbols(tfmdata,...) -- end end -registerotffeature { +local dummies_specification = { name = "dummies", description = "dummy symbols", default = true, @@ -447,15 +447,8 @@ registerotffeature { } } -registerafmfeature { - name = "dummies", - description = "dummy symbols", - default = true, - manipulators = { - base = adddummysymbols, - node = adddummysymbols, - } -} +registerotffeature(dummies_specification) +registerafmfeature(dummies_specification) -- callback.register("char_exists",function(f,c) -- to slow anyway as called often so we should flag in tfmdata -- return true diff --git a/tex/context/base/mkiv/font-enh.lua b/tex/context/base/mkiv/font-enh.lua index 3439a434a..f3209f5ee 100644 --- a/tex/context/base/mkiv/font-enh.lua +++ b/tex/context/base/mkiv/font-enh.lua @@ -173,7 +173,7 @@ local function initializeunicoding(tfmdata) end end -registerafmfeature { +local unicoding_specification = { name = "unicoding", description = "adapt unicode table", initializers = { @@ -186,15 +186,5 @@ registerafmfeature { -- } } -registerotffeature { - name = "unicoding", - description = "adapt unicode table", - initializers = { - base = initializeunicoding, - node = initializeunicoding, - }, - -- manipulators = { - -- base = finalizeunicoding, - -- node = finalizeunicoding, - -- } -} +registerotffeature(unicoding_specification) +registerafmfeature(unicoding_specification) diff --git a/tex/context/base/mkiv/font-ext.lua b/tex/context/base/mkiv/font-ext.lua index 79144aa70..189a588f1 100644 --- a/tex/context/base/mkiv/font-ext.lua +++ b/tex/context/base/mkiv/font-ext.lua @@ -149,7 +149,7 @@ local function initializeexpansion(tfmdata,value) end end -registerotffeature { +local expansion_specification = { name = "expansion", description = "apply hz optimization", initializers = { @@ -158,14 +158,8 @@ registerotffeature { } } -registerafmfeature { - name = "expansion", - description = "apply hz optimization", - initializers = { - base = initializeexpansion, - node = initializeexpansion, - } -} +registerotffeature(expansion_specification) +registerafmfeature(expansion_specification) fonts.goodies.register("expansions", function(...) return fonts.goodies.report("expansions", trace_expansion, ...) end) @@ -466,7 +460,7 @@ local function initializeprotrusion(tfmdata,value) end end -registerotffeature { +local protrusion_specification = { name = "protrusion", description = "l/r margin character protrusion", initializers = { @@ -475,14 +469,8 @@ registerotffeature { } } -registerafmfeature { - name = "protrusion", - description = "shift characters into the left and or right margin", - initializers = { - base = initializeprotrusion, - node = initializeprotrusion, - } -} +registerotffeature(protrusion_specification) +registerafmfeature(protrusion_specification) fonts.goodies.register("protrusions", function(...) return fonts.goodies.report("protrusions", trace_protrusion, ...) end) @@ -534,7 +522,7 @@ local function initializeitlc(tfmdata,value) -- hm, always value end end -registerotffeature { +local italic_specification = { name = "itlc", description = "italic correction", initializers = { @@ -543,20 +531,14 @@ registerotffeature { } } -registerafmfeature { - name = "itlc", - description = "italic correction", - initializers = { - base = initializeitlc, - node = initializeitlc, - } -} +registerotffeature(italic_specification) +registerafmfeature(italic_specification) local function initializetextitalics(tfmdata,value) -- yes no delay tfmdata.properties.textitalics = toboolean(value) end -registerotffeature { +local textitalics_specification = { name = "textitalics", description = "use alternative text italic correction", initializers = { @@ -565,20 +547,14 @@ registerotffeature { } } -registerafmfeature { - name = "textitalics", - description = "use alternative text italic correction", - initializers = { - base = initializetextitalics, - node = initializetextitalics, - } -} +registerotffeature(textitalics_specification) +registerafmfeature(textitalics_specification) local function initializemathitalics(tfmdata,value) -- yes no delay tfmdata.properties.mathitalics = toboolean(value) end -registerotffeature { +local mathitalics_specification = { name = "mathitalics", description = "use alternative math italic correction", initializers = { @@ -587,14 +563,8 @@ registerotffeature { } } -registerafmfeature { - name = "mathitalics", - description = "use alternative math italic correction", - initializers = { - base = initializemathitalics, - node = initializemathitalics, - } -} +registerotffeature(mathitalics_specification) +registerafmfeature(mathitalics_specification) -- slanting @@ -610,7 +580,7 @@ local function initializeslant(tfmdata,value) tfmdata.parameters.slantfactor = value end -registerotffeature { +local slant_specification = { name = "slant", description = "slant glyphs", initializers = { @@ -619,14 +589,8 @@ registerotffeature { } } -registerafmfeature { - name = "slant", - description = "slant glyphs", - initializers = { - base = initializeslant, - node = initializeslant, - } -} +registerotffeature(slant_specification) +registerafmfeature(slant_specification) local function initializeextend(tfmdata,value) value = tonumber(value) @@ -640,7 +604,7 @@ local function initializeextend(tfmdata,value) tfmdata.parameters.extendfactor = value end -registerotffeature { +local extend_specification = { name = "extend", description = "scale glyphs horizontally", initializers = { @@ -649,14 +613,8 @@ registerotffeature { } } -registerafmfeature { - name = "extend", - description = "scale glyphs horizontally", - initializers = { - base = initializeextend, - node = initializeextend, - } -} +registerotffeature(extend_specification) +registerafmfeature(extend_specification) -- For Wolfgang Schuster: -- @@ -745,7 +703,7 @@ local function manipulatedimensions(tfmdata,key,value) end end -registerotffeature { +local dimensions_specification = { name = "dimensions", description = "force dimensions", manipulators = { @@ -754,6 +712,9 @@ registerotffeature { } } +registerotffeature(dimensions_specification) +registerafmfeature(dimensions_specification) + -- for zhichu chen (see mailing list archive): we might add a few more variants -- in due time -- diff --git a/tex/context/base/mkiv/font-fbk.lua b/tex/context/base/mkiv/font-fbk.lua index 9ef0706d2..3734e8071 100644 --- a/tex/context/base/mkiv/font-fbk.lua +++ b/tex/context/base/mkiv/font-fbk.lua @@ -251,7 +251,7 @@ local function composecharacters(tfmdata) end end -registerotffeature { +local compose_specification = { name = "compose", description = "additional composed characters", manipulators = { @@ -260,14 +260,8 @@ registerotffeature { } } -registerafmfeature { - name = "compose", - description = "additional composed characters", - manipulators = { - base = composecharacters, - node = composecharacters, - } -} +registerotffeature(compose_specification) +registerafmfeature(compose_specification) vf.helpers.composecharacters = composecharacters diff --git a/tex/context/base/mkiv/font-gbn.lua b/tex/context/base/mkiv/font-gbn.lua index daa072b4b..a02406b75 100644 --- a/tex/context/base/mkiv/font-gbn.lua +++ b/tex/context/base/mkiv/font-gbn.lua @@ -126,17 +126,19 @@ function nodes.handlers.nodepass(head) local variant = hash[getchar(p)] if variant then setchar(p,variant) - if not redundant then - redundant = { n } - else - redundant[#redundant+1] = n - end end end end + -- per generic user request we always remove selectors + if not redundant then + redundant = { n } + else + redundant[#redundant+1] = n + end end end end + local nofbasefonts = #basefonts if redundant then for i=1,#redundant do local r = redundant[i] @@ -147,8 +149,8 @@ function nodes.handlers.nodepass(head) else setlink(p,n) end - if b > 0 then - for i=1,b do + if nofbasefonts > 0 then + for i=1,nofbasefonts do local bi = basefonts[i] if r == bi[1] then bi[1] = n @@ -192,8 +194,8 @@ function nodes.handlers.nodepass(head) end end end - if basemodepass and #basefonts > 0 then - for i=1,#basefonts do + if basemodepass and nofbasefonts > 0 then + for i=1,nofbasefonts do local range = basefonts[i] local start = range[1] local stop = range[2] diff --git a/tex/context/base/mkiv/font-gds.lua b/tex/context/base/mkiv/font-gds.lua index 23bbad42f..52bb9c983 100644 --- a/tex/context/base/mkiv/font-gds.lua +++ b/tex/context/base/mkiv/font-gds.lua @@ -465,9 +465,9 @@ local function setextensions(tfmdata) end end --- installation (collected to keep the overview) -- also for type 1 +-- installation -registerotffeature { +local goodies_specification = { name = "goodies", description = "goodies on top of built in features", initializers = { @@ -477,6 +477,12 @@ registerotffeature { } } +registerotffeature(goodies_specification) +registerafmfeature(goodies_specification) +registertfmfeature(goodies_specification) + +-- maybe more of the following could be for type one too + registerotffeature { name = "extrafeatures", description = "extra features", @@ -527,30 +533,6 @@ registerotffeature { } } --- afm - -registerafmfeature { - name = "goodies", - description = "goodies on top of built in features", - initializers = { - position = 1, - base = setgoodies, - node = setgoodies, - } -} - --- tfm - -registertfmfeature { - name = "goodies", - description = "goodies on top of built in features", - initializers = { - position = 1, - base = setgoodies, - node = setgoodies, - } -} - -- experiment, we have to load the definitions immediately as they precede -- the definition so they need to be initialized in the typescript diff --git a/tex/context/base/mkiv/font-lib.mkvi b/tex/context/base/mkiv/font-lib.mkvi index 7bb042a59..90de409d1 100644 --- a/tex/context/base/mkiv/font-lib.mkvi +++ b/tex/context/base/mkiv/font-lib.mkvi @@ -36,9 +36,6 @@ \registerctxluafile{font-tfm}{1.001} -\registerctxluafile{font-afm}{1.001} -\registerctxluafile{font-afk}{1.001} - \registerctxluafile{font-hsh}{1.001} % hashes used by context \registerctxluafile{font-nod}{1.001} @@ -58,6 +55,12 @@ \registerctxluafile{font-oth}{1.001} \registerctxluafile{font-osd}{1.001} +% we use otf code for type one + +\registerctxluafile{font-one}{1.001} +%registerctxluafile{font-afm}{1.001} +\registerctxluafile{font-afk}{1.001} + % so far \registerctxluafile{font-pat}{1.001} % patchers diff --git a/tex/context/base/mkiv/font-otc.lua b/tex/context/base/mkiv/font-otc.lua index 871b6f13c..be9ba8660 100644 --- a/tex/context/base/mkiv/font-otc.lua +++ b/tex/context/base/mkiv/font-otc.lua @@ -61,8 +61,6 @@ local function addfeature(data,feature,specifications) end -- feature has to be unique but the name entry wins eventually - -- todo alse gpos - local fontfeatures = resources.features or everywhere local unicodes = resources.unicodes local splitter = lpeg.splitter(" ",unicodes) @@ -529,6 +527,7 @@ end otf.enhancers.addfeature = addfeature local extrafeatures = { } +local knownfeatures = { } function otf.addfeature(name,specification) if type(name) == "table" then @@ -536,16 +535,31 @@ function otf.addfeature(name,specification) name = specification.name end if type(name) == "string" then - extrafeatures[name] = specification + local slot = knownfeatures[name] + if slot then + -- we overload one + else + slot = #extrafeatures + 1 + knownfeatures[name] = slot + end + specification.name = name -- to be sure + extrafeatures[slot] = specification end end +-- for feature, specification in next, extrafeatures do +-- addfeature(data,feature,specification) +-- end + local function enhance(data,filename,raw) - for feature, specification in next, extrafeatures do - addfeature(data,feature,specification) + for slot=1,#extrafeatures do + local specification = extrafeatures[slot] + addfeature(data,specification.name,specification) end end +-- otf.enhancers.enhance = enhance + otf.enhancers.register("check extra features",enhance) -- tlig -- @@ -574,6 +588,7 @@ local tlig_specification = { otf.addfeature("tlig",tlig_specification) registerotffeature { + -- this makes it a known feature (in tables) name = 'tlig', description = 'tex ligatures', } @@ -598,6 +613,7 @@ local trep_specification = { otf.addfeature("trep",trep_specification) registerotffeature { + -- this makes it a known feature (in tables) name = 'trep', description = 'tex replacements', } @@ -699,6 +715,7 @@ local anum_specification = { otf.addfeature("anum",anum_specification) -- todo: only when there is already an arab script feature registerotffeature { + -- this makes it a known feature (in tables) name = 'anum', description = 'arabic digits', } diff --git a/tex/context/base/mkiv/font-otj.lua b/tex/context/base/mkiv/font-otj.lua index 6ff80d88d..b65a9db66 100644 --- a/tex/context/base/mkiv/font-otj.lua +++ b/tex/context/base/mkiv/font-otj.lua @@ -1255,11 +1255,11 @@ local function inject_everything(head,where) insert_node_after(pre,n,newkern(rightkern)) done = true end - end - if hasmarks then - local pm = i.markbasenode - if pm then - processmark(pm,current,i) + if hasmarks then + local pm = i.markbasenode + if pm then + processmark(pm,current,i) + end end end end @@ -1287,11 +1287,11 @@ local function inject_everything(head,where) insert_node_after(post,n,newkern(rightkern)) done = true end - end - if hasmarks then - local pm = i.markbasenode - if pm then - processmark(pm,current,i) + if hasmarks then + local pm = i.markbasenode + if pm then + processmark(pm,current,i) + end end end end @@ -1319,11 +1319,11 @@ local function inject_everything(head,where) insert_node_after(replace,n,newkern(rightkern)) done = true end - end - if hasmarks then - local pm = i.markbasenode - if pm then - processmark(pm,current,i) + if hasmarks then + local pm = i.markbasenode + if pm then + processmark(pm,current,i) + end end end end diff --git a/tex/context/base/mkiv/font-otl.lua b/tex/context/base/mkiv/font-otl.lua index f7b6eb5ae..01342a9be 100644 --- a/tex/context/base/mkiv/font-otl.lua +++ b/tex/context/base/mkiv/font-otl.lua @@ -101,6 +101,12 @@ registerdirective("fonts.otf.loader.forcenotdef", function(v) forcenotdef = -- end -- end +-- Enhancers are used to apply fixes and extensions to fonts. For instance, we use them +-- to implement tlig and trep features. They are not neccessarily bound to opentype +-- fonts but can also apply to type one fonts, given that they obey the structure of an +-- opentype font. They are not to be confused with format specific features but maybe +-- some are so generic that they might eventually move to this mechanism. + local ordered_enhancers = { "check extra features", } @@ -302,7 +308,7 @@ function otf.load(filename,sub,featurefile) -- second argument (format) is gone -- enhancers.apply(data,filename,data) -- - constructors.addcoreunicodes(unicodes) + -- constructors.addcoreunicodes(data.resources.unicodes) -- still needed ? -- if applyruntimefixes then applyruntimefixes(filename,data) diff --git a/tex/context/base/mkiv/font-oto.lua b/tex/context/base/mkiv/font-oto.lua index b7ee717c9..23beba787 100644 --- a/tex/context/base/mkiv/font-oto.lua +++ b/tex/context/base/mkiv/font-oto.lua @@ -450,3 +450,5 @@ registerotffeature { base = featuresinitializer, } } + +otf.basemodeinitializer = featuresinitializer diff --git a/tex/context/base/mkiv/font-ots.lua b/tex/context/base/mkiv/font-ots.lua index 21225c227..c173de2be 100644 --- a/tex/context/base/mkiv/font-ots.lua +++ b/tex/context/base/mkiv/font-ots.lua @@ -3652,6 +3652,9 @@ registerotffeature { } } +otf.nodemodeinitializer = featuresinitializer +otf.featuresprocessor = featuresprocessor + -- This can be used for extra handlers, but should be used with care! otf.handlers = handlers -- used in devanagari diff --git a/tex/context/base/mkiv/lang-def.mkiv b/tex/context/base/mkiv/lang-def.mkiv index e4906096e..5e40a33b0 100644 --- a/tex/context/base/mkiv/lang-def.mkiv +++ b/tex/context/base/mkiv/lang-def.mkiv @@ -341,7 +341,7 @@ \c!rightquotation=\upperrightdoubleninequote, \c!date={\v!year,.,\space,\v!month,\space,\v!day,.}] -\installlanguage [\s!finish] [\s!fi] +\installlanguage [\s!finnish] [\s!fi] \installlanguage [\s!hungarian] [\s!hu] % Altaic Languages: Uigur, Uzbek, Azeri/Azerbaijani, Chuvash, diff --git a/tex/context/base/mkiv/lang-ini.lua b/tex/context/base/mkiv/lang-ini.lua index eb7e32b89..00fdb3f09 100644 --- a/tex/context/base/mkiv/lang-ini.lua +++ b/tex/context/base/mkiv/lang-ini.lua @@ -40,8 +40,8 @@ local prehyphenchar = lang.prehyphenchar -- global per language local posthyphenchar = lang.posthyphenchar -- global per language local preexhyphenchar = lang.preexhyphenchar -- global per language local postexhyphenchar = lang.postexhyphenchar -- global per language -local lefthyphenmin = lang.lefthyphenmin -local righthyphenmin = lang.righthyphenmin +----- lefthyphenmin = lang.lefthyphenmin +----- righthyphenmin = lang.righthyphenmin local sethjcode = lang.sethjcode local uccodes = characters.uccodes @@ -344,8 +344,8 @@ function languages.prehyphenchar (what) return prehyphenchar (tolang(what)) function languages.posthyphenchar (what) return posthyphenchar (tolang(what)) end function languages.preexhyphenchar (what) return preexhyphenchar (tolang(what)) end function languages.postexhyphenchar(what) return postexhyphenchar(tolang(what)) end -function languages.lefthyphenmin (what) return lefthyphenmin (tolang(what)) end -function languages.righthyphenmin (what) return righthyphenmin (tolang(what)) end +-------- languages.lefthyphenmin (what) return lefthyphenmin (tolang(what)) end +-------- languages.righthyphenmin (what) return righthyphenmin (tolang(what)) end -- e['implementer']= 'imple{m}{-}{-}menter' -- e['manual'] = 'man{}{}{}' diff --git a/tex/context/base/mkiv/lang-ini.mkiv b/tex/context/base/mkiv/lang-ini.mkiv index 8ff41b287..214ce8ca3 100644 --- a/tex/context/base/mkiv/lang-ini.mkiv +++ b/tex/context/base/mkiv/lang-ini.mkiv @@ -518,7 +518,7 @@ \let\dohyphens\relax -\unexpanded\def\lang_basics_synchronize_min_max +\unexpanded\def\lang_basics_synchronize_min_max % maybe store this at the lua end {% these values are stored along with glyph nodes \lefthyphenmin \numexpr0\languageparameter\s!lefthyphenmin +\hyphenminoffset\relax \righthyphenmin\numexpr0\languageparameter\s!righthyphenmin+\hyphenminoffset\relax diff --git a/tex/context/base/mkiv/lpdf-ini.lua b/tex/context/base/mkiv/lpdf-ini.lua index 2149f4729..f0b919d4e 100644 --- a/tex/context/base/mkiv/lpdf-ini.lua +++ b/tex/context/base/mkiv/lpdf-ini.lua @@ -1059,13 +1059,30 @@ do return timestamp end + function lpdf.settime(n) + if n then + n = converters.totime(n) + if n then + converters.settime(n) + timestamp = os.date("%Y-%m-%dT%X",os.time(n)) .. os.timezone(true) + end + end + return timestamp + end + + lpdf.settime(tonumber(resolvers.variable("start_time")) or tonumber(resolvers.variable("SOURCE_DATE_EPOCH"))) -- bah + function lpdf.pdftimestamp(str) local Y, M, D, h, m, s, Zs, Zh, Zm = match(str,"^(%d%d%d%d)%-(%d%d)%-(%d%d)T(%d%d):(%d%d):(%d%d)([%+%-])(%d%d):(%d%d)$") return Y and format("D:%s%s%s%s%s%s%s%s'%s'",Y,M,D,h,m,s,Zs,Zh,Zm) end - function lpdf.id() - return format("%s.%s",tex.jobname,timestamp) + function lpdf.id(nodate) + if nodate then + return tex.jobname + else + return format("%s.%s",tex.jobname,timestamp) + end end end diff --git a/tex/context/base/mkiv/lpdf-xmp.lua b/tex/context/base/mkiv/lpdf-xmp.lua index b8170319c..c8b86d384 100644 --- a/tex/context/base/mkiv/lpdf-xmp.lua +++ b/tex/context/base/mkiv/lpdf-xmp.lua @@ -41,7 +41,7 @@ local mapping = { ["ConTeXt.Time"] = { "date", "rdf:Description/pdfx:ConTeXt.Time" }, ["ConTeXt.Url"] = { "context", "rdf:Description/pdfx:ConTeXt.Url" }, ["ConTeXt.Version"] = { "context", "rdf:Description/pdfx:ConTeXt.Version" }, - ["ID"] = { "date", "rdf:Description/pdfx:ID" }, -- has date + ["ID"] = { "id", "rdf:Description/pdfx:ID" }, -- has date ["PTEX.Fullbanner"] = { "metadata","rdf:Description/pdfx:PTEX.Fullbanner" }, -- Adobe PDF schema ["Keywords"] = { "metadata","rdf:Description/pdf:Keywords" }, @@ -60,8 +60,8 @@ local mapping = { ["ModDate"] = { "date", "rdf:Description/xmp:ModDate" }, -- dummy ["ModifyDate"] = { "date", "rdf:Description/xmp:ModifyDate" }, -- XMP Media Management schema - ["DocumentID"] = { "date", "rdf:Description/xmpMM:DocumentID" }, -- uuid - ["InstanceID"] = { "date", "rdf:Description/xmpMM:InstanceID" }, -- uuid + ["DocumentID"] = { "id", "rdf:Description/xmpMM:DocumentID" }, -- uuid + ["InstanceID"] = { "id", "rdf:Description/xmpMM:InstanceID" }, -- uuid ["RenditionClass"] = { "pdf", "rdf:Description/xmpMM:RenditionClass" }, -- PDF/X-4 ["VersionID"] = { "pdf", "rdf:Description/xmpMM:VersionID" }, -- PDF/X-4 -- additional entries @@ -105,32 +105,66 @@ local included = table.setmetatableindex( { return true end) -directives.register("backend.nodates", function(v) - included.date = not v +function lpdf.settrailerid(v) if v then - report_info("no date/time information will be added to the PDF file") - end -end) - -directives.register("backend.trailerid", function(v) - if v then - if toboolean(v) or v == "" then + local b = toboolean(v) or v == "" + if b then v = "This file is processed by ConTeXt and LuaTeX." else v = tostring(v) end local h = md5.HEX(v) - report_info("using hashed trailer id %a (%a)",v,h) + if b then + report_info("using frozen trailer id") + else + report_info("using hashed trailer id %a (%a)",v,h) + end pdf.settrailerid(format("[<%s> <%s>]",h,h)) end -end) +end + +function lpdf.setdates(v) + local t = type(v) + if t == "number" or t == "string" then + t = converters.totime(v) + if t then + included.date = true + included.id = "fake" + report_info("forced date/time information %a will be used",lpdf.settime(t)) + lpdf.settrailerid(false) + return + end + end + v = toboolean(v) + included.date = v + if v then + included.id = true + else + report_info("no date/time but fake id information will be added") + lpdf.settrailerid(true) + included.id = "fake" + -- maybe: lpdf.settime(231631200) -- 1975-05-05 % first entry of knuth about tex mentioned in DT + end +end + +function lpdf.id() -- overload of ini + local banner = tex.jobname + if included.date then + return format("%s.%s",banner,lpdf.timestamp()) + else + return banner + end +end + +directives.register("backend.trailerid", lpdf.settrailerid) +directives.register("backend.date", lpdf.setdates) local function permitdetail(what) local m = mapping[what] if m then return included[m[1]] and m[2] else - return included[what] + return included[what] and true or false end end @@ -214,22 +248,34 @@ end -- flushing -local t = { } for i=1,24 do t[i] = random() end +local function randomstring(n) + local t = { } + for i=1,n do + t[i] = char(96 + random(26)) + end + return concat(t) +end + +randomstring(26) -- kind of initializes and kicks off random local function flushxmpinfo() commands.pushrandomseed() commands.setrandomseed(os.time()) - local t = { } for i=1,24 do t[i] = char(96 + random(26)) end - local packetid = concat(t) - - local documentid = format("uuid:%s",os.uuid()) - local instanceid = format("uuid:%s",os.uuid()) + local packetid = "no unique packet id here" -- 24 chars + local documentid = "no unique document id here" + local instanceid = "no unique instance id here" local producer = format("LuaTeX-%0.2f.%s",status.luatex_version/100,status.luatex_revision) local creator = "LuaTeX + ConTeXt MkIV" local time = lpdf.timestamp() local fullbanner = status.banner + if included.id ~= "fake" then + packetid = randomstring(24) + documentid = "uuid:%s" .. os.uuid() + instanceid = "uuid:%s" .. os.uuid() + end + pdfaddxmpinfo("DocumentID", documentid) pdfaddxmpinfo("InstanceID", instanceid) pdfaddxmpinfo("Producer", producer) diff --git a/tex/context/base/mkiv/luat-cnf.lua b/tex/context/base/mkiv/luat-cnf.lua index 83622ef53..9d37df7bb 100644 --- a/tex/context/base/mkiv/luat-cnf.lua +++ b/tex/context/base/mkiv/luat-cnf.lua @@ -42,6 +42,7 @@ luatex = luatex or { } texconfig.kpse_init = false texconfig.shell_escape = 't' +---------.start_time = tonumber(os.getenv("SOURCE_DATE_EPOCH")) -- not used in context -- as soon as possible diff --git a/tex/context/base/mkiv/m-oldotf.mkiv b/tex/context/base/mkiv/m-oldotf.mkiv index c7c468d93..313f9f484 100644 --- a/tex/context/base/mkiv/m-oldotf.mkiv +++ b/tex/context/base/mkiv/m-oldotf.mkiv @@ -25,6 +25,7 @@ "font-otc", "font-oth", "font-odv", + "font-one", "font-map", "font-fbk", "font-gds", diff --git a/tex/context/base/mkiv/meta-ini.mkiv b/tex/context/base/mkiv/meta-ini.mkiv index 9f53aea69..d0fff83df 100644 --- a/tex/context/base/mkiv/meta-ini.mkiv +++ b/tex/context/base/mkiv/meta-ini.mkiv @@ -486,6 +486,10 @@ \startMPextensions string contextversion; contextversion:="\contextversion"; % expanded + minute:=\the\normaltime mod 60; + hour:=\the\normaltime div 60; + year:=\the\normalyear; + month:=\the\normalmonth; \stopMPextensions %D \macros diff --git a/tex/context/base/mkiv/mult-sys.mkiv b/tex/context/base/mkiv/mult-sys.mkiv index 01a1674dc..88e956d66 100644 --- a/tex/context/base/mkiv/mult-sys.mkiv +++ b/tex/context/base/mkiv/mult-sys.mkiv @@ -48,7 +48,7 @@ \definesystemconstant {danish} \definesystemconstant {da} \definesystemconstant {dutch} \definesystemconstant {nl} \definesystemconstant {english} \definesystemconstant {en} -\definesystemconstant {finish} \definesystemconstant {fi} +\definesystemconstant {finnish} \definesystemconstant {fi} \definesystemconstant {french} \definesystemconstant {fr} \definesystemconstant {german} \definesystemconstant {de} \definesystemconstant {hungarian} \definesystemconstant {hu} diff --git a/tex/context/base/mkiv/node-fnt.lua b/tex/context/base/mkiv/node-fnt.lua index 7f0d23997..e77280c37 100644 --- a/tex/context/base/mkiv/node-fnt.lua +++ b/tex/context/base/mkiv/node-fnt.lua @@ -17,9 +17,12 @@ local trace_characters = false trackers.register("nodes.characters", function( local trace_fontrun = false trackers.register("nodes.fontrun", function(v) trace_fontrun = v end) local trace_variants = false trackers.register("nodes.variants", function(v) trace_variants = v end) -local force_discrun = true directives.register("nodes.discrun", function(v) force_discrun = v end) -local force_boundaryrun = true directives.register("nodes.boundaryrun", function(v) force_boundaryrun = v end) -local force_basepass = true directives.register("nodes.basepass", function(v) force_basepass = v end) +-- bad namespace for directives + +local force_discrun = true directives.register("nodes.discrun", function(v) force_discrun = v end) +local force_boundaryrun = true directives.register("nodes.boundaryrun", function(v) force_boundaryrun = v end) +local force_basepass = true directives.register("nodes.basepass", function(v) force_basepass = v end) +local keep_redundant = false directives.register("nodes.keepredundant",function(v) keep_redundant = v end) local report_fonts = logs.reporter("fonts","processing") @@ -271,6 +274,12 @@ function handlers.characters(head) end end end + elseif keep_redundant then + -- go on, can be used for tracing + elseif not redundant then + redundant = { n } + else + redundant[#redundant+1] = n end end end diff --git a/tex/context/base/mkiv/status-files.pdf b/tex/context/base/mkiv/status-files.pdf Binary files differindex 5d4c84ce1..169d8e0a3 100644 --- a/tex/context/base/mkiv/status-files.pdf +++ b/tex/context/base/mkiv/status-files.pdf diff --git a/tex/context/base/mkiv/status-lua.pdf b/tex/context/base/mkiv/status-lua.pdf Binary files differindex 84aba5a7f..469f94361 100644 --- a/tex/context/base/mkiv/status-lua.pdf +++ b/tex/context/base/mkiv/status-lua.pdf diff --git a/tex/context/base/mkiv/typo-tal.lua b/tex/context/base/mkiv/typo-tal.lua index a67cd0a4b..21c6794c4 100644 --- a/tex/context/base/mkiv/typo-tal.lua +++ b/tex/context/base/mkiv/typo-tal.lua @@ -200,10 +200,10 @@ function characteralign.handler(originalhead,where) while current do local char, id = isglyph(current) if char then - local font = getfont(current) - -- local unicode = unicodes[font][char] - local unicode = fontcharacters[font][char].unicode or char -- ignore tables - if not unicode then + local font = getfont(current) + local data = fontcharacters[font][char] + local unicode = data and data.unicode or char -- ignore tables + if not unicode then -- type(unicode) ~= "number" -- no unicode so forget about it elseif unicode == separator then c = current diff --git a/tex/context/base/mkiv/typo-wrp.mkiv b/tex/context/base/mkiv/typo-wrp.mkiv index 4b18785bd..081349050 100644 --- a/tex/context/base/mkiv/typo-wrp.mkiv +++ b/tex/context/base/mkiv/typo-wrp.mkiv @@ -44,6 +44,7 @@ \spac_crlf_placeholder \ifcase\raggedstatus\hfil\or\or\or\hfil\fi \break + \hskip\zeropoint % new so that the next word also hyphenates \ignorespaces} \unexpanded\def\spac_crlf_placeholder diff --git a/tex/context/interface/mkiv/i-context.pdf b/tex/context/interface/mkiv/i-context.pdf Binary files differindex 6d0041d25..c3cc5715c 100644 --- a/tex/context/interface/mkiv/i-context.pdf +++ b/tex/context/interface/mkiv/i-context.pdf diff --git a/tex/context/interface/mkiv/i-readme.pdf b/tex/context/interface/mkiv/i-readme.pdf Binary files differindex 4d61ab7a8..62fd9a034 100644 --- a/tex/context/interface/mkiv/i-readme.pdf +++ b/tex/context/interface/mkiv/i-readme.pdf diff --git a/tex/context/modules/mkiv/s-languages-hyphenation.lua b/tex/context/modules/mkiv/s-languages-hyphenation.lua index c5a4f91f1..b8de773a0 100644 --- a/tex/context/modules/mkiv/s-languages-hyphenation.lua +++ b/tex/context/modules/mkiv/s-languages-hyphenation.lua @@ -11,101 +11,182 @@ moduledata.languages.hyphenation = moduledata.languages.hyphenation or { } local a_colormodel = attributes.private('colormodel') +local tex = tex +local context = context + local nodecodes = nodes.nodecodes -local nodepool = nodes.pool +local nuts = nodes.nuts +local nodepool = nuts.pool + local disc_code = nodecodes.disc local glyph_code = nodecodes.glyph + local emwidths = fonts.hashes.emwidths local exheights = fonts.hashes.exheights + local newkern = nodepool.kern local newrule = nodepool.rule local newglue = nodepool.glue -local insert_node_after = node.insert_after -local traverse_by_id = node.traverse_id -local hyphenate = languages.hyphenators.handler -- lang.hyphenate -local find_tail = node.tail -local remove_node = nodes.remove +local insert_node_after = nuts.insert_after +local traverse_by_id = nuts.traverse_id + +local tonut = nodes.tonut +local tonode = nodes.tonode +local getid = nuts.getid +local getnext = nuts.getnext +local getdisc = nuts.getdisc +local getattr = nuts.getattr +local getfont = nuts.getfont +local getfield = nuts.getfield +local setlink = nuts.setlink +local setdisc = nuts.setdisc +local setfield = nuts.setfield +local free_node = nuts.free local tracers = nodes.tracers local colortracers = tracers and tracers.colors local setnodecolor = colortracers.set +-- maybe this will become code code + +local states = table.setmetatableindex(function(t,k) + return { + lefthyphenmin = tex.lefthyphenmin, + righthyphenmin = tex.righthyphenmin, + hyphenationmin = tex.hyphenationmin, + prehyphenchar = tex.prehyphenchar, + posthyphenchar = tex.posthyphenchar, + } +end) + +interfaces.implement { + name = "storelanguagestate", + actions = function() + states[tex.language] = { + lefthyphenmin = tex.lefthyphenmin, + righthyphenmin = tex.righthyphenmin, + hyphenationmin = tex.hyphenationmin, + prehyphenchar = tex.prehyphenchar, + posthyphenchar = tex.posthyphenchar, + } + end +} + +function moduledata.languages.getstate(l) + return states[l] -- code +end + +-- end + local function identify(head,marked) - local current, prev = head, nil + local current = tonut(head) + local prev = nil while current do - local id = current.id - local next = current.next + local id = getid(current) + local next = getnext(current) if id == disc_code then - if prev and next then -- and next.id == glyph_code then -- catch other usage of disc + if prev and next then -- asume glyphs marked[#marked+1] = prev + local pre, post, replace, pre_tail, post_tail, replace_tail = getdisc(current,true) + if replace then + setlink(prev,replace) + setlink(replace_tail,next) + setdisc(pre,post,nil) + prev = tail + else + setlink(prev,next) + end + free_node(current) end elseif id == glyph_code then prev = current + else + prev = nil end current = next end end -local function strip(head,marked) +local function mark(head,marked,w,h,d,how) + head = tonut(head) for i=1,#marked do - local prev = marked[i] - remove_node(head,prev.next,true) + local current = marked[i] + local font = getfont(current) + local em = emwidths[font] + local ex = exheights[font] + local width = w*em + local rule = newrule(width,h*ex,d*ex) + head, current = insert_node_after(head,current,newkern(-width/2)) + head, current = insert_node_after(head,current,rule) + head, current = insert_node_after(head,current,newkern(-width/2)) + head, current = insert_node_after(head,current,newglue(0)) + setnodecolor(rule,how) -- ,getattr(current,a_colormodel)) end end -local function mark(head,marked,w,h,d,how) - for i=1,#marked do - local prev = marked[i] - local font = prev.font - local em = emwidths[font] - local ex = exheights[font] - local width = w*em - local rule = newrule(width,h*ex,d*ex) - head, prev = insert_node_after(head,prev,newkern(-width/2)) - head, prev = insert_node_after(head,prev,rule) - head, prev = insert_node_after(head,prev,newkern(-width/2)) - head, prev = insert_node_after(head,prev,newglue(0)) - setnodecolor(rule,how,prev[a_colormodel]) +local function getlanguage(head,l,left,right) + local t = { } + for n in traverse_by_id(glyph_code,tonut(head)) do + t[n] = { + getfield(n,"lang"), + getfield(n,"left"), + getfield(n,"right"), + } end end -local langs, tags, noflanguages = { }, { }, 0 - -local colorbytag = false +local langs = { } +local tags = { } +local noflanguages = 0 +local colorbytag = false function moduledata.languages.hyphenation.showhyphens(head) if noflanguages > 0 then local marked = { } + local cached = { } + -- somehow assigning -1 fails + for n in traverse_by_id(glyph_code,tonut(head)) do + cached[n] = { + getfield(n,"lang"), + getfield(n,"left"), + getfield(n,"right") + } + end for i=1,noflanguages do local m = { } local l = langs[i] + local s = states[l] marked[i] = m - for n in traverse_by_id(glyph_code,head) do - n.lang = l + local lmin = s.lefthyphenmin + local rmin = s.righthyphenmin + for n in next, cached do + setfield(n,"lang",l) + setfield(n,"left",lmin) + setfield(n,"right",rmin) end languages.hyphenators.methods.original(head) identify(head,m) - strip(head,m) end for i=noflanguages,1,-1 do local l = noflanguages - i + 1 mark(head,marked[i],1/16,l/2,l/4,"hyphenation:"..(colorbytag and tags[i] or i)) end + for n, d in next, cached do + setfield(n,"lang",d[1]) + setfield(n,"left",d[2]) + setfield(n,"right",d[3]) + end return head, true else return head, false end end -local savedlanguage - function moduledata.languages.hyphenation.startcomparepatterns(list) if list and list ~= "" then tags = utilities.parsers.settings_to_array(list) end - savedlanguage = tex.language - tex.language = 0 noflanguages = #tags for i=1,noflanguages do langs[i] = tags[i] and languages.getnumber(tags[i]) @@ -115,7 +196,6 @@ end function moduledata.languages.hyphenation.stopcomparepatterns() noflanguages = 0 - tex.language = savedlanguage or tex.language nodes.tasks.disableaction("processors","moduledata.languages.hyphenation.showhyphens") end diff --git a/tex/context/modules/mkiv/s-languages-hyphenation.mkiv b/tex/context/modules/mkiv/s-languages-hyphenation.mkiv index 6662dbf2f..9ff5720a0 100644 --- a/tex/context/modules/mkiv/s-languages-hyphenation.mkiv +++ b/tex/context/modules/mkiv/s-languages-hyphenation.mkiv @@ -34,6 +34,12 @@ \ctxlua{moduledata.languages.hyphenation.stopcomparepatterns()}% \endgroup} +\appendtoks + \clf_storelanguagestate % global +\to \everylanguage + +\clf_storelanguagestate % initialize + \installmodulecommandluasingle \showcomparepatternslegend {moduledata.languages.hyphenation.showcomparelegend} \protect @@ -42,11 +48,13 @@ \definecolor[hyphenation:2] [g=.8] \definecolor[hyphenation:3] [b=.8] \definecolor[hyphenation:4] [r=.4,g=.4] +\definecolor[hyphenation:5] [r=.4,b=.4] \definecolor[hyphenation:en] [hyphenation:1] \definecolor[hyphenation:de] [hyphenation:2] \definecolor[hyphenation:nl] [hyphenation:3] \definecolor[hyphenation:fr] [hyphenation:4] +\definecolor[hyphenation:sl] [hyphenation:5] \stopmodule @@ -56,10 +64,33 @@ \starttext -\def|#1|{-} + \startcomparepatterns[en,sl] + {For Mojca Miklavec} + {For Mojca Miklavec} + \stopcomparepatterns + + \startcomparepatterns[en] + {For Mojca Miklavec} + {For Mojca Miklavec} + \stopcomparepatterns + + \startcomparepatterns[sl] + {For Mojca Miklavec} + {For Mojca Miklavec} + \stopcomparepatterns + + \startcomparepatterns[en,sl] + {For Mojca Miklavec}\crlf + {For Mojca Miklavec}\crlf + {\de For Mojca Miklavec}\crlf + {For Mojca Miklavec}\crlf + {\fr For Mojca Miklavec} + \stopcomparepatterns + + \page \startsubject{Normal text} - \input tufte + \input tufte \stopsubject \startsubject{Compare hyphenation points of \showcomparepatternslegend[en,de]} diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua index 5f35deddf..eef11d297 100644 --- a/tex/generic/context/luatex/luatex-fonts-merged.lua +++ b/tex/generic/context/luatex/luatex-fonts-merged.lua @@ -1,6 +1,6 @@ -- merged file : c:/data/develop/context/sources/luatex-fonts-merged.lua -- parent file : c:/data/develop/context/sources/luatex-fonts.lua --- merge date : 05/01/16 09:52:32 +-- merge date : 05/07/16 14:37:15 do -- begin closure to overcome local limits and interference @@ -6973,1137 +6973,6 @@ end -- closure do -- begin closure to overcome local limits and interference -if not modules then modules={} end modules ['font-afm']={ - version=1.001, - comment="companion to font-ini.mkiv", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files" -} -local fonts,logs,trackers,containers,resolvers=fonts,logs,trackers,containers,resolvers -local next,type,tonumber=next,type,tonumber -local match,gmatch,lower,gsub,strip,find=string.match,string.gmatch,string.lower,string.gsub,string.strip,string.find -local char,byte,sub=string.char,string.byte,string.sub -local abs=math.abs -local bxor,rshift=bit32.bxor,bit32.rshift -local P,S,R,Cmt,C,Ct,Cs,lpegmatch,patterns=lpeg.P,lpeg.S,lpeg.R,lpeg.Cmt,lpeg.C,lpeg.Ct,lpeg.Cs,lpeg.match,lpeg.patterns -local derivetable=table.derive -local trace_features=false trackers.register("afm.features",function(v) trace_features=v end) -local trace_indexing=false trackers.register("afm.indexing",function(v) trace_indexing=v end) -local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end) -local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) -local report_afm=logs.reporter("fonts","afm loading") -local setmetatableindex=table.setmetatableindex -local findbinfile=resolvers.findbinfile -local definers=fonts.definers -local readers=fonts.readers -local constructors=fonts.constructors -local afm=constructors.newhandler("afm") -local pfb=constructors.newhandler("pfb") -local afmfeatures=constructors.newfeatures("afm") -local registerafmfeature=afmfeatures.register -afm.version=1.501 -afm.cache=containers.define("fonts","afm",afm.version,true) -afm.autoprefixed=true -afm.helpdata={} -afm.syncspace=true -afm.addligatures=true -afm.addtexligatures=true -afm.addkerns=true -local overloads=fonts.mappings.overloads -local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes -local function setmode(tfmdata,value) - if value then - tfmdata.properties.mode=lower(value) - end -end -registerafmfeature { - name="mode", - description="mode", - initializers={ - base=setmode, - node=setmode, - } -} -local comment=P("Comment") -local spacing=patterns.spacer -local lineend=patterns.newline -local words=C((1-lineend)^1) -local number=C((R("09")+S("."))^1)/tonumber*spacing^0 -local data=lpeg.Carg(1) -local pattern=( - comment*spacing*( - data*( - ("CODINGSCHEME"*spacing*words )/function(fd,a) end+("DESIGNSIZE"*spacing*number*words )/function(fd,a) fd[ 1]=a end+("CHECKSUM"*spacing*number*words )/function(fd,a) fd[ 2]=a end+("SPACE"*spacing*number*"plus"*number*"minus"*number)/function(fd,a,b,c) fd[ 3],fd[ 4],fd[ 5]=a,b,c end+("QUAD"*spacing*number )/function(fd,a) fd[ 6]=a end+("EXTRASPACE"*spacing*number )/function(fd,a) fd[ 7]=a end+("NUM"*spacing*number*number*number )/function(fd,a,b,c) fd[ 8],fd[ 9],fd[10]=a,b,c end+("DENOM"*spacing*number*number )/function(fd,a,b ) fd[11],fd[12]=a,b end+("SUP"*spacing*number*number*number )/function(fd,a,b,c) fd[13],fd[14],fd[15]=a,b,c end+("SUB"*spacing*number*number )/function(fd,a,b) fd[16],fd[17]=a,b end+("SUPDROP"*spacing*number )/function(fd,a) fd[18]=a end+("SUBDROP"*spacing*number )/function(fd,a) fd[19]=a end+("DELIM"*spacing*number*number )/function(fd,a,b) fd[20],fd[21]=a,b end+("AXISHEIGHT"*spacing*number )/function(fd,a) fd[22]=a end - )+(1-lineend)^0 - )+(1-comment)^1 -)^0 -local function scan_comment(str) - local fd={} - lpegmatch(pattern,str,1,fd) - return fd -end -local keys={} -function keys.FontName (data,line) data.metadata.fontname=strip (line) - data.metadata.fullname=strip (line) end -function keys.ItalicAngle (data,line) data.metadata.italicangle=tonumber (line) end -function keys.IsFixedPitch(data,line) data.metadata.monospaced=toboolean(line,true) end -function keys.CharWidth (data,line) data.metadata.charwidth=tonumber (line) end -function keys.XHeight (data,line) data.metadata.xheight=tonumber (line) end -function keys.Descender (data,line) data.metadata.descender=tonumber (line) end -function keys.Ascender (data,line) data.metadata.ascender=tonumber (line) end -function keys.Comment (data,line) - line=lower(line) - local designsize=match(line,"designsize[^%d]*(%d+)") - if designsize then data.metadata.designsize=tonumber(designsize) end -end -local function get_charmetrics(data,charmetrics,vector) - local characters=data.characters - local chr,ind={},0 - for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do - if k=='C' then - v=tonumber(v) - if v<0 then - ind=ind+1 - else - ind=v - end - chr={ - index=ind - } - elseif k=='WX' then - chr.width=tonumber(v) - elseif k=='N' then - characters[v]=chr - elseif k=='B' then - local llx,lly,urx,ury=match(v,"^ *(.-) +(.-) +(.-) +(.-)$") - chr.boundingbox={ tonumber(llx),tonumber(lly),tonumber(urx),tonumber(ury) } - elseif k=='L' then - local plus,becomes=match(v,"^(.-) +(.-)$") - local ligatures=chr.ligatures - if ligatures then - ligatures[plus]=becomes - else - chr.ligatures={ [plus]=becomes } - end - end - end -end -local function get_kernpairs(data,kernpairs) - local characters=data.characters - for one,two,value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do - local chr=characters[one] - if chr then - local kerns=chr.kerns - if kerns then - kerns[two]=tonumber(value) - else - chr.kerns={ [two]=tonumber(value) } - end - end - end -end -local function get_variables(data,fontmetrics) - for key,rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do - local keyhandler=keys[key] - if keyhandler then - keyhandler(data,rest) - end - end -end -local get_indexes -do - local fontloader=fontloader - local get_indexes_old=false - if fontloader then - local font_to_table=fontloader.to_table - local open_font=fontloader.open - local close_font=fontloader.close - get_indexes_old=function(data,pfbname) - local pfbblob=open_font(pfbname) - if pfbblob then - local characters=data.characters - local pfbdata=font_to_table(pfbblob) - if pfbdata then - local glyphs=pfbdata.glyphs - if glyphs then - if trace_loading then - report_afm("getting index data from %a",pfbname) - end - for index,glyph in next,glyphs do - local name=glyph.name - if name then - local char=characters[name] - if char then - if trace_indexing then - report_afm("glyph %a has index %a",name,index) - end - char.index=index - end - end - end - elseif trace_loading then - report_afm("no glyph data in pfb file %a",pfbname) - end - elseif trace_loading then - report_afm("no data in pfb file %a",pfbname) - end - close_font(pfbblob) - elseif trace_loading then - report_afm("invalid pfb file %a",pfbname) - end - end - end - local n,m - local progress=function(str,position,name,size) - local forward=position+tonumber(size)+3+2 - n=n+1 - if n>=m then - return #str,name - elseif forward<#str then - return forward,name - else - return #str,name - end - end - local initialize=function(str,position,size) - n=0 - m=tonumber(size) - return position+1 - end - local charstrings=P("/CharStrings") - local name=P("/")*C((R("az")+R("AZ")+R("09")+S("-_."))^1) - local size=C(R("09")^1) - local spaces=P(" ")^1 - local p_filternames=Ct ( - (1-charstrings)^0*charstrings*spaces*Cmt(size,initialize)*(Cmt(name*P(" ")^1*C(R("09")^1),progress)+P(1))^1 - ) - local decrypt - do - local r,c1,c2,n=0,0,0,0 - local function step(c) - local cipher=byte(c) - local plain=bxor(cipher,rshift(r,8)) - r=((cipher+r)*c1+c2)%65536 - return char(plain) - end - decrypt=function(binary) - r,c1,c2,n=55665,52845,22719,4 - binary=gsub(binary,".",step) - return sub(binary,n+1) - end - end - local function loadpfbvector(filename) - local data=io.loaddata(resolvers.findfile(filename)) - if not find(data,"!PS%-AdobeFont%-") then - print("no font",filename) - return - end - if not data then - print("no data",filename) - return - end - local ascii,binary=match(data,"(.*)eexec%s+......(.*)") - if not binary then - print("no binary",filename) - return - end - binary=decrypt(binary,4) - local vector=lpegmatch(p_filternames,binary) - vector[0]=table.remove(vector,1) - if not vector then - print("no vector",filename) - return - end - return vector - end - get_indexes=function(data,pfbname) - local vector=loadpfbvector(pfbname) - if vector then - local characters=data.characters - if trace_loading then - report_afm("getting index data from %a",pfbname) - end - for index=1,#vector do - local name=vector[index] - local char=characters[name] - if char then - if trace_indexing then - report_afm("glyph %a has index %a",name,index) - end - char.index=index - end - end - end - end - if get_indexes_old then - afm.use_new_indexer=true - get_indexes_new=get_indexes - get_indexes=function(data,pfbname) - if afm.use_new_indexer then - return get_indexes_new(data,pfbname) - else - return get_indexes_old(data,pfbname) - end - end - end -end -local function readafm(filename) - local ok,afmblob,size=resolvers.loadbinfile(filename) - if ok and afmblob then - local data={ - resources={ - filename=resolvers.unresolve(filename), - version=afm.version, - creator="context mkiv", - }, - properties={ - hasitalics=false, - }, - goodies={}, - metadata={ - filename=file.removesuffix(file.basename(filename)) - }, - characters={ - }, - descriptions={ - }, - } - afmblob=gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics",function(charmetrics) - if trace_loading then - report_afm("loading char metrics") - end - get_charmetrics(data,charmetrics,vector) - return "" - end) - afmblob=gsub(afmblob,"StartKernPairs(.-)EndKernPairs",function(kernpairs) - if trace_loading then - report_afm("loading kern pairs") - end - get_kernpairs(data,kernpairs) - return "" - end) - afmblob=gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics",function(version,fontmetrics) - if trace_loading then - report_afm("loading variables") - end - data.afmversion=version - get_variables(data,fontmetrics) - data.fontdimens=scan_comment(fontmetrics) - return "" - end) - return data - else - if trace_loading then - report_afm("no valid afm file %a",filename) - end - return nil - end -end -local addkerns,addligatures,addtexligatures,unify,normalize,fixnames -function afm.load(filename) - filename=resolvers.findfile(filename,'afm') or "" - if filename~="" and not fonts.names.ignoredfile(filename) then - local name=file.removesuffix(file.basename(filename)) - local data=containers.read(afm.cache,name) - local attr=lfs.attributes(filename) - local size,time=attr.size or 0,attr.modification or 0 - local pfbfile=file.replacesuffix(name,"pfb") - local pfbname=resolvers.findfile(pfbfile,"pfb") or "" - if pfbname=="" then - pfbname=resolvers.findfile(file.basename(pfbfile),"pfb") or "" - end - local pfbsize,pfbtime=0,0 - if pfbname~="" then - local attr=lfs.attributes(pfbname) - pfbsize=attr.size or 0 - pfbtime=attr.modification or 0 - end - if not data or data.size~=size or data.time~=time or data.pfbsize~=pfbsize or data.pfbtime~=pfbtime then - report_afm("reading %a",filename) - data=readafm(filename) - if data then - if pfbname~="" then - data.resources.filename=resolvers.unresolve(pfbname) - get_indexes(data,pfbname) - elseif trace_loading then - report_afm("no pfb file for %a",filename) - end - report_afm("unifying %a",filename) - unify(data,filename) - if afm.addligatures then - report_afm("add ligatures") - addligatures(data) - end - if afm.addtexligatures then - report_afm("add tex ligatures") - addtexligatures(data) - end - if afm.addkerns then - report_afm("add extra kerns") - addkerns(data) - end - normalize(data) - fixnames(data) - report_afm("add tounicode data") - fonts.mappings.addtounicode(data,filename) - data.size=size - data.time=time - data.pfbsize=pfbsize - data.pfbtime=pfbtime - report_afm("saving %a in cache",name) - data.resources.unicodes=nil - data=containers.write(afm.cache,name,data) - data=containers.read(afm.cache,name) - end - if applyruntimefixes and data then - applyruntimefixes(filename,data) - end - end - return data - else - return nil - end -end -local uparser=fonts.mappings.makenameparser() -unify=function(data,filename) - local unicodevector=fonts.encodings.agl.unicodes - local unicodes={} - local names={} - local private=constructors.privateoffset - local descriptions=data.descriptions - for name,blob in next,data.characters do - local code=unicodevector[name] - if not code then - code=lpegmatch(uparser,name) - if not code then - code=private - private=private+1 - report_afm("assigning private slot %U for unknown glyph name %a",code,name) - end - end - local index=blob.index - unicodes[name]=code - names[name]=index - blob.name=name - descriptions[code]={ - boundingbox=blob.boundingbox, - width=blob.width, - kerns=blob.kerns, - index=index, - name=name, - } - end - for unicode,description in next,descriptions do - local kerns=description.kerns - if kerns then - local krn={} - for name,kern in next,kerns do - local unicode=unicodes[name] - if unicode then - krn[unicode]=kern - else - end - end - description.kerns=krn - end - end - data.characters=nil - local resources=data.resources - local filename=resources.filename or file.removesuffix(file.basename(filename)) - resources.filename=resolvers.unresolve(filename) - resources.unicodes=unicodes - resources.marks={} - resources.private=private -end -local everywhere={ ["*"]={ ["*"]=true } } -local noflags={ false,false,false,false } -afm.experimental_normalize=false -normalize=function(data) - if type(afm.experimental_normalize)=="function" then - afm.experimental_normalize(data) - end -end -fixnames=function(data) - for k,v in next,data.descriptions do - local n=v.name - local r=overloads[n] - if r then - local name=r.name - if trace_indexing then - report_afm("renaming characters %a to %a",n,name) - end - v.name=name - v.unicode=r.unicode - end - end -end -local addthem=function(rawdata,ligatures) - if ligatures then - local descriptions=rawdata.descriptions - local resources=rawdata.resources - local unicodes=resources.unicodes - for ligname,ligdata in next,ligatures do - local one=descriptions[unicodes[ligname]] - if one then - for _,pair in next,ligdata do - local two,three=unicodes[pair[1]],unicodes[pair[2]] - if two and three then - local ol=one.ligatures - if ol then - if not ol[two] then - ol[two]=three - end - else - one.ligatures={ [two]=three } - end - end - end - end - end - end -end -addligatures=function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end -addtexligatures=function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end -addkerns=function(rawdata) - local descriptions=rawdata.descriptions - local resources=rawdata.resources - local unicodes=resources.unicodes - local function do_it_left(what) - if what then - for unicode,description in next,descriptions do - local kerns=description.kerns - if kerns then - local extrakerns - for complex,simple in next,what do - complex=unicodes[complex] - simple=unicodes[simple] - if complex and simple then - local ks=kerns[simple] - if ks and not kerns[complex] then - if extrakerns then - extrakerns[complex]=ks - else - extrakerns={ [complex]=ks } - end - end - end - end - if extrakerns then - description.extrakerns=extrakerns - end - end - end - end - end - local function do_it_copy(what) - if what then - for complex,simple in next,what do - complex=unicodes[complex] - simple=unicodes[simple] - if complex and simple then - local complexdescription=descriptions[complex] - if complexdescription then - local simpledescription=descriptions[complex] - if simpledescription then - local extrakerns - local kerns=simpledescription.kerns - if kerns then - for unicode,kern in next,kerns do - if extrakerns then - extrakerns[unicode]=kern - else - extrakerns={ [unicode]=kern } - end - end - end - local extrakerns=simpledescription.extrakerns - if extrakerns then - for unicode,kern in next,extrakerns do - if extrakerns then - extrakerns[unicode]=kern - else - extrakerns={ [unicode]=kern } - end - end - end - if extrakerns then - complexdescription.extrakerns=extrakerns - end - end - end - end - end - end - end - do_it_left(afm.helpdata.leftkerned) - do_it_left(afm.helpdata.bothkerned) - do_it_copy(afm.helpdata.bothkerned) - do_it_copy(afm.helpdata.rightkerned) -end -local function adddimensions(data) - if data then - for unicode,description in next,data.descriptions do - local bb=description.boundingbox - if bb then - local ht,dp=bb[4],-bb[2] - if ht==0 or ht<0 then - else - description.height=ht - end - if dp==0 or dp<0 then - else - description.depth=dp - end - end - end - end -end -local function copytotfm(data) - if data and data.descriptions then - local metadata=data.metadata - local resources=data.resources - local properties=derivetable(data.properties) - local descriptions=derivetable(data.descriptions) - local goodies=derivetable(data.goodies) - local characters={} - local parameters={} - local unicodes=resources.unicodes - for unicode,description in next,data.descriptions do - characters[unicode]={} - end - local filename=constructors.checkedfilename(resources) - local fontname=metadata.fontname or metadata.fullname - local fullname=metadata.fullname or metadata.fontname - local endash=0x0020 - local emdash=0x2014 - local spacer="space" - local spaceunits=500 - local monospaced=metadata.monospaced - local charwidth=metadata.charwidth - local italicangle=metadata.italicangle - local charxheight=metadata.xheight and metadata.xheight>0 and metadata.xheight - properties.monospaced=monospaced - parameters.italicangle=italicangle - parameters.charwidth=charwidth - parameters.charxheight=charxheight - if properties.monospaced then - if descriptions[endash] then - spaceunits,spacer=descriptions[endash].width,"space" - end - if not spaceunits and descriptions[emdash] then - spaceunits,spacer=descriptions[emdash].width,"emdash" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - else - if descriptions[endash] then - spaceunits,spacer=descriptions[endash].width,"space" - end - if not spaceunits and charwidth then - spaceunits,spacer=charwidth,"charwidth" - end - end - spaceunits=tonumber(spaceunits) - if spaceunits<200 then - end - parameters.slant=0 - parameters.space=spaceunits - parameters.space_stretch=500 - parameters.space_shrink=333 - parameters.x_height=400 - parameters.quad=1000 - if italicangle and italicangle~=0 then - parameters.italicangle=italicangle - parameters.italicfactor=math.cos(math.rad(90+italicangle)) - parameters.slant=- math.tan(italicangle*math.pi/180) - end - if monospaced then - parameters.space_stretch=0 - parameters.space_shrink=0 - elseif afm.syncspace then - parameters.space_stretch=spaceunits/2 - parameters.space_shrink=spaceunits/3 - end - parameters.extra_space=parameters.space_shrink - if charxheight then - parameters.x_height=charxheight - else - local x=0x0078 - if x then - local x=descriptions[x] - if x then - parameters.x_height=x.height - end - end - end - local fd=data.fontdimens - if fd and fd[8] and fd[9] and fd[10] then - for k,v in next,fd do - parameters[k]=v - end - end - parameters.designsize=(metadata.designsize or 10)*65536 - parameters.ascender=abs(metadata.ascender or 0) - parameters.descender=abs(metadata.descender or 0) - parameters.units=1000 - properties.spacer=spacer - properties.encodingbytes=2 - properties.format=fonts.formats[filename] or "type1" - properties.filename=filename - properties.fontname=fontname - properties.fullname=fullname - properties.psname=fullname - properties.name=filename or fullname or fontname - if next(characters) then - return { - characters=characters, - descriptions=descriptions, - parameters=parameters, - resources=resources, - properties=properties, - goodies=goodies, - } - end - end - return nil -end -function afm.setfeatures(tfmdata,features) - local okay=constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm) - if okay then - return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm) - else - return {} - end -end -local function addtables(data) - local resources=data.resources - local lookuptags=resources.lookuptags - local unicodes=resources.unicodes - if not lookuptags then - lookuptags={} - resources.lookuptags=lookuptags - end - setmetatableindex(lookuptags,function(t,k) - local v=type(k)=="number" and ("lookup "..k) or k - t[k]=v - return v - end) - if not unicodes then - unicodes={} - resources.unicodes=unicodes - setmetatableindex(unicodes,function(t,k) - setmetatableindex(unicodes,nil) - for u,d in next,data.descriptions do - local n=d.name - if n then - t[n]=u - end - end - return rawget(t,k) - end) - end - constructors.addcoreunicodes(unicodes) -end -local function afmtotfm(specification) - local afmname=specification.filename or specification.name - if specification.forced=="afm" or specification.format=="afm" then - if trace_loading then - report_afm("forcing afm format for %a",afmname) - end - else - local tfmname=findbinfile(afmname,"ofm") or "" - if tfmname~="" then - if trace_loading then - report_afm("fallback from afm to tfm for %a",afmname) - end - return - end - end - if afmname~="" then - local features=constructors.checkedfeatures("afm",specification.features.normal) - specification.features.normal=features - constructors.hashinstance(specification,true) - specification=definers.resolve(specification) - local cache_id=specification.hash - local tfmdata=containers.read(constructors.cache,cache_id) - if not tfmdata then - local rawdata=afm.load(afmname) - if rawdata and next(rawdata) then - addtables(rawdata) - adddimensions(rawdata) - tfmdata=copytotfm(rawdata) - if tfmdata and next(tfmdata) then - local shared=tfmdata.shared - if not shared then - shared={} - tfmdata.shared=shared - end - shared.rawdata=rawdata - shared.features=features - shared.processes=afm.setfeatures(tfmdata,features) - end - elseif trace_loading then - report_afm("no (valid) afm file found with name %a",afmname) - end - tfmdata=containers.write(constructors.cache,cache_id,tfmdata) - end - return tfmdata - end -end -local function read_from_afm(specification) - local tfmdata=afmtotfm(specification) - if tfmdata then - tfmdata.properties.name=specification.name - tfmdata=constructors.scale(tfmdata,specification) - local allfeatures=tfmdata.shared.features or specification.features.normal - constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm) - fonts.loggers.register(tfmdata,'afm',specification) - end - return tfmdata -end -local function prepareligatures(tfmdata,ligatures,value) - if value then - local descriptions=tfmdata.descriptions - local hasligatures=false - for unicode,character in next,tfmdata.characters do - local description=descriptions[unicode] - local dligatures=description.ligatures - if dligatures then - local cligatures=character.ligatures - if not cligatures then - cligatures={} - character.ligatures=cligatures - end - for unicode,ligature in next,dligatures do - cligatures[unicode]={ - char=ligature, - type=0 - } - end - hasligatures=true - end - end - tfmdata.properties.hasligatures=hasligatures - end -end -local function preparekerns(tfmdata,kerns,value) - if value then - local rawdata=tfmdata.shared.rawdata - local resources=rawdata.resources - local unicodes=resources.unicodes - local descriptions=tfmdata.descriptions - local haskerns=false - for u,chr in next,tfmdata.characters do - local d=descriptions[u] - local newkerns=d[kerns] - if newkerns then - local kerns=chr.kerns - if not kerns then - kerns={} - chr.kerns=kerns - end - for k,v in next,newkerns do - local uk=unicodes[k] - if uk then - kerns[uk]=v - end - end - haskerns=true - end - end - tfmdata.properties.haskerns=haskerns - end -end -local list={ - [0x0027]=0x2019, -} -local function texreplacements(tfmdata,value) - local descriptions=tfmdata.descriptions - local characters=tfmdata.characters - for k,v in next,list do - characters [k]=characters [v] - descriptions[k]=descriptions[v] - end -end -local function ligatures (tfmdata,value) prepareligatures(tfmdata,'ligatures',value) end -local function texligatures(tfmdata,value) prepareligatures(tfmdata,'texligatures',value) end -local function kerns (tfmdata,value) preparekerns (tfmdata,'kerns',value) end -local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns',value) end -registerafmfeature { - name="liga", - description="traditional ligatures", - initializers={ - base=ligatures, - node=ligatures, - } -} -registerafmfeature { - name="kern", - description="intercharacter kerning", - initializers={ - base=kerns, - node=kerns, - } -} -registerafmfeature { - name="extrakerns", - description="additional intercharacter kerning", - initializers={ - base=extrakerns, - node=extrakerns, - } -} -registerafmfeature { - name='tlig', - description='tex ligatures', - initializers={ - base=texligatures, - node=texligatures, - } -} -registerafmfeature { - name='trep', - description='tex replacements', - initializers={ - base=texreplacements, - node=texreplacements, - } -} -local check_tfm=readers.check_tfm -fonts.formats.afm="type1" -fonts.formats.pfb="type1" -local function check_afm(specification,fullname) - local foundname=findbinfile(fullname,'afm') or "" - if foundname=="" then - foundname=fonts.names.getfilename(fullname,"afm") or "" - end - if foundname=="" and afm.autoprefixed then - local encoding,shortname=match(fullname,"^(.-)%-(.*)$") - if encoding and shortname and fonts.encodings.known[encoding] then - shortname=findbinfile(shortname,'afm') or "" - if shortname~="" then - foundname=shortname - if trace_defining then - report_afm("stripping encoding prefix from filename %a",afmname) - end - end - end - end - if foundname~="" then - specification.filename=foundname - specification.format="afm" - return read_from_afm(specification) - end -end -function readers.afm(specification,method) - local fullname,tfmdata=specification.filename or "",nil - if fullname=="" then - local forced=specification.forced or "" - if forced~="" then - tfmdata=check_afm(specification,specification.name.."."..forced) - end - if not tfmdata then - method=method or definers.method or "afm or tfm" - if method=="tfm" then - tfmdata=check_tfm(specification,specification.name) - elseif method=="afm" then - tfmdata=check_afm(specification,specification.name) - elseif method=="tfm or afm" then - tfmdata=check_tfm(specification,specification.name) or check_afm(specification,specification.name) - else - tfmdata=check_afm(specification,specification.name) or check_tfm(specification,specification.name) - end - end - else - tfmdata=check_afm(specification,fullname) - end - return tfmdata -end -function readers.pfb(specification,method) - local original=specification.specification - if trace_defining then - report_afm("using afm reader for %a",original) - end - specification.specification=gsub(original,"%.pfb",".afm") - specification.forced="afm" - return readers.afm(specification,method) -end - -end -- closure - -do -- begin closure to overcome local limits and interference - -if not modules then modules={} end modules ['font-afk']={ - version=1.001, - comment="companion to font-afm.lua", - author="Hans Hagen, PRAGMA-ADE, Hasselt NL", - copyright="PRAGMA ADE / ConTeXt Development Team", - license="see context related readme files", - dataonly=true, -} -local allocate=utilities.storage.allocate -fonts.handlers.afm.helpdata={ - ligatures=allocate { - ['f']={ - { 'f','ff' }, - { 'i','fi' }, - { 'l','fl' }, - }, - ['ff']={ - { 'i','ffi' } - }, - ['fi']={ - { 'i','fii' } - }, - ['fl']={ - { 'i','fli' } - }, - ['s']={ - { 't','st' } - }, - ['i']={ - { 'j','ij' } - }, - }, - texligatures=allocate { - ['quoteleft']={ - { 'quoteleft','quotedblleft' } - }, - ['quoteright']={ - { 'quoteright','quotedblright' } - }, - ['hyphen']={ - { 'hyphen','endash' } - }, - ['endash']={ - { 'hyphen','emdash' } - } - }, - leftkerned=allocate { - AEligature="A",aeligature="a", - OEligature="O",oeligature="o", - IJligature="I",ijligature="i", - AE="A",ae="a", - OE="O",oe="o", - IJ="I",ij="i", - Ssharp="S",ssharp="s", - }, - rightkerned=allocate { - AEligature="E",aeligature="e", - OEligature="E",oeligature="e", - IJligature="J",ijligature="j", - AE="E",ae="e", - OE="E",oe="e", - IJ="J",ij="j", - Ssharp="S",ssharp="s", - }, - bothkerned=allocate { - Acircumflex="A",acircumflex="a", - Ccircumflex="C",ccircumflex="c", - Ecircumflex="E",ecircumflex="e", - Gcircumflex="G",gcircumflex="g", - Hcircumflex="H",hcircumflex="h", - Icircumflex="I",icircumflex="i", - Jcircumflex="J",jcircumflex="j", - Ocircumflex="O",ocircumflex="o", - Scircumflex="S",scircumflex="s", - Ucircumflex="U",ucircumflex="u", - Wcircumflex="W",wcircumflex="w", - Ycircumflex="Y",ycircumflex="y", - Agrave="A",agrave="a", - Egrave="E",egrave="e", - Igrave="I",igrave="i", - Ograve="O",ograve="o", - Ugrave="U",ugrave="u", - Ygrave="Y",ygrave="y", - Atilde="A",atilde="a", - Itilde="I",itilde="i", - Otilde="O",otilde="o", - Utilde="U",utilde="u", - Ntilde="N",ntilde="n", - Adiaeresis="A",adiaeresis="a",Adieresis="A",adieresis="a", - Ediaeresis="E",ediaeresis="e",Edieresis="E",edieresis="e", - Idiaeresis="I",idiaeresis="i",Idieresis="I",idieresis="i", - Odiaeresis="O",odiaeresis="o",Odieresis="O",odieresis="o", - Udiaeresis="U",udiaeresis="u",Udieresis="U",udieresis="u", - Ydiaeresis="Y",ydiaeresis="y",Ydieresis="Y",ydieresis="y", - Aacute="A",aacute="a", - Cacute="C",cacute="c", - Eacute="E",eacute="e", - Iacute="I",iacute="i", - Lacute="L",lacute="l", - Nacute="N",nacute="n", - Oacute="O",oacute="o", - Racute="R",racute="r", - Sacute="S",sacute="s", - Uacute="U",uacute="u", - Yacute="Y",yacute="y", - Zacute="Z",zacute="z", - Dstroke="D",dstroke="d", - Hstroke="H",hstroke="h", - Tstroke="T",tstroke="t", - Cdotaccent="C",cdotaccent="c", - Edotaccent="E",edotaccent="e", - Gdotaccent="G",gdotaccent="g", - Idotaccent="I",idotaccent="i", - Zdotaccent="Z",zdotaccent="z", - Amacron="A",amacron="a", - Emacron="E",emacron="e", - Imacron="I",imacron="i", - Omacron="O",omacron="o", - Umacron="U",umacron="u", - Ccedilla="C",ccedilla="c", - Kcedilla="K",kcedilla="k", - Lcedilla="L",lcedilla="l", - Ncedilla="N",ncedilla="n", - Rcedilla="R",rcedilla="r", - Scedilla="S",scedilla="s", - Tcedilla="T",tcedilla="t", - Ohungarumlaut="O",ohungarumlaut="o", - Uhungarumlaut="U",uhungarumlaut="u", - Aogonek="A",aogonek="a", - Eogonek="E",eogonek="e", - Iogonek="I",iogonek="i", - Uogonek="U",uogonek="u", - Aring="A",aring="a", - Uring="U",uring="u", - Abreve="A",abreve="a", - Ebreve="E",ebreve="e", - Gbreve="G",gbreve="g", - Ibreve="I",ibreve="i", - Obreve="O",obreve="o", - Ubreve="U",ubreve="u", - Ccaron="C",ccaron="c", - Dcaron="D",dcaron="d", - Ecaron="E",ecaron="e", - Lcaron="L",lcaron="l", - Ncaron="N",ncaron="n", - Rcaron="R",rcaron="r", - Scaron="S",scaron="s", - Tcaron="T",tcaron="t", - Zcaron="Z",zcaron="z", - dotlessI="I",dotlessi="i", - dotlessJ="J",dotlessj="j", - AEligature="AE",aeligature="ae",AE="AE",ae="ae", - OEligature="OE",oeligature="oe",OE="OE",oe="oe", - IJligature="IJ",ijligature="ij",IJ="IJ",ij="ij", - Lstroke="L",lstroke="l",Lslash="L",lslash="l", - Ostroke="O",ostroke="o",Oslash="O",oslash="o", - Ssharp="SS",ssharp="ss", - Aumlaut="A",aumlaut="a", - Eumlaut="E",eumlaut="e", - Iumlaut="I",iumlaut="i", - Oumlaut="O",oumlaut="o", - Uumlaut="U",uumlaut="u", - } -} - -end -- closure - -do -- begin closure to overcome local limits and interference - if not modules then modules={} end modules ['font-oti']={ version=1.001, comment="companion to font-ini.mkiv", @@ -15827,7 +14696,6 @@ function otf.load(filename,sub,featurefile) otfreaders.expand(data) otfreaders.addunicodetable(data) enhancers.apply(data,filename,data) - constructors.addcoreunicodes(unicodes) if applyruntimefixes then applyruntimefixes(filename,data) end @@ -16658,6 +15526,7 @@ registerotffeature { base=featuresinitializer, } } +otf.basemodeinitializer=featuresinitializer end -- closure @@ -17713,11 +16582,11 @@ local function inject_everything(head,where) insert_node_after(pre,n,newkern(rightkern)) done=true end - end - if hasmarks then - local pm=i.markbasenode - if pm then - processmark(pm,current,i) + if hasmarks then + local pm=i.markbasenode + if pm then + processmark(pm,current,i) + end end end end @@ -17743,11 +16612,11 @@ local function inject_everything(head,where) insert_node_after(post,n,newkern(rightkern)) done=true end - end - if hasmarks then - local pm=i.markbasenode - if pm then - processmark(pm,current,i) + if hasmarks then + local pm=i.markbasenode + if pm then + processmark(pm,current,i) + end end end end @@ -17773,11 +16642,11 @@ local function inject_everything(head,where) insert_node_after(replace,n,newkern(rightkern)) done=true end - end - if hasmarks then - local pm=i.markbasenode - if pm then - processmark(pm,current,i) + if hasmarks then + local pm=i.markbasenode + if pm then + processmark(pm,current,i) + end end end end @@ -21284,6 +20153,8 @@ registerotffeature { node=featuresprocessor, } } +otf.nodemodeinitializer=featuresinitializer +otf.featuresprocessor=featuresprocessor otf.handlers=handlers local setspacekerns=nodes.injections.setspacekerns if not setspacekerns then os.exit() end function otf.handlers.trigger_space_kerns(head,start,dataset,sequence,_,_,_,_,font,attr) @@ -23381,6 +22252,1160 @@ end -- closure do -- begin closure to overcome local limits and interference +if not modules then modules={} end modules ['font-one']={ + version=1.001, + comment="companion to font-ini.mkiv", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files" +} +local fonts,logs,trackers,containers,resolvers=fonts,logs,trackers,containers,resolvers +local next,type,tonumber=next,type,tonumber +local match,gmatch,lower,gsub,strip,find=string.match,string.gmatch,string.lower,string.gsub,string.strip,string.find +local char,byte,sub=string.char,string.byte,string.sub +local abs=math.abs +local bxor,rshift=bit32.bxor,bit32.rshift +local P,S,R,Cmt,C,Ct,Cs,lpegmatch,patterns=lpeg.P,lpeg.S,lpeg.R,lpeg.Cmt,lpeg.C,lpeg.Ct,lpeg.Cs,lpeg.match,lpeg.patterns +local derivetable=table.derive +local trace_features=false trackers.register("afm.features",function(v) trace_features=v end) +local trace_indexing=false trackers.register("afm.indexing",function(v) trace_indexing=v end) +local trace_loading=false trackers.register("afm.loading",function(v) trace_loading=v end) +local trace_defining=false trackers.register("fonts.defining",function(v) trace_defining=v end) +local report_afm=logs.reporter("fonts","afm loading") +local setmetatableindex=table.setmetatableindex +local findbinfile=resolvers.findbinfile +local definers=fonts.definers +local readers=fonts.readers +local constructors=fonts.constructors +local afm=constructors.newhandler("afm") +local pfb=constructors.newhandler("pfb") +local otf=fonts.handlers.otf +local otfreaders=otf.readers +local otfenhancers=otf.enhancers +local afmfeatures=constructors.newfeatures("afm") +local registerafmfeature=afmfeatures.register +afm.version=1.505 +afm.cache=containers.define("fonts","afm",afm.version,true) +afm.autoprefixed=true +afm.helpdata={} +afm.syncspace=true +local overloads=fonts.mappings.overloads +local applyruntimefixes=fonts.treatments and fonts.treatments.applyfixes +local comment=P("Comment") +local spacing=patterns.spacer +local lineend=patterns.newline +local words=C((1-lineend)^1) +local number=C((R("09")+S("."))^1)/tonumber*spacing^0 +local data=lpeg.Carg(1) +local pattern=( + comment*spacing*( + data*( + ("CODINGSCHEME"*spacing*words )/function(fd,a) end+("DESIGNSIZE"*spacing*number*words )/function(fd,a) fd[ 1]=a end+("CHECKSUM"*spacing*number*words )/function(fd,a) fd[ 2]=a end+("SPACE"*spacing*number*"plus"*number*"minus"*number)/function(fd,a,b,c) fd[ 3],fd[ 4],fd[ 5]=a,b,c end+("QUAD"*spacing*number )/function(fd,a) fd[ 6]=a end+("EXTRASPACE"*spacing*number )/function(fd,a) fd[ 7]=a end+("NUM"*spacing*number*number*number )/function(fd,a,b,c) fd[ 8],fd[ 9],fd[10]=a,b,c end+("DENOM"*spacing*number*number )/function(fd,a,b ) fd[11],fd[12]=a,b end+("SUP"*spacing*number*number*number )/function(fd,a,b,c) fd[13],fd[14],fd[15]=a,b,c end+("SUB"*spacing*number*number )/function(fd,a,b) fd[16],fd[17]=a,b end+("SUPDROP"*spacing*number )/function(fd,a) fd[18]=a end+("SUBDROP"*spacing*number )/function(fd,a) fd[19]=a end+("DELIM"*spacing*number*number )/function(fd,a,b) fd[20],fd[21]=a,b end+("AXISHEIGHT"*spacing*number )/function(fd,a) fd[22]=a end + )+(1-lineend)^0 + )+(1-comment)^1 +)^0 +local function scan_comment(str) + local fd={} + lpegmatch(pattern,str,1,fd) + return fd +end +local keys={} +function keys.FontName (data,line) data.metadata.fontname=strip (line) + data.metadata.fullname=strip (line) end +function keys.ItalicAngle (data,line) data.metadata.italicangle=tonumber (line) end +function keys.IsFixedPitch(data,line) data.metadata.monospaced=toboolean(line,true) end +function keys.CharWidth (data,line) data.metadata.charwidth=tonumber (line) end +function keys.XHeight (data,line) data.metadata.xheight=tonumber (line) end +function keys.Descender (data,line) data.metadata.descender=tonumber (line) end +function keys.Ascender (data,line) data.metadata.ascender=tonumber (line) end +function keys.Comment (data,line) + line=lower(line) + local designsize=match(line,"designsize[^%d]*(%d+)") + if designsize then data.metadata.designsize=tonumber(designsize) end +end +local function get_charmetrics(data,charmetrics,vector) + local characters=data.characters + local chr,ind={},0 + for k,v in gmatch(charmetrics,"([%a]+) +(.-) *;") do + if k=='C' then + v=tonumber(v) + if v<0 then + ind=ind+1 + else + ind=v + end + chr={ + index=ind + } + elseif k=='WX' then + chr.width=tonumber(v) + elseif k=='N' then + characters[v]=chr + elseif k=='B' then + local llx,lly,urx,ury=match(v,"^ *(.-) +(.-) +(.-) +(.-)$") + chr.boundingbox={ tonumber(llx),tonumber(lly),tonumber(urx),tonumber(ury) } + elseif k=='L' then + local plus,becomes=match(v,"^(.-) +(.-)$") + local ligatures=chr.ligatures + if ligatures then + ligatures[plus]=becomes + else + chr.ligatures={ [plus]=becomes } + end + end + end +end +local function get_kernpairs(data,kernpairs) + local characters=data.characters + for one,two,value in gmatch(kernpairs,"KPX +(.-) +(.-) +(.-)\n") do + local chr=characters[one] + if chr then + local kerns=chr.kerns + if kerns then + kerns[two]=tonumber(value) + else + chr.kerns={ [two]=tonumber(value) } + end + end + end +end +local function get_variables(data,fontmetrics) + for key,rest in gmatch(fontmetrics,"(%a+) *(.-)[\n\r]") do + local keyhandler=keys[key] + if keyhandler then + keyhandler(data,rest) + end + end +end +local get_indexes +do + local n,m + local progress=function(str,position,name,size) + local forward=position+tonumber(size)+3+2 + n=n+1 + if n>=m then + return #str,name + elseif forward<#str then + return forward,name + else + return #str,name + end + end + local initialize=function(str,position,size) + n=0 + m=tonumber(size) + return position+1 + end + local charstrings=P("/CharStrings") + local name=P("/")*C((R("az")+R("AZ")+R("09")+S("-_."))^1) + local size=C(R("09")^1) + local spaces=P(" ")^1 + local p_filternames=Ct ( + (1-charstrings)^0*charstrings*spaces*Cmt(size,initialize)*(Cmt(name*P(" ")^1*C(R("09")^1),progress)+P(1))^1 + ) + local decrypt + do + local r,c1,c2,n=0,0,0,0 + local function step(c) + local cipher=byte(c) + local plain=bxor(cipher,rshift(r,8)) + r=((cipher+r)*c1+c2)%65536 + return char(plain) + end + decrypt=function(binary) + r,c1,c2,n=55665,52845,22719,4 + binary=gsub(binary,".",step) + return sub(binary,n+1) + end + end + local function loadpfbvector(filename) + local data=io.loaddata(resolvers.findfile(filename)) + if not find(data,"!PS%-AdobeFont%-") then + print("no font",filename) + return + end + if not data then + print("no data",filename) + return + end + local ascii,binary=match(data,"(.*)eexec%s+......(.*)") + if not binary then + print("no binary",filename) + return + end + binary=decrypt(binary,4) + local vector=lpegmatch(p_filternames,binary) + vector[0]=table.remove(vector,1) + if not vector then + print("no vector",filename) + return + end + return vector + end + get_indexes=function(data,pfbname) + local vector=loadpfbvector(pfbname) + if vector then + local characters=data.characters + if trace_loading then + report_afm("getting index data from %a",pfbname) + end + for index=1,#vector do + local name=vector[index] + local char=characters[name] + if char then + if trace_indexing then + report_afm("glyph %a has index %a",name,index) + end + char.index=index + end + end + end + end +end +local function readafm(filename) + local ok,afmblob,size=resolvers.loadbinfile(filename) + if ok and afmblob then + local data={ + resources={ + filename=resolvers.unresolve(filename), + version=afm.version, + creator="context mkiv", + }, + properties={ + hasitalics=false, + }, + goodies={}, + metadata={ + filename=file.removesuffix(file.basename(filename)) + }, + characters={ + }, + descriptions={ + }, + } + afmblob=gsub(afmblob,"StartCharMetrics(.-)EndCharMetrics",function(charmetrics) + if trace_loading then + report_afm("loading char metrics") + end + get_charmetrics(data,charmetrics,vector) + return "" + end) + afmblob=gsub(afmblob,"StartKernPairs(.-)EndKernPairs",function(kernpairs) + if trace_loading then + report_afm("loading kern pairs") + end + get_kernpairs(data,kernpairs) + return "" + end) + afmblob=gsub(afmblob,"StartFontMetrics%s+([%d%.]+)(.-)EndFontMetrics",function(version,fontmetrics) + if trace_loading then + report_afm("loading variables") + end + data.afmversion=version + get_variables(data,fontmetrics) + data.fontdimens=scan_comment(fontmetrics) + return "" + end) + return data + else + if trace_loading then + report_afm("no valid afm file %a",filename) + end + return nil + end +end +local addkerns,unify,normalize,fixnames,addligatures,addtexligatures +function afm.load(filename) + filename=resolvers.findfile(filename,'afm') or "" + if filename~="" and not fonts.names.ignoredfile(filename) then + local name=file.removesuffix(file.basename(filename)) + local data=containers.read(afm.cache,name) + local attr=lfs.attributes(filename) + local size,time=attr.size or 0,attr.modification or 0 + local pfbfile=file.replacesuffix(name,"pfb") + local pfbname=resolvers.findfile(pfbfile,"pfb") or "" + if pfbname=="" then + pfbname=resolvers.findfile(file.basename(pfbfile),"pfb") or "" + end + local pfbsize,pfbtime=0,0 + if pfbname~="" then + local attr=lfs.attributes(pfbname) + pfbsize=attr.size or 0 + pfbtime=attr.modification or 0 + end + if not data or data.size~=size or data.time~=time or data.pfbsize~=pfbsize or data.pfbtime~=pfbtime then + report_afm("reading %a",filename) + data=readafm(filename) + if data then + if pfbname~="" then + data.resources.filename=resolvers.unresolve(pfbname) + get_indexes(data,pfbname) + elseif trace_loading then + report_afm("no pfb file for %a",filename) + end + if trace_loading then + report_afm("unifying %a",filename) + end + unify(data,filename) + if trace_loading then + report_afm("add ligatures") + end + addligatures(data) + if trace_loading then + report_afm("add extra kerns") + end + addkerns(data) + if trace_loading then + report_afm("normalizing") + end + normalize(data) + if trace_loading then + report_afm("fixing names") + end + fixnames(data) + if trace_loading then + report_afm("add tounicode data") + end + fonts.mappings.addtounicode(data,filename) + otfreaders.pack(data) + data.size=size + data.time=time + data.pfbsize=pfbsize + data.pfbtime=pfbtime + report_afm("saving %a in cache",name) + data=containers.write(afm.cache,name,data) + data=containers.read(afm.cache,name) + end + end + if data then + otfreaders.unpack(data) + otfreaders.expand(data) + otfreaders.addunicodetable(data) + otfenhancers.apply(data,filename,data) + if applyruntimefixes then + applyruntimefixes(filename,data) + end + end + return data + else + return nil + end +end +local uparser=fonts.mappings.makenameparser() +unify=function(data,filename) + local unicodevector=fonts.encodings.agl.unicodes + local unicodes={} + local names={} + local private=constructors.privateoffset + local descriptions=data.descriptions + for name,blob in next,data.characters do + local code=unicodevector[name] + if not code then + code=lpegmatch(uparser,name) + if not code then + code=private + private=private+1 + report_afm("assigning private slot %U for unknown glyph name %a",code,name) + end + end + local index=blob.index + unicodes[name]=code + names[name]=index + blob.name=name + descriptions[code]={ + boundingbox=blob.boundingbox, + width=blob.width, + kerns=blob.kerns, + index=index, + name=name, + } + end + for unicode,description in next,descriptions do + local kerns=description.kerns + if kerns then + local krn={} + for name,kern in next,kerns do + local unicode=unicodes[name] + if unicode then + krn[unicode]=kern + else + end + end + description.kerns=krn + end + end + data.characters=nil + local resources=data.resources + local filename=resources.filename or file.removesuffix(file.basename(filename)) + resources.filename=resolvers.unresolve(filename) + resources.unicodes=unicodes + resources.marks={} + resources.private=private +end +local everywhere={ ["*"]={ ["*"]=true } } +local noflags={ false,false,false,false } +normalize=function(data) + local ligatures=setmetatableindex("table") + local kerns=setmetatableindex("table") + local extrakerns=setmetatableindex("table") + for u,c in next,data.descriptions do + local l=c.ligatures + local k=c.kerns + local e=c.extrakerns + if l then + ligatures[u]=l + for u,v in next,l do + l[u]={ ligature=v } + end + c.ligatures=nil + end + if k then + kerns[u]=k + for u,v in next,k do + k[u]=v + end + c.kerns=nil + end + if e then + extrakerns[u]=e + for u,v in next,e do + e[u]=v + end + c.extrakerns=nil + end + end + local features={ + gpos={}, + gsub={}, + } + local sequences={ + } + if next(ligatures) then + features.gsub.liga=everywhere + data.properties.hasligatures=true + sequences[#sequences+1]={ + features={ + liga=everywhere, + }, + flags=noflags, + name="s_s_0", + nofsteps=1, + order={ "liga" }, + type="gsub_ligature", + steps={ + { + coverage=ligatures, + }, + }, + } + end + if next(kerns) then + features.gpos.kern=everywhere + data.properties.haskerns=true + sequences[#sequences+1]={ + features={ + kern=everywhere, + }, + flags=noflags, + name="p_s_0", + nofsteps=1, + order={ "kern" }, + type="gpos_pair", + steps={ + { + format="kern", + coverage=kerns, + }, + }, + } + end + if next(extrakerns) then + features.gpos.extrakerns=everywhere + data.properties.haskerns=true + sequences[#sequences+1]={ + features={ + extrakerns=everywhere, + }, + flags=noflags, + name="p_s_1", + nofsteps=1, + order={ "extrakerns" }, + type="gpos_pair", + steps={ + { + format="kern", + coverage=extrakerns, + }, + }, + } + end + data.resources.features=features + data.resources.sequences=sequences +end +fixnames=function(data) + for k,v in next,data.descriptions do + local n=v.name + local r=overloads[n] + if r then + local name=r.name + if trace_indexing then + report_afm("renaming characters %a to %a",n,name) + end + v.name=name + v.unicode=r.unicode + end + end +end +local addthem=function(rawdata,ligatures) + if ligatures then + local descriptions=rawdata.descriptions + local resources=rawdata.resources + local unicodes=resources.unicodes + for ligname,ligdata in next,ligatures do + local one=descriptions[unicodes[ligname]] + if one then + for _,pair in next,ligdata do + local two,three=unicodes[pair[1]],unicodes[pair[2]] + if two and three then + local ol=one.ligatures + if ol then + if not ol[two] then + ol[two]=three + end + else + one.ligatures={ [two]=three } + end + end + end + end + end + end +end +addligatures=function(rawdata) addthem(rawdata,afm.helpdata.ligatures ) end +addtexligatures=function(rawdata) addthem(rawdata,afm.helpdata.texligatures) end +addkerns=function(rawdata) + local descriptions=rawdata.descriptions + local resources=rawdata.resources + local unicodes=resources.unicodes + local function do_it_left(what) + if what then + for unicode,description in next,descriptions do + local kerns=description.kerns + if kerns then + local extrakerns + for complex,simple in next,what do + complex=unicodes[complex] + simple=unicodes[simple] + if complex and simple then + local ks=kerns[simple] + if ks and not kerns[complex] then + if extrakerns then + extrakerns[complex]=ks + else + extrakerns={ [complex]=ks } + end + end + end + end + if extrakerns then + description.extrakerns=extrakerns + end + end + end + end + end + local function do_it_copy(what) + if what then + for complex,simple in next,what do + complex=unicodes[complex] + simple=unicodes[simple] + if complex and simple then + local complexdescription=descriptions[complex] + if complexdescription then + local simpledescription=descriptions[complex] + if simpledescription then + local extrakerns + local kerns=simpledescription.kerns + if kerns then + for unicode,kern in next,kerns do + if extrakerns then + extrakerns[unicode]=kern + else + extrakerns={ [unicode]=kern } + end + end + end + local extrakerns=simpledescription.extrakerns + if extrakerns then + for unicode,kern in next,extrakerns do + if extrakerns then + extrakerns[unicode]=kern + else + extrakerns={ [unicode]=kern } + end + end + end + if extrakerns then + complexdescription.extrakerns=extrakerns + end + end + end + end + end + end + end + do_it_left(afm.helpdata.leftkerned) + do_it_left(afm.helpdata.bothkerned) + do_it_copy(afm.helpdata.bothkerned) + do_it_copy(afm.helpdata.rightkerned) +end +local function adddimensions(data) + if data then + for unicode,description in next,data.descriptions do + local bb=description.boundingbox + if bb then + local ht,dp=bb[4],-bb[2] + if ht==0 or ht<0 then + else + description.height=ht + end + if dp==0 or dp<0 then + else + description.depth=dp + end + end + end + end +end +local function copytotfm(data) + if data and data.descriptions then + local metadata=data.metadata + local resources=data.resources + local properties=derivetable(data.properties) + local descriptions=derivetable(data.descriptions) + local goodies=derivetable(data.goodies) + local characters={} + local parameters={} + local unicodes=resources.unicodes + for unicode,description in next,data.descriptions do + characters[unicode]={} + end + local filename=constructors.checkedfilename(resources) + local fontname=metadata.fontname or metadata.fullname + local fullname=metadata.fullname or metadata.fontname + local endash=0x0020 + local emdash=0x2014 + local spacer="space" + local spaceunits=500 + local monospaced=metadata.monospaced + local charwidth=metadata.charwidth + local italicangle=metadata.italicangle + local charxheight=metadata.xheight and metadata.xheight>0 and metadata.xheight + properties.monospaced=monospaced + parameters.italicangle=italicangle + parameters.charwidth=charwidth + parameters.charxheight=charxheight + if properties.monospaced then + if descriptions[endash] then + spaceunits,spacer=descriptions[endash].width,"space" + end + if not spaceunits and descriptions[emdash] then + spaceunits,spacer=descriptions[emdash].width,"emdash" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + else + if descriptions[endash] then + spaceunits,spacer=descriptions[endash].width,"space" + end + if not spaceunits and charwidth then + spaceunits,spacer=charwidth,"charwidth" + end + end + spaceunits=tonumber(spaceunits) + if spaceunits<200 then + end + parameters.slant=0 + parameters.space=spaceunits + parameters.space_stretch=500 + parameters.space_shrink=333 + parameters.x_height=400 + parameters.quad=1000 + if italicangle and italicangle~=0 then + parameters.italicangle=italicangle + parameters.italicfactor=math.cos(math.rad(90+italicangle)) + parameters.slant=- math.tan(italicangle*math.pi/180) + end + if monospaced then + parameters.space_stretch=0 + parameters.space_shrink=0 + elseif afm.syncspace then + parameters.space_stretch=spaceunits/2 + parameters.space_shrink=spaceunits/3 + end + parameters.extra_space=parameters.space_shrink + if charxheight then + parameters.x_height=charxheight + else + local x=0x0078 + if x then + local x=descriptions[x] + if x then + parameters.x_height=x.height + end + end + end + local fd=data.fontdimens + if fd and fd[8] and fd[9] and fd[10] then + for k,v in next,fd do + parameters[k]=v + end + end + parameters.designsize=(metadata.designsize or 10)*65536 + parameters.ascender=abs(metadata.ascender or 0) + parameters.descender=abs(metadata.descender or 0) + parameters.units=1000 + properties.spacer=spacer + properties.encodingbytes=2 + properties.format=fonts.formats[filename] or "type1" + properties.filename=filename + properties.fontname=fontname + properties.fullname=fullname + properties.psname=fullname + properties.name=filename or fullname or fontname + if next(characters) then + return { + characters=characters, + descriptions=descriptions, + parameters=parameters, + resources=resources, + properties=properties, + goodies=goodies, + } + end + end + return nil +end +function afm.setfeatures(tfmdata,features) + local okay=constructors.initializefeatures("afm",tfmdata,features,trace_features,report_afm) + if okay then + return constructors.collectprocessors("afm",tfmdata,features,trace_features,report_afm) + else + return {} + end +end +local function addtables(data) + local resources=data.resources + local lookuptags=resources.lookuptags + local unicodes=resources.unicodes + if not lookuptags then + lookuptags={} + resources.lookuptags=lookuptags + end + setmetatableindex(lookuptags,function(t,k) + local v=type(k)=="number" and ("lookup "..k) or k + t[k]=v + return v + end) + if not unicodes then + unicodes={} + resources.unicodes=unicodes + setmetatableindex(unicodes,function(t,k) + setmetatableindex(unicodes,nil) + for u,d in next,data.descriptions do + local n=d.name + if n then + t[n]=u + end + end + return rawget(t,k) + end) + end + constructors.addcoreunicodes(unicodes) +end +local function afmtotfm(specification) + local afmname=specification.filename or specification.name + if specification.forced=="afm" or specification.format=="afm" then + if trace_loading then + report_afm("forcing afm format for %a",afmname) + end + else + local tfmname=findbinfile(afmname,"ofm") or "" + if tfmname~="" then + if trace_loading then + report_afm("fallback from afm to tfm for %a",afmname) + end + return + end + end + if afmname~="" then + local features=constructors.checkedfeatures("afm",specification.features.normal) + specification.features.normal=features + constructors.hashinstance(specification,true) + specification=definers.resolve(specification) + local cache_id=specification.hash + local tfmdata=containers.read(constructors.cache,cache_id) + if not tfmdata then + local rawdata=afm.load(afmname) + if rawdata and next(rawdata) then + addtables(rawdata) + adddimensions(rawdata) + tfmdata=copytotfm(rawdata) + if tfmdata and next(tfmdata) then + local shared=tfmdata.shared + if not shared then + shared={} + tfmdata.shared=shared + end + shared.rawdata=rawdata + shared.dynamics={} + tfmdata.changed={} + shared.features=features + shared.processes=afm.setfeatures(tfmdata,features) + end + elseif trace_loading then + report_afm("no (valid) afm file found with name %a",afmname) + end + tfmdata=containers.write(constructors.cache,cache_id,tfmdata) + end + return tfmdata + end +end +local function read_from_afm(specification) + local tfmdata=afmtotfm(specification) + if tfmdata then + tfmdata.properties.name=specification.name + tfmdata=constructors.scale(tfmdata,specification) + local allfeatures=tfmdata.shared.features or specification.features.normal + constructors.applymanipulators("afm",tfmdata,allfeatures,trace_features,report_afm) + fonts.loggers.register(tfmdata,'afm',specification) + end + return tfmdata +end +local function prepareligatures(tfmdata,ligatures,value) + if value then + local descriptions=tfmdata.descriptions + local hasligatures=false + for unicode,character in next,tfmdata.characters do + local description=descriptions[unicode] + local dligatures=description.ligatures + if dligatures then + local cligatures=character.ligatures + if not cligatures then + cligatures={} + character.ligatures=cligatures + end + for unicode,ligature in next,dligatures do + cligatures[unicode]={ + char=ligature, + type=0 + } + end + hasligatures=true + end + end + tfmdata.properties.hasligatures=hasligatures + end +end +local function preparekerns(tfmdata,kerns,value) + if value then + local rawdata=tfmdata.shared.rawdata + local resources=rawdata.resources + local unicodes=resources.unicodes + local descriptions=tfmdata.descriptions + local haskerns=false + for u,chr in next,tfmdata.characters do + local d=descriptions[u] + local newkerns=d[kerns] + if newkerns then + local kerns=chr.kerns + if not kerns then + kerns={} + chr.kerns=kerns + end + for k,v in next,newkerns do + local uk=unicodes[k] + if uk then + kerns[uk]=v + end + end + haskerns=true + end + end + tfmdata.properties.haskerns=haskerns + end +end +local list={ + [0x0027]=0x2019, +} +local function texreplacements(tfmdata,value) + local descriptions=tfmdata.descriptions + local characters=tfmdata.characters + for k,v in next,list do + characters [k]=characters [v] + descriptions[k]=descriptions[v] + end +end +local function extrakerns (tfmdata,value) preparekerns (tfmdata,'extrakerns',value) end +local function setmode(tfmdata,value) + if value then + tfmdata.properties.mode=lower(value) + end +end +registerafmfeature { + name="mode", + description="mode", + initializers={ + base=setmode, + node=setmode, + } +} +registerafmfeature { + name="features", + description="features", + default=true, + initializers={ + node=otf.nodemodeinitializer, + base=otf.basemodeinitializer, + }, + processors={ + node=otf.featuresprocessor, + } +} +local check_tfm=readers.check_tfm +fonts.formats.afm="type1" +fonts.formats.pfb="type1" +local function check_afm(specification,fullname) + local foundname=findbinfile(fullname,'afm') or "" + if foundname=="" then + foundname=fonts.names.getfilename(fullname,"afm") or "" + end + if foundname=="" and afm.autoprefixed then + local encoding,shortname=match(fullname,"^(.-)%-(.*)$") + if encoding and shortname and fonts.encodings.known[encoding] then + shortname=findbinfile(shortname,'afm') or "" + if shortname~="" then + foundname=shortname + if trace_defining then + report_afm("stripping encoding prefix from filename %a",afmname) + end + end + end + end + if foundname~="" then + specification.filename=foundname + specification.format="afm" + return read_from_afm(specification) + end +end +function readers.afm(specification,method) + local fullname,tfmdata=specification.filename or "",nil + if fullname=="" then + local forced=specification.forced or "" + if forced~="" then + tfmdata=check_afm(specification,specification.name.."."..forced) + end + if not tfmdata then + method=method or definers.method or "afm or tfm" + if method=="tfm" then + tfmdata=check_tfm(specification,specification.name) + elseif method=="afm" then + tfmdata=check_afm(specification,specification.name) + elseif method=="tfm or afm" then + tfmdata=check_tfm(specification,specification.name) or check_afm(specification,specification.name) + else + tfmdata=check_afm(specification,specification.name) or check_tfm(specification,specification.name) + end + end + else + tfmdata=check_afm(specification,fullname) + end + return tfmdata +end +function readers.pfb(specification,method) + local original=specification.specification + if trace_defining then + report_afm("using afm reader for %a",original) + end + specification.specification=gsub(original,"%.pfb",".afm") + specification.forced="afm" + return readers.afm(specification,method) +end + +end -- closure + +do -- begin closure to overcome local limits and interference + +if not modules then modules={} end modules ['font-afk']={ + version=1.001, + comment="companion to font-afm.lua", + author="Hans Hagen, PRAGMA-ADE, Hasselt NL", + copyright="PRAGMA ADE / ConTeXt Development Team", + license="see context related readme files", + dataonly=true, +} +local allocate=utilities.storage.allocate +fonts.handlers.afm.helpdata={ + ligatures=allocate { + ['f']={ + { 'f','ff' }, + { 'i','fi' }, + { 'l','fl' }, + }, + ['ff']={ + { 'i','ffi' } + }, + ['fi']={ + { 'i','fii' } + }, + ['fl']={ + { 'i','fli' } + }, + ['s']={ + { 't','st' } + }, + ['i']={ + { 'j','ij' } + }, + }, + texligatures=allocate { + ['quoteleft']={ + { 'quoteleft','quotedblleft' } + }, + ['quoteright']={ + { 'quoteright','quotedblright' } + }, + ['hyphen']={ + { 'hyphen','endash' } + }, + ['endash']={ + { 'hyphen','emdash' } + } + }, + leftkerned=allocate { + AEligature="A",aeligature="a", + OEligature="O",oeligature="o", + IJligature="I",ijligature="i", + AE="A",ae="a", + OE="O",oe="o", + IJ="I",ij="i", + Ssharp="S",ssharp="s", + }, + rightkerned=allocate { + AEligature="E",aeligature="e", + OEligature="E",oeligature="e", + IJligature="J",ijligature="j", + AE="E",ae="e", + OE="E",oe="e", + IJ="J",ij="j", + Ssharp="S",ssharp="s", + }, + bothkerned=allocate { + Acircumflex="A",acircumflex="a", + Ccircumflex="C",ccircumflex="c", + Ecircumflex="E",ecircumflex="e", + Gcircumflex="G",gcircumflex="g", + Hcircumflex="H",hcircumflex="h", + Icircumflex="I",icircumflex="i", + Jcircumflex="J",jcircumflex="j", + Ocircumflex="O",ocircumflex="o", + Scircumflex="S",scircumflex="s", + Ucircumflex="U",ucircumflex="u", + Wcircumflex="W",wcircumflex="w", + Ycircumflex="Y",ycircumflex="y", + Agrave="A",agrave="a", + Egrave="E",egrave="e", + Igrave="I",igrave="i", + Ograve="O",ograve="o", + Ugrave="U",ugrave="u", + Ygrave="Y",ygrave="y", + Atilde="A",atilde="a", + Itilde="I",itilde="i", + Otilde="O",otilde="o", + Utilde="U",utilde="u", + Ntilde="N",ntilde="n", + Adiaeresis="A",adiaeresis="a",Adieresis="A",adieresis="a", + Ediaeresis="E",ediaeresis="e",Edieresis="E",edieresis="e", + Idiaeresis="I",idiaeresis="i",Idieresis="I",idieresis="i", + Odiaeresis="O",odiaeresis="o",Odieresis="O",odieresis="o", + Udiaeresis="U",udiaeresis="u",Udieresis="U",udieresis="u", + Ydiaeresis="Y",ydiaeresis="y",Ydieresis="Y",ydieresis="y", + Aacute="A",aacute="a", + Cacute="C",cacute="c", + Eacute="E",eacute="e", + Iacute="I",iacute="i", + Lacute="L",lacute="l", + Nacute="N",nacute="n", + Oacute="O",oacute="o", + Racute="R",racute="r", + Sacute="S",sacute="s", + Uacute="U",uacute="u", + Yacute="Y",yacute="y", + Zacute="Z",zacute="z", + Dstroke="D",dstroke="d", + Hstroke="H",hstroke="h", + Tstroke="T",tstroke="t", + Cdotaccent="C",cdotaccent="c", + Edotaccent="E",edotaccent="e", + Gdotaccent="G",gdotaccent="g", + Idotaccent="I",idotaccent="i", + Zdotaccent="Z",zdotaccent="z", + Amacron="A",amacron="a", + Emacron="E",emacron="e", + Imacron="I",imacron="i", + Omacron="O",omacron="o", + Umacron="U",umacron="u", + Ccedilla="C",ccedilla="c", + Kcedilla="K",kcedilla="k", + Lcedilla="L",lcedilla="l", + Ncedilla="N",ncedilla="n", + Rcedilla="R",rcedilla="r", + Scedilla="S",scedilla="s", + Tcedilla="T",tcedilla="t", + Ohungarumlaut="O",ohungarumlaut="o", + Uhungarumlaut="U",uhungarumlaut="u", + Aogonek="A",aogonek="a", + Eogonek="E",eogonek="e", + Iogonek="I",iogonek="i", + Uogonek="U",uogonek="u", + Aring="A",aring="a", + Uring="U",uring="u", + Abreve="A",abreve="a", + Ebreve="E",ebreve="e", + Gbreve="G",gbreve="g", + Ibreve="I",ibreve="i", + Obreve="O",obreve="o", + Ubreve="U",ubreve="u", + Ccaron="C",ccaron="c", + Dcaron="D",dcaron="d", + Ecaron="E",ecaron="e", + Lcaron="L",lcaron="l", + Ncaron="N",ncaron="n", + Rcaron="R",rcaron="r", + Scaron="S",scaron="s", + Tcaron="T",tcaron="t", + Zcaron="Z",zcaron="z", + dotlessI="I",dotlessi="i", + dotlessJ="J",dotlessj="j", + AEligature="AE",aeligature="ae",AE="AE",ae="ae", + OEligature="OE",oeligature="oe",OE="OE",oe="oe", + IJligature="IJ",ijligature="ij",IJ="IJ",ij="ij", + Lstroke="L",lstroke="l",Lslash="L",lslash="l", + Ostroke="O",ostroke="o",Oslash="O",oslash="o", + Ssharp="SS",ssharp="ss", + Aumlaut="A",aumlaut="a", + Eumlaut="E",eumlaut="e", + Iumlaut="I",iumlaut="i", + Oumlaut="O",oumlaut="o", + Uumlaut="U",uumlaut="u", + } +} + +end -- closure + +do -- begin closure to overcome local limits and interference + if not modules then modules={} end modules ['font-lua']={ version=1.001, comment="companion to font-ini.mkiv", @@ -24164,17 +24189,18 @@ function nodes.handlers.nodepass(head) local variant=hash[getchar(p)] if variant then setchar(p,variant) - if not redundant then - redundant={ n } - else - redundant[#redundant+1]=n - end end end end + if not redundant then + redundant={ n } + else + redundant[#redundant+1]=n + end end end end + local nofbasefonts=#basefonts if redundant then for i=1,#redundant do local r=redundant[i] @@ -24185,8 +24211,8 @@ function nodes.handlers.nodepass(head) else setlink(p,n) end - if b>0 then - for i=1,b do + if nofbasefonts>0 then + for i=1,nofbasefonts do local bi=basefonts[i] if r==bi[1] then bi[1]=n @@ -24230,8 +24256,8 @@ function nodes.handlers.nodepass(head) end end end - if basemodepass and #basefonts>0 then - for i=1,#basefonts do + if basemodepass and nofbasefonts>0 then + for i=1,nofbasefonts do local range=basefonts[i] local start=range[1] local stop=range[2] diff --git a/tex/generic/context/luatex/luatex-fonts.lua b/tex/generic/context/luatex/luatex-fonts.lua index e1ec3764e..1d2f2037f 100644 --- a/tex/generic/context/luatex/luatex-fonts.lua +++ b/tex/generic/context/luatex/luatex-fonts.lua @@ -230,8 +230,6 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then loadmodule('luatex-fonts-syn.lua') loadmodule('font-tfm.lua') - loadmodule('font-afm.lua') - loadmodule('font-afk.lua') loadmodule('font-oti.lua') -- These are the old loader and processing modules. These use the built-in font loader and @@ -260,6 +258,11 @@ if non_generic_context.luatex_fonts.skip_loading ~= true then loadmodule('font-ots.lua') loadmodule('font-osd.lua') + -- type one code + + loadmodule('font-one.lua') -- was font-afm.lua + loadmodule('font-afk.lua') + -- common code loadmodule('font-lua.lua') |