From 0da1a7a94f55a5dc0d318f399eb843303d5b62f6 Mon Sep 17 00:00:00 2001
From: Hans Hagen
The code defined here may move to the big character table.
--ldx]]-- -characters.basedigits = { +characters.basedigits = allocate { ['zero'] = 48, ['one'] = 49, ['two'] = 50, ['three'] = 51, ['four'] = 52, ['five'] = 53, @@ -50,7 +52,7 @@ Of course they may come in handy elsewhere too -- => shcode == { ub('a') } -- => reduction = "a" -uncomposed.left = { +uncomposed.left = allocate { AEligature = "A", aeligature = "a", OEligature = "O", oeligature = "o", IJligature = "I", ijligature = "i", @@ -60,7 +62,7 @@ uncomposed.left = { Ssharp = "S", ssharp = "s", } -uncomposed.right = { +uncomposed.right = allocate { AEligature = "E", aeligature = "e", OEligature = "E", oeligature = "e", IJligature = "J", ijligature = "j", @@ -70,7 +72,7 @@ uncomposed.right = { Ssharp = "S", ssharp = "s", } -uncomposed.both = { +uncomposed.both = allocate { Acircumflex = "A", acircumflex = "a", Ccircumflex = "C", ccircumflex = "c", Ecircumflex = "E", ecircumflex = "e", @@ -222,7 +224,7 @@ is that a character can be in an encoding twice but is hashed once. --ldx]]-- -characters.ligatures = { +characters.ligatures = allocate { ['f'] = { { 'f', 'ff' }, { 'i', 'fi' }, @@ -245,7 +247,7 @@ characters.ligatures = { }, } -characters.texligatures = { +characters.texligatures = allocate { -- ['space'] = { -- { 'L', 'Lslash' }, -- { 'l', 'lslash' } diff --git a/tex/context/base/char-enc.lua b/tex/context/base/char-enc.lua index 8addff0e8..bdca9582c 100644 --- a/tex/context/base/char-enc.lua +++ b/tex/context/base/char-enc.lua @@ -8,10 +8,12 @@ if not modules then modules = { } end modules ['char-syn'] = { -- thanks to tex4ht for these mappings +local allocate = utilities.storage.allocate + characters = characters or { } local characters = characters -characters.synonyms = { +characters.synonyms = allocate { angle = 0x2220, anticlockwise = 0x21BA, arrowaxisleft = 0x2190, diff --git a/tex/context/base/char-ini.lua b/tex/context/base/char-ini.lua index a24de6e23..4893875c3 100644 --- a/tex/context/base/char-ini.lua +++ b/tex/context/base/char-ini.lua @@ -14,10 +14,17 @@ local concat = table.concat local next, tonumber = next, tonumber local texsprint, texprint = tex.sprint, tex.print local format, lower, gsub, match, gmatch = string.format, string.lower, string.gsub, string.match, string.match, string.gmatch +local texsetlccode, texsetuccode, texsetsfcode, texsetcatcode = tex.setlccode, tex.setuccode, tex.setsfcode, tex.setcatcode + +local allocate, mark = utilities.storage.allocate, utilities.storage.mark local ctxcatcodes = tex.ctxcatcodes local texcatcodes = tex.texcatcodes +local trace_defining = false trackers.register("characters.defining", function(v) characters_defining = v end) + +local report_defining = logs.new("characters") + --[[ldx--This module implements some methods and creates additional datastructured from the big character table that we use for all kind of purposes: @@ -27,15 +34,23 @@ from the big character table that we use for all kind of purposes: loaded!
--ldx]]-- -characters = characters or { } +characters = characters or { } local characters = characters -characters.data = characters.data or { } + local data = characters.data +if data then + mark(data) -- why does this fail +else + report_defining("fatal error: 'char-def.lua' is not loaded") + os.exit() +end + if not characters.ranges then - characters.ranges = { } + local ranges = allocate { } + characters.ranges = ranges for k, v in next, data do - characters.ranges[#characters.ranges+1] = k + ranges[#ranges+1] = k end end @@ -43,20 +58,18 @@ storage.register("characters/ranges",characters.ranges,"characters.ranges") local ranges = characters.ranges -setmetatable(data, { - __index = function(t,k) - for r=1,#ranges do - local rr = ranges[r] -- first in range - if k > rr and k <= data[rr].range then - t[k] = t[rr] - return t[k] - end +setmetatablekey(data, "__index", function(t,k) + for r=1,#ranges do + local rr = ranges[r] -- first in range + if k > rr and k <= data[rr].range then + t[k] = t[rr] + return t[k] end - return nil end -}) + return nil +end ) -characters.blocks = { +characters.blocks = allocate { ["aegeannumbers"] = { 0x10100, 0x1013F, "Aegean Numbers" }, ["alphabeticpresentationforms"] = { 0x0FB00, 0x0FB4F, "Alphabetic Presentation Forms" }, ["ancientgreekmusicalnotation"] = { 0x1D200, 0x1D24F, "Ancient Greek Musical Notation" }, @@ -249,7 +262,7 @@ function characters.getrange(name) return slot, slot, nil end -characters.categories = { +characters.categories = allocate { lu = "Letter Uppercase", ll = "Letter Lowercase", lt = "Letter Titlecase", @@ -285,22 +298,26 @@ characters.categories = { --~ special : cf (softhyphen) zs (emspace) --~ characters: ll lm lo lt lu mn nl no pc pd pe pf pi po ps sc sk sm so -characters.is_character = table.tohash { +local is_character = allocate ( table.tohash { "lu","ll","lt","lm","lo", "nd","nl","no", "mn", "nl","no", "pc","pd","ps","pe","pi","pf","po", "sm","sc","sk","so" -} +} ) -characters.is_letter = table.tohash { +local is_letter = allocate ( table.tohash { "ll","lm","lo","lt","lu" -} +} ) -characters.is_command = table.tohash { +local is_command = allocate ( table.tohash { "cf","zs" -} +} ) + +characters.is_character = is_character +characters.is_letter = is_letter +characters.is_command = is_command -- linebreak: todo: hash -- @@ -311,7 +328,7 @@ characters.is_command = table.tohash { -- -- N A H W F Na -characters.bidi = { +characters.bidi = allocate { l = "Left-to-Right", lre = "Left-to-Right Embedding", lro = "Left-to-Right Override", @@ -360,8 +377,8 @@ if not characters.fallbacks then end -storage.register("characters.fallbacks", characters.fallbacks, "characters.fallbacks") -storage.register("characters.directions", characters.directions, "characters.directions") +storage.register("characters/fallbacks", characters.fallbacks, "characters.fallbacks") +storage.register("characters/directions", characters.directions, "characters.directions") --[[ldx--The
Setting the lccodes is also done in a loop over the data table.
--ldx]]-- +--~ function tex.setsfcode (index,sf) ... end +--~ function tex.setlccode (index,lc,[uc]) ... end -- optional third value, safes call +--~ function tex.setuccode (index,uc,[lc]) ... end +--~ function tex.setcatcode(index,cc) ... end + -- we need a function ... -function characters.setcodes() - for code, chr in next, data do - local cc = chr.category - if cc == 'll' or cc == 'lu' or cc == 'lt' then - local lc, uc = chr.lccode, chr.uccode - if not lc then chr.lccode, lc = code, code end - if not uc then chr.uccode, uc = code, code end - texsprint(ctxcatcodes,format("\\setcclcuc{%i}{%i}{%i}",code,lc,uc)) +--~ tex.lccode +--~ tex.uccode +--~ tex.sfcode +--~ tex.catcode + +if texsetcatcode then + + function characters.setcodes() + if trace_defining then + report_defining("defining lc and uc codes") end - if cc == "lu" then - texprint(ctxcatcodes,"\\sfcode ",code,"999 ") + for code, chr in next, data do + local cc = chr.category + if cc == 'll' or cc == 'lu' or cc == 'lt' then + local lc, uc = chr.lccode, chr.uccode + if not lc then chr.lccode, lc = code, code end + if not uc then chr.uccode, uc = code, code end + texsetcatcode(code,11) -- letter + texsetlccode(code,lc,uc) + if cc == "lu" then + texsetsfcode(code,999) + end + elseif cc == "lo" and chr.range then + for i=code,chr.range do + texsetcatcode(code,11) -- letter + texsetlccode(code,code,code) -- self self + end + end end - if cc == "lo" and chr.range then - texsprint(ctxcatcodes,format('\\dofastrecurse{"%05X}{"%05X}{1}{\\setcclcucself\\fastrecursecounter}',code,chr.range)) + end + +else -- keep this one + + function characters.setcodes() + for code, chr in next, data do + local cc = chr.category + if cc == 'll' or cc == 'lu' or cc == 'lt' then + local lc, uc = chr.lccode, chr.uccode + if not lc then chr.lccode, lc = code, code end + if not uc then chr.uccode, uc = code, code end + texsprint(ctxcatcodes,format("\\setcclcuc{%i}{%i}{%i}",code,lc,uc)) + end + if cc == "lu" then + texprint(ctxcatcodes,"\\sfcode ",code,"999 ") + end + if cc == "lo" and chr.range then + texsprint(ctxcatcodes,format('\\dofastrecurse{"%05X}{"%05X}{1}{\\setcclcucself\\fastrecursecounter}',code,chr.range)) + end end end + end --[[ldx-- @@ -649,6 +787,18 @@ function characters.upper(str) return concat(new) end +function characters.lettered(str) + local new = { } + for u in utfvalues(str) do + local d = data[u] + if is_letter[d.category] then + new[#new+1] = utfchar(d.lccode or u) + end + end + return concat(new) +end + + -- -- some day we might go this route, but it does not really save that much -- -- so not now (we can generate a lot using mtx-unicode that operates on the -- -- database) diff --git a/tex/context/base/char-ini.mkiv b/tex/context/base/char-ini.mkiv index 0d5e16bb0..a89c448be 100644 --- a/tex/context/base/char-ini.mkiv +++ b/tex/context/base/char-ini.mkiv @@ -56,21 +56,22 @@ \number\mthcatcodes, \number\vrbcatcodes, \number\prtcatcodes, - \number\xmlcatcodesn, - \number\xmlcatcodese, - \number\xmlcatcodesr, +% \number\xmlcatcodesn, +% \number\xmlcatcodese, +% \number\xmlcatcodesr, \number\typcatcodesa, \number\typcatcodesb, + \number\txtcatcodes, }, { % activate catcodes \number\ctxcatcodes, \number\notcatcodes, - \number\xmlcatcodesn, - \number\xmlcatcodese, - \number\xmlcatcodesr, +% \number\xmlcatcodesn, +% \number\xmlcatcodese, +% \number\xmlcatcodesr, } ) - catcodes.register("xmlcatcodes",\number\xmlcatcodes) +% catcodes.register("xmlcatcodes",\number\xmlcatcodes) } \protect \endinput diff --git a/tex/context/base/char-tex.lua b/tex/context/base/char-tex.lua index 5a1edb42e..6e57a860a 100644 --- a/tex/context/base/char-tex.lua +++ b/tex/context/base/char-tex.lua @@ -12,11 +12,13 @@ local lpeg = lpeg local P, C, R, S, Cs, Cc = lpeg.P, lpeg.C, lpeg.R, lpeg.S, lpeg.Cs, lpeg.Cc local U, lpegmatch = lpeg.patterns.utf8, lpeg.match +local allocate, mark = utilities.storage.allocate, utilities.storage.mark + characters = characters or { } local characters = characters characters.tex = characters.tex or { } -local accent_map = { +local accent_map = allocate { ['~'] = "̃" , -- ̃ Ẽ ['"'] = "̈" , -- ̈ Ë ["`"] = "̀" , -- ̀ È @@ -49,7 +51,7 @@ local function remap_accents(a,c,braced) end end -local command_map = { +local command_map = allocate { ["i"] = "ı" } diff --git a/tex/context/base/char-utf.lua b/tex/context/base/char-utf.lua index 680d426b0..a0a611e9a 100644 --- a/tex/context/base/char-utf.lua +++ b/tex/context/base/char-utf.lua @@ -25,23 +25,24 @@ local concat, gmatch, gsub = table.concat, string.gmatch, string.gsub local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues local ctxcatcodes = tex.ctxcatcodes local texsprint = tex.sprint +local allocate = utilities.storage.allocate -- todo: trackers -characters = characters or { } -local characters = characters +characters = characters or { } +local characters = characters -characters.graphemes = characters.graphemes or { } -local graphemes = characters.graphemes +characters.graphemes = allocate() +local graphemes = characters.graphemes -characters.filters = characters.filters or { } -local filters = characters.filters +characters.filters = allocate() +local filters = characters.filters -filters.utf = filters.utf or { } -local utffilters = characters.filters.utf +filters.utf = filters.utf or { } +local utffilters = characters.filters.utf -utffilters.collapsing = true -utffilters.expanding = true +utffilters.collapsing = true +utffilters.expanding = true --[[ldx--It only makes sense to collapse at runtime, since we don't expect @@ -123,9 +124,9 @@ to their right glyph there.
0x100000. --ldx]]-- -local low = { } -local high = { } -local escapes = { } +local low = allocate({ }) +local high = allocate({ }) +local escapes = allocate({ }) local special = "~#$%^&_{}\\|" local private = { diff --git a/tex/context/base/char-utf.mkiv b/tex/context/base/char-utf.mkiv index 16b4029d8..b96aec38d 100644 --- a/tex/context/base/char-utf.mkiv +++ b/tex/context/base/char-utf.mkiv @@ -31,7 +31,7 @@ \appendtoks \ctxlua { characters.filters.utf.collapsing = true - resolvers.install_text_filter('utf',characters.filters.utf.collapse) + resolvers.filters.install('utf',characters.filters.utf.collapse) }% \to \everyjob diff --git a/tex/context/base/colo-icc.lua b/tex/context/base/colo-icc.lua index 5df4b9663..fd30b63e4 100644 --- a/tex/context/base/colo-icc.lua +++ b/tex/context/base/colo-icc.lua @@ -17,7 +17,7 @@ local invalid = R(char(0)..char(31)) local cleaned = invalid^0 * Cs((1-invalid)^0) function colors.iccprofile(filename,verbose) - local fullname = resolvers.find_file(filename,"icc") or "" + local fullname = resolvers.findfile(filename,"icc") or "" if fullname == "" then local locate = resolvers.finders.loc -- not in mtxrun if locate then diff --git a/tex/context/base/cont-new.tex b/tex/context/base/cont-new.tex index 8c0852955..0fdce6595 100644 --- a/tex/context/base/cont-new.tex +++ b/tex/context/base/cont-new.tex @@ -11,7 +11,7 @@ %C therefore copyrighted by \PRAGMA. See mreadme.pdf for %C details. -\newcontextversion{2010.08.20 00:00} +\newcontextversion{2010.09.03 11:05} %D This file is loaded at runtime, thereby providing an %D excellent place for hacks, patches, extensions and new diff --git a/tex/context/base/context.mkii b/tex/context/base/context.mkii index 4be60cbfc..36b2cd9e9 100644 --- a/tex/context/base/context.mkii +++ b/tex/context/base/context.mkii @@ -29,6 +29,7 @@ \loadcorefile{catc-def} \loadcorefile{catc-ctx} \loadcorefile{catc-sym} +\loadcorefile{catc-xml} \loadmarkfile{syst-gen} \loadmarkfile{syst-ext} diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv index c8271fcb1..371d363b4 100644 --- a/tex/context/base/context.mkiv +++ b/tex/context/base/context.mkiv @@ -169,15 +169,15 @@ \loadmarkfile{strc-ini} \loadmarkfile{strc-tag} \loadmarkfile{strc-doc} +\loadmarkfile{strc-num} \loadmarkfile{strc-mar} \loadmarkfile{strc-prc} \loadmarkfile{strc-sbe} \loadmarkfile{strc-lst} \loadmarkfile{strc-sec} -\loadmarkfile{strc-num} +\loadmarkfile{strc-pag} % hm, depends on core-num \loadmarkfile{strc-ren} \loadmarkfile{strc-xml} -\loadmarkfile{strc-pag} % hm, depends on core-num \loadmarkfile{strc-def} % might happen later \loadmarkfile{strc-ref} \loadmarkfile{strc-reg} @@ -339,7 +339,7 @@ \loadmarkfile{bibl-bib} \loadmarkfile{bibl-tra} -\loadmarkfile{x-xtag} % at some point this will not be preloaded +%loadmarkfile{x-xtag} % no longer preloaded \loadcorefile{meta-xml} diff --git a/tex/context/base/context.tex b/tex/context/base/context.tex index 351667d9d..494462f6a 100644 --- a/tex/context/base/context.tex +++ b/tex/context/base/context.tex @@ -20,7 +20,7 @@ %D your styles an modules. \edef\contextformat {\jobname} -\edef\contextversion{2010.08.20 00:00} +\edef\contextversion{2010.09.03 11:05} %D For those who want to use this: diff --git a/tex/context/base/core-con.lua b/tex/context/base/core-con.lua index e83f9a487..7c1bb01f9 100644 --- a/tex/context/base/core-con.lua +++ b/tex/context/base/core-con.lua @@ -18,11 +18,12 @@ local utf = unicode.utf8 local floor, date, time, concat = math.floor, os.date, os.time, table.concat local lower, format, rep = string.lower, string.format, string.rep -local texsprint, utfchar = tex.sprint, utf.char +local utfchar, utfbyte = utf.char, utf.byte local tonumber, tostring = tonumber, tostring -local settings_to_array = utilities.parsers.settings_to_array -local ctxcatcodes = tex.ctxcatcodes +local settings_to_array = utilities.parsers.settings_to_array +local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes +local allocate = utilities.storage.allocate converters = converters or { } local converters = converters @@ -56,7 +57,9 @@ end --~ 0x06F5, 0x06F6, 0x06F7, 0x06F8, 0x06F9 --~ }, -languages.counters = { +-- to be reconsidered ... languages namespace here, might become local plus a register command + +languages.counters = allocate { ['**'] = { 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006A, @@ -134,13 +137,13 @@ languages.counters = { local counters = languages.counters -counters['ar'] = counters['arabic'] -counters['gr'] = counters['greek'] -counters['g'] = counters['greek'] -counters['sl'] = counters['slovenian'] -counters['kr'] = counters['korean'] -counters['kr-p'] = counters['korean-parent'] -counters['kr-c'] = counters['korean-circle'] +counters['ar'] = counters['arabic'] +counters['gr'] = counters['greek'] +counters['g'] = counters['greek'] +counters['sl'] = counters['slovenian'] +counters['kr'] = counters['korean'] +counters['kr-p'] = counters['korean-parent'] +counters['kr-c'] = counters['korean-circle'] local fallback = utf.byte('0') diff --git a/tex/context/base/core-ini.mkiv b/tex/context/base/core-ini.mkiv index d6a72bb26..cd2d2e25e 100644 --- a/tex/context/base/core-ini.mkiv +++ b/tex/context/base/core-ini.mkiv @@ -56,11 +56,4 @@ %appendtoks \setlastlinewidth \to \everyendofpar % gone, will be done in lua \appendtoks \endgraf \to \everyendofpar -% Todo: verbatim, xml, tex, move code to here - -\ifx\normalcompound\undefined \let\normalcompound=| \fi - -\appendtoks \catcode`|=\@@active \let|\normalcompound \to \everyTEXinputmode -\appendtoks \catcode`|=\@@letter \to \everyXMLinputmode - \protect \endinput diff --git a/tex/context/base/core-job.lua b/tex/context/base/core-job.lua index f58344f93..474eb86bf 100644 --- a/tex/context/base/core-job.lua +++ b/tex/context/base/core-job.lua @@ -49,7 +49,7 @@ function resolvers.findctxfile(name,maxreadlevel) end end end - return resolvers.find_file(name) or "" + return resolvers.findfile(name) or "" end end @@ -73,12 +73,12 @@ function commands.locatefilepath(name,maxreadlevel) end function commands.usepath(paths,maxreadlevel) - resolvers.register_extra_path(paths) + resolvers.registerextrapath(paths) texsprint(texcatcodes,concat(resolvers.instance.extra_paths or {}, "")) end function commands.usesubpath(subpaths,maxreadlevel) - resolvers.register_extra_path(nil,subpaths) + resolvers.registerextrapath(nil,subpaths) texsprint(texcatcodes,concat(resolvers.instance.extra_paths or {}, "")) end @@ -113,13 +113,13 @@ local function convertexamodes(str) end end --- we need a system file option: ,. .. etc + paths but no tex lookup so resolvers.find_file is wrong here +-- we need a system file option: ,. .. etc + paths but no tex lookup so resolvers.findfile is wrong here function commands.loadexamodes(filename) if not filename or filename == "" then filename = file.removesuffix(tex.jobname) end - filename = resolvers.find_file(file.addsuffix(filename,'ctm')) or "" + filename = resolvers.findfile(file.addsuffix(filename,'ctm')) or "" if filename ~= "" then commands.writestatus("examodes","loading %s",filename) -- todo: message system convertexamodes(io.loaddata(filename)) diff --git a/tex/context/base/core-job.mkiv b/tex/context/base/core-job.mkiv index e52aeeaac..85f20db12 100644 --- a/tex/context/base/core-job.mkiv +++ b/tex/context/base/core-job.mkiv @@ -65,7 +65,7 @@ \def\registerfileinfo[#1#2]#3% geen \showmessage ? {\writestatus\m!systems{#1#2 file #3 at line \the\inputlineno}} -\ifx\preloadfonts \undefined \let\preloadfonts \relax \fi +\ifdefined\preloadfonts\else \let\preloadfonts\relax \fi \def\loadallsystemfiles#1#2% {\ifx\@@svdirectory\empty @@ -76,8 +76,6 @@ \processcommacommand[\@@svdirectory]\doloadsystemfile \fi} -\ifx\disableXML\undefined \let\disableXML\relax \fi - \def\loadsystemfiles {\reportprotectionstate \readsysfile\f!newfilename{\showmessage\m!systems2\f!newfilename}\donothing @@ -86,13 +84,11 @@ \donothing \loadallsystemfiles\f!sysfilename {\loadallsystemfiles{\f!sysfilename.rme}\donothing % new, fall back - \doglobal\appendtoks % brrr better \setcatcodetable\ctxcatcodes % % test - \bgroup\disableXML\loadallsystemfiles\f!errfilename\donothing\egroup - \to\everygoodbye}} + \loadallsystemfiles \f!errfilename \donothing}} %D We don't want multiple jobfiles to interfere. -\def\loadoptionfile +\def\loadoptionfile % todo : mark document.* tables as storage {\readjobfile{\jobname.\f!optionextension} {\showmessage\m!systems2{\jobname.\f!optionextension}% \ctxlua{commands.logoptionfile("\jobname.\f!optionextension")}}% diff --git a/tex/context/base/core-mis.mkiv b/tex/context/base/core-mis.mkiv index 6e159532f..88da7fb5e 100644 --- a/tex/context/base/core-mis.mkiv +++ b/tex/context/base/core-mis.mkiv @@ -745,23 +745,6 @@ \unexpanded\def\stopsubsentence {\endofsubsentencespacing\prewordbreak\endofsubsentence} \unexpanded\def\subsentence {\groupedcommand\startsubsentence\stopsubsentence} -%D \defineXMLenvironment [subsentence] -%D {|<|} -%D {|>|} -%D \defineXMLenvironment [subsentence] -%D {\directdiscretionary{<}} -%D {\directdiscretionary{>}} -%D \defineXMLenvironment [subsentence] -%D {\startsubsentence} -%D {\stopsubsentence} -%D -%D \startbuffer -%D testWe save multi-pass information in the main utility table. This is a @@ -16,19 +18,18 @@ bit of a mess because we support old and new methods.
--ldx]]-- local jobpasses = { - collected = { }, - tobesaved = { }, + collected = collected, + tobesaved = tobesaved, } job.passes = jobpasses -local collected, tobesaved = jobpasses.collected, jobpasses.tobesaved - local function initializer() - collected, tobesaved = jobpasses.collected, jobpasses.tobesaved + collected = mark(jobpasses.collected) + tobesaved = mark(jobpasses.tobesaved) end -job.register('job.passes.collected', jobpasses.tobesaved, initializer, nil) +job.register('job.passes.collected', tobesaved, initializer, nil) local function allocate(id) local p = tobesaved[id] diff --git a/tex/context/base/core-uti.lua b/tex/context/base/core-uti.lua index c8dc0f73d..1681646df 100644 --- a/tex/context/base/core-uti.lua +++ b/tex/context/base/core-uti.lua @@ -22,15 +22,18 @@ local next, type, tostring = next, type, tostring local texsprint, ctxcatcodes = tex.sprint, tex.ctxcatcodes local definetable, accesstable = utilities.tables.definetable, utilities.tables.accesstable local serialize = table.serialize +local packers = utilities.packers +local allocate, mark = utilities.storage.allocate, utilities.storage.mark local report_jobcontrol = logs.new("jobcontrol") -if not jobs then jobs = { } end -if not job then jobs['main'] = { } end job = jobs['main'] +job = job or { } +local job = job -local packers = utilities.packers +job.version = 1.14 -jobs.version = 1.14 +-- some day we will implement loading of other jobs and then we need +-- job.jobs --[[ldx--Variables are saved using in the previously defined table and passed @@ -44,7 +47,7 @@ function job.comment(str) comment[#comment+1] = str end -job.comment(format("version: %1.2f",jobs.version)) +job.comment(format("version: %1.2f",job.version)) function job.initialize(loadname,savename) job.load(loadname) -- has to come after structure is defined ! @@ -61,21 +64,26 @@ end -- as an example we implement variables +local tobesaved, collected, checksums = allocate(), allocate(), allocate() + local jobvariables = { - collected = { }, - tobesaved = { }, - checksums = { }, + collected = collected, + tobesaved = tobesaved, + checksums = checksums, } job.variables = jobvariables -if not jobvariables.checksums.old then jobvariables.checksums.old = md5.HEX("old") end -- used in experiment -if not jobvariables.checksums.new then jobvariables.checksums.new = md5.HEX("new") end -- used in experiment +if not checksums.old then checksums.old = md5.HEX("old") end -- used in experiment +if not checksums.new then checksums.new = md5.HEX("new") end -- used in experiment -job.register('job.variables.checksums', jobvariables.checksums) +job.register('job.variables.checksums', checksums) local function initializer() - local r = jobvariables.collected.randomseed + tobesaved = mark(jobvariables.tobesaved) + collected = mark(jobvariables.collected) + checksums = mark(jobvariables.checksums) + local r = collected.randomseed if not r then r = math.random() math.setrandomseedi(r,"initialize") @@ -84,16 +92,16 @@ local function initializer() math.setrandomseedi(r,"previous run") report_jobcontrol("resuming randomizer with %s",r) end - jobvariables.tobesaved.randomseed = r - for cs, value in next, jobvariables.collected do + tobesaved.randomseed = r + for cs, value in next, collected do texsprint(ctxcatcodes,format("\\xdef\\%s{%s}",cs,value)) end end -job.register('job.variables.collected', jobvariables.tobesaved, initializer) +job.register('job.variables.collected', tobesaved, initializer) function jobvariables.save(cs,value) - jobvariables.tobesaved[cs] = value + tobesaved[cs] = value end local packlist = { @@ -149,8 +157,8 @@ function job.load(filename) local data = io.loaddata(filename) if data and data ~= "" then local version = tonumber(match(data,"^-- version: ([%d%.]+)")) - if version ~= jobs.version then - report_jobcontrol("version mismatch with jobfile: %s <> %s", version or "?", jobs.version) + if version ~= job.version then + report_jobcontrol("version mismatch with jobfile: %s <> %s", version or "?", job.version) else local data = loadstring(data) if data then diff --git a/tex/context/base/core-var.mkiv b/tex/context/base/core-var.mkiv index 62cc9fc50..60b15437e 100644 --- a/tex/context/base/core-var.mkiv +++ b/tex/context/base/core-var.mkiv @@ -138,21 +138,6 @@ \unexpanded\def\starttextproperties{\the\everystarttextproperties} \unexpanded\def\stoptextproperties {\the\everystoptextproperties} -%D \macros -%D {defineinputmode,setinputmode} -%D -%D New. Some work needs to be done. - -% not in mkiv - -\unexpanded\def\defineinputmode[#1]{\@EA\newtoks\csname every#1inputmode\endcsname} -\def\setinputmode [#1]{\the\executeifdefined{every#1inputmode}\emptytoks} - -\defineinputmode [TEX] -\defineinputmode [XML] - -\setinputmode [TEX] - %D \macros %D {trialtypesetting} %D @@ -179,7 +164,7 @@ %D %D We need this one even if no \XML\ is supported. -\newif\ifprocessingXML % old way +% \newif\ifprocessingXML % old way %D \macros %D {ifproductionrun} diff --git a/tex/context/base/data-aux.lua b/tex/context/base/data-aux.lua index 943bf0a52..0a80e04ce 100644 --- a/tex/context/base/data-aux.lua +++ b/tex/context/base/data-aux.lua @@ -15,21 +15,21 @@ local resolvers = resolvers local report_resolvers = logs.new("resolvers") -function resolvers.update_script(oldname,newname) -- oldname -> own.name, not per se a suffix +function resolvers.updatescript(oldname,newname) -- oldname -> own.name, not per se a suffix local scriptpath = "scripts/context/lua" newname = file.addsuffix(newname,"lua") - local oldscript = resolvers.clean_path(oldname) + local oldscript = resolvers.cleanpath(oldname) if trace_locating then report_resolvers("to be replaced old script %s", oldscript) end - local newscripts = resolvers.find_files(newname) or { } + local newscripts = resolvers.findfiles(newname) or { } if #newscripts == 0 then if trace_locating then report_resolvers("unable to locate new script") end else for i=1,#newscripts do - local newscript = resolvers.clean_path(newscripts[i]) + local newscript = resolvers.cleanpath(newscripts[i]) if trace_locating then report_resolvers("checking new script %s", newscript) end diff --git a/tex/context/base/data-con.lua b/tex/context/base/data-con.lua index 05f1b07de..5d9650f8e 100644 --- a/tex/context/base/data-con.lua +++ b/tex/context/base/data-con.lua @@ -50,7 +50,8 @@ local mt = { t.readables = readables return readables end - end + end, + __storage__ = true } function containers.define(category, subcategory, version, enabled) diff --git a/tex/context/base/data-ctx.lua b/tex/context/base/data-ctx.lua index 30f974131..1bb3f9e71 100644 --- a/tex/context/base/data-ctx.lua +++ b/tex/context/base/data-ctx.lua @@ -12,7 +12,7 @@ local report_resolvers = logs.new("resolvers") local resolvers = resolvers -function resolvers.save_used_files_in_trees() +local function saveusedfilesin_trees() local jobname = environment.jobname if not jobname or jobname == "" then jobname = "luatex" end local filename = file.replacesuffix(jobname,'jlg') @@ -40,4 +40,4 @@ function resolvers.save_used_files_in_trees() end end -directives.register("system.dumpfiles", function() resolvers.save_used_files_in_trees() end) +directives.register("system.dumpfiles", function() saveusedfilesintrees() end) diff --git a/tex/context/base/data-env.lua b/tex/context/base/data-env.lua index d1c110e80..be596f3bf 100644 --- a/tex/context/base/data-env.lua +++ b/tex/context/base/data-env.lua @@ -6,13 +6,15 @@ if not modules then modules = { } end modules ['data-env'] = { license = "see context related readme files", } +local allocate = utilities.storage.allocate + local resolvers = resolvers -local formats = { } resolvers.formats = formats -local suffixes = { } resolvers.suffixes = suffixes -local dangerous = { } resolvers.dangerous = dangerous -local suffixmap = { } resolvers.suffixmap = suffixmap -local alternatives = { } resolvers.alternatives = alternatives +local formats = allocate() resolvers.formats = formats +local suffixes = allocate() resolvers.suffixes = suffixes +local dangerous = allocate() resolvers.dangerous = dangerous +local suffixmap = allocate() resolvers.suffixmap = suffixmap +local alternatives = allocate() resolvers.alternatives = alternatives formats['afm'] = 'AFMFONTS' suffixes['afm'] = { 'afm' } formats['enc'] = 'ENCFONTS' suffixes['enc'] = { 'enc' } @@ -95,12 +97,12 @@ alternatives['subfont definition files'] = 'sfd' -- A few accessors, mostly for command line tool. -function resolvers.suffix_of_format(str) +function resolvers.suffixofformat(str) local s = suffixes[str] return s and s[1] or "" end -function resolvers.suffixes_of_format(str) +function resolvers.suffixesofformat(str) return suffixes[str] or { } end @@ -113,13 +115,15 @@ for name, suffixlist in next, suffixes do end end -setmetatable(suffixes, { __newindex = function(suffixes,name,suffixlist) +local mt = getmetatable(suffixes) + +mt.__newindex = function(suffixes,name,suffixlist) rawset(suffixes,name,suffixlist) suffixes[name] = suffixlist for i=1,#suffixlist do suffixmap[suffixlist[i]] = name end -end } ) +end for name, format in next, formats do dangerous[name] = true @@ -135,19 +139,19 @@ dangerous.tex = nil -- more helpers -function resolvers.format_of_var(str) +function resolvers.formatofvariable(str) return formats[str] or formats[alternatives[str]] or '' end -function resolvers.format_of_suffix(str) -- of file +function resolvers.formatofsuffix(str) -- of file return suffixmap[file.extname(str)] or 'tex' end -function resolvers.variable_of_format(str) +function resolvers.variableofformat(str) return formats[str] or formats[alternatives[str]] or '' end -function resolvers.var_of_format_or_suffix(str) +function resolvers.vsriableofformatorsuffix(str) local v = formats[str] if v then return v diff --git a/tex/context/base/data-exp.lua b/tex/context/base/data-exp.lua index fb7e48efd..6d15a1cd7 100644 --- a/tex/context/base/data-exp.lua +++ b/tex/context/base/data-exp.lua @@ -135,9 +135,9 @@ local function validate(s) return s ~= "" and not find(s,dummy_path_expr) and s end -resolvers.validated_path = validate -- keeps the trailing // +resolvers.validatedpath = validate -- keeps the trailing // -function resolvers.expanded_path_from_list(pathlist) -- maybe not a list, just a path +function resolvers.expandedpathfromlist(pathlist) -- maybe not a list, just a path -- a previous version fed back into pathlist local newlist, ok = { }, false for k=1,#pathlist do @@ -172,7 +172,7 @@ cleanup = lpeg.replacer { { "~" , function() return lpegmatch(cleanup,environment.homedir) end }, } -function resolvers.clean_path(str) +function resolvers.cleanpath(str) return str and lpegmatch(cleanup,str) end @@ -193,7 +193,7 @@ local stripper = lpegCs( lpegpatterns.unspacer * (dosingle + dodouble + dostring) * lpegpatterns.unspacer ) -function resolvers.checked_variable(str) -- assumes str is a string +function resolvers.checkedvariable(str) -- assumes str is a string return lpegmatch(stripper,str) or str end @@ -209,7 +209,7 @@ local cache = { } local splitter = lpegCt(lpeg.splitat(lpegS(ostype == "windows" and ";" or ":;"))) -- maybe add , -local function split_configuration_path(str) -- beware, this can be either a path or a { specification } +local function splitconfigurationpath(str) -- beware, this can be either a path or a { specification } if str then local found = cache[str] if not found then @@ -238,19 +238,19 @@ local function split_configuration_path(str) -- beware, this can be either a pat end end -resolvers.split_configuration_path = split_configuration_path +resolvers.splitconfigurationpath = splitconfigurationpath -function resolvers.split_path(str) +function resolvers.splitpath(str) if type(str) == 'table' then return str else - return split_configuration_path(str) + return splitconfigurationpath(str) end end -function resolvers.join_path(str) +function resolvers.joinpath(str) if type(str) == 'table' then - return file.join_path(str) + return file.joinpath(str) else return str end @@ -280,7 +280,7 @@ end local weird = lpegP(".")^1 + lpeg.anywhere(lpegS("~`!#$%^&*()={}[]:;\"\'||<>,?\n\r\t")) -function resolvers.scan_files(specification) +function resolvers.scanfiles(specification) if trace_locating then report_resolvers("scanning path '%s'",specification) end @@ -335,4 +335,4 @@ function resolvers.scan_files(specification) return files end ---~ print(table.serialize(resolvers.scan_files("t:/sources"))) +--~ print(table.serialize(resolvers.scanfiles("t:/sources"))) diff --git a/tex/context/base/data-ini.lua b/tex/context/base/data-ini.lua index 63329b6e2..9550c1e78 100644 --- a/tex/context/base/data-ini.lua +++ b/tex/context/base/data-ini.lua @@ -37,14 +37,8 @@ kpse = { original = kpse } setmetatable(kpse, { __index = function(kp,name) - local r = resolvers[name] - if not r then - r = function (...) - report_resolvers("not supported: %s(%s)",name,concat(...)) - end - rawset(kp,name,r) - end - return r + report_resolvers("fatal error: kpse library is accessed (key: %s)",name) + os.exit() end } ) diff --git a/tex/context/base/data-inp.lua b/tex/context/base/data-inp.lua index 45a348b7d..b3e30a6c6 100644 --- a/tex/context/base/data-inp.lua +++ b/tex/context/base/data-inp.lua @@ -6,12 +6,10 @@ if not modules then modules = { } end modules ['data-inp'] = { license = "see context related readme files" } -local resolvers = resolvers +local allocate = utilities.storage.allocate -resolvers.finders = resolvers.finders or { } -resolvers.openers = resolvers.openers or { } -resolvers.loaders = resolvers.loaders or { } +local resolvers = resolvers -resolvers.finders.notfound = { nil } -resolvers.openers.notfound = { nil } -resolvers.loaders.notfound = { false, nil, 0 } +resolvers.finders = allocate { notfound = { nil } } +resolvers.openers = allocate { notfound = { nil } } +resolvers.loaders = allocate { notfound = { false, nil, 0 } } diff --git a/tex/context/base/data-lua.lua b/tex/context/base/data-lua.lua index f163361fe..fc44e5508 100644 --- a/tex/context/base/data-lua.lua +++ b/tex/context/base/data-lua.lua @@ -27,7 +27,7 @@ local _path_, libpaths, _cpath_, clibpaths function package.libpaths() if not _path_ or package.path ~= _path_ then _path_ = package.path - libpaths = file.split_path(_path_,";") + libpaths = file.splitpath(_path_,";") end return libpaths end @@ -35,7 +35,7 @@ end function package.clibpaths() if not _cpath_ or package.cpath ~= _cpath_ then _cpath_ = package.cpath - clibpaths = file.split_path(_cpath_,";") + clibpaths = file.splitpath(_cpath_,";") end return clibpaths end @@ -84,7 +84,7 @@ package.loaders[2] = function(name) -- was [#package.loaders+1] end for i=1,#libformats do local format = libformats[i] - local resolved = resolvers.find_file(name,format) or "" + local resolved = resolvers.findfile(name,format) or "" if trace_locating then -- mode detail report_resolvers("! checking for '%s' using 'libformat path': '%s'",name,format) end @@ -108,7 +108,7 @@ package.loaders[2] = function(name) -- was [#package.loaders+1] for i=1,#clibformats do -- better have a dedicated loop local format = clibformats[i] - local paths = resolvers.expanded_path_list_from_var(format) + local paths = resolvers.expandedpathlistfromvariable(format) for p=1,#paths do local path = paths[p] local resolved = file.join(path,libname) @@ -140,7 +140,7 @@ package.loaders[2] = function(name) -- was [#package.loaders+1] if trace_loading then -- more detail report_resolvers("! checking for '%s' using 'luatexlibs': '%s'",name) end - local resolved = resolvers.find_file(file.basename(name),'luatexlibs') or "" + local resolved = resolvers.findfile(file.basename(name),'luatexlibs') or "" if resolved ~= "" then if trace_locating then report_resolvers("! lib '%s' located by basename via environment: '%s'",name,resolved) diff --git a/tex/context/base/data-met.lua b/tex/context/base/data-met.lua index fb9b4d923..06c810fc4 100644 --- a/tex/context/base/data-met.lua +++ b/tex/context/base/data-met.lua @@ -12,11 +12,14 @@ local trace_locating = false trackers.register("resolvers.locating", functi local report_resolvers = logs.new("resolvers") +local allocate = utilities.storage.allocate + local resolvers = resolvers -resolvers.locators = { notfound = { nil } } -- locate databases -resolvers.hashers = { notfound = { nil } } -- load databases -resolvers.generators = { notfound = { nil } } -- generate databases +resolvers.concatinators = allocate () +resolvers.locators = allocate { notfound = { nil } } -- locate databases +resolvers.hashers = allocate { notfound = { nil } } -- load databases +resolvers.generators = allocate { notfound = { nil } } -- generate databases function resolvers.splitmethod(filename) if not filename then diff --git a/tex/context/base/data-pre.lua b/tex/context/base/data-pre.lua index 94992c102..fdf304b73 100644 --- a/tex/context/base/data-pre.lua +++ b/tex/context/base/data-pre.lua @@ -17,7 +17,7 @@ local prefixes = { } local getenv = resolvers.getenv prefixes.environment = function(str) -- getenv is case insensitive anyway - return resolvers.clean_path(getenv(str) or getenv(upper(str)) or getenv(lower(str)) or "") + return resolvers.cleanpath(getenv(str) or getenv(upper(str)) or getenv(lower(str)) or "") end prefixes.relative = function(str,n) @@ -36,7 +36,7 @@ prefixes.relative = function(str,n) end end end - return resolvers.clean_path(str) + return resolvers.cleanpath(str) end prefixes.auto = function(str) @@ -48,18 +48,18 @@ prefixes.auto = function(str) end prefixes.locate = function(str) - local fullname = resolvers.find_given_file(str) or "" - return resolvers.clean_path((fullname ~= "" and fullname) or str) + local fullname = resolvers.findgivenfile(str) or "" + return resolvers.cleanpath((fullname ~= "" and fullname) or str) end prefixes.filename = function(str) - local fullname = resolvers.find_given_file(str) or "" - return resolvers.clean_path(file.basename((fullname ~= "" and fullname) or str)) + local fullname = resolvers.findgivenfile(str) or "" + return resolvers.cleanpath(file.basename((fullname ~= "" and fullname) or str)) end prefixes.pathname = function(str) - local fullname = resolvers.find_given_file(str) or "" - return resolvers.clean_path(file.dirname((fullname ~= "" and fullname) or str)) + local fullname = resolvers.findgivenfile(str) or "" + return resolvers.cleanpath(file.dirname((fullname ~= "" and fullname) or str)) end prefixes.env = prefixes.environment diff --git a/tex/context/base/data-res.lua b/tex/context/base/data-res.lua index a9582262f..f7dd5cd2e 100644 --- a/tex/context/base/data-res.lua +++ b/tex/context/base/data-res.lua @@ -24,6 +24,7 @@ local lpegmatch, lpegpatterns = lpeg.match, lpeg.patterns local filedirname, filebasename, fileextname, filejoin = file.dirname, file.basename, file.extname, file.join local collapse_path = file.collapse_path +local allocate = utilities.storage.allocate local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local trace_detail = false trackers.register("resolvers.details", function(v) trace_detail = v end) @@ -33,9 +34,9 @@ local report_resolvers = logs.new("resolvers") local resolvers = resolvers -local expanded_path_from_list = resolvers.expanded_path_from_list -local checked_variable = resolvers.checked_variable -local split_configuration_path = resolvers.split_configuration_path +local expandedpathfromlist = resolvers.expandedpathfromlist +local checkedvariable = resolvers.checkedvariable +local splitconfigurationpath = resolvers.splitconfigurationpath local initializesetter = utilities.setters.initialize @@ -44,7 +45,7 @@ local ostype, osname, osenv, ossetenv, osgetenv = os.type, os.name, os.env, os.s resolvers.cacheversion = '1.0.1' resolvers.configbanner = '' resolvers.homedir = environment.homedir -resolvers.criticalvars = { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" } +resolvers.criticalvars = allocate { "SELFAUTOLOC", "SELFAUTODIR", "SELFAUTOPARENT", "TEXMFCNF", "TEXMF", "TEXOS" } resolvers.luacnfspec = '{$SELFAUTODIR,$SELFAUTOPARENT}{,{/share,}/texmf{-local,}/web2c}' -- rubish path resolvers.luacnfname = 'texmfcnf.lua' resolvers.luacnfstate = "unknown" @@ -66,18 +67,20 @@ function resolvers.newinstance() progname = 'context', engine = 'luatex', format = '', - environment = { }, - variables = { }, - expansions = { }, - files = { }, - setups = { }, - order = { }, - found = { }, - foundintrees = { }, - origins = { }, - hashes = { }, - specification = { }, - lists = { }, + environment = allocate(), + variables = allocate(), + expansions = allocate(), + files = allocate(), + setups = allocate(), + order = allocate(), + found = allocate(), + foundintrees = allocate(), + origins = allocate(), + hashes = allocate(), + specification = allocate(), + lists = allocate(), + data = allocate(), -- only for loading + fakepaths = allocate(), remember = true, diskcache = true, renewcache = false, @@ -85,15 +88,13 @@ function resolvers.newinstance() savelists = true, allresults = false, pattern = nil, -- lists - data = { }, -- only for loading force_suffixes = true, - fakepaths = { }, } local ne = newinstance.environment for k, v in next, osenv do - ne[upper(k)] = checked_variable(v) + ne[upper(k)] = checkedvariable(v) end return newinstance @@ -128,13 +129,13 @@ function resolvers.getenv(key) return value else local e = osgetenv(key) - return e ~= nil and e ~= "" and checked_variable(e) or "" + return e ~= nil and e ~= "" and checkedvariable(e) or "" end end resolvers.env = resolvers.getenv -local function expand_vars(lst) -- simple vars +local function expandvars(lst) -- simple vars local variables, getenv = instance.variables, resolvers.getenv local function resolve(a) local va = variables[a] or "" @@ -160,10 +161,10 @@ local function resolve(key) return value end local e = osgetenv(key) - return e ~= nil and e ~= "" and checked_variable(e) or "" + return e ~= nil and e ~= "" and checkedvariable(e) or "" end -local function expanded_var(var) -- simple vars +local function expandedvariable(var) -- simple vars var = gsub(var,"%$([%a%d%_%-]+)",resolve) var = gsub(var,";+",";") var = gsub(var,";[!{}/\\]+;",";") @@ -181,7 +182,7 @@ local function entry(entries,name) result = resolvers.getenv(name) if result then instance.variables[name] = result - resolvers.expand_variables() + resolvers.expandvariables() return instance.expansions[name] or "" end end @@ -198,7 +199,7 @@ local function is_entry(entries,name) end end -function resolvers.report_critical_variables() +local function reportcriticalvariables() if trace_locating then for i=1,#resolvers.criticalvars do local v = resolvers.criticalvars[i] @@ -206,7 +207,7 @@ function resolvers.report_critical_variables() end report_resolvers() end - resolvers.report_critical_variables = function() end + reportcriticalvariables = function() end end local function identify_configuration_files() @@ -219,10 +220,10 @@ local function identify_configuration_files() else resolvers.luacnfstate = "environment" end - resolvers.report_critical_variables() - resolvers.expand_variables() - local cnfpaths = expanded_path_from_list(resolvers.split_path(cnfspec)) - expand_vars(cnfpaths) --- hm + reportcriticalvariables() + resolvers.expandvariables() + local cnfpaths = expandedpathfromlist(resolvers.splitpath(cnfspec)) + expandvars(cnfpaths) --- hm local luacnfname = resolvers.luacnfname for i=1,#cnfpaths do local filename = collapse_path(filejoin(cnfpaths[i],luacnfname)) @@ -327,7 +328,7 @@ local function collapse_configuration_data() -- potential optimization: pass sta if ek and ek ~= "" then variables[k], origins[k] = ek, "env" else - local bv = checked_variable(v) + local bv = checkedvariable(v) variables[k], origins[k] = bv, "cnf" end end @@ -348,7 +349,7 @@ function resolvers.locators.tex(specification) if trace_locating then report_resolvers("tex locator '%s' found",specification) end - resolvers.append_hash('file',specification,filename,true) -- cache + resolvers.appendhash('file',specification,filename,true) -- cache elseif trace_locating then report_resolvers("tex locator '%s' not found",specification) end @@ -361,7 +362,7 @@ function resolvers.hashdatabase(tag,name) end local function load_file_databases() - instance.loaderror, instance.files = false, { } + instance.loaderror, instance.files = false, allocate() if not instance.renewcache then local hashes = instance.hashes for k=1,#hashes do @@ -384,12 +385,12 @@ end local function locate_file_databases() -- todo: cache:// and tree:// (runtime) - local texmfpaths = resolvers.expanded_path_list('TEXMF') + local texmfpaths = resolvers.expandedpathlist('TEXMF') for i=1,#texmfpaths do local path = collapse_path(texmfpaths[i]) local stripped = gsub(path,"^!!","") local runtime = stripped == path - path = resolvers.clean_path(path) + path = resolvers.cleanpath(path) if stripped ~= "" then if lfs.isdir(path) then local spec = resolvers.splitmethod(stripped) @@ -462,23 +463,23 @@ local function load_databases() end end -function resolvers.append_hash(type,tag,name,cache) +function resolvers.appendhash(type,tag,name,cache) if trace_locating then report_resolvers("hash '%s' appended",tag) end insert(instance.hashes, { type = type, tag = tag, name = name, cache = cache } ) end -function resolvers.prepend_hash(type,tag,name,cache) +function resolvers.prependhash(type,tag,name,cache) if trace_locating then report_resolvers("hash '%s' prepended",tag) end insert(instance.hashes, 1, { type = type, tag = tag, name = name, cache = cache } ) end -function resolvers.extend_texmf_var(specification) -- crap, we could better prepend the hash --- local t = resolvers.expanded_path_list('TEXMF') -- full expansion - local t = resolvers.split_path(resolvers.getenv('TEXMF')) +function resolvers.extendtexmfvariable(specification) -- crap, we could better prepend the hash +-- local t = resolvers.expandedpathlist('TEXMF') -- full expansion + local t = resolvers.splitpath(resolvers.getenv('TEXMF')) insert(t,1,specification) local newspec = concat(t,";") if instance.environment["TEXMF"] then @@ -488,18 +489,18 @@ function resolvers.extend_texmf_var(specification) -- crap, we could better prep else -- weird end - resolvers.expand_variables() + resolvers.expandvariables() reset_hashes() end function resolvers.generators.tex(specification,tag) - instance.files[tag or specification] = resolvers.scan_files(specification) + instance.files[tag or specification] = resolvers.scanfiles(specification) end function resolvers.splitexpansions() local ie = instance.expansions for k,v in next, ie do - local t, h, p = { }, { }, split_configuration_path(v) + local t, h, p = { }, { }, splitconfigurationpath(v) for kk=1,#p do local vv = p[kk] if vv ~= "" and not h[vv] then @@ -520,12 +521,12 @@ end -- we used to have 'files' and 'configurations' so therefore the following -- shared function -function resolvers.data_state() +function resolvers.datastate() return caches.contentstate() end -function resolvers.expand_variables() - local expansions, environment, variables = { }, instance.environment, instance.variables +function resolvers.expandvariables() + local expansions, environment, variables = allocate(), instance.environment, instance.variables local getenv = resolvers.getenv instance.expansions = expansions local engine, progname = instance.engine, instance.progname @@ -586,19 +587,19 @@ function resolvers.is_expansion(name) return is_entry(instance.expansions,name) end -function resolvers.unexpanded_path_list(str) +function resolvers.unexpandedpathlist(str) local pth = resolvers.variable(str) - local lst = resolvers.split_path(pth) - return expanded_path_from_list(lst) + local lst = resolvers.splitpath(pth) + return expandedpathfromlist(lst) end -function resolvers.unexpanded_path(str) - return file.join_path(resolvers.unexpanded_path_list(str)) +function resolvers.unexpandedpath(str) + return file.joinpath(resolvers.unexpandedpath_list(str)) end local done = { } -function resolvers.reset_extra_path() +function resolvers.resetextrapath() local ep = instance.extra_paths if not ep then ep, done = { }, { } @@ -608,7 +609,7 @@ function resolvers.reset_extra_path() end end -function resolvers.register_extra_path(paths,subpaths) +function resolvers.registerextrapath(paths,subpaths) local ep = instance.extra_paths or { } local n = #ep if paths and paths ~= "" then @@ -618,7 +619,7 @@ function resolvers.register_extra_path(paths,subpaths) for s in gmatch(subpaths,"[^,]+") do local ps = p .. "/" .. s if not done[ps] then - ep[#ep+1] = resolvers.clean_path(ps) + ep[#ep+1] = resolvers.cleanpath(ps) done[ps] = true end end @@ -626,7 +627,7 @@ function resolvers.register_extra_path(paths,subpaths) else for p in gmatch(paths,"[^,]+") do if not done[p] then - ep[#ep+1] = resolvers.clean_path(p) + ep[#ep+1] = resolvers.cleanpath(p) done[p] = true end end @@ -637,7 +638,7 @@ function resolvers.register_extra_path(paths,subpaths) for s in gmatch(subpaths,"[^,]+") do local ps = ep[i] .. "/" .. s if not done[ps] then - ep[#ep+1] = resolvers.clean_path(ps) + ep[#ep+1] = resolvers.cleanpath(ps) done[ps] = true end end @@ -689,54 +690,54 @@ local function made_list(instance,list) end end -function resolvers.clean_path_list(str) - local t = resolvers.expanded_path_list(str) +function resolvers.cleanpathlist(str) + local t = resolvers.expandedpathlist(str) if t then for i=1,#t do - t[i] = collapse_path(resolvers.clean_path(t[i])) + t[i] = collapse_path(resolvers.cleanpath(t[i])) end end return t end -function resolvers.expand_path(str) - return file.join_path(resolvers.expanded_path_list(str)) +function resolvers.expandpath(str) + return file.joinpath(resolvers.expandedpathlist(str)) end -function resolvers.expanded_path_list(str) +function resolvers.expandedpathlist(str) if not str then return ep or { } -- ep ? elseif instance.savelists then -- engine+progname hash str = gsub(str,"%$","") if not instance.lists[str] then -- cached - local lst = made_list(instance,resolvers.split_path(resolvers.expansion(str))) - instance.lists[str] = expanded_path_from_list(lst) + local lst = made_list(instance,resolvers.splitpath(resolvers.expansion(str))) + instance.lists[str] = expandedpathfromlist(lst) end return instance.lists[str] else - local lst = resolvers.split_path(resolvers.expansion(str)) - return made_list(instance,expanded_path_from_list(lst)) + local lst = resolvers.splitpath(resolvers.expansion(str)) + return made_list(instance,expandedpathfromlist(lst)) end end -function resolvers.expanded_path_list_from_var(str) -- brrr - local tmp = resolvers.var_of_format_or_suffix(gsub(str,"%$","")) +function resolvers.expandedpathlistfromvariable(str) -- brrr + local tmp = resolvers.variableofformatorsuffix(gsub(str,"%$","")) if tmp ~= "" then - return resolvers.expanded_path_list(tmp) + return resolvers.expandedpathlist(tmp) else - return resolvers.expanded_path_list(str) + return resolvers.expandedpathlist(str) end end -function resolvers.expand_path_from_var(str) - return file.join_path(resolvers.expanded_path_list_from_var(str)) +function resolvers.expandpathfromvariable(str) + return file.joinpath(resolvers.expandedpathlistfromvariable(str)) end -function resolvers.expand_braces(str) -- output variable and brace expansion of STRING +function resolvers.expandbraces(str) -- output variable and brace expansion of STRING local ori = resolvers.variable(str) - local pth = expanded_path_from_list(resolvers.split_path(ori)) - return file.join_path(pth) + local pth = expandedpathfromlist(resolvers.splitpath(ori)) + return file.joinpath(pth) end resolvers.isreadable = { } @@ -825,7 +826,7 @@ local function collect_files(names) return #filelist > 0 and filelist or nil end -function resolvers.register_in_trees(name) +function resolvers.registerintrees(name) if not find(name,"^%.") then instance.foundintrees[name] = (instance.foundintrees[name] or 0) + 1 -- maybe only one end @@ -856,7 +857,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan if trace_locating then report_resolvers("remembering file '%s'",filename) end - resolvers.register_in_trees(filename) -- for tracing used files + resolvers.registerintrees(filename) -- for tracing used files return instance.found[stamp] end end @@ -873,7 +874,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan if trace_locating then report_resolvers("checking wildcard '%s'", filename) end - result = resolvers.find_wildcard_files(filename) + result = resolvers.findwildcardfiles(filename) elseif file.is_qualified_path(filename) then if resolvers.isreadable.file(filename) then if trace_locating then @@ -916,7 +917,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan local savedformat = instance.format local format = savedformat or "" if format == "" then - instance.format = resolvers.format_of_suffix(suffix) + instance.format = resolvers.formatofsuffix(suffix) end if not format then instance.format = "othertextfiles" -- kind of everything, maybe texinput is better @@ -973,12 +974,12 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan if ext == "" or not suffixmap[ext] then local forcedname = filename .. '.tex' wantedfiles[#wantedfiles+1] = forcedname - filetype = resolvers.format_of_suffix(forcedname) + filetype = resolvers.formatofsuffix(forcedname) if trace_locating then report_resolvers("forcing filetype '%s'",filetype) end else - filetype = resolvers.format_of_suffix(filename) + filetype = resolvers.formatofsuffix(filename) if trace_locating then report_resolvers("using suffix based filetype '%s'",filetype) end @@ -997,8 +998,8 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan report_resolvers("using given filetype '%s'",filetype) end end - local typespec = resolvers.variable_of_format(filetype) - local pathlist = resolvers.expanded_path_list(typespec) + local typespec = resolvers.variableofformat(filetype) + local pathlist = resolvers.expandedpathlist(typespec) if not pathlist or #pathlist == 0 then -- no pathlist, access check only / todo == wildcard if trace_detail then @@ -1113,7 +1114,7 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan for k=1,#result do local rk = collapse_path(result[k]) result[k] = rk - resolvers.register_in_trees(rk) -- for tracing used files + resolvers.registerintrees(rk) -- for tracing used files end if instance.remember then instance.found[stamp] = result @@ -1121,12 +1122,10 @@ local function collect_instance_files(filename,collected) -- todo : plugin (scan return result end -if not resolvers.concatinators then resolvers.concatinators = { } end - resolvers.concatinators.tex = filejoin resolvers.concatinators.file = resolvers.concatinators.tex -function resolvers.find_files(filename,filetype,mustexist) +function resolvers.findfiles(filename,filetype,mustexist) if type(mustexist) == boolean then -- all set elseif type(filetype) == 'boolean' then @@ -1146,17 +1145,17 @@ function resolvers.find_files(filename,filetype,mustexist) return result end -function resolvers.find_file(filename,filetype,mustexist) - return (resolvers.find_files(filename,filetype,mustexist)[1] or "") +function resolvers.findfile(filename,filetype,mustexist) + return (resolvers.findfiles(filename,filetype,mustexist)[1] or "") end -function resolvers.find_path(filename,filetype) - local path = resolvers.find_files(filename,filetype)[1] or "" +function resolvers.findpath(filename,filetype) + local path = resolvers.findfiles(filename,filetype)[1] or "" -- todo return current path return file.dirname(path) end -function resolvers.find_given_files(filename) +function resolvers.findgivenfiles(filename) local bname, result = filebasename(filename), { } local hashes = instance.hashes for k=1,#hashes do @@ -1187,8 +1186,8 @@ function resolvers.find_given_files(filename) return result end -function resolvers.find_given_file(filename) - return (resolvers.find_given_files(filename)[1] or "") +function resolvers.findgivenfile(filename) + return (resolvers.findgivenfiles(filename)[1] or "") end local function doit(path,blist,bname,tag,kind,result,allresults) @@ -1214,7 +1213,7 @@ local function doit(path,blist,bname,tag,kind,result,allresults) return done end -function resolvers.find_wildcard_files(filename) -- todo: remap: and lpeg +function resolvers.findwildcardfiles(filename) -- todo: remap: and lpeg local result = { } local bname, dname = filebasename(filename), filedirname(filename) local path = gsub(dname,"^*/","") @@ -1257,8 +1256,8 @@ function resolvers.find_wildcard_files(filename) -- todo: remap: and lpeg return result end -function resolvers.find_wildcard_file(filename) - return (resolvers.find_wildcard_files(filename)[1] or "") +function resolvers.findwildcardfile(filename) + return (resolvers.findwildcardfiles(filename)[1] or "") end -- main user functions @@ -1272,7 +1271,7 @@ function resolvers.load(option) identify_configuration_files() load_configuration_files() collapse_configuration_data() - resolvers.expand_variables() + resolvers.expandvariables() if option ~= "nofiles" then load_databases() resolvers.automount() @@ -1282,15 +1281,16 @@ function resolvers.load(option) return files and next(files) and true end -function resolvers.for_files(command, files, filetype, mustexist) +local function report(str) + if trace_locating then + report_resolvers(str) -- has already verbose + else + print(str) + end +end + +function resolvers.dowithfilesandreport(command, files, filetype, mustexist) if files and #files > 0 then - local function report(str) - if trace_locating then - report_resolvers(str) -- has already verbose - else - print(str) - end - end if trace_locating then report('') -- ? end @@ -1308,21 +1308,21 @@ function resolvers.for_files(command, files, filetype, mustexist) end end --- strtab +-- obsolete -resolvers.var_value = resolvers.variable -- output the value of variable $STRING. -resolvers.expand_var = resolvers.expansion -- output variable expansion of STRING. +-- resolvers.varvalue = resolvers.variable -- output the value of variable $STRING. +-- resolvers.expandvar = resolvers.expansion -- output variable expansion of STRING. -function resolvers.show_path(str) -- output search path for file type NAME - return file.join_path(resolvers.expanded_path_list(resolvers.format_of_var(str))) +function resolvers.showpath(str) -- output search path for file type NAME + return file.joinpath(resolvers.expandedpathlist(resolvers.formatofvariable(str))) end --- resolvers.find_file(filename) --- resolvers.find_file(filename, filetype, mustexist) --- resolvers.find_file(filename, mustexist) --- resolvers.find_file(filename, filetype) +-- resolvers.findfile(filename) +-- resolvers.findfile(filename, filetype, mustexist) +-- resolvers.findfile(filename, mustexist) +-- resolvers.findfile(filename, filetype) -function resolvers.register_file(files, name, path) +function resolvers.registerfile(files, name, path) if files[name] then if type(files[name]) == 'string' then files[name] = { files[name], path } @@ -1334,23 +1334,23 @@ function resolvers.register_file(files, name, path) end end -function resolvers.do_with_path(name,func) - local pathlist = resolvers.expanded_path_list(name) +function resolvers.dowithpath(name,func) + local pathlist = resolvers.expandedpathlist(name) for i=1,#pathlist do - func("^"..resolvers.clean_path(pathlist[i])) + func("^"..resolvers.cleanpath(pathlist[i])) end end -function resolvers.do_with_var(name,func) - func(expanded_var(name)) +function resolvers.dowithvariable(name,func) + func(expandedvariable(name)) end -function resolvers.locate_format(name) +function resolvers.locateformat(name) local barename = gsub(name,"%.%a+$","") local fmtname = caches.getfirstreadablefile(barename..".fmt","formats") or "" if fmtname == "" then - fmtname = resolvers.find_files(barename..".fmt")[1] or "" - fmtname = resolvers.clean_path(fmtname) + fmtname = resolvers.findfiles(barename..".fmt")[1] or "" + fmtname = resolvers.cleanpath(fmtname) end if fmtname ~= "" then local barename = file.removesuffix(fmtname) @@ -1366,7 +1366,7 @@ function resolvers.locate_format(name) return nil, nil end -function resolvers.boolean_variable(str,default) +function resolvers.booleanvariable(str,default) local b = resolvers.expansion(str) if b == "" then return default @@ -1376,7 +1376,7 @@ function resolvers.boolean_variable(str,default) end end -function resolvers.with_files(pattern,handle,before,after) -- can be a nice iterator instead +function resolvers.dowithfilesintree(pattern,handle,before,after) -- can be a nice iterator instead local instance = resolvers.instance local hashes = instance.hashes for i=1,#hashes do @@ -1419,3 +1419,9 @@ function resolvers.with_files(pattern,handle,before,after) -- can be a nice iter end end end + +resolvers.obsolete = resolvers.obsolete or { } +local obsolete = resolvers.obsolete + +resolvers.find_file = resolvers.findfile obsolete.find_file = resolvers.findfile +resolvers.find_files = resolvers.findfiles obsolete.find_files = resolvers.findfiles diff --git a/tex/context/base/data-tex.lua b/tex/context/base/data-tex.lua index d597b26a2..cf66913c1 100644 --- a/tex/context/base/data-tex.lua +++ b/tex/context/base/data-tex.lua @@ -22,7 +22,7 @@ local finders, openers, loaders = resolvers.finders, resolvers.openers, resolver local checkgarbage = utilities.garbagecollector and utilities.garbagecollector.check function finders.generic(tag,filename,filetype) - local foundname = resolvers.find_file(filename,filetype) + local foundname = resolvers.findfile(filename,filetype) if foundname and foundname ~= "" then if trace_locating then report_resolvers("%s finder: file '%s' found",tag,filename) @@ -39,15 +39,17 @@ end --~ local lpegmatch = lpeg.match --~ local getlines = lpeg.Ct(lpeg.patterns.textline) +resolvers.filters = resolvers.filters or { } + local input_translator, utf_translator, user_translator = nil, nil, nil -function resolvers.install_text_filter(name,func) +function resolvers.filters.install(name,func) if name == "input" then input_translator = func elseif name == "utf" then utf_translator = func elseif name == "user" then user_translator = func end end -function openers.text_opener(filename,file_handle,tag) +function openers.textopener(filename,file_handle,tag) local u = unicode.utftype(file_handle) local t = { } if u > 0 then @@ -161,7 +163,7 @@ function openers.generic(tag,filename) if trace_locating then report_resolvers("%s opener, file '%s' opened",tag,filename) end - return openers.text_opener(filename,f,tag) + return openers.textopener(filename,f,tag) end end if trace_locating then diff --git a/tex/context/base/data-tmp.lua b/tex/context/base/data-tmp.lua index aeca105a0..e4bef66d8 100644 --- a/tex/context/base/data-tmp.lua +++ b/tex/context/base/data-tmp.lua @@ -28,7 +28,7 @@ local mkdirs, isdir = dir.mkdirs, lfs.isdir local trace_locating = false trackers.register("resolvers.locating", function(v) trace_locating = v end) local trace_cache = false trackers.register("resolvers.cache", function(v) trace_cache = v end) -local report_cache = logs.new("cache") +local report_cache = logs.new("cache") local report_resolvers = logs.new("resolvers") local resolvers = resolvers @@ -51,12 +51,12 @@ local writable, readables, usedreadables = nil, { }, { } local function identify() -- Combining the loops makes it messy. First we check the format cache path -- and when the last component is not present we try to create it. - local texmfcaches = resolvers.clean_path_list("TEXMFCACHE") + local texmfcaches = resolvers.cleanpathlist("TEXMFCACHE") if texmfcaches then for k=1,#texmfcaches do local cachepath = texmfcaches[k] if cachepath ~= "" then - cachepath = resolvers.clean_path(cachepath) + cachepath = resolvers.cleanpath(cachepath) cachepath = file.collapse_path(cachepath) local valid = isdir(cachepath) if valid then @@ -90,7 +90,7 @@ local function identify() local cachepath = texmfcaches[k] cachepath = resolvers.getenv(cachepath) if cachepath ~= "" then - cachepath = resolvers.clean_path(cachepath) + cachepath = resolvers.cleanpath(cachepath) local valid = isdir(cachepath) if valid and file.is_readable(cachepath) then if not writable and file.is_writable(cachepath) then @@ -112,7 +112,7 @@ local function identify() os.exit() end -- why here - writable = dir.expandname(resolvers.clean_path(writable)) -- just in case + writable = dir.expandname(resolvers.cleanpath(writable)) -- just in case -- moved here local base, more, tree = caches.base, caches.more, caches.tree or caches.treehash() -- we have only one writable tree if tree then @@ -277,8 +277,8 @@ function caches.savedata(filepath,filename,data,raw) else table.tofile(tmaname, data,'return',false,true,false) -- maybe not the last true end - local cleanup = resolvers.boolean_variable("PURGECACHE", false) - local strip = resolvers.boolean_variable("LUACSTRIP", true) + local cleanup = resolvers.booleanvariable("PURGECACHE", false) + local strip = resolvers.booleanvariable("LUACSTRIP", true) utilities.lua.compile(tmaname, tmcname, cleanup, strip) end @@ -356,5 +356,3 @@ function caches.savecontent(cachename,dataname,content) report_resolvers("unable to save '%s' in '%s' (access error)",dataname,luaname) end end - - diff --git a/tex/context/base/data-tre.lua b/tex/context/base/data-tre.lua index cfa8e3c5d..f119e52e7 100644 --- a/tex/context/base/data-tre.lua +++ b/tex/context/base/data-tre.lua @@ -52,7 +52,7 @@ function resolvers.locators.tree(specification) if trace_locating then report_resolvers("tree locator '%s' found (%s)",path,specification) end - resolvers.append_hash('tree',specification,path,false) -- don't cache + resolvers.appendhash('tree',specification,path,false) -- don't cache elseif trace_locating then report_resolvers("tree locator '%s' not found",path) end diff --git a/tex/context/base/data-use.lua b/tex/context/base/data-use.lua index 75f2ebff2..d4e9b53fe 100644 --- a/tex/context/base/data-use.lua +++ b/tex/context/base/data-use.lua @@ -19,7 +19,7 @@ local resolvers = resolvers resolvers.automounted = resolvers.automounted or { } function resolvers.automount(usecache) - local mountpaths = resolvers.clean_path_list(resolvers.expansion('TEXMFMOUNT')) + local mountpaths = resolvers.cleanpathlist(resolvers.expansion('TEXMFMOUNT')) if (not mountpaths or #mountpaths == 0) and usecache then mountpaths = caches.getreadablepaths("mount") end @@ -63,7 +63,7 @@ function statistics.savefmtstatus(texname,formatbanner,sourcefile) -- texname == local luvdata = { enginebanner = enginebanner, formatbanner = formatbanner, - sourcehash = md5.hex(io.loaddata(resolvers.find_file(sourcefile)) or "unknown"), + sourcehash = md5.hex(io.loaddata(resolvers.findfile(sourcefile)) or "unknown"), sourcefile = sourcefile, } io.savedata(luvname,table.serialize(luvdata,true)) @@ -77,7 +77,7 @@ function statistics.checkfmtstatus(texname) if lfs.isfile(luvname) then local luv = dofile(luvname) if luv and luv.sourcefile then - local sourcehash = md5.hex(io.loaddata(resolvers.find_file(luv.sourcefile)) or "unknown") + local sourcehash = md5.hex(io.loaddata(resolvers.findfile(luv.sourcefile)) or "unknown") local luvbanner = luv.enginebanner or "?" if luvbanner ~= enginebanner then return format("engine mismatch (luv: %s <> bin: %s)",luvbanner,enginebanner) diff --git a/tex/context/base/data-zip.lua b/tex/context/base/data-zip.lua index 1b261c45e..784ecb6b7 100644 --- a/tex/context/base/data-zip.lua +++ b/tex/context/base/data-zip.lua @@ -47,7 +47,7 @@ function zip.openarchive(name) else local arch = archives[name] if not arch then - local full = resolvers.find_file(name) or "" + local full = resolvers.findfile(name) or "" arch = (full ~= "" and zip.open(full)) or false archives[name] = arch end @@ -141,7 +141,7 @@ function openers.zip(specification) if trace_locating then report_resolvers("zip opener, file '%s' found",q.name) end - return openers.text_opener(specification,dfile,'zip') + return openers.textopener(specification,dfile,'zip') elseif trace_locating then report_resolvers("zip opener, file '%s' not found",q.name) end @@ -205,10 +205,10 @@ function resolvers.usezipfile(zipname) report_resolvers("zip registering, registering archive '%s'",zipname) end statistics.starttiming(instance) - resolvers.prepend_hash('zip',zipname,zipfile) - resolvers.extend_texmf_var(zipname) -- resets hashes too + resolvers.prependhash('zip',zipname,zipfile) + resolvers.extendtexmfvariable(zipname) -- resets hashes too registeredfiles[zipname] = z - instance.files[zipname] = resolvers.register_zip_file(z,tree or "") + instance.files[zipname] = resolvers.registerzipfile(z,tree or "") statistics.stoptiming(instance) elseif trace_locating then report_resolvers("zip registering, unknown archive '%s'",zipname) @@ -218,7 +218,7 @@ function resolvers.usezipfile(zipname) end end -function resolvers.register_zip_file(z,tree) +function resolvers.registerzipfile(z,tree) local files, filter = { }, "" if tree == "" then filter = "^(.+)/(.-)$" @@ -228,7 +228,7 @@ function resolvers.register_zip_file(z,tree) if trace_locating then report_resolvers("zip registering, using filter '%s'",filter) end - local register, n = resolvers.register_file, 0 + local register, n = resolvers.registerfile, 0 for i in z:files() do local path, name = match(i.filename,filter) if path then diff --git a/tex/context/base/font-afm.lua b/tex/context/base/font-afm.lua index 8a9fabed1..f0c440e9c 100644 --- a/tex/context/base/font-afm.lua +++ b/tex/context/base/font-afm.lua @@ -34,10 +34,14 @@ fonts.afm = fonts.afm or { } local afm = fonts.afm local tfm = fonts.tfm -afm.version = 1.402 -- incrementing this number one up will force a re-cache -afm.syncspace = true -- when true, nicer stretch values -afm.enhance_data = true -- best leave this set to true -afm.cache = containers.define("fonts", "afm", afm.version, true) +afm.version = 1.402 -- incrementing this number one up will force a re-cache +afm.syncspace = true -- when true, nicer stretch values +afm.addligatures = true -- best leave this set to true +afm.addtexligatures = true -- best leave this set to true +afm.addkerns = true -- best leave this set to true +afm.cache = containers.define("fonts", "afm", afm.version, true) + +local definers = fonts.definers local afmfeatures = { aux = { }, @@ -260,11 +264,11 @@ by adding ligatures and kern information to the afm derived data. That way we can set them faster when defining a font.
--ldx]]-- -local add_kerns, add_ligatures, unify -- we will implement these later +local addkerns, addligatures, unify -- we will implement these later function afm.load(filename) -- hm, for some reasons not resolved yet - filename = resolvers.find_file(filename,'afm') or "" + filename = resolvers.findfile(filename,'afm') or "" if filename ~= "" then local name = file.removesuffix(file.basename(filename)) local data = containers.read(afm.cache,name) @@ -272,9 +276,9 @@ function afm.load(filename) local size, time = attr.size or 0, attr.modification or 0 -- local pfbfile = file.replacesuffix(name,"pfb") - local pfbname = resolvers.find_file(pfbfile,"pfb") or "" + local pfbname = resolvers.findfile(pfbfile,"pfb") or "" if pfbname == "" then - pfbname = resolvers.find_file(file.basename(pfbfile),"pfb") or "" + pfbname = resolvers.findfile(file.basename(pfbfile),"pfb") or "" end local pfbsize, pfbtime = 0, 0 if pfbname ~= "" then @@ -294,16 +298,20 @@ function afm.load(filename) end report_afm( "unifying %s",filename) unify(data,filename) - if afm.enhance_data then + if afm.addligatures then report_afm( "add ligatures") - add_ligatures(data,'ligatures') -- easier this way + addligatures(data,'ligatures') -- easier this way + end + if afm.addtexligatures then report_afm( "add tex-ligatures") - add_ligatures(data,'texligatures') -- easier this way + addligatures(data,'texligatures') -- easier this way + end + if afm.addkerns then report_afm( "add extra kerns") - add_kerns(data) -- faster this way + addkerns(data) -- faster this way end report_afm( "add tounicode data") - fonts.map.add_to_unicode(data,filename) + fonts.map.addtounicode(data,filename) data.size = size data.time = time data.pfbsize = pfbsize @@ -323,7 +331,7 @@ end unify = function(data, filename) local unicodevector = fonts.enc.load('unicode').hash local glyphs, indices, unicodes, names = { }, { }, { }, { } - local verbose, private = fonts.verbose, fonts.private + local verbose, private = fonts.verbose, fonts.privateoffset for name, blob in next, data.characters do local code = unicodevector[name] -- or characters.name_to_unicode[name] if not code then @@ -370,7 +378,7 @@ end and extra kerns. This saves quite some lookups later. --ldx]]-- -add_ligatures = function(afmdata,ligatures) +addligatures = function(afmdata,ligatures) local glyphs, luatex = afmdata.glyphs, afmdata.luatex local indices, unicodes, names = luatex.indices, luatex.unicodes, luatex.names for k,v in next, characters[ligatures] do -- main characters table @@ -398,7 +406,7 @@ end them selectively. --ldx]]-- -add_kerns = function(afmdata) +addkerns = function(afmdata) local glyphs = afmdata.glyphs local names = afmdata.luatex.names local uncomposed = characters.uncomposed @@ -458,7 +466,7 @@ end -- once we have otf sorted out (new format) we can try to make the afm -- cache similar to it (similar tables) -local function add_dimensions(data) -- we need to normalize afm to otf i.e. indexed table instead of name +local function adddimensions(data) -- we need to normalize afm to otf i.e. indexed table instead of name if data then for index, glyph in next, data.glyphs do local bb = glyph.boundingbox @@ -496,7 +504,7 @@ local function copytotfm(data) characters[u] = { } descriptions[u] = d end - local filename = fonts.tfm.checked_filename(luatex) -- was metadata.filename + local filename = fonts.tfm.checkedfilename(luatex) -- was metadata.filename local fontname = metadata.fontname or metadata.fullname local fullname = metadata.fullname or metadata.fontname local endash, emdash, spacer, spaceunits = unicodes['space'], unicodes['emdash'], "space", 500 @@ -671,10 +679,10 @@ local function setfeatures(tfmdata) end local function checkfeatures(specification) - local features, done = fonts.define.check(specification.features.normal,afmfeatures.default) + local features, done = definers.check(specification.features.normal,afmfeatures.default) if done then specification.features.normal = features - tfm.hash_instance(specification,true) + tfm.hashinstance(specification,true) end end @@ -697,14 +705,14 @@ local function afmtotfm(specification) return nil else checkfeatures(specification) - specification = fonts.define.resolve(specification) -- new, was forgotten + specification = definers.resolve(specification) -- new, was forgotten local features = specification.features.normal local cache_id = specification.hash local tfmdata = containers.read(tfm.cache, cache_id) -- cache with features applied if not tfmdata then local afmdata = afm.load(afmname) if afmdata and next(afmdata) then - add_dimensions(afmdata) + adddimensions(afmdata) tfmdata = copytotfm(afmdata) if tfmdata and next(tfmdata) then local shared = tfmdata.shared @@ -731,23 +739,23 @@ those cases, but now that we can handleWe hardly gain anything when we cache the final (pre scaled) @@ -79,7 +86,7 @@ and prepares a table that will move along as we proceed.
-- name name(sub) name(sub)*spec name*spec -- name@spec*oeps -local splitter, specifiers = nil, "" +local splitter, splitspecifiers = nil, "" local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc @@ -88,13 +95,13 @@ local right = P(")") local colon = P(":") local space = P(" ") -define.defaultlookup = "file" +definers.defaultlookup = "file" local prefixpattern = P(false) -function define.add_specifier(symbol) - specifiers = specifiers .. symbol - local method = S(specifiers) +local function addspecifier(symbol) + splitspecifiers = splitspecifiers .. symbol + local method = S(splitspecifiers) local lookup = C(prefixpattern) * colon local sub = left * C(P(1-left-right-method)^1) * right local specification = C(method) * C(P(1)^1) @@ -102,24 +109,28 @@ function define.add_specifier(symbol) splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc(""))) end -function define.add_lookup(str,default) +local function addlookup(str,default) prefixpattern = prefixpattern + P(str) end -define.add_lookup("file") -define.add_lookup("name") -define.add_lookup("spec") +definers.addlookup = addlookup + +addlookup("file") +addlookup("name") +addlookup("spec") -function define.get_specification(str) +local function getspecification(str) return lpegmatch(splitter,str) end -function define.register_split(symbol,action) - define.add_specifier(symbol) - define.specify[symbol] = action +definers.getspecification = getspecification + +function definers.registersplit(symbol,action) + addspecifier(symbol) + variants[symbol] = action end -function define.makespecification(specification, lookup, name, sub, method, detail, size) +function definers.makespecification(specification, lookup, name, sub, method, detail, size) size = size or 655360 if trace_defining then report_define("%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s", @@ -127,7 +138,7 @@ function define.makespecification(specification, lookup, name, sub, method, deta (sub ~= "" and sub) or "-", (method ~= "" and method) or "-", (detail ~= "" and detail) or "-") end if not lookup or lookup == "" then - lookup = define.defaultlookup + lookup = definers.defaultlookup end local t = { lookup = lookup, -- forced type @@ -144,10 +155,10 @@ function define.makespecification(specification, lookup, name, sub, method, deta return t end -function define.analyze(specification, size) +function definers.analyze(specification, size) -- can be optimized with locals - local lookup, name, sub, method, detail = define.get_specification(specification or "") - return define.makespecification(specification, lookup, name, sub, method, detail, size) + local lookup, name, sub, method, detail = getspecification(specification or "") + return definers.makespecification(specification, lookup, name, sub, method, detail, size) end --[[ldx-- @@ -156,7 +167,7 @@ end local sortedhashkeys = table.sortedhashkeys -function tfm.hash_features(specification) +function tfm.hashfeatures(specification) local features = specification.features if features then local t = { } @@ -188,7 +199,7 @@ function tfm.hash_features(specification) return "unknown" end -fonts.designsizes = { } +fonts.designsizes = allocate() --[[ldx--In principle we can share tfm tables when we are in node for a font, but then
@@ -198,10 +209,10 @@ when we get rid of base mode we can optimize even further by sharing, but then w
loose our testcases for
We can resolve the filename using the next function:
--ldx]]-- -define.resolvers = define.resolvers or { } -local resolvers = define.resolvers +definers.resolvers = definers.resolvers or { } +local resolvers = definers.resolvers -- todo: reporter @@ -274,7 +285,7 @@ function resolvers.spec(specification) end end -function define.resolve(specification) +function definers.resolve(specification) if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash local r = resolvers[specification.lookup] if r then @@ -295,7 +306,7 @@ function define.resolve(specification) end end -- - specification.hash = lower(specification.name .. ' @ ' .. tfm.hash_features(specification)) + specification.hash = lower(specification.name .. ' @ ' .. tfm.hashfeatures(specification)) if specification.sub and specification.sub ~= "" then specification.hash = specification.sub .. ' @ ' .. specification.hash end @@ -319,7 +330,7 @@ specification yet. --ldx]]-- function tfm.read(specification) - local hash = tfm.hash_instance(specification) + local hash = tfm.hashinstance(specification) local tfmtable = tfm.fonts[hash] -- hashes by size ! if not tfmtable then local forced = specification.forced or "" @@ -367,22 +378,22 @@ endFor virtual fonts we need a slightly different approach:
--ldx]]-- -function tfm.read_and_define(name,size) -- no id - local specification = define.analyze(name,size) +function tfm.readanddefine(name,size) -- no id + local specification = definers.analyze(name,size) local method = specification.method - if method and define.specify[method] then - specification = define.specify[method](specification) + if method and variants[method] then + specification = variants[method](specification) end - specification = define.resolve(specification) - local hash = tfm.hash_instance(specification) - local id = define.registered(hash) + specification = definers.resolve(specification) + local hash = tfm.hashinstance(specification) + local id = definers.registered(hash) if not id then local fontdata = tfm.read(specification) if fontdata then fontdata.hash = hash id = font.define(fontdata) - define.register(fontdata,id) - tfm.cleanup_table(fontdata) + definers.register(fontdata,id) + tfm.cleanuptable(fontdata) else id = 0 -- signal end @@ -402,6 +413,9 @@ local function check_tfm(specification,fullname) if foundname == "" then foundname = findbinfile(fullname, 'ofm') or "" -- bonus for usage outside context end + if foundname == "" then + foundname = fonts.names.getfilename(fullname,"tfm") + end if foundname ~= "" then specification.filename, specification.format = foundname, "ofm" return tfm.read_from_tfm(specification) @@ -410,13 +424,15 @@ end local function check_afm(specification,fullname) local foundname = findbinfile(fullname, 'afm') or "" -- just to be sure - if foundname == "" and tfm.auto_afm then + if foundname == "" then + foundname = fonts.names.getfilename(fullname,"afm") + end + if foundname == "" and tfm.autoprefixedafm then local encoding, shortname = match(fullname,"^(.-)%-(.*)$") -- context: encoding-name.* if encoding and shortname and fonts.enc.known[encoding] then shortname = findbinfile(shortname,'afm') or "" -- just to be sure if shortname ~= "" then foundname = shortname - -- tfm.set_normal_feature(specification,'encoding',encoding) -- will go away if trace_loading then report_afm("stripping encoding prefix from filename %s",afmname) end @@ -453,7 +469,7 @@ function readers.afm(specification,method) tfmtable = check_afm(specification,specification.name .. "." .. forced) end if not tfmtable then - method = method or define.method or "afm or tfm" + method = method or definers.method or "afm or tfm" if method == "tfm" then tfmtable = check_tfm(specification,specification.name) elseif method == "afm" then @@ -478,21 +494,26 @@ local function check_otf(forced,specification,suffix,what) name = file.addsuffix(name,suffix,true) end local fullname, tfmtable = findbinfile(name,suffix) or "", nil -- one shot + -- if false then -- can be enabled again when needed + -- if fullname == "" then + -- local fb = fonts.names.old_to_new[name] + -- if fb then + -- fullname = findbinfile(fb,suffix) or "" + -- end + -- end + -- if fullname == "" then + -- local fb = fonts.names.new_to_old[name] + -- if fb then + -- fullname = findbinfile(fb,suffix) or "" + -- end + -- end + -- end if fullname == "" then - local fb = fonts.names.old_to_new[name] - if fb then - fullname = findbinfile(fb,suffix) or "" - end - end - if fullname == "" then - local fb = fonts.names.new_to_old[name] - if fb then - fullname = findbinfile(fb,suffix) or "" - end + fullname = fonts.names.getfilename(name,suffix) end if fullname ~= "" then specification.filename, specification.format = fullname, what -- hm, so we do set the filename, then - tfmtable = tfm.read_from_open_type(specification) -- we need to do it for all matches / todo + tfmtable = tfm.read_from_otf(specification) -- we need to do it for all matches / todo end return tfmtable end @@ -518,7 +539,7 @@ function readers.dfont(specification) return readers.opentype(specification,"ttf a helper function. --ldx]]-- -function define.check(features,defaults) -- nb adapts features ! +function definers.check(features,defaults) -- nb adapts features ! local done = false if features and next(features) then for k,v in next, defaults do @@ -533,7 +554,7 @@ function define.check(features,defaults) -- nb adapts features ! end --[[ldx-- -So far the specifyers. Now comes the real definer. Here we cache +
So far the specifiers. Now comes the real definer. Here we cache based on id's. Here we also intercept the virtual font handler. Since it evolved stepwise I may rewrite this bit (combine code).
@@ -544,9 +565,13 @@ not gain much. By the way, passing id's back to in the callback was introduced later in the development. --ldx]]-- -define.last = nil +local lastdefined = nil -- we don't want this one to end up in s-tra-02 + +function definers.current() -- or maybe current + return lastdefined +end -function define.register(fontdata,id) +function definers.register(fontdata,id) if fontdata and id then local hash = fontdata.hash if not tfm.internalized[hash] then @@ -562,7 +587,7 @@ function define.register(fontdata,id) end end -function define.registered(hash) +function definers.registered(hash) local id = tfm.internalized[hash] return id, id and fonts.ids[id] end @@ -577,7 +602,7 @@ function tfm.make(specification) -- however, when virtual tricks are used as feature (makes more -- sense) we scale the commands in fonts.tfm.scale (and set the -- factor there) - local fvm = define.methods[specification.features.vtf.preset] + local fvm = definers.methods.variants[specification.features.vtf.preset] if fvm then return fvm(specification) else @@ -585,28 +610,28 @@ function tfm.make(specification) end end -function define.read(specification,size,id) -- id can be optional, name can already be table +function definers.read(specification,size,id) -- id can be optional, name can already be table statistics.starttiming(fonts) if type(specification) == "string" then - specification = define.analyze(specification,size) + specification = definers.analyze(specification,size) end local method = specification.method - if method and define.specify[method] then - specification = define.specify[method](specification) + if method and variants[method] then + specification = variants[method](specification) end - specification = define.resolve(specification) - local hash = tfm.hash_instance(specification) + specification = definers.resolve(specification) + local hash = tfm.hashinstance(specification) if cache_them then local fontdata = containers.read(fonts.cache,hash) -- for tracing purposes end - local fontdata = define.registered(hash) -- id + local fontdata = definers.registered(hash) -- id if not fontdata then if specification.features.vtf and specification.features.vtf.preset then fontdata = tfm.make(specification) else fontdata = tfm.read(specification) if fontdata then - tfm.check_virtual_id(fontdata) + tfm.checkvirtualid(fontdata) end end if cache_them then @@ -616,11 +641,11 @@ function define.read(specification,size,id) -- id can be optional, name can alre fontdata.hash = hash fontdata.cache = "no" if id then - define.register(fontdata,id) + definers.register(fontdata,id) end end end - define.last = fontdata or id -- todo ! ! ! ! ! + lastdefined = fontdata or id -- todo ! ! ! ! ! if not fontdata then report_define( "unknown font %s, loading aborted",specification.name) elseif trace_defining and type(fontdata) == "table" then @@ -640,7 +665,7 @@ end function vf.find(name) name = file.removesuffix(file.basename(name)) - if tfm.resolve_vf then + if tfm.resolvevirtualtoo then local format = fonts.logger.format(name) if format == 'tfm' or format == 'ofm' then if trace_defining then @@ -665,5 +690,5 @@ endWe overload both the
This is very experimental code!
--ldx]]-- @@ -18,7 +20,7 @@ local fonts = fonts local vf = fonts.vf local tfm = fonts.tfm -fonts.fallbacks = fonts.fallbacks or { } +fonts.fallbacks = allocate() local fallbacks = fonts.fallbacks local commands = vf.aux.combine.commands @@ -327,7 +329,7 @@ commands["disable-force"] = function(g,v) force_fallback = false end -local install = fonts.define.methods.install +local install = fonts.definers.methods.install install("fallback", { -- todo: auto-fallback with loop over data.characters { "fake-character", 0x00A2, 'textcent' }, diff --git a/tex/context/base/font-gds.lua b/tex/context/base/font-gds.lua index 80dc0ca55..ce1184292 100644 --- a/tex/context/base/font-gds.lua +++ b/tex/context/base/font-gds.lua @@ -13,6 +13,8 @@ local trace_goodies = false trackers.register("fonts.goodies", function(v) trac local report_fonts = logs.new("fonts") +local allocate = utilities.storage.allocate + -- goodies=name,colorscheme=,featureset= -- -- goodies=auto @@ -23,10 +25,10 @@ local node = node fonts.goodies = fonts.goodies or { } local fontgoodies = fonts.goodies -fontgoodies.data = fontgoodies.data or { } +fontgoodies.data = allocate() -- fontgoodies.data or { } local data = fontgoodies.data -fontgoodies.list = fontgoodies.list or { } +fontgoodies.list = fontgoodies.list or { } -- no allocate as we want to see what is there local list = fontgoodies.list function fontgoodies.report(what,trace,goodies) @@ -43,9 +45,9 @@ local function getgoodies(filename) -- maybe a merge is better if goodies ~= nil then -- found or tagged unfound elseif type(filename) == "string" then - local fullname = resolvers.find_file(file.addsuffix(filename,"lfg")) or "" -- prefered suffix + local fullname = resolvers.findfile(file.addsuffix(filename,"lfg")) or "" -- prefered suffix if fullname == "" then - fullname = resolvers.find_file(file.addsuffix(filename,"lua")) or "" -- fallback suffix + fullname = resolvers.findfile(file.addsuffix(filename,"lua")) or "" -- fallback suffix end if fullname == "" then report_fonts("goodie file '%s.lfg' is not found",filename) @@ -77,7 +79,7 @@ fontgoodies.get = getgoodies -- register goodies file -local preset_context = fonts.define.specify.preset_context +local presetcontext = fonts.definers.specifiers.presetcontext local function setgoodies(tfmdata,value) local goodies = tfmdata.goodies or { } -- future versions might store goodies in the cached instance @@ -124,7 +126,7 @@ function fontgoodies.prepare_features(goodies,name,set) if set then local ff = flattenedfeatures(set) local fullname = goodies.name .. "::" .. name - local n, s = preset_context(fullname,"",ff) + local n, s = presetcontext(fullname,"",ff) goodies.featuresets[name] = s -- set if trace_goodies then report_fonts("feature set '%s' gets number %s and name '%s'",name,n,fullname) @@ -209,7 +211,7 @@ local function set_colorscheme(tfmdata,scheme) end local fontdata = fonts.ids -local fcs = fonts.color.set +local fcs = fonts.colors.set local has_attribute = node.has_attribute local traverse_id = node.traverse_id local a_colorscheme = attributes.private('colorscheme') diff --git a/tex/context/base/font-ini.lua b/tex/context/base/font-ini.lua index 210edbbe8..e068dae55 100644 --- a/tex/context/base/font-ini.lua +++ b/tex/context/base/font-ini.lua @@ -14,6 +14,7 @@ local utf = unicode.utf8 local format, serialize = string.format, table.serialize local write_nl = texio.write_nl local lower = string.lower +local allocate, mark = utilities.storage.allocate, utilities.storage.mark local report_define = logs.new("define fonts") @@ -26,9 +27,11 @@ fonts = fonts or { } -- we will also have des and fam hashes -fonts.ids = fonts.ids or { } fonts.identifiers = fonts.ids -- aka fontdata -fonts.chr = fonts.chr or { } fonts.characters = fonts.chr -- aka chardata -fonts.qua = fonts.qua or { } fonts.quads = fonts.qua -- aka quaddata +-- beware, soem alreadyu defined + +fonts.ids = mark(fonts.ids or { }) fonts.identifiers = fonts.ids -- aka fontdata +fonts.chr = mark(fonts.chr or { }) fonts.characters = fonts.chr -- aka chardata +fonts.qua = mark(fonts.qua or { }) fonts.quads = fonts.qua -- aka quaddata fonts.tfm = fonts.tfm or { } fonts.vf = fonts.vf or { } @@ -36,7 +39,7 @@ fonts.afm = fonts.afm or { } fonts.pfb = fonts.pfb or { } fonts.otf = fonts.otf or { } -fonts.private = 0xF0000 -- 0x10FFFF +fonts.privateoffset = 0xF0000 -- 0x10FFFF fonts.verbose = false -- more verbose cache tables fonts.ids[0] = { -- nullfont @@ -70,15 +73,15 @@ fonts.processors = fonts.processors or { fonts.manipulators = fonts.manipulators or { } -fonts.define = fonts.define or { } -fonts.define.specify = fonts.define.specify or { } -fonts.define.specify.synonyms = fonts.define.specify.synonyms or { } +fonts.definers = fonts.definers or { } +fonts.definers.specifiers = fonts.definers.specifiers or { } +fonts.definers.specifiers.synonyms = fonts.definers.specifiers.synonyms or { } -- tracing if not fonts.color then - fonts.color = { + fonts.color = allocate { set = function() end, reset = function() end, } @@ -87,7 +90,7 @@ end -- format identification -fonts.formats = { } +fonts.formats = allocate() function fonts.fontformat(filename,default) local extname = lower(file.extname(filename)) diff --git a/tex/context/base/font-ini.mkiv b/tex/context/base/font-ini.mkiv index 4b813d7f6..2987bd36d 100644 --- a/tex/context/base/font-ini.mkiv +++ b/tex/context/base/font-ini.mkiv @@ -59,6 +59,7 @@ \registerctxluafile{font-clr}{1.001} \registerctxluafile{node-fnt}{1.001} % here \registerctxluafile{font-enc}{1.001} +\registerctxluafile{font-agl}{1.001} % uses enc, needed in map \registerctxluafile{font-map}{1.001} \registerctxluafile{font-syn}{1.001} \registerctxluafile{font-log}{1.001} @@ -72,7 +73,7 @@ \registerctxluafile{font-oti}{1.001} % otf initialization \registerctxluafile{font-otb}{1.001} % otf main base \registerctxluafile{font-otn}{1.001} % otf main node -\registerctxluafile{font-ota}{1.001} % otf analyzers +\registerctxluafile{font-ota}{1.001} % otf analyzers (needs dynamics) \registerctxluafile{font-otp}{1.001} % otf pack \registerctxluafile{font-otc}{1.001} % otf context \registerctxluafile{font-oth}{1.001} % otf helpers @@ -85,7 +86,6 @@ \registerctxluafile{font-ext}{1.001} \registerctxluafile{font-pat}{1.001} \registerctxluafile{font-chk}{1.001} -\registerctxluafile{font-agl}{1.001} \unprotect @@ -699,7 +699,7 @@ \def\lowleveldefinefont#1#2% #2 = cs {% - \ctxlua{fonts.define.command_1("\luaescapestring{#1}")}% the escapestring catches at \somedimen + \ctxlua{fonts.definers.stage_one("\luaescapestring{#1}")}% the escapestring catches at \somedimen % sets \scaledfontmode and \somefontname and \somefontsize \ifcase\scaledfontmode\relax % none, avoid the designsize if possible @@ -730,7 +730,7 @@ \fi \updatefontparameters \updatefontclassparameters - \ctxlua{fonts.define.command_2( + \ctxlua{fonts.definers.stage_two( \ifx\fontclass\empty false\else true\fi, "#2", % cs, trailing % is gone "\somefontfile", @@ -2713,7 +2713,7 @@ \def\dodefinefontfeature[#1][#2][#3]% {\global\expandafter\chardef\csname\??fq=#1\endcsname % beware () needed as we get two values returned - \ctxlua{tex.write((fonts.define.specify.preset_context("#1","#2","#3")))}\relax} + \ctxlua{tex.write((fonts.definers.specifiers.presetcontext("#1","#2","#3")))}\relax} \definefontfeature [default] @@ -2798,7 +2798,7 @@ {\dodoubleargument\dofontfeatureslist} \def\dofontfeatureslist[#1][#2]% todo: arg voor type - {\ctxlua{tex.sprint(tex.ctxcatcodes,fonts.define.specify.context_tostring("#1","otf","\luaescapestring{#2}","yes","no",true,{"number"}))}} + {\ctxlua{tex.sprint(tex.ctxcatcodes,fonts.definers.specifiers.contexttostring("#1","otf","\luaescapestring{#2}","yes","no",true,{"number"}))}} \attribute\zerocount\zerocount % first in list, so fast match @@ -2819,7 +2819,7 @@ % % \typebuffer \getbuffer -\def\featureattribute#1{\ctxlua{tex.sprint(fonts.define.specify.context_number("#1"))}} +\def\featureattribute#1{\ctxlua{tex.sprint(fonts.definers.specifiers.contextnumber("#1"))}} \def\setfontfeature #1{\edef\currentfeature{#1}\attribute\zerocount\featureattribute{#1}\relax} \def\resetfontfeature#1{\let\currentfeature\empty\attribute\zerocount\zerocount} % initial value diff --git a/tex/context/base/font-map.lua b/tex/context/base/font-map.lua index 6230ee326..5fa4170d7 100644 --- a/tex/context/base/font-map.lua +++ b/tex/context/base/font-map.lua @@ -27,9 +27,9 @@ of obsolete. Some code may move to runtime or auxiliary modules. local fonts = fonts fonts.map = fonts.map or { } -local function load_lum_table(filename) -- will move to font goodies +local function loadlumtable(filename) -- will move to font goodies local lumname = file.replacesuffix(file.basename(filename),"lum") - local lumfile = resolvers.find_file(lumname,"map") or "" + local lumfile = resolvers.findfile(lumname,"map") or "" if lumfile ~= "" and lfs.isfile(lumfile) then if trace_loading or trace_unimapping then report_otf("enhance: loading %s ",lumfile) @@ -54,7 +54,7 @@ local parser = unicode + ucode + index local parsers = { } -local function make_name_parser(str) +local function makenameparser(str) if not str or str == "" then return parser else @@ -67,8 +67,8 @@ local function make_name_parser(str) end end ---~ local parser = fonts.map.make_name_parser("Japan1") ---~ local parser = fonts.map.make_name_parser() +--~ local parser = fonts.map.makenameparser("Japan1") +--~ local parser = fonts.map.makenameparser() --~ local function test(str) --~ local b, a = lpegmatch(parser,str) --~ print((a and table.serialize(b)) or b) @@ -122,8 +122,8 @@ end --~ return s --~ end -fonts.map.load_lum_table = load_lum_table -fonts.map.make_name_parser = make_name_parser +fonts.map.loadlumtable = loadlumtable +fonts.map.makenameparser = makenameparser fonts.map.tounicode16 = tounicode16 fonts.map.tounicode16sequence = tounicode16sequence @@ -137,7 +137,7 @@ local ligsplitter = Ct(other * (separator * other)^0) --~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more"))) --~ print(table.serialize(lpegmatch(ligsplitter,"such_so_more.that"))) -fonts.map.add_to_unicode = function(data,filename) +fonts.map.addtounicode = function(data,filename) local unicodes = data.luatex and data.luatex.unicodes if not unicodes then return @@ -148,11 +148,11 @@ fonts.map.add_to_unicode = function(data,filename) unicodes['zwj'] = unicodes['zwj'] or 0x200D unicodes['zwnj'] = unicodes['zwnj'] or 0x200C -- the tounicode mapping is sparse and only needed for alternatives - local tounicode, originals, ns, nl, private, unknown = { }, { }, 0, 0, fonts.private, format("%04X",utfbyte("?")) + local tounicode, originals, ns, nl, private, unknown = { }, { }, 0, 0, fonts.privateoffset, format("%04X",utfbyte("?")) data.luatex.tounicode, data.luatex.originals = tounicode, originals local lumunic, uparser, oparser if false then -- will become an option - lumunic = load_lum_table(filename) + lumunic = loadlumtable(filename) lumunic = lumunic and lumunic.tounicode end local cidinfo, cidnames, cidcodes = data.cidinfo @@ -160,12 +160,12 @@ fonts.map.add_to_unicode = function(data,filename) usedmap = usedmap and lower(usedmap) usedmap = usedmap and fonts.cid.map[usedmap] if usedmap then - oparser = usedmap and make_name_parser(cidinfo.ordering) + oparser = usedmap and makenameparser(cidinfo.ordering) cidnames = usedmap.names cidcodes = usedmap.unicodes end - uparser = make_name_parser() - local aglmap = fonts.map and fonts.map.agl_to_unicode + uparser = makenameparser() + local aglmap = fonts.enc.agl and fonts.enc.unicodes -- to name for index, glyph in next, data.glyphs do local name, unic = glyph.name, glyph.unicode or -1 -- play safe if unic == -1 or unic >= private or (unic >= 0xE000 and unic <= 0xF8FF) or unic == 0xFFFE or unic == 0xFFFF then diff --git a/tex/context/base/font-mis.lua b/tex/context/base/font-mis.lua index 7a2653856..645278837 100644 --- a/tex/context/base/font-mis.lua +++ b/tex/context/base/font-mis.lua @@ -38,7 +38,7 @@ local featuregroups = { "gsub", "gpos" } function fonts.get_features(name,t,script,language) local t = lower(t or (name and file.extname(name)) or "") if t == "otf" or t == "ttf" or t == "ttc" or t == "dfont" then - local filename = resolvers.find_file(name,t) or "" + local filename = resolvers.findfile(name,t) or "" if filename ~= "" then local data = fonts.otf.loadcached(filename) if data and data.luatex and data.luatex.features then diff --git a/tex/context/base/font-ota.lua b/tex/context/base/font-ota.lua index 0ec7bac73..73e2c578b 100644 --- a/tex/context/base/font-ota.lua +++ b/tex/context/base/font-ota.lua @@ -43,23 +43,18 @@ local traverse_node_list = node.traverse local fontdata = fonts.ids local state = attributes.private('state') -local fcs = (fonts.color and fonts.color.set) or function() end -local fcr = (fonts.color and fonts.color.reset) or function() end - -local a_to_script = otf.a_to_script -local a_to_language = otf.a_to_language +local fontcolors = fonts.colors +local fcs = (fontscolors and fontscolors.set) or function() end +local fcr = (fontscolors and fontscolors.reset) or function() end -- in the future we will use language/script attributes instead of the -- font related value, but then we also need dynamic features which is -- somewhat slower; and .. we need a chain of them +local scriptandlanguage = otf.scriptandlanguage + function fonts.initializers.node.otf.analyze(tfmdata,value,attr) - local script, language - if attr and attr > 0 then - script, language = a_to_script[attr], a_to_language[attr] - else - script, language = tfmdata.script, tfmdata.language - end + local script, language = otf.scriptandlanguage(tfmdata,attr) local action = initializers[script] if action then if type(action) == "function" then @@ -76,12 +71,7 @@ end function fonts.methods.node.otf.analyze(head,font,attr) local tfmdata = fontdata[font] - local script, language - if attr and attr > 0 then - script, language = a_to_script[attr], a_to_language[attr] - else - script, language = tfmdata.script, tfmdata.language - end + local script, language = otf.scriptandlanguage(tfmdata,attr) local action = methods[script] if action then if type(action) == "function" then diff --git a/tex/context/base/font-otb.lua b/tex/context/base/font-otb.lua index ea46ebdbc..e4d694ea9 100644 --- a/tex/context/base/font-otb.lua +++ b/tex/context/base/font-otb.lua @@ -154,7 +154,7 @@ local splitter = lpeg.splitat(" ") local function prepare_base_substitutions(tfmdata,kind,value) -- we can share some code with the node features if value then local otfdata = tfmdata.shared.otfdata - local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language) + local validlookups, lookuplist = otf.collectlookups(otfdata,kind,tfmdata.script,tfmdata.language) if validlookups then local ligatures = { } local unicodes = tfmdata.unicodes -- names to unicodes @@ -266,7 +266,7 @@ end local function preparebasekerns(tfmdata,kind,value) -- todo what kind of kerns, currently all if value then local otfdata = tfmdata.shared.otfdata - local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language) + local validlookups, lookuplist = otf.collectlookups(otfdata,kind,tfmdata.script,tfmdata.language) if validlookups then local unicodes = tfmdata.unicodes -- names to unicodes local indices = tfmdata.indices diff --git a/tex/context/base/font-otd.lua b/tex/context/base/font-otd.lua index f23ef8eb4..910725a9f 100644 --- a/tex/context/base/font-otd.lua +++ b/tex/context/base/font-otd.lua @@ -10,23 +10,24 @@ local trace_dynamics = false trackers.register("otf.dynamics", function(v) trac local report_otf = logs.new("load otf") -local fonts = fonts -local otf = fonts.otf -local fontdata = fonts.ids +local fonts = fonts +local otf = fonts.otf +local fontdata = fonts.ids otf.features = otf.features or { } otf.features.default = otf.features.default or { } -local context_setups = fonts.define.specify.context_setups -local context_numbers = fonts.define.specify.context_numbers +local definers = fonts.definers +local contextsetups = definers.specifiers.contextsetups +local contextnumbers = definers.specifiers.contextnumbers -- todo: dynamics namespace -local a_to_script = { } otf.a_to_script = a_to_script -local a_to_language = { } otf.a_to_language = a_to_language +local a_to_script = { } +local a_to_language = { } function otf.setdynamics(font,dynamics,attribute) - local features = context_setups[context_numbers[attribute]] -- can be moved to caller + local features = contextsetups[contextnumbers[attribute]] -- can be moved to caller if features then local script = features.script or 'dflt' local language = features.language or 'dflt' @@ -43,7 +44,7 @@ function otf.setdynamics(font,dynamics,attribute) local dsla = dsl[attribute] if dsla then -- if trace_dynamics then - -- report_otf("using dynamics %s: attribute %s, script %s, language %s",context_numbers[attribute],attribute,script,language) + -- report_otf("using dynamics %s: attribute %s, script %s, language %s",contextnumbers[attribute],attribute,script,language) -- end return dsla else @@ -63,10 +64,10 @@ function otf.setdynamics(font,dynamics,attribute) tfmdata.script = script tfmdata.shared.features = { } -- end of save - local set = fonts.define.check(features,otf.features.default) + local set = definers.check(features,otf.features.default) dsla = otf.setfeatures(tfmdata,set) if trace_dynamics then - report_otf("setting dynamics %s: attribute %s, script %s, language %s, set: %s",context_numbers[attribute],attribute,script,language,table.sequenced(set)) + report_otf("setting dynamics %s: attribute %s, script %s, language %s, set: %s",contextnumbers[attribute],attribute,script,language,table.sequenced(set)) end -- we need to restore some values tfmdata.script = saved.script @@ -80,3 +81,11 @@ function otf.setdynamics(font,dynamics,attribute) end return nil -- { } end + +function otf.scriptandlanguage(tfmdata,attr) + if attr and attr > 0 then + return a_to_script[attr] or tfmdata.script, a_to_language[attr] or tfmdata.language + else + return tfmdata.script, tfmdata.language + end +end diff --git a/tex/context/base/font-otf.lua b/tex/context/base/font-otf.lua index cda1cfdd2..1da295eb0 100644 --- a/tex/context/base/font-otf.lua +++ b/tex/context/base/font-otf.lua @@ -15,6 +15,8 @@ local abs = math.abs local getn = table.getn local lpegmatch = lpeg.match +local allocate = utilities.storage.allocate + local trace_private = false trackers.register("otf.private", function(v) trace_private = v end) local trace_loading = false trackers.register("otf.loading", function(v) trace_loading = v end) local trace_features = false trackers.register("otf.features", function(v) trace_features = v end) @@ -79,9 +81,11 @@ otf.features = otf.features or { } otf.features.list = otf.features.list or { } otf.features.default = otf.features.default or { } -otf.enhancers = otf.enhancers or { } +otf.enhancers = allocate() local enhancers = otf.enhancers +local definers = fonts.definers + otf.glists = { "gsub", "gpos" } otf.version = 2.653 -- beware: also sync font-mis.lua @@ -89,7 +93,6 @@ otf.pack = true -- beware: also sync font-mis.lua otf.syncspace = true otf.notdef = false otf.cache = containers.define("fonts", "otf", otf.version, true) -otf.cleanup_aat = false -- only context local wildcard = "*" local default = "dflt" @@ -100,7 +103,7 @@ local default = "dflt" -- we can have more local functions -otf.tables.global_fields = table.tohash { +otf.tables.global_fields = allocate( table.tohash { "lookups", "glyphs", "subfonts", @@ -116,9 +119,9 @@ otf.tables.global_fields = table.tohash { "kern_classes", "gpos", "gsub" -} +} ) -otf.tables.valid_fields = { +otf.tables.valid_fields = allocate( { "anchor_classes", "ascent", "cache_version", @@ -176,21 +179,18 @@ otf.tables.valid_fields = { "weight", "weight_width_slope_only", "xuid", -} +} ) --[[ldx--Here we go.
--ldx]]-- local function load_featurefile(ff,featurefile) - if featurefile then - featurefile = resolvers.find_file(file.addsuffix(featurefile,'fea'),'fea') - if featurefile and featurefile ~= "" then - if trace_loading then - report_otf("featurefile: %s", featurefile) - end - fontloader.apply_featurefile(ff, featurefile) + if featurefile and featurefile ~= "" then + if trace_loading then + report_otf("featurefile: %s", featurefile) end + fontloader.apply_featurefile(ff, featurefile) end end @@ -215,8 +215,8 @@ local ordered_enhancers = { -- implemented later "flatten glyph lookups", "flatten anchor tables", "flatten feature tables", "simplify glyph lookups", -- some saving "prepare luatex tables", - "analyse features", "rehash features", - "analyse anchors", "analyse marks", "analyse unicodes", "analyse subtables", + "analyze features", "rehash features", + "analyze anchors", "analyze marks", "analyze unicodes", "analyze subtables", "check italic correction","check math", "share widths", "strip not needed data", @@ -224,7 +224,7 @@ local ordered_enhancers = { -- implemented later "check math parameters", } -local add_dimensions, show_feature_order -- implemented later +local adddimensions, showfeatureorder -- implemented later function otf.load(filename,format,sub,featurefile) local name = file.basename(file.removesuffix(filename)) @@ -239,8 +239,50 @@ function otf.load(filename,format,sub,featurefile) hash = hash .. "-" .. sub end hash = containers.cleanname(hash) + local featurefiles + if featurefile then + featurefiles = { } + for s in gmatch(featurefile,"[^,]+") do + local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or "" + if name == "" then + report_otf("loading: no featurefile '%s'",s) + else + local attr = lfs.attributes(name) + featurefiles[#featurefiles+1] = { + name = name, + size = attr.size or 0, + time = attr.modification or 0, + } + end + end + if #featurefiles == 0 then + featurefiles = nil + end + end local data = containers.read(otf.cache,hash) - if not data or data.verbose ~= fonts.verbose or data.size ~= size or data.time ~= time then + local reload = not data or data.verbose ~= fonts.verbose or data.size ~= size or data.time ~= time + if not reload then + local featuredata = data.featuredata + if featurefiles then + if not featuredata or #featuredata ~= #featurefiles then + reload = true + else + for i=1,#featurefiles do + local fi, fd = featurefiles[i], featuredata[i] + if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then + reload = true + break + end + end + end + elseif featuredata then + reload = true + end + if reload then + report_otf("loading: forced reload due to changed featurefile specification: %s",featurefile or "--") + end + end + if reload then report_otf("loading: %s (hash: %s)",filename,hash) local ff, messages if sub then @@ -260,7 +302,11 @@ function otf.load(filename,format,sub,featurefile) report_otf("font loaded okay") end if ff then - load_featurefile(ff,featurefile) + if featurefiles then + for i=1,#featurefiles do + load_featurefile(ff,featurefiles[i].name) + end + end data = fontloader.to_table(ff) fontloader.close(ff) if data then @@ -275,6 +321,9 @@ function otf.load(filename,format,sub,featurefile) end data.size = size data.time = time + if featurefiles then + data.featuredata = featurefiles + end data.verbose = fonts.verbose report_otf("saving in cache: %s",filename) data = containers.write(otf.cache, hash, data) @@ -293,15 +342,15 @@ function otf.load(filename,format,sub,featurefile) report_otf("loading from cache: %s",hash) end enhance("unpack",data,filename,false) -- no message here - add_dimensions(data) + adddimensions(data) if trace_sequences then - show_feature_order(data,filename) + showfeatureorder(data,filename) end end return data end -add_dimensions = function(data) +adddimensions = function(data) -- todo: forget about the width if it's the defaultwidth (saves mem) -- we could also build the marks hash here (instead of storing it) if data then @@ -337,7 +386,7 @@ add_dimensions = function(data) end end -local function show_feature_order(otfdata,filename) +local function showfeatureorder(otfdata,filename) local sequences = otfdata.luatex.sequences if sequences and #sequences > 0 then if trace_loading then @@ -410,11 +459,6 @@ enhancers["prepare luatex tables"] = function(data,filename) luatex.creator = "context mkiv" end -enhancers["cleanup aat"] = function(data,filename) - if otf.cleanup_aat then - end -end - local function analyze_features(g, features) if g then local t, done = { }, { } @@ -438,7 +482,7 @@ local function analyze_features(g, features) return nil end -enhancers["analyse features"] = function(data,filename) +enhancers["analyze features"] = function(data,filename) -- local luatex = data.luatex -- luatex.gposfeatures = analyze_features(data.gpos) -- luatex.gsubfeatures = analyze_features(data.gsub) @@ -475,7 +519,7 @@ enhancers["rehash features"] = function(data,filename) end end -enhancers["analyse anchors"] = function(data,filename) +enhancers["analyze anchors"] = function(data,filename) local classes = data.anchor_classes local luatex = data.luatex local anchor_to_lookup, lookup_to_anchor = { }, { } @@ -501,7 +545,7 @@ enhancers["analyse anchors"] = function(data,filename) end end -enhancers["analyse marks"] = function(data,filename) +enhancers["analyze marks"] = function(data,filename) local glyphs = data.glyphs local marks = { } data.luatex.marks = marks @@ -513,9 +557,9 @@ enhancers["analyse marks"] = function(data,filename) end end -enhancers["analyse unicodes"] = fonts.map.add_to_unicode +enhancers["analyze unicodes"] = fonts.map.addtounicode -enhancers["analyse subtables"] = function(data,filename) +enhancers["analyze subtables"] = function(data,filename) data.luatex = data.luatex or { } local luatex = data.luatex local sequences = { } @@ -654,8 +698,8 @@ enhancers["prepare unicode"] = function(data,filename) else mapmap = mapmap.map end - local criterium = fonts.private - local private = fonts.private + local criterium = fonts.privateoffset + local private = criterium for index, glyph in next, glyphs do if index > 0 then local name = glyph.name @@ -1360,7 +1404,7 @@ enhancers["flatten feature tables"] = function(data,filename) end end -enhancers.patches = enhancers.patches or { } +enhancers.patches = allocate() enhancers["patch bugs"] = function(data,filename) local basename = file.basename(lower(filename)) @@ -1575,7 +1619,7 @@ local function copytotfm(data,cache_id) -- we can save a copy when we reorder th end spaceunits = tonumber(spaceunits) or tfm.units/2 -- 500 -- brrr -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?) - local filename = fonts.tfm.checked_filename(luatex) + local filename = fonts.tfm.checkedfilename(luatex) local fontname = metadata.fontname local fullname = metadata.fullname or fontname local cidinfo = data.cidinfo @@ -1687,7 +1731,7 @@ local function otftotfm(specification) tfmdata.has_italic = otfdata.metadata.has_italic if not tfmdata.language then tfmdata.language = 'dflt' end if not tfmdata.script then tfmdata.script = 'dflt' end - shared.processes, shared.features = otf.setfeatures(tfmdata,fonts.define.check(features,otf.features.default)) + shared.processes, shared.features = otf.setfeatures(tfmdata,definers.check(features,otf.features.default)) end end containers.write(tfm.cache,cache_id,tfmdata) @@ -1697,7 +1741,7 @@ end otf.features.register('mathsize') -function tfm.read_from_open_type(specification) -- wrong namespace +function tfm.read_from_otf(specification) -- wrong namespace local tfmtable = otftotfm(specification) if tfmtable then local otfdata = tfmtable.shared.otfdata @@ -1735,7 +1779,7 @@ function tfm.read_from_open_type(specification) -- wrong namespace end end tfmtable = tfm.scale(tfmtable,s,specification.relativeid) - if tfm.fontname_mode == "specification" then + if tfm.fontnamemode == "specification" then -- not to be used in context ! local specname = specification.specification if specname then @@ -1753,7 +1797,7 @@ end -- helpers -function otf.collect_lookups(otfdata,kind,script,language) +function otf.collectlookups(otfdata,kind,script,language) -- maybe store this in the font local sequences = otfdata.luatex.sequences if sequences then diff --git a/tex/context/base/font-oth.lua b/tex/context/base/font-oth.lua index 448d54b90..d1a68d809 100644 --- a/tex/context/base/font-oth.lua +++ b/tex/context/base/font-oth.lua @@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['font-oth'] = { local lpegmatch = lpeg.match local splitter = lpeg.Ct(lpeg.splitat(" ")) -local collect_lookups = fonts.otf.collect_lookups +local collectlookups = fonts.otf.collectlookups -- For the moment there is no need to cache this but this might -- happen when I get the feeling that there is a performance @@ -20,7 +20,7 @@ function fonts.otf.getalternate(tfmdata,k,kind,value) local shared = tfmdata.shared local otfdata = shared and shared.otfdata if otfdata then - local validlookups, lookuplist = collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language) + local validlookups, lookuplist = collectlookups(otfdata,kind,tfmdata.script,tfmdata.language) if validlookups then local lookups = tfmdata.descriptions[k].slookups -- we assume only slookups (we can always extend) if lookups then diff --git a/tex/context/base/font-oti.lua b/tex/context/base/font-oti.lua index a5bbabf5c..e531ba8b2 100644 --- a/tex/context/base/font-oti.lua +++ b/tex/context/base/font-oti.lua @@ -13,11 +13,8 @@ local fonts = fonts local otf = fonts.otf local initializers = fonts.initializers -otf.default_language = 'latn' -otf.default_script = 'dflt' - -local languages = otf.tables.languages -local scripts = otf.tables.scripts +local languages = otf.tables.languages +local scripts = otf.tables.scripts local function set_language(tfmdata,value) if value then diff --git a/tex/context/base/font-otn.lua b/tex/context/base/font-otn.lua index a04d13758..4109ca2d0 100644 --- a/tex/context/base/font-otn.lua +++ b/tex/context/base/font-otn.lua @@ -200,10 +200,11 @@ local curscurs = attributes.private('curscurs') local cursdone = attributes.private('cursdone') local kernpair = attributes.private('kernpair') -local set_mark = nodes.set_mark -local set_cursive = nodes.set_cursive -local set_kern = nodes.set_kern -local set_pair = nodes.set_pair +local injections = nodes.injections +local setmark = injections.setmark +local setcursive = injections.setcursive +local setkern = injections.setkern +local setpair = injections.setpair local markonce = true local cursonce = true @@ -232,9 +233,10 @@ local featurevalue = false -- we cheat a bit and assume that a font,attr combination are kind of ranged -local context_setups = fonts.define.specify.context_setups -local context_numbers = fonts.define.specify.context_numbers -local context_merged = fonts.define.specify.context_merged +local specifiers = fonts.definers.specifiers +local contextsetups = specifiers.contextsetups +local contextnumbers = specifiers.contextnumbers +local contextmerged = specifiers.contextmerged -- we cannot optimize with "start = first_character(head)" because then we don't -- know which rlmode we're in which messes up cursive handling later on @@ -591,7 +593,7 @@ function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence) if al[anchor] then local ma = markanchors[anchor] if ma then - local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma) + local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma) if trace_marks then logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)", pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) @@ -606,7 +608,7 @@ function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence) end else -- if trace_bugs then -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) - fonts.register_message(currentfont,basechar,"no base anchors") + fonts.registermessage(currentfont,basechar,"no base anchors") end elseif trace_bugs then logwarning("%s: prev node is no char",pref(kind,lookupname)) @@ -659,7 +661,7 @@ function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence) if ma then ba = ba[index] if ba then - local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma,index) + local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma,index) if trace_marks then logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)", pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy) @@ -676,7 +678,7 @@ function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence) end else -- if trace_bugs then -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) - fonts.register_message(currentfont,basechar,"no base anchors") + fonts.registermessage(currentfont,basechar,"no base anchors") end elseif trace_bugs then logwarning("%s: prev node is no char",pref(kind,lookupname)) @@ -706,7 +708,7 @@ function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence) if al[anchor] then local ma = markanchors[anchor] if ma then - local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma) + local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma) if trace_marks then logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)", pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) @@ -722,7 +724,7 @@ function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence) end else -- if trace_bugs then -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) - fonts.register_message(currentfont,basechar,"no base anchors") + fonts.registermessage(currentfont,basechar,"no base anchors") end elseif trace_bugs then logwarning("%s: prev node is no mark",pref(kind,lookupname)) @@ -764,7 +766,7 @@ function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to if al[anchor] then local exit = exitanchors[anchor] if exit then - local dx, dy, bound = set_cursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + local dx, dy, bound = setcursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) if trace_cursive then logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) end @@ -777,7 +779,7 @@ function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to end else -- if trace_bugs then -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) - fonts.register_message(currentfont,startchar,"no entry anchors") + fonts.registermessage(currentfont,startchar,"no entry anchors") end break end @@ -794,7 +796,7 @@ end function handlers.gpos_single(start,kind,lookupname,kerns,sequence) local startchar = start.char - local dx, dy, w, h = set_pair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + local dx, dy, w, h = setpair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) if trace_kerns then logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),dx,dy,w,h) end @@ -825,14 +827,14 @@ function handlers.gpos_pair(start,kind,lookupname,kerns,sequence) local a, b = krn[3], krn[4] if a and #a > 0 then local startchar = start.char - local x, y, w, h = set_pair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) if trace_kerns then logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) end end if b and #b > 0 then local startchar = start.char - local x, y, w, h = set_pair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) if trace_kerns then logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) end @@ -841,7 +843,7 @@ function handlers.gpos_pair(start,kind,lookupname,kerns,sequence) report_process("%s: check this out (old kern stuff)",pref(kind,lookupname)) local a, b = krn[3], krn[7] if a and a ~= 0 then - local k = set_kern(snext,factor,rlmode,a) + local k = setkern(snext,factor,rlmode,a) if trace_kerns then logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar)) end @@ -852,7 +854,7 @@ function handlers.gpos_pair(start,kind,lookupname,kerns,sequence) end done = true elseif krn ~= 0 then - local k = set_kern(snext,factor,rlmode,krn) + local k = setkern(snext,factor,rlmode,krn) if trace_kerns then logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar)) end @@ -1223,7 +1225,7 @@ function chainprocs.gpos_mark2base(start,stop,kind,chainname,currentcontext,cach if al[anchor] then local ma = markanchors[anchor] if ma then - local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma) + local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma) if trace_marks then logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)", cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) @@ -1296,7 +1298,7 @@ function chainprocs.gpos_mark2ligature(start,stop,kind,chainname,currentcontext, if ma then ba = ba[index] if ba then - local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma,index) + local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma,index) if trace_marks then logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)", cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy) @@ -1348,7 +1350,7 @@ function chainprocs.gpos_mark2mark(start,stop,kind,chainname,currentcontext,cach if al[anchor] then local ma = markanchors[anchor] if ma then - local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma) + local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma) if trace_marks then logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)", cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) @@ -1414,7 +1416,7 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,cache, if al[anchor] then local exit = exitanchors[anchor] if exit then - local dx, dy, bound = set_cursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + local dx, dy, bound = setcursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) if trace_cursive then logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) end @@ -1427,7 +1429,7 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,cache, end else -- if trace_bugs then -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) - fonts.register_message(currentfont,startchar,"no entry anchors") + fonts.registermessage(currentfont,startchar,"no entry anchors") end break end @@ -1453,7 +1455,7 @@ function chainprocs.gpos_single(start,stop,kind,chainname,currentcontext,cache,c if kerns then kerns = kerns[startchar] if kerns then - local dx, dy, w, h = set_pair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + local dx, dy, w, h = setpair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) if trace_kerns then logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h) end @@ -1491,14 +1493,14 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur local a, b = krn[3], krn[4] if a and #a > 0 then local startchar = start.char - local x, y, w, h = set_pair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) if trace_kerns then logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) end end if b and #b > 0 then local startchar = start.char - local x, y, w, h = set_pair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) if trace_kerns then logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) end @@ -1507,7 +1509,7 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname)) local a, b = krn[3], krn[7] if a and a ~= 0 then - local k = set_kern(snext,factor,rlmode,a) + local k = setkern(snext,factor,rlmode,a) if trace_kerns then logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) end @@ -1518,7 +1520,7 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur end done = true elseif krn ~= 0 then - local k = set_kern(snext,factor,rlmode,krn) + local k = setkern(snext,factor,rlmode,krn) if trace_kerns then logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) end @@ -1922,8 +1924,8 @@ function fonts.methods.node.otf.features(head,font,attr) local script, language, s_enabled, a_enabled, dyn local attribute_driven = attr and attr ~= 0 if attribute_driven then - local features = context_setups[context_numbers[attr]] -- could be a direct list - dyn = context_merged[attr] or 0 + local features = contextsetups[contextnumbers[attr]] -- could be a direct list + dyn = contextmerged[attr] or 0 language, script = features.language or "dflt", features.script or "dflt" a_enabled = features -- shared.features -- can be made local to the resolver if dyn == 2 or dyn == -2 then diff --git a/tex/context/base/font-ott.lua b/tex/context/base/font-ott.lua index 379032f18..3c3ecdee0 100644 --- a/tex/context/base/font-ott.lua +++ b/tex/context/base/font-ott.lua @@ -10,6 +10,8 @@ local type, next, tonumber, tostring = type, next, tonumber, tostring local gsub, lower, format = string.gsub, string.lower, string.format local is_boolean = string.is_boolean +local allocate = utilities.storage.allocate + local fonts = fonts fonts.otf = fonts.otf or { } local otf = fonts.otf @@ -20,7 +22,7 @@ local tables = otf.tables otf.meanings = otf.meanings or { } local meanings = otf.meanings -local scripts = { +local scripts = allocate { ['dflt'] = 'Default', ['arab'] = 'Arabic', @@ -93,7 +95,7 @@ local scripts = { ['yi' ] = 'Yi', } -local languages = { +local languages = allocate { ['dflt'] = 'Default', ['aba'] = 'Abaza', @@ -487,7 +489,7 @@ local languages = { ['zul'] = 'Zulu' } -local features = { +local features = allocate { ['aalt'] = 'Access All Alternates', ['abvf'] = 'Above-Base Forms', ['abvm'] = 'Above-Base Mark Positioning', @@ -625,7 +627,7 @@ local features = { ['tlig'] = 'Traditional TeX Ligatures', } -local baselines = { +local baselines = allocate { ['hang'] = 'Hanging baseline', ['icfb'] = 'Ideographic character face bottom edge baseline', ['icft'] = 'Ideographic character face tope edige baseline', @@ -635,32 +637,32 @@ local baselines = { ['romn'] = 'Roman baseline' } -local to_scripts = table.swaphash(scripts ) -local to_languages = table.swaphash(languages) -local to_features = table.swaphash(features ) +local verbosescripts = allocate(table.swaphash(scripts )) +local verboselanguages = allocate(table.swaphash(languages)) +local verbosefeatures = allocate(table.swaphash(features )) -tables.scripts = scripts -tables.languages = languages -tables.features = features -tables.baselines = baselines +tables.scripts = scripts +tables.languages = languages +tables.features = features +tables.baselines = baselines -tables.to_scripts = to_scripts -tables.to_languages = to_languages -tables.to_features = to_features +tables.verbosescripts = verbosescripts +tables.verboselanguages = verboselanguages +tables.verbosefeatures = verbosefeatures -for k, v in next, to_features do +for k, v in next, verbosefeatures do local stripped = gsub(k,"%-"," ") - to_features[stripped] = v + verbosefeatures[stripped] = v local stripped = gsub(k,"[^a-zA-Z0-9]","") - to_features[stripped] = v + verbosefeatures[stripped] = v end -for k, v in next, to_features do - to_features[lower(k)] = v +for k, v in next, verbosefeatures do + verbosefeatures[lower(k)] = v end -- can be sped up by local tables -function tables.to_tag(id) +function tables.totag(id) -- not used return format("%4s",lower(id)) end @@ -694,14 +696,14 @@ function meanings.normalize(features) if k == "language" or k == "lang" then v = gsub(lower(v),"[^a-z0-9%-]","") if not languages[v] then - h.language = to_languages[v] or "dflt" + h.language = verboselanguages[v] or "dflt" else h.language = v end elseif k == "script" then v = gsub(lower(v),"[^a-z0-9%-]","") if not scripts[v] then - h.script = to_scripts[v] or "dflt" + h.script = verbosescripts[v] or "dflt" else h.script = v end @@ -714,7 +716,7 @@ function meanings.normalize(features) v = b end end - k = to_features[k] or k + k = verbosefeatures[k] or k local c = checkers[k] h[k] = c and c(v) or v end diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua index 39e3df5ea..03aa528be 100644 --- a/tex/context/base/font-syn.lua +++ b/tex/context/base/font-syn.lua @@ -17,6 +17,8 @@ local lpegmatch = lpeg.match local utfgsub, utflower = utf.gsub, utf.lower local unpack = unpack or table.unpack +local allocate = utilities.storage.allocate + local trace_names = false trackers.register("fonts.names", function(v) trace_names = v end) local trace_warnings = false trackers.register("fonts.warnings", function(v) trace_warnings = v end) @@ -39,7 +41,7 @@ local filters = names.filters names.data = names.data or { } -names.version = 1.103 +names.version = 1.110 names.basename = "names" names.saved = false names.loaded = false @@ -103,14 +105,14 @@ local variants = Cs( -- fax casual local any = P(1) -local analysed_table +local analyzed_table -local analyser = Cs ( +local analyzer = Cs ( ( - weights / function(s) analysed_table[1] = s return "" end - + styles / function(s) analysed_table[2] = s return "" end - + widths / function(s) analysed_table[3] = s return "" end - + variants / function(s) analysed_table[4] = s return "" end + weights / function(s) analyzed_table[1] = s return "" end + + styles / function(s) analyzed_table[2] = s return "" end + + widths / function(s) analyzed_table[3] = s return "" end + + variants / function(s) analyzed_table[4] = s return "" end + any )^0 ) @@ -137,11 +139,11 @@ function names.splitspec(askedname) return name or askedname, weight, style, width, variant end -local function analysespec(somename) +local function analyzespec(somename) if somename then - analysed_table = { } - local name = lpegmatch(analyser,somename) - return name, analysed_table[1], analysed_table[2], analysed_table[3], analysed_table[4] + analyzed_table = { } + local name = lpegmatch(analyzer,somename) + return name, analyzed_table[1], analyzed_table[2], analyzed_table[3], analyzed_table[4] end end @@ -172,9 +174,9 @@ filters.otf = fontloader.fullinfo function filters.afm(name) -- we could parse the afm file as well, and then report an error but -- it's not worth the trouble - local pfbname = resolvers.find_file(file.removesuffix(name)..".pfb","pfb") or "" + local pfbname = resolvers.findfile(file.removesuffix(name)..".pfb","pfb") or "" if pfbname == "" then - pfbname = resolvers.find_file(file.removesuffix(file.basename(name))..".pfb","pfb") or "" + pfbname = resolvers.findfile(file.removesuffix(file.basename(name))..".pfb","pfb") or "" end if pfbname ~= "" then local f = io.open(name) @@ -211,8 +213,8 @@ filters.list = { --~ "ttc", "otf", "ttf", "dfont", "afm", } -names.xml_configuration_file = "fonts.conf" -- a bit weird format, bonus feature -names.environment_path_variable = "OSFONTDIR" -- the official way, in minimals etc +names.fontconfigfile = "fonts.conf" -- a bit weird format, bonus feature +names.osfontdirvariable = "OSFONTDIR" -- the official way, in minimals etc filters.paths = { } filters.names = { } @@ -221,7 +223,7 @@ function names.getpaths(trace) local hash, result = { }, { } local function collect(t,where) for i=1, #t do - local v = resolvers.clean_path(t[i]) + local v = resolvers.cleanpath(t[i]) v = gsub(v,"/+$","") -- not needed any more local key = lower(v) report_names("adding path from %s: %s",where,v) @@ -230,18 +232,18 @@ function names.getpaths(trace) end end end - local path = names.environment_path_variable or "" + local path = names.osfontdirvariable or "" if path ~= "" then - collect(resolvers.expanded_path_list(path),path) + collect(resolvers.expandedpathlist(path),path) end if xml then local confname = resolvers.getenv("FONTCONFIG_FILE") or "" if confname == "" then - confname = names.xml_configuration_file or "" + confname = names.fontconfigfile or "" end if confname ~= "" then -- first look in the tex tree - local name = resolvers.find_file(confname,"fontconfig files") or "" + local name = resolvers.findfile(confname,"fontconfig files") or "" if name == "" then -- after all, fontconfig is a unix thing name = file.join("/etc",confname) @@ -292,7 +294,20 @@ local function cleanname(name) -- return (utfgsub(utfgsub(lower(str),"[^%a%A%d]",""),"%s","")) end -names.cleanname = cleanname +local function cleanfilename(fullname,defaultsuffix) + local _, _, name, suffix = file.splitname(fullname) + name = gsub(lower(name),"[^%a%d]","") + if suffix and suffix ~= "" then + return name .. ".".. suffix + elseif defaultsuffix and defaultsuffix ~= "" then + return name .. ".".. defaultsuffix + else + return name + end +end + +names.cleanname = cleanname +names.cleanfilename = cleanfilename local function check_names(result) local names = result.names @@ -310,7 +325,7 @@ local function walk_tree(pathlist,suffix,identify) if pathlist then for i=1,#pathlist do local path = pathlist[i] - path = resolvers.clean_path(path .. "/") + path = resolvers.cleanpath(path .. "/") path = gsub(path,"/+","/") local pattern = path .. "**." .. suffix -- ** forces recurse report_names( "globbing path %s",pattern) @@ -348,8 +363,8 @@ local function check_name(data,result,filename,suffix,subfont) modifiers = modifiers and cleanname(modifiers) weight = weight and cleanname(weight) italicangle = (italicangle == 0) and nil - -- analyse - local a_name, a_weight, a_style, a_width, a_variant = analysespec(fullname or fontname or familyname) + -- analyze + local a_name, a_weight, a_style, a_width, a_variant = analyzespec(fullname or fontname or familyname) -- check local width = a_width local variant = a_variant @@ -400,11 +415,11 @@ local function cleanupkeywords() for i=1,#specifications do local s = specifications[i] -- fix (sofar styles are taken from the name, and widths from the specification) - local _, b_weight, b_style, b_width, b_variant = analysespec(s.weight) - local _, c_weight, c_style, c_width, c_variant = analysespec(s.style) - local _, d_weight, d_style, d_width, d_variant = analysespec(s.width) - local _, e_weight, e_style, e_width, e_variant = analysespec(s.variant) - local _, f_weight, f_style, f_width, f_variant = analysespec(s.fullname or "") + local _, b_weight, b_style, b_width, b_variant = analyzespec(s.weight) + local _, c_weight, c_style, c_width, c_variant = analyzespec(s.style) + local _, d_weight, d_style, d_width, d_variant = analyzespec(s.width) + local _, e_weight, e_style, e_width, e_variant = analyzespec(s.variant) + local _, f_weight, f_style, f_width, f_variant = analyzespec(s.fullname or "") local weight = b_weight or c_weight or d_weight or e_weight or f_weight or "normal" local style = b_style or c_style or d_style or e_style or f_style or "normal" local width = b_width or c_width or d_width or e_width or f_width or "normal" @@ -593,7 +608,7 @@ local function unpackreferences() end end -local function analysefiles() +local function analyzefiles() local data = names.data local done, totalnofread, totalnofskipped, totalnofduplicates, nofread, nofskipped, nofduplicates = { }, 0, 0, 0, 0, 0, 0 local skip_paths, skip_names = filters.paths, filters.names @@ -616,7 +631,7 @@ local function analysefiles() logs.push() end nofskipped = nofskipped + 1 - elseif not file.is_qualified_path(completename) and resolvers.find_file(completename,suffix) == "" then + elseif not file.is_qualified_path(completename) and resolvers.findfile(completename,suffix) == "" then -- not locateble by backend anyway if trace_names then report_names("%s font %s cannot be found by backend",suffix,completename) @@ -702,7 +717,7 @@ local function analysefiles() report_names( "warnings are disabled (tracker 'fonts.warnings')") end traverse("tree", function(suffix) -- TEXTREE only - resolvers.with_files(".*%." .. suffix .. "$", function(method,root,path,name) + resolvers.dowithfilesintree(".*%." .. suffix .. "$", function(method,root,path,name) if method == "file" or method == "tree" then local completename = root .."/" .. path .. "/" .. name identify(completename,name,suffix,name) @@ -718,7 +733,7 @@ local function analysefiles() -- we do this only for a stupid names run, not used for context itself, -- using the vars is to clumsy so we just stick to a full scan instead traverse("lsr", function(suffix) -- all trees - local pathlist = resolvers.split_path(resolvers.show_path("ls-R") or "") + local pathlist = resolvers.splitpath(resolvers.showpath("ls-R") or "") walk_tree(pathlist,suffix,identify) end) else @@ -729,6 +744,17 @@ local function analysefiles() data.statistics.readfiles, data.statistics.skippedfiles, data.statistics.duplicatefiles = totalnofread, totalnofskipped, totalnofduplicates end +local function addfilenames() + local data = names.data + local specifications = data.specifications + local files = { } + for i=1,#specifications do + local fullname = specifications[i].filename + files[cleanfilename(fullname)] = fullname + end + data.files = files +end + local function rejectclashes() -- just to be sure, so no explicit afm will be found then local specifications, used, okay = names.data.specifications, { }, { } for i=1,#specifications do @@ -766,19 +792,20 @@ local function resetdata() specifications = { }, families = { }, statistics = { }, - data_state = resolvers.data_state(), + datastate = resolvers.datastate(), } end function names.identify() resetdata() - analysefiles() + analyzefiles() rejectclashes() collectfamilies() collectstatistics() cleanupkeywords() collecthashes() checkduplicates() + addfilenames() -- sorthashes() -- will be resorted when saved end @@ -838,7 +865,7 @@ local function list_them(mapping,sorted,pattern,t,all) end function names.list(pattern,reload,all) -- here? - names.load(reload) + names.load() -- todo reload if names.loaded then local t = { } local data = names.data @@ -868,8 +895,8 @@ local function is_reloaded() if not reloaded then local data = names.data if names.autoreload then - local c_status = table.serialize(resolvers.data_state()) - local f_status = table.serialize(data.data_state) + local c_status = table.serialize(resolvers.datastate()) + local f_status = table.serialize(data.datastate) if c_status == f_status then -- report_names("font database matches configuration and file hashes") return @@ -974,6 +1001,17 @@ function names.resolve(askedname,sub) end end +function names.getfilename(askedname,suffix) -- last resort, strip funny chars + names.load() + local files = names.data.files + askedname = files and files[cleanfilename(askedname,suffix)] or "" + if askedname == "" then + return "" + else + return resolvers.findbinfile(askedname,suffix) or "" + end +end + -- specified search local function s_collect_weight_style_width_variant(found,done,all,weight,style,width,variant,family) @@ -1284,7 +1322,8 @@ end function names.specification(askedname,weight,style,width,variant,reload,all) if askedname and askedname ~= "" and names.enabled then - askedname = lower(askedname) -- or cleanname +--~ askedname = lower(askedname) -- or cleanname + askedname = cleanname(askedname) -- or cleanname names.load(reload) local found = heuristic(askedname,weight,style,width,variant,all) if not found and is_reloaded() then @@ -1299,7 +1338,8 @@ end function names.collect(askedname,weight,style,width,variant,reload,all) if askedname and askedname ~= "" and names.enabled then - askedname = lower(askedname) -- or cleanname +--~ askedname = lower(askedname) -- or cleanname + askedname = cleanname(askedname) -- or cleanname names.load(reload) local list = heuristic(askedname,weight,style,width,variant,true) if not list or #list == 0 and is_reloaded() then @@ -1323,14 +1363,16 @@ end function names.collectfiles(askedname,reload) -- no all if askedname and askedname ~= "" and names.enabled then - askedname = lower(askedname) -- or cleanname +--~ askedname = lower(askedname) -- or cleanname + askedname = cleanname(askedname) -- or cleanname names.load(reload) local list = { } local basename = file.basename local specifications = names.data.specifications for i=1,#specifications do local s = specifications[i] - if find(lower(basename(s.filename)),askedname) then +--~ if find(lower(basename(s.filename)),askedname) then + if find(cleanname(basename(s.filename)),askedname) then list[#list+1] = s end end @@ -1338,65 +1380,65 @@ function names.collectfiles(askedname,reload) -- no all end end ---[[ldx-- -Fallbacks, not permanent but a transition thing.
---ldx]]-- - -names.new_to_old = { - ["lmroman10-capsregular"] = "lmromancaps10-oblique", - ["lmroman10-capsoblique"] = "lmromancaps10-regular", - ["lmroman10-demi"] = "lmromandemi10-oblique", - ["lmroman10-demioblique"] = "lmromandemi10-regular", - ["lmroman8-oblique"] = "lmromanslant8-regular", - ["lmroman9-oblique"] = "lmromanslant9-regular", - ["lmroman10-oblique"] = "lmromanslant10-regular", - ["lmroman12-oblique"] = "lmromanslant12-regular", - ["lmroman17-oblique"] = "lmromanslant17-regular", - ["lmroman10-boldoblique"] = "lmromanslant10-bold", - ["lmroman10-dunhill"] = "lmromandunh10-oblique", - ["lmroman10-dunhilloblique"] = "lmromandunh10-regular", - ["lmroman10-unslanted"] = "lmromanunsl10-regular", - ["lmsans10-demicondensed"] = "lmsansdemicond10-regular", - ["lmsans10-demicondensedoblique"] = "lmsansdemicond10-oblique", - ["lmsansquotation8-bold"] = "lmsansquot8-bold", - ["lmsansquotation8-boldoblique"] = "lmsansquot8-boldoblique", - ["lmsansquotation8-oblique"] = "lmsansquot8-oblique", - ["lmsansquotation8-regular"] = "lmsansquot8-regular", - ["lmtypewriter8-regular"] = "lmmono8-regular", - ["lmtypewriter9-regular"] = "lmmono9-regular", - ["lmtypewriter10-regular"] = "lmmono10-regular", - ["lmtypewriter12-regular"] = "lmmono12-regular", - ["lmtypewriter10-italic"] = "lmmono10-italic", - ["lmtypewriter10-oblique"] = "lmmonoslant10-regular", - ["lmtypewriter10-capsoblique"] = "lmmonocaps10-oblique", - ["lmtypewriter10-capsregular"] = "lmmonocaps10-regular", - ["lmtypewriter10-light"] = "lmmonolt10-regular", - ["lmtypewriter10-lightoblique"] = "lmmonolt10-oblique", - ["lmtypewriter10-lightcondensed"] = "lmmonoltcond10-regular", - ["lmtypewriter10-lightcondensedoblique"] = "lmmonoltcond10-oblique", - ["lmtypewriter10-dark"] = "lmmonolt10-bold", - ["lmtypewriter10-darkoblique"] = "lmmonolt10-boldoblique", - ["lmtypewritervarwd10-regular"] = "lmmonoproplt10-regular", - ["lmtypewritervarwd10-oblique"] = "lmmonoproplt10-oblique", - ["lmtypewritervarwd10-light"] = "lmmonoprop10-regular", - ["lmtypewritervarwd10-lightoblique"] = "lmmonoprop10-oblique", - ["lmtypewritervarwd10-dark"] = "lmmonoproplt10-bold", - ["lmtypewritervarwd10-darkoblique"] = "lmmonoproplt10-boldoblique", -} - -names.old_to_new = table.swapped(names.new_to_old) +--~ --[[ldx-- +--~Fallbacks, not permanent but a transition thing.
+--~ --ldx]]-- +--~ +--~ names.new_to_old = allocate { +--~ ["lmroman10-capsregular"] = "lmromancaps10-oblique", +--~ ["lmroman10-capsoblique"] = "lmromancaps10-regular", +--~ ["lmroman10-demi"] = "lmromandemi10-oblique", +--~ ["lmroman10-demioblique"] = "lmromandemi10-regular", +--~ ["lmroman8-oblique"] = "lmromanslant8-regular", +--~ ["lmroman9-oblique"] = "lmromanslant9-regular", +--~ ["lmroman10-oblique"] = "lmromanslant10-regular", +--~ ["lmroman12-oblique"] = "lmromanslant12-regular", +--~ ["lmroman17-oblique"] = "lmromanslant17-regular", +--~ ["lmroman10-boldoblique"] = "lmromanslant10-bold", +--~ ["lmroman10-dunhill"] = "lmromandunh10-oblique", +--~ ["lmroman10-dunhilloblique"] = "lmromandunh10-regular", +--~ ["lmroman10-unslanted"] = "lmromanunsl10-regular", +--~ ["lmsans10-demicondensed"] = "lmsansdemicond10-regular", +--~ ["lmsans10-demicondensedoblique"] = "lmsansdemicond10-oblique", +--~ ["lmsansquotation8-bold"] = "lmsansquot8-bold", +--~ ["lmsansquotation8-boldoblique"] = "lmsansquot8-boldoblique", +--~ ["lmsansquotation8-oblique"] = "lmsansquot8-oblique", +--~ ["lmsansquotation8-regular"] = "lmsansquot8-regular", +--~ ["lmtypewriter8-regular"] = "lmmono8-regular", +--~ ["lmtypewriter9-regular"] = "lmmono9-regular", +--~ ["lmtypewriter10-regular"] = "lmmono10-regular", +--~ ["lmtypewriter12-regular"] = "lmmono12-regular", +--~ ["lmtypewriter10-italic"] = "lmmono10-italic", +--~ ["lmtypewriter10-oblique"] = "lmmonoslant10-regular", +--~ ["lmtypewriter10-capsoblique"] = "lmmonocaps10-oblique", +--~ ["lmtypewriter10-capsregular"] = "lmmonocaps10-regular", +--~ ["lmtypewriter10-light"] = "lmmonolt10-regular", +--~ ["lmtypewriter10-lightoblique"] = "lmmonolt10-oblique", +--~ ["lmtypewriter10-lightcondensed"] = "lmmonoltcond10-regular", +--~ ["lmtypewriter10-lightcondensedoblique"] = "lmmonoltcond10-oblique", +--~ ["lmtypewriter10-dark"] = "lmmonolt10-bold", +--~ ["lmtypewriter10-darkoblique"] = "lmmonolt10-boldoblique", +--~ ["lmtypewritervarwd10-regular"] = "lmmonoproplt10-regular", +--~ ["lmtypewritervarwd10-oblique"] = "lmmonoproplt10-oblique", +--~ ["lmtypewritervarwd10-light"] = "lmmonoprop10-regular", +--~ ["lmtypewritervarwd10-lightoblique"] = "lmmonoprop10-oblique", +--~ ["lmtypewritervarwd10-dark"] = "lmmonoproplt10-bold", +--~ ["lmtypewritervarwd10-darkoblique"] = "lmmonoproplt10-boldoblique", +--~ } +--~ +--~ names.old_to_new = allocate(table.swapped(names.new_to_old)) function names.exists(name) local found = false local list = filters.list for k=1,#list do local v = list[k] - found = (resolvers.find_file(name,v) or "") ~= "" + found = (resolvers.findfile(name,v) or "") ~= "" if found then return found end end - return ((resolvers.find_file(name,"tfm") or "") ~= "") or ((names.resolve(name) or "") ~= "") + return ((resolvers.findfile(name,"tfm") or "") ~= "") or ((names.resolve(name) or "") ~= "") end -- for i=1,fonts.names.lookup(pattern) do diff --git a/tex/context/base/font-tfm.lua b/tex/context/base/font-tfm.lua index d51bcc3b8..a48d3c3f4 100644 --- a/tex/context/base/font-tfm.lua +++ b/tex/context/base/font-tfm.lua @@ -11,6 +11,8 @@ local utf = unicode.utf8 local next, format, match, lower, gsub = next, string.format, string.match, string.lower, string.gsub local concat, sortedkeys, utfbyte, serialize = table.concat, table.sortedkeys, utf.byte, table.serialize +local allocate = utilities.storage.allocate + local trace_defining = false trackers.register("fonts.defining", function(v) trace_defining = v end) local trace_scaling = false trackers.register("fonts.scaling" , function(v) trace_scaling = v end) @@ -28,8 +30,8 @@ local report_define = logs.new("define fonts") local fonts = fonts local tfm = fonts.tfm -fonts.loaded = fonts.loaded or { } -fonts.dontembed = fonts.dontembed or { } +fonts.loaded = allocate() +fonts.dontembed = allocate() fonts.triggers = fonts.triggers or { } -- brrr fonts.initializers = fonts.initializers or { } fonts.initializers.common = fonts.initializers.common or { } @@ -47,10 +49,10 @@ local glyph_code = nodecodes.glyph supplied byThe reason why the scaler was originally split, is that for a while we experimented
+with a helper function. However, in practice the
The reason why the scaler is split, is that for a while we experimented
-with a helper function. However, in practice the
We cannot load anything yet. However what we will do us reserve a fewtables. These can be used for runtime user data or third party modules and will not be @@ -31,8 +33,8 @@ document = document or { }
These can be used/set by the caller program;
Please create a namespace within these tables before using them!
diff --git a/tex/context/base/luat-iop.lua b/tex/context/base/luat-iop.lua index e6f8a7433..6642a2383 100644 --- a/tex/context/base/luat-iop.lua +++ b/tex/context/base/luat-iop.lua @@ -13,10 +13,12 @@ if not modules then modules = { } end modules ['luat-iop'] = { local lower, find, sub = string.lower, string.find, string.sub +local allocate = utilities.storage.allocate + local ioinp = io.inp if not ioinp then ioinp = { } io.inp = ioinp end local ioout = io.out if not ioout then ioout = { } io.out = ioout end -ioinp.modes, ioout.modes = { }, { } -- functions +ioinp.modes, ioout.modes = allocate(), allocate() local inp_blocked, inp_permitted = { }, { } local out_blocked, out_permitted = { }, { } @@ -104,12 +106,12 @@ function ioinp.modes.paranoid() i_inhibit('%.%.') i_permit('^%./') i_permit('[^/]') - resolvers.do_with_path('TEXMF',i_permit) + resolvers.dowithpath('TEXMF',i_permit) end function ioout.modes.paranoid() o_inhibit('.*') - resolvers.do_with_path('TEXMFOUTPUT',o_permit) + resolvers.dowithpath('TEXMFOUTPUT',o_permit) end -- handy diff --git a/tex/context/base/luat-lib.mkiv b/tex/context/base/luat-lib.mkiv index 366578efb..02bafa4cc 100644 --- a/tex/context/base/luat-lib.mkiv +++ b/tex/context/base/luat-lib.mkiv @@ -21,6 +21,7 @@ \registerctxluafile{util-tab}{1.001} \registerctxluafile{util-fmt}{1.001} \registerctxluafile{util-deb}{1.001} % could also be done in trac-deb.mkiv +\registerctxluafile{util-sto}{1.001} % could also be done in trac-deb.mkiv \registerctxluafile{trac-inf}{1.001} \registerctxluafile{trac-set}{1.001} diff --git a/tex/context/base/luat-sto.lua b/tex/context/base/luat-sto.lua index 0c810021d..2ead0253d 100644 --- a/tex/context/base/luat-sto.lua +++ b/tex/context/base/luat-sto.lua @@ -6,7 +6,7 @@ if not modules then modules = { } end modules ['luat-sto'] = { license = "see context related readme files" } -local type, next = type, next +local type, next, setmetatable, getmetatable = type, next, setmetatable, getmetatable local gmatch, format, write_nl = string.gmatch, string.format, texio.write_nl local report_storage = logs.new("storage") @@ -25,8 +25,21 @@ storage.max = storage.min - 1 storage.noftables = storage.noftables or 0 storage.nofmodules = storage.nofmodules or 0 +storage.mark = utilities.storage.mark +storage.allocate = utilities.storage.allocate +storage.marked = utilities.storage.marked + function storage.register(...) - data[#data+1] = { ... } + local t = { ... } + local d = t[2] + if d then + storage.mark(d) + else + report_storage("fatal error: invalid storage '%s'",t[1]) + os.exit() + end + data[#data+1] = t + return t end -- evaluators .. messy .. to be redone @@ -140,3 +153,11 @@ storage.shared = storage.shared or { } -- (non table) values. storage.register("storage/shared", storage.shared, "storage.shared") + +local mark = storage.mark + +if string.patterns then mark(string.patterns) end +if lpeg.patterns then mark(lpeg.patterns) end +if os.env then mark(os.env) end +if number.dimenfactors then mark(number.dimenfactors) end +if libraries then for k,v in next, libraries do mark(v) end end diff --git a/tex/context/base/lxml-aux.lua b/tex/context/base/lxml-aux.lua index f2e3ee61e..f01478dbe 100644 --- a/tex/context/base/lxml-aux.lua +++ b/tex/context/base/lxml-aux.lua @@ -79,7 +79,7 @@ function xml.withelement(e,n,handle) -- slow end function xml.each(root,pattern,handle,reverse) - local collected = xmlapplylpath({ root },pattern) + local collected = xmlapplylpath(root,pattern) if collected then if reverse then for c=#collected,1,-1 do @@ -95,7 +95,7 @@ function xml.each(root,pattern,handle,reverse) end function xml.processattributes(root,pattern,handle) - local collected = xmlapplylpath({ root },pattern) + local collected = xmlapplylpath(root,pattern) if collected and handle then for c=1,#collected do handle(collected[c].at) @@ -111,11 +111,11 @@ end -- are these still needed -> lxml-cmp.lua function xml.collect(root, pattern) - return xmlapplylpath({ root },pattern) + return xmlapplylpath(root,pattern) end function xml.collecttexts(root, pattern, flatten) -- todo: variant with handle - local collected = xmlapplylpath({ root },pattern) + local collected = xmlapplylpath(root,pattern) if collected and flatten then local xmltostring = xml.tostring for c=1,#collected do @@ -126,7 +126,7 @@ function xml.collecttexts(root, pattern, flatten) -- todo: variant with handle end function xml.collect_tags(root, pattern, nonamespace) - local collected = xmlapplylpath({ root },pattern) + local collected = xmlapplylpath(root,pattern) if collected then local t = { } for c=1,#collected do @@ -197,7 +197,7 @@ local function copiedelement(element,newparent) end function xml.delete(root,pattern) - local collected = xmlapplylpath({ root },pattern) + local collected = xmlapplylpath(root,pattern) if collected then for c=1,#collected do local e = collected[c] @@ -216,7 +216,7 @@ end function xml.replace(root,pattern,whatever) local element = root and xmltoelement(whatever,root) - local collected = element and xmlapplylpath({ root },pattern) + local collected = element and xmlapplylpath(root,pattern) if collected then for c=1,#collected do local e = collected[c] @@ -235,7 +235,7 @@ end local function inject_element(root,pattern,whatever,prepend) local element = root and xmltoelement(whatever,root) - local collected = element and xmlapplylpath({ root },pattern) + local collected = element and xmlapplylpath(root,pattern) if collected then for c=1,#collected do local e = collected[c] @@ -266,7 +266,7 @@ end local function insert_element(root,pattern,whatever,before) -- todo: element als functie local element = root and xmltoelement(whatever,root) - local collected = element and xmlapplylpath({ root },pattern) + local collected = element and xmlapplylpath(root,pattern) if collected then for c=1,#collected do local e = collected[c] @@ -292,7 +292,7 @@ local function include(xmldata,pattern,attribute,recursive,loaddata) -- attribute = attribute or 'href' pattern = pattern or 'include' loaddata = loaddata or io.loaddata - local collected = xmlapplylpath({ xmldata },pattern) + local collected = xmlapplylpath(xmldata,pattern) if collected then for c=1,#collected do local ek = collected[c] @@ -335,74 +335,80 @@ end xml.include = include -function xml.strip(root, pattern, nolines, anywhere) -- strips all leading and trailing spacing - local collected = xmlapplylpath({ root },pattern) -- beware, indices no longer are valid now - if collected then - for i=1,#collected do - local e = collected[i] - local edt = e.dt - if edt then - if anywhere then - local t = { } - for e=1,#edt do - local str = edt[e] - if type(str) ~= "string" then - t[#t+1] = str - elseif str ~= "" then - -- todo: lpeg for each case - if nolines then - str = gsub(str,"%s+"," ") - end - str = gsub(str,"^%s*(.-)%s*$","%1") - if str ~= "" then - t[#t+1] = str - end - end +local function stripelement(e,nolines,anywhere) + local edt = e.dt + if edt then + if anywhere then + local t = { } + for e=1,#edt do + local str = edt[e] + if type(str) ~= "string" then + t[#t+1] = str + elseif str ~= "" then + -- todo: lpeg for each case + if nolines then + str = gsub(str,"%s+"," ") + end + str = gsub(str,"^%s*(.-)%s*$","%1") + if str ~= "" then + t[#t+1] = str end - e.dt = t + end + end + e.dt = t + else + -- we can assume a regular sparse xml table with no successive strings + -- otherwise we should use a while loop + if #edt > 0 then + -- strip front + local str = edt[1] + if type(str) ~= "string" then + -- nothing + elseif str == "" then + remove(edt,1) else - -- we can assume a regular sparse xml table with no successive strings - -- otherwise we should use a while loop - if #edt > 0 then - -- strip front - local str = edt[1] - if type(str) ~= "string" then - -- nothing - elseif str == "" then - remove(edt,1) - else - if nolines then - str = gsub(str,"%s+"," ") - end - str = gsub(str,"^%s+","") - if str == "" then - remove(edt,1) - else - edt[1] = str - end - end + if nolines then + str = gsub(str,"%s+"," ") end - if #edt > 1 then - -- strip end - local str = edt[#edt] - if type(str) ~= "string" then - -- nothing - elseif str == "" then - remove(edt) - else - if nolines then - str = gsub(str,"%s+"," ") - end - str = gsub(str,"%s+$","") - if str == "" then - remove(edt) - else - edt[#edt] = str - end - end + str = gsub(str,"^%s+","") + if str == "" then + remove(edt,1) + else + edt[1] = str end end end + if #edt > 1 then + -- strip end + local str = edt[#edt] + if type(str) ~= "string" then + -- nothing + elseif str == "" then + remove(edt) + else + if nolines then + str = gsub(str,"%s+"," ") + end + str = gsub(str,"%s+$","") + if str == "" then + remove(edt) + else + edt[#edt] = str + end + end + end + end + end + return e -- convenient +end + +xml.stripelement = stripelement + +function xml.strip(root,pattern,nolines,anywhere) -- strips all leading and trailing spacing + local collected = xmlapplylpath(root,pattern) -- beware, indices no longer are valid now + if collected then + for i=1,#collected do + stripelement(collected[i],nolines,anywhere) end end end @@ -429,7 +435,7 @@ end xml.renamespace = renamespace function xml.remaptag(root, pattern, newtg) - local collected = xmlapplylpath({ root },pattern) + local collected = xmlapplylpath(root,pattern) if collected then for c=1,#collected do collected[c].tg = newtg @@ -438,7 +444,7 @@ function xml.remaptag(root, pattern, newtg) end function xml.remapnamespace(root, pattern, newns) - local collected = xmlapplylpath({ root },pattern) + local collected = xmlapplylpath(root,pattern) if collected then for c=1,#collected do collected[c].ns = newns @@ -447,7 +453,7 @@ function xml.remapnamespace(root, pattern, newns) end function xml.checknamespace(root, pattern, newns) - local collected = xmlapplylpath({ root },pattern) + local collected = xmlapplylpath(root,pattern) if collected then for c=1,#collected do local e = collected[c] @@ -459,7 +465,7 @@ function xml.checknamespace(root, pattern, newns) end function xml.remapname(root, pattern, newtg, newns, newrn) - local collected = xmlapplylpath({ root },pattern) + local collected = xmlapplylpath(root,pattern) if collected then for c=1,#collected do local e = collected[c] diff --git a/tex/context/base/lxml-ini.mkiv b/tex/context/base/lxml-ini.mkiv index 8bc154df4..83a9825bf 100644 --- a/tex/context/base/lxml-ini.mkiv +++ b/tex/context/base/lxml-ini.mkiv @@ -314,24 +314,6 @@ \c!compress=\v!no, % strip comment \c!entities=\v!yes] % replace entities -% \defineXMLenvironment[y]{(}{)} -% -% \startxmlsetups x -% /\xmlflush{#1}/ -% \stopxmlsetups -% -% \startxmlsetups xx -% \xmlsetsetup{main}{x}{*} -% \stopxmlsetups -% -% \xmlregistersetup{xx} -% -% \startbuffer -%This is the main filter function. It returns whatever is asked for.
+--ldx]]-- + +function xml.filter(root,pattern) -- no longer funny attribute handling here + return applylpath(root,pattern) +end + -- internal (parsed) expressions.child = function(e,pattern) - return applylpath({ e },pattern) -- todo: cache + return applylpath(e,pattern) -- todo: cache end expressions.count = function(e,pattern) - local collected = applylpath({ e },pattern) -- todo: cache + local collected = applylpath(e,pattern) -- todo: cache return (collected and #collected) or 0 end @@ -1077,7 +1115,7 @@ expressions.boolean = toboolean local function traverse(root,pattern,handle) report_lpath("use 'xml.selection' instead for '%s'",pattern) - local collected = applylpath({ root },pattern) + local collected = applylpath(root,pattern) if collected then for c=1,#collected do local e = collected[c] @@ -1088,7 +1126,7 @@ local function traverse(root,pattern,handle) end local function selection(root,pattern,handle) - local collected = applylpath({ root },pattern) + local collected = applylpath(root,pattern) if collected then if handle then for c=1,#collected do @@ -1215,14 +1253,6 @@ expressions.tag = function(e,n) -- only tg end end ---[[ldx-- -This is the main filter function. It returns whatever is asked for.
---ldx]]-- - -function xml.filter(root,pattern) -- no longer funny attribute handling here - return applylpath({ root },pattern) -end - --[[ldx--Often using an iterators looks nicer in the code than passing handler
functions. The
The reason why the scaler was originally split, is that for a while we experimented
+with a helper function. However, in practice the
The reason why the scaler is split, is that for a while we experimented
-with a helper function. However, in practice the
Here we go.
--ldx]]-- local function load_featurefile(ff,featurefile) - if featurefile then - featurefile = resolvers.find_file(file.addsuffix(featurefile,'fea'),'fea') - if featurefile and featurefile ~= "" then - if trace_loading then - report_otf("featurefile: %s", featurefile) - end - fontloader.apply_featurefile(ff, featurefile) + if featurefile and featurefile ~= "" then + if trace_loading then + report_otf("featurefile: %s", featurefile) end + fontloader.apply_featurefile(ff, featurefile) end end @@ -5635,8 +5643,8 @@ local ordered_enhancers = { -- implemented later "flatten glyph lookups", "flatten anchor tables", "flatten feature tables", "simplify glyph lookups", -- some saving "prepare luatex tables", - "analyse features", "rehash features", - "analyse anchors", "analyse marks", "analyse unicodes", "analyse subtables", + "analyze features", "rehash features", + "analyze anchors", "analyze marks", "analyze unicodes", "analyze subtables", "check italic correction","check math", "share widths", "strip not needed data", @@ -5644,7 +5652,7 @@ local ordered_enhancers = { -- implemented later "check math parameters", } -local add_dimensions, show_feature_order -- implemented later +local adddimensions, showfeatureorder -- implemented later function otf.load(filename,format,sub,featurefile) local name = file.basename(file.removesuffix(filename)) @@ -5659,8 +5667,50 @@ function otf.load(filename,format,sub,featurefile) hash = hash .. "-" .. sub end hash = containers.cleanname(hash) + local featurefiles + if featurefile then + featurefiles = { } + for s in gmatch(featurefile,"[^,]+") do + local name = resolvers.findfile(file.addsuffix(s,'fea'),'fea') or "" + if name == "" then + report_otf("loading: no featurefile '%s'",s) + else + local attr = lfs.attributes(name) + featurefiles[#featurefiles+1] = { + name = name, + size = attr.size or 0, + time = attr.modification or 0, + } + end + end + if #featurefiles == 0 then + featurefiles = nil + end + end local data = containers.read(otf.cache,hash) - if not data or data.verbose ~= fonts.verbose or data.size ~= size or data.time ~= time then + local reload = not data or data.verbose ~= fonts.verbose or data.size ~= size or data.time ~= time + if not reload then + local featuredata = data.featuredata + if featurefiles then + if not featuredata or #featuredata ~= #featurefiles then + reload = true + else + for i=1,#featurefiles do + local fi, fd = featurefiles[i], featuredata[i] + if fi.name ~= fd.name or fi.size ~= fd.size or fi.time ~= fd.time then + reload = true + break + end + end + end + elseif featuredata then + reload = true + end + if reload then + report_otf("loading: forced reload due to changed featurefile specification: %s",featurefile or "--") + end + end + if reload then report_otf("loading: %s (hash: %s)",filename,hash) local ff, messages if sub then @@ -5680,7 +5730,11 @@ function otf.load(filename,format,sub,featurefile) report_otf("font loaded okay") end if ff then - load_featurefile(ff,featurefile) + if featurefiles then + for i=1,#featurefiles do + load_featurefile(ff,featurefiles[i].name) + end + end data = fontloader.to_table(ff) fontloader.close(ff) if data then @@ -5695,6 +5749,9 @@ function otf.load(filename,format,sub,featurefile) end data.size = size data.time = time + if featurefiles then + data.featuredata = featurefiles + end data.verbose = fonts.verbose report_otf("saving in cache: %s",filename) data = containers.write(otf.cache, hash, data) @@ -5713,15 +5770,15 @@ function otf.load(filename,format,sub,featurefile) report_otf("loading from cache: %s",hash) end enhance("unpack",data,filename,false) -- no message here - add_dimensions(data) + adddimensions(data) if trace_sequences then - show_feature_order(data,filename) + showfeatureorder(data,filename) end end return data end -add_dimensions = function(data) +adddimensions = function(data) -- todo: forget about the width if it's the defaultwidth (saves mem) -- we could also build the marks hash here (instead of storing it) if data then @@ -5757,7 +5814,7 @@ add_dimensions = function(data) end end -local function show_feature_order(otfdata,filename) +local function showfeatureorder(otfdata,filename) local sequences = otfdata.luatex.sequences if sequences and #sequences > 0 then if trace_loading then @@ -5830,11 +5887,6 @@ enhancers["prepare luatex tables"] = function(data,filename) luatex.creator = "context mkiv" end -enhancers["cleanup aat"] = function(data,filename) - if otf.cleanup_aat then - end -end - local function analyze_features(g, features) if g then local t, done = { }, { } @@ -5858,7 +5910,7 @@ local function analyze_features(g, features) return nil end -enhancers["analyse features"] = function(data,filename) +enhancers["analyze features"] = function(data,filename) -- local luatex = data.luatex -- luatex.gposfeatures = analyze_features(data.gpos) -- luatex.gsubfeatures = analyze_features(data.gsub) @@ -5895,7 +5947,7 @@ enhancers["rehash features"] = function(data,filename) end end -enhancers["analyse anchors"] = function(data,filename) +enhancers["analyze anchors"] = function(data,filename) local classes = data.anchor_classes local luatex = data.luatex local anchor_to_lookup, lookup_to_anchor = { }, { } @@ -5921,7 +5973,7 @@ enhancers["analyse anchors"] = function(data,filename) end end -enhancers["analyse marks"] = function(data,filename) +enhancers["analyze marks"] = function(data,filename) local glyphs = data.glyphs local marks = { } data.luatex.marks = marks @@ -5933,9 +5985,9 @@ enhancers["analyse marks"] = function(data,filename) end end -enhancers["analyse unicodes"] = fonts.map.add_to_unicode +enhancers["analyze unicodes"] = fonts.map.addtounicode -enhancers["analyse subtables"] = function(data,filename) +enhancers["analyze subtables"] = function(data,filename) data.luatex = data.luatex or { } local luatex = data.luatex local sequences = { } @@ -6074,8 +6126,8 @@ enhancers["prepare unicode"] = function(data,filename) else mapmap = mapmap.map end - local criterium = fonts.private - local private = fonts.private + local criterium = fonts.privateoffset + local private = criterium for index, glyph in next, glyphs do if index > 0 then local name = glyph.name @@ -6780,7 +6832,7 @@ enhancers["flatten feature tables"] = function(data,filename) end end -enhancers.patches = enhancers.patches or { } +enhancers.patches = allocate() enhancers["patch bugs"] = function(data,filename) local basename = file.basename(lower(filename)) @@ -6995,7 +7047,7 @@ local function copytotfm(data,cache_id) -- we can save a copy when we reorder th end spaceunits = tonumber(spaceunits) or tfm.units/2 -- 500 -- brrr -- we need a runtime lookup because of running from cdrom or zip, brrr (shouldn't we use the basename then?) - local filename = fonts.tfm.checked_filename(luatex) + local filename = fonts.tfm.checkedfilename(luatex) local fontname = metadata.fontname local fullname = metadata.fullname or fontname local cidinfo = data.cidinfo @@ -7107,7 +7159,7 @@ local function otftotfm(specification) tfmdata.has_italic = otfdata.metadata.has_italic if not tfmdata.language then tfmdata.language = 'dflt' end if not tfmdata.script then tfmdata.script = 'dflt' end - shared.processes, shared.features = otf.setfeatures(tfmdata,fonts.define.check(features,otf.features.default)) + shared.processes, shared.features = otf.setfeatures(tfmdata,definers.check(features,otf.features.default)) end end containers.write(tfm.cache,cache_id,tfmdata) @@ -7117,7 +7169,7 @@ end otf.features.register('mathsize') -function tfm.read_from_open_type(specification) -- wrong namespace +function tfm.read_from_otf(specification) -- wrong namespace local tfmtable = otftotfm(specification) if tfmtable then local otfdata = tfmtable.shared.otfdata @@ -7155,7 +7207,7 @@ function tfm.read_from_open_type(specification) -- wrong namespace end end tfmtable = tfm.scale(tfmtable,s,specification.relativeid) - if tfm.fontname_mode == "specification" then + if tfm.fontnamemode == "specification" then -- not to be used in context ! local specname = specification.specification if specname then @@ -7173,7 +7225,7 @@ end -- helpers -function otf.collect_lookups(otfdata,kind,script,language) +function otf.collectlookups(otfdata,kind,script,language) -- maybe store this in the font local sequences = otfdata.luatex.sequences if sequences then @@ -7220,23 +7272,24 @@ local trace_dynamics = false trackers.register("otf.dynamics", function(v) trac local report_otf = logs.new("load otf") -local fonts = fonts -local otf = fonts.otf -local fontdata = fonts.ids +local fonts = fonts +local otf = fonts.otf +local fontdata = fonts.ids otf.features = otf.features or { } otf.features.default = otf.features.default or { } -local context_setups = fonts.define.specify.context_setups -local context_numbers = fonts.define.specify.context_numbers +local definers = fonts.definers +local contextsetups = definers.specifiers.contextsetups +local contextnumbers = definers.specifiers.contextnumbers -- todo: dynamics namespace -local a_to_script = { } otf.a_to_script = a_to_script -local a_to_language = { } otf.a_to_language = a_to_language +local a_to_script = { } +local a_to_language = { } function otf.setdynamics(font,dynamics,attribute) - local features = context_setups[context_numbers[attribute]] -- can be moved to caller + local features = contextsetups[contextnumbers[attribute]] -- can be moved to caller if features then local script = features.script or 'dflt' local language = features.language or 'dflt' @@ -7253,7 +7306,7 @@ function otf.setdynamics(font,dynamics,attribute) local dsla = dsl[attribute] if dsla then -- if trace_dynamics then - -- report_otf("using dynamics %s: attribute %s, script %s, language %s",context_numbers[attribute],attribute,script,language) + -- report_otf("using dynamics %s: attribute %s, script %s, language %s",contextnumbers[attribute],attribute,script,language) -- end return dsla else @@ -7273,10 +7326,10 @@ function otf.setdynamics(font,dynamics,attribute) tfmdata.script = script tfmdata.shared.features = { } -- end of save - local set = fonts.define.check(features,otf.features.default) + local set = definers.check(features,otf.features.default) dsla = otf.setfeatures(tfmdata,set) if trace_dynamics then - report_otf("setting dynamics %s: attribute %s, script %s, language %s, set: %s",context_numbers[attribute],attribute,script,language,table.sequenced(set)) + report_otf("setting dynamics %s: attribute %s, script %s, language %s, set: %s",contextnumbers[attribute],attribute,script,language,table.sequenced(set)) end -- we need to restore some values tfmdata.script = saved.script @@ -7291,6 +7344,14 @@ function otf.setdynamics(font,dynamics,attribute) return nil -- { } end +function otf.scriptandlanguage(tfmdata,attr) + if attr and attr > 0 then + return a_to_script[attr] or tfmdata.script, a_to_language[attr] or tfmdata.language + else + return tfmdata.script, tfmdata.language + end +end + end -- closure do -- begin closure to overcome local limits and interference @@ -7310,11 +7371,8 @@ local fonts = fonts local otf = fonts.otf local initializers = fonts.initializers -otf.default_language = 'latn' -otf.default_script = 'dflt' - -local languages = otf.tables.languages -local scripts = otf.tables.scripts +local languages = otf.tables.languages +local scripts = otf.tables.scripts local function set_language(tfmdata,value) if value then @@ -7517,7 +7575,7 @@ local splitter = lpeg.splitat(" ") local function prepare_base_substitutions(tfmdata,kind,value) -- we can share some code with the node features if value then local otfdata = tfmdata.shared.otfdata - local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language) + local validlookups, lookuplist = otf.collectlookups(otfdata,kind,tfmdata.script,tfmdata.language) if validlookups then local ligatures = { } local unicodes = tfmdata.unicodes -- names to unicodes @@ -7629,7 +7687,7 @@ end local function preparebasekerns(tfmdata,kind,value) -- todo what kind of kerns, currently all if value then local otfdata = tfmdata.shared.otfdata - local validlookups, lookuplist = otf.collect_lookups(otfdata,kind,tfmdata.script,tfmdata.language) + local validlookups, lookuplist = otf.collectlookups(otfdata,kind,tfmdata.script,tfmdata.language) if validlookups then local unicodes = tfmdata.unicodes -- names to unicodes local indices = tfmdata.indices @@ -7956,10 +8014,11 @@ local curscurs = attributes.private('curscurs') local cursdone = attributes.private('cursdone') local kernpair = attributes.private('kernpair') -local set_mark = nodes.set_mark -local set_cursive = nodes.set_cursive -local set_kern = nodes.set_kern -local set_pair = nodes.set_pair +local injections = nodes.injections +local setmark = injections.setmark +local setcursive = injections.setcursive +local setkern = injections.setkern +local setpair = injections.setpair local markonce = true local cursonce = true @@ -7988,9 +8047,10 @@ local featurevalue = false -- we cheat a bit and assume that a font,attr combination are kind of ranged -local context_setups = fonts.define.specify.context_setups -local context_numbers = fonts.define.specify.context_numbers -local context_merged = fonts.define.specify.context_merged +local specifiers = fonts.definers.specifiers +local contextsetups = specifiers.contextsetups +local contextnumbers = specifiers.contextnumbers +local contextmerged = specifiers.contextmerged -- we cannot optimize with "start = first_character(head)" because then we don't -- know which rlmode we're in which messes up cursive handling later on @@ -8347,7 +8407,7 @@ function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence) if al[anchor] then local ma = markanchors[anchor] if ma then - local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma) + local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma) if trace_marks then logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)", pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) @@ -8362,7 +8422,7 @@ function handlers.gpos_mark2base(start,kind,lookupname,markanchors,sequence) end else -- if trace_bugs then -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) - fonts.register_message(currentfont,basechar,"no base anchors") + fonts.registermessage(currentfont,basechar,"no base anchors") end elseif trace_bugs then logwarning("%s: prev node is no char",pref(kind,lookupname)) @@ -8415,7 +8475,7 @@ function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence) if ma then ba = ba[index] if ba then - local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma,index) + local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma,index) if trace_marks then logprocess("%s, anchor %s, index %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)", pref(kind,lookupname),anchor,index,bound,gref(markchar),gref(basechar),index,dx,dy) @@ -8432,7 +8492,7 @@ function handlers.gpos_mark2ligature(start,kind,lookupname,markanchors,sequence) end else -- if trace_bugs then -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) - fonts.register_message(currentfont,basechar,"no base anchors") + fonts.registermessage(currentfont,basechar,"no base anchors") end elseif trace_bugs then logwarning("%s: prev node is no char",pref(kind,lookupname)) @@ -8462,7 +8522,7 @@ function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence) if al[anchor] then local ma = markanchors[anchor] if ma then - local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma) + local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma) if trace_marks then logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)", pref(kind,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) @@ -8478,7 +8538,7 @@ function handlers.gpos_mark2mark(start,kind,lookupname,markanchors,sequence) end else -- if trace_bugs then -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(basechar)) - fonts.register_message(currentfont,basechar,"no base anchors") + fonts.registermessage(currentfont,basechar,"no base anchors") end elseif trace_bugs then logwarning("%s: prev node is no mark",pref(kind,lookupname)) @@ -8520,7 +8580,7 @@ function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to if al[anchor] then local exit = exitanchors[anchor] if exit then - local dx, dy, bound = set_cursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + local dx, dy, bound = setcursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) if trace_cursive then logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) end @@ -8533,7 +8593,7 @@ function handlers.gpos_cursive(start,kind,lookupname,exitanchors,sequence) -- to end else -- if trace_bugs then -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) - fonts.register_message(currentfont,startchar,"no entry anchors") + fonts.registermessage(currentfont,startchar,"no entry anchors") end break end @@ -8550,7 +8610,7 @@ end function handlers.gpos_single(start,kind,lookupname,kerns,sequence) local startchar = start.char - local dx, dy, w, h = set_pair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + local dx, dy, w, h = setpair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) if trace_kerns then logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),dx,dy,w,h) end @@ -8581,14 +8641,14 @@ function handlers.gpos_pair(start,kind,lookupname,kerns,sequence) local a, b = krn[3], krn[4] if a and #a > 0 then local startchar = start.char - local x, y, w, h = set_pair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) if trace_kerns then logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) end end if b and #b > 0 then local startchar = start.char - local x, y, w, h = set_pair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) if trace_kerns then logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",pref(kind,lookupname),gref(startchar),gref(nextchar),x,y,w,h) end @@ -8597,7 +8657,7 @@ function handlers.gpos_pair(start,kind,lookupname,kerns,sequence) report_process("%s: check this out (old kern stuff)",pref(kind,lookupname)) local a, b = krn[3], krn[7] if a and a ~= 0 then - local k = set_kern(snext,factor,rlmode,a) + local k = setkern(snext,factor,rlmode,a) if trace_kerns then logprocess("%s: inserting first kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar)) end @@ -8608,7 +8668,7 @@ function handlers.gpos_pair(start,kind,lookupname,kerns,sequence) end done = true elseif krn ~= 0 then - local k = set_kern(snext,factor,rlmode,krn) + local k = setkern(snext,factor,rlmode,krn) if trace_kerns then logprocess("%s: inserting kern %s between %s and %s",pref(kind,lookupname),k,gref(prev.char),gref(nextchar)) end @@ -8979,7 +9039,7 @@ function chainprocs.gpos_mark2base(start,stop,kind,chainname,currentcontext,cach if al[anchor] then local ma = markanchors[anchor] if ma then - local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma) + local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma) if trace_marks then logprocess("%s, anchor %s, bound %s: anchoring mark %s to basechar %s => (%s,%s)", cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) @@ -9052,7 +9112,7 @@ function chainprocs.gpos_mark2ligature(start,stop,kind,chainname,currentcontext, if ma then ba = ba[index] if ba then - local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma,index) + local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma,index) if trace_marks then logprocess("%s, anchor %s, bound %s: anchoring mark %s to baselig %s at index %s => (%s,%s)", cref(kind,chainname,chainlookupname,lookupname),anchor,a or bound,gref(markchar),gref(basechar),index,dx,dy) @@ -9104,7 +9164,7 @@ function chainprocs.gpos_mark2mark(start,stop,kind,chainname,currentcontext,cach if al[anchor] then local ma = markanchors[anchor] if ma then - local dx, dy, bound = set_mark(start,base,tfmdata.factor,rlmode,ba,ma) + local dx, dy, bound = setmark(start,base,tfmdata.factor,rlmode,ba,ma) if trace_marks then logprocess("%s, anchor %s, bound %s: anchoring mark %s to basemark %s => (%s,%s)", cref(kind,chainname,chainlookupname,lookupname),anchor,bound,gref(markchar),gref(basechar),dx,dy) @@ -9170,7 +9230,7 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,cache, if al[anchor] then local exit = exitanchors[anchor] if exit then - local dx, dy, bound = set_cursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) + local dx, dy, bound = setcursive(start,nxt,tfmdata.factor,rlmode,exit,entry,characters[startchar],characters[nextchar]) if trace_cursive then logprocess("%s: moving %s to %s cursive (%s,%s) using anchor %s and bound %s in rlmode %s",pref(kind,lookupname),gref(startchar),gref(nextchar),dx,dy,anchor,bound,rlmode) end @@ -9183,7 +9243,7 @@ function chainprocs.gpos_cursive(start,stop,kind,chainname,currentcontext,cache, end else -- if trace_bugs then -- logwarning("%s: char %s is missing in font",pref(kind,lookupname),gref(startchar)) - fonts.register_message(currentfont,startchar,"no entry anchors") + fonts.registermessage(currentfont,startchar,"no entry anchors") end break end @@ -9209,7 +9269,7 @@ function chainprocs.gpos_single(start,stop,kind,chainname,currentcontext,cache,c if kerns then kerns = kerns[startchar] if kerns then - local dx, dy, w, h = set_pair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) + local dx, dy, w, h = setpair(start,tfmdata.factor,rlmode,sequence.flags[4],kerns,characters[startchar]) if trace_kerns then logprocess("%s: shifting single %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),dx,dy,w,h) end @@ -9247,14 +9307,14 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur local a, b = krn[3], krn[4] if a and #a > 0 then local startchar = start.char - local x, y, w, h = set_pair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) + local x, y, w, h = setpair(start,factor,rlmode,sequence.flags[4],a,characters[startchar]) if trace_kerns then logprocess("%s: shifting first of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) end end if b and #b > 0 then local startchar = start.char - local x, y, w, h = set_pair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) + local x, y, w, h = setpair(snext,factor,rlmode,sequence.flags[4],b,characters[nextchar]) if trace_kerns then logprocess("%s: shifting second of pair %s and %s by (%s,%s) and correction (%s,%s)",cref(kind,chainname,chainlookupname),gref(startchar),gref(nextchar),x,y,w,h) end @@ -9263,7 +9323,7 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur report_process("%s: check this out (old kern stuff)",cref(kind,chainname,chainlookupname)) local a, b = krn[3], krn[7] if a and a ~= 0 then - local k = set_kern(snext,factor,rlmode,a) + local k = setkern(snext,factor,rlmode,a) if trace_kerns then logprocess("%s: inserting first kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) end @@ -9274,7 +9334,7 @@ function chainprocs.gpos_pair(start,stop,kind,chainname,currentcontext,cache,cur end done = true elseif krn ~= 0 then - local k = set_kern(snext,factor,rlmode,krn) + local k = setkern(snext,factor,rlmode,krn) if trace_kerns then logprocess("%s: inserting kern %s between %s and %s",cref(kind,chainname,chainlookupname),k,gref(prev.char),gref(nextchar)) end @@ -9678,8 +9738,8 @@ function fonts.methods.node.otf.features(head,font,attr) local script, language, s_enabled, a_enabled, dyn local attribute_driven = attr and attr ~= 0 if attribute_driven then - local features = context_setups[context_numbers[attr]] -- could be a direct list - dyn = context_merged[attr] or 0 + local features = contextsetups[contextnumbers[attr]] -- could be a direct list + dyn = contextmerged[attr] or 0 language, script = features.language or "dflt", features.script or "dflt" a_enabled = features -- shared.features -- can be made local to the resolver if dyn == 2 or dyn == -2 then @@ -10505,23 +10565,18 @@ local traverse_node_list = node.traverse local fontdata = fonts.ids local state = attributes.private('state') -local fcs = (fonts.color and fonts.color.set) or function() end -local fcr = (fonts.color and fonts.color.reset) or function() end - -local a_to_script = otf.a_to_script -local a_to_language = otf.a_to_language +local fontcolors = fonts.colors +local fcs = (fontscolors and fontscolors.set) or function() end +local fcr = (fontscolors and fontscolors.reset) or function() end -- in the future we will use language/script attributes instead of the -- font related value, but then we also need dynamic features which is -- somewhat slower; and .. we need a chain of them +local scriptandlanguage = otf.scriptandlanguage + function fonts.initializers.node.otf.analyze(tfmdata,value,attr) - local script, language - if attr and attr > 0 then - script, language = a_to_script[attr], a_to_language[attr] - else - script, language = tfmdata.script, tfmdata.language - end + local script, language = otf.scriptandlanguage(tfmdata,attr) local action = initializers[script] if action then if type(action) == "function" then @@ -10538,12 +10593,7 @@ end function fonts.methods.node.otf.analyze(head,font,attr) local tfmdata = fontdata[font] - local script, language - if attr and attr > 0 then - script, language = a_to_script[attr], a_to_language[attr] - else - script, language = tfmdata.script, tfmdata.language - end + local script, language = otf.scriptandlanguage(tfmdata,attr) local action = methods[script] if action then if type(action) == "function" then @@ -10996,6 +11046,8 @@ local format, concat, gmatch, match, find, lower = string.format, table.concat, local tostring, next = tostring, next local lpegmatch = lpeg.match +local allocate = utilities.storage.allocate + local trace_defining = false trackers .register("fonts.defining", function(v) trace_defining = v end) local directive_embedall = false directives.register("fonts.embedall", function(v) directive_embedall = v end) @@ -11010,33 +11062,38 @@ local report_afm = logs.new("load afm") default loader that only handlesWe hardly gain anything when we cache the final (pre scaled) @@ -11065,7 +11122,7 @@ and prepares a table that will move along as we proceed.
-- name name(sub) name(sub)*spec name*spec -- name@spec*oeps -local splitter, specifiers = nil, "" +local splitter, splitspecifiers = nil, "" local P, C, S, Cc = lpeg.P, lpeg.C, lpeg.S, lpeg.Cc @@ -11074,13 +11131,13 @@ local right = P(")") local colon = P(":") local space = P(" ") -define.defaultlookup = "file" +definers.defaultlookup = "file" local prefixpattern = P(false) -function define.add_specifier(symbol) - specifiers = specifiers .. symbol - local method = S(specifiers) +local function addspecifier(symbol) + splitspecifiers = splitspecifiers .. symbol + local method = S(splitspecifiers) local lookup = C(prefixpattern) * colon local sub = left * C(P(1-left-right-method)^1) * right local specification = C(method) * C(P(1)^1) @@ -11088,24 +11145,28 @@ function define.add_specifier(symbol) splitter = P((lookup + Cc("")) * name * (sub + Cc("")) * (specification + Cc(""))) end -function define.add_lookup(str,default) +local function addlookup(str,default) prefixpattern = prefixpattern + P(str) end -define.add_lookup("file") -define.add_lookup("name") -define.add_lookup("spec") +definers.addlookup = addlookup + +addlookup("file") +addlookup("name") +addlookup("spec") -function define.get_specification(str) +local function getspecification(str) return lpegmatch(splitter,str) end -function define.register_split(symbol,action) - define.add_specifier(symbol) - define.specify[symbol] = action +definers.getspecification = getspecification + +function definers.registersplit(symbol,action) + addspecifier(symbol) + variants[symbol] = action end -function define.makespecification(specification, lookup, name, sub, method, detail, size) +function definers.makespecification(specification, lookup, name, sub, method, detail, size) size = size or 655360 if trace_defining then report_define("%s -> lookup: %s, name: %s, sub: %s, method: %s, detail: %s", @@ -11113,7 +11174,7 @@ function define.makespecification(specification, lookup, name, sub, method, deta (sub ~= "" and sub) or "-", (method ~= "" and method) or "-", (detail ~= "" and detail) or "-") end if not lookup or lookup == "" then - lookup = define.defaultlookup + lookup = definers.defaultlookup end local t = { lookup = lookup, -- forced type @@ -11130,10 +11191,10 @@ function define.makespecification(specification, lookup, name, sub, method, deta return t end -function define.analyze(specification, size) +function definers.analyze(specification, size) -- can be optimized with locals - local lookup, name, sub, method, detail = define.get_specification(specification or "") - return define.makespecification(specification, lookup, name, sub, method, detail, size) + local lookup, name, sub, method, detail = getspecification(specification or "") + return definers.makespecification(specification, lookup, name, sub, method, detail, size) end --[[ldx-- @@ -11142,7 +11203,7 @@ end local sortedhashkeys = table.sortedhashkeys -function tfm.hash_features(specification) +function tfm.hashfeatures(specification) local features = specification.features if features then local t = { } @@ -11174,7 +11235,7 @@ function tfm.hash_features(specification) return "unknown" end -fonts.designsizes = { } +fonts.designsizes = allocate() --[[ldx--In principle we can share tfm tables when we are in node for a font, but then
@@ -11184,10 +11245,10 @@ when we get rid of base mode we can optimize even further by sharing, but then w
loose our testcases for
We can resolve the filename using the next function:
--ldx]]-- -define.resolvers = define.resolvers or { } -local resolvers = define.resolvers +definers.resolvers = definers.resolvers or { } +local resolvers = definers.resolvers -- todo: reporter @@ -11260,7 +11321,7 @@ function resolvers.spec(specification) end end -function define.resolve(specification) +function definers.resolve(specification) if not specification.resolved or specification.resolved == "" then -- resolved itself not per se in mapping hash local r = resolvers[specification.lookup] if r then @@ -11281,7 +11342,7 @@ function define.resolve(specification) end end -- - specification.hash = lower(specification.name .. ' @ ' .. tfm.hash_features(specification)) + specification.hash = lower(specification.name .. ' @ ' .. tfm.hashfeatures(specification)) if specification.sub and specification.sub ~= "" then specification.hash = specification.sub .. ' @ ' .. specification.hash end @@ -11305,7 +11366,7 @@ specification yet. --ldx]]-- function tfm.read(specification) - local hash = tfm.hash_instance(specification) + local hash = tfm.hashinstance(specification) local tfmtable = tfm.fonts[hash] -- hashes by size ! if not tfmtable then local forced = specification.forced or "" @@ -11353,22 +11414,22 @@ endFor virtual fonts we need a slightly different approach:
--ldx]]-- -function tfm.read_and_define(name,size) -- no id - local specification = define.analyze(name,size) +function tfm.readanddefine(name,size) -- no id + local specification = definers.analyze(name,size) local method = specification.method - if method and define.specify[method] then - specification = define.specify[method](specification) + if method and variants[method] then + specification = variants[method](specification) end - specification = define.resolve(specification) - local hash = tfm.hash_instance(specification) - local id = define.registered(hash) + specification = definers.resolve(specification) + local hash = tfm.hashinstance(specification) + local id = definers.registered(hash) if not id then local fontdata = tfm.read(specification) if fontdata then fontdata.hash = hash id = font.define(fontdata) - define.register(fontdata,id) - tfm.cleanup_table(fontdata) + definers.register(fontdata,id) + tfm.cleanuptable(fontdata) else id = 0 -- signal end @@ -11388,6 +11449,9 @@ local function check_tfm(specification,fullname) if foundname == "" then foundname = findbinfile(fullname, 'ofm') or "" -- bonus for usage outside context end + if foundname == "" then + foundname = fonts.names.getfilename(fullname,"tfm") + end if foundname ~= "" then specification.filename, specification.format = foundname, "ofm" return tfm.read_from_tfm(specification) @@ -11396,13 +11460,15 @@ end local function check_afm(specification,fullname) local foundname = findbinfile(fullname, 'afm') or "" -- just to be sure - if foundname == "" and tfm.auto_afm then + if foundname == "" then + foundname = fonts.names.getfilename(fullname,"afm") + end + if foundname == "" and tfm.autoprefixedafm then local encoding, shortname = match(fullname,"^(.-)%-(.*)$") -- context: encoding-name.* if encoding and shortname and fonts.enc.known[encoding] then shortname = findbinfile(shortname,'afm') or "" -- just to be sure if shortname ~= "" then foundname = shortname - -- tfm.set_normal_feature(specification,'encoding',encoding) -- will go away if trace_loading then report_afm("stripping encoding prefix from filename %s",afmname) end @@ -11439,7 +11505,7 @@ function readers.afm(specification,method) tfmtable = check_afm(specification,specification.name .. "." .. forced) end if not tfmtable then - method = method or define.method or "afm or tfm" + method = method or definers.method or "afm or tfm" if method == "tfm" then tfmtable = check_tfm(specification,specification.name) elseif method == "afm" then @@ -11464,21 +11530,26 @@ local function check_otf(forced,specification,suffix,what) name = file.addsuffix(name,suffix,true) end local fullname, tfmtable = findbinfile(name,suffix) or "", nil -- one shot + -- if false then -- can be enabled again when needed + -- if fullname == "" then + -- local fb = fonts.names.old_to_new[name] + -- if fb then + -- fullname = findbinfile(fb,suffix) or "" + -- end + -- end + -- if fullname == "" then + -- local fb = fonts.names.new_to_old[name] + -- if fb then + -- fullname = findbinfile(fb,suffix) or "" + -- end + -- end + -- end if fullname == "" then - local fb = fonts.names.old_to_new[name] - if fb then - fullname = findbinfile(fb,suffix) or "" - end - end - if fullname == "" then - local fb = fonts.names.new_to_old[name] - if fb then - fullname = findbinfile(fb,suffix) or "" - end + fullname = fonts.names.getfilename(name,suffix) end if fullname ~= "" then specification.filename, specification.format = fullname, what -- hm, so we do set the filename, then - tfmtable = tfm.read_from_open_type(specification) -- we need to do it for all matches / todo + tfmtable = tfm.read_from_otf(specification) -- we need to do it for all matches / todo end return tfmtable end @@ -11504,7 +11575,7 @@ function readers.dfont(specification) return readers.opentype(specification,"ttf a helper function. --ldx]]-- -function define.check(features,defaults) -- nb adapts features ! +function definers.check(features,defaults) -- nb adapts features ! local done = false if features and next(features) then for k,v in next, defaults do @@ -11519,7 +11590,7 @@ function define.check(features,defaults) -- nb adapts features ! end --[[ldx-- -So far the specifyers. Now comes the real definer. Here we cache +
So far the specifiers. Now comes the real definer. Here we cache based on id's. Here we also intercept the virtual font handler. Since it evolved stepwise I may rewrite this bit (combine code).
@@ -11530,9 +11601,13 @@ not gain much. By the way, passing id's back to in the callback was introduced later in the development. --ldx]]-- -define.last = nil +local lastdefined = nil -- we don't want this one to end up in s-tra-02 -function define.register(fontdata,id) +function definers.current() -- or maybe current + return lastdefined +end + +function definers.register(fontdata,id) if fontdata and id then local hash = fontdata.hash if not tfm.internalized[hash] then @@ -11548,7 +11623,7 @@ function define.register(fontdata,id) end end -function define.registered(hash) +function definers.registered(hash) local id = tfm.internalized[hash] return id, id and fonts.ids[id] end @@ -11563,7 +11638,7 @@ function tfm.make(specification) -- however, when virtual tricks are used as feature (makes more -- sense) we scale the commands in fonts.tfm.scale (and set the -- factor there) - local fvm = define.methods[specification.features.vtf.preset] + local fvm = definers.methods.variants[specification.features.vtf.preset] if fvm then return fvm(specification) else @@ -11571,28 +11646,28 @@ function tfm.make(specification) end end -function define.read(specification,size,id) -- id can be optional, name can already be table +function definers.read(specification,size,id) -- id can be optional, name can already be table statistics.starttiming(fonts) if type(specification) == "string" then - specification = define.analyze(specification,size) + specification = definers.analyze(specification,size) end local method = specification.method - if method and define.specify[method] then - specification = define.specify[method](specification) + if method and variants[method] then + specification = variants[method](specification) end - specification = define.resolve(specification) - local hash = tfm.hash_instance(specification) + specification = definers.resolve(specification) + local hash = tfm.hashinstance(specification) if cache_them then local fontdata = containers.read(fonts.cache,hash) -- for tracing purposes end - local fontdata = define.registered(hash) -- id + local fontdata = definers.registered(hash) -- id if not fontdata then if specification.features.vtf and specification.features.vtf.preset then fontdata = tfm.make(specification) else fontdata = tfm.read(specification) if fontdata then - tfm.check_virtual_id(fontdata) + tfm.checkvirtualid(fontdata) end end if cache_them then @@ -11602,11 +11677,11 @@ function define.read(specification,size,id) -- id can be optional, name can alre fontdata.hash = hash fontdata.cache = "no" if id then - define.register(fontdata,id) + definers.register(fontdata,id) end end end - define.last = fontdata or id -- todo ! ! ! ! ! + lastdefined = fontdata or id -- todo ! ! ! ! ! if not fontdata then report_define( "unknown font %s, loading aborted",specification.name) elseif trace_defining and type(fontdata) == "table" then @@ -11626,7 +11701,7 @@ end function vf.find(name) name = file.removesuffix(file.basename(name)) - if tfm.resolve_vf then + if tfm.resolvevirtualtoo then local format = fonts.logger.format(name) if format == 'tfm' or format == 'ofm' then if trace_defining then @@ -11651,7 +11726,7 @@ endWe overload both the