summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMarius <mariausol@gmail.com>2012-09-04 19:20:28 +0300
committerMarius <mariausol@gmail.com>2012-09-04 19:20:28 +0300
commitedd2b22c9d92fd77d47b5fb6517a230470ec7038 (patch)
treec4d5386d55a53d423cfd6cb4157b6bda2813d8c7
parente0f42793d14b7571ae6221a15e691d28b3f76d8f (diff)
downloadcontext-edd2b22c9d92fd77d47b5fb6517a230470ec7038.tar.gz
beta 2012.09.04 18:08
-rw-r--r--scripts/context/lua/mtxrun.lua397
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua397
-rw-r--r--scripts/context/stubs/unix/mtxrun397
-rw-r--r--tex/context/base/back-exp.lua8
-rw-r--r--tex/context/base/char-cjk.lua5
-rw-r--r--tex/context/base/char-utf.lua12
-rw-r--r--tex/context/base/colo-ini.lua18
-rw-r--r--tex/context/base/cont-new.mkii2
-rw-r--r--tex/context/base/cont-new.mkiv2
-rw-r--r--tex/context/base/context.mkii2
-rw-r--r--tex/context/base/context.mkiv2
-rw-r--r--tex/context/base/data-pre.lua2
-rw-r--r--tex/context/base/font-afk.lua11
-rw-r--r--tex/context/base/font-gds.lua4
-rw-r--r--tex/context/base/font-syn.lua7
-rw-r--r--tex/context/base/l-dir.lua18
-rw-r--r--tex/context/base/l-table.lua28
-rw-r--r--tex/context/base/l-unicode.lua320
-rw-r--r--tex/context/base/lang-url.lua4
-rw-r--r--tex/context/base/lpdf-col.lua6
-rw-r--r--tex/context/base/lxml-tab.lua18
-rw-r--r--tex/context/base/node-res.lua4
-rw-r--r--tex/context/base/page-ins.lua28
-rw-r--r--tex/context/base/regi-ini.lua59
-rw-r--r--tex/context/base/s-inf-03.mkiv121
-rw-r--r--tex/context/base/status-files.pdfbin24380 -> 24596 bytes
-rw-r--r--tex/context/base/status-lua.pdfbin191341 -> 6242 bytes
-rw-r--r--tex/context/base/tabl-ntb.mkiv14
-rw-r--r--tex/context/base/trac-lmx.lua2
-rw-r--r--tex/context/base/util-jsn.lua6
-rw-r--r--tex/context/base/util-sql.lua85
-rw-r--r--tex/context/base/util-str.lua7
-rw-r--r--tex/context/base/util-tab.lua53
-rw-r--r--tex/context/base/util-tpl.lua2
-rw-r--r--tex/context/base/x-mathml.lua13
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua30
36 files changed, 1377 insertions, 707 deletions
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index b2d3fec54..40fe71c0e 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -8,6 +8,11 @@ if not modules then modules = { } end modules ['mtxrun'] = {
license = "see context related readme files"
}
+-- if not lpeg then require("lpeg") end
+-- if not md5 then require("md5") end
+-- if not lfs then require("lfs") end
+-- if not texconfig then texconfig = { } end
+
-- one can make a stub:
--
-- #!/bin/sh
@@ -177,7 +182,8 @@ if not modules then modules = { } end modules ['l-table'] = {
license = "see context related readme files"
}
-local type, next, tostring, tonumber, ipairs, table, string = type, next, tostring, tonumber, ipairs, table, string
+local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs
+local table, string = table, string
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
@@ -188,6 +194,8 @@ local getinfo = debug.getinfo
-- impact on ConTeXt was not that large; the remaining ipairs already
-- have been replaced. In a similar fashion we also hardly used pairs.
--
+-- Hm, actually ipairs was retained, but we no longer use it anyway.
+--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -571,12 +579,25 @@ local function do_serialize(root,name,depth,level,indexed)
end
-- we could check for k (index) being number (cardinal)
if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- if compact then
+ -- -- NOT: for k=1,#root do (we need to quit at nil)
+ -- for k,v in ipairs(root) do -- can we use next?
+ -- if not first then first = k end
+ -- last = last + 1
+ -- end
+ -- end
+ local first, last = nil, 0
if compact then
- -- NOT: for k=1,#root do (we need to quit at nil)
- for k,v in ipairs(root) do -- can we use next?
- if not first then first = k end
- last = last + 1
+ last = #root
+ for k=1,last do
+ if not root[k] then
+ last = k - 1
+ break
+ end
+ end
+ if last > 0 then
+ first = 1
end
end
local sk = sortedkeys(root)
@@ -3662,6 +3683,24 @@ local isdir = lfs.isdir
local isfile = lfs.isfile
local currentdir = lfs.currentdir
+-- in case we load outside luatex
+
+if not isdir then
+ function isdir(name)
+ local a = attributes(name)
+ return a and a.mode == "directory"
+ end
+ lfs.isdir = isdir
+end
+
+if not isfile then
+ function isfile(name)
+ local a = attributes(name)
+ return a and a.mode == "file"
+ end
+ lfs.isfile = isfile
+end
+
-- handy
function dir.current()
@@ -4083,6 +4122,18 @@ if not modules then modules = { } end modules ['l-unicode'] = {
license = "see context related readme files"
}
+-- this module will be reorganized
+
+-- todo: utf.sub replacement (used in syst-aux)
+
+local concat = table.concat
+local type = type
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local utftype = patterns.utftype
+local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
+local utfsplitlines = string.utfsplitlines
+
if not unicode then
unicode = { }
@@ -4140,12 +4191,156 @@ if not utf.char then
end
-local concat = table.concat
-local utfchar, utfbyte, utfgsub = utf.char, utf.byte, utf.gsub
-local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
-local type = type
+if not utf.byte then
-local utfsplitlines = string.utfsplitlines
+ local utf8byte = patterns.utf8byte
+
+ function utf.byte(c)
+ return lpegmatch(utf8byte,c)
+ end
+
+end
+
+local utfchar, utfbyte = utf.char, utf.byte
+
+-- As we want to get rid of the (unmaintained) utf library we implement our own
+-- variants (in due time an independent module):
+
+function unicode.filetype(data)
+ return data and lpegmatch(utftype,data) or "unknown"
+end
+
+local toentities = Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+
+
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+-- actually one of them is already utf ... sort of useless this one
+
+-- function utf.char(n)
+-- if n < 0x80 then
+-- return char(n)
+-- elseif n < 0x800 then
+-- return char(
+-- 0xC0 + floor(n/0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x10000 then
+-- return char(
+-- 0xE0 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x40000 then
+-- return char(
+-- 0xF0 + floor(n/0x40000),
+-- 0x80 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- else
+-- -- return char(
+-- -- 0xF1 + floor(n/0x1000000),
+-- -- 0x80 + floor(n/0x40000),
+-- -- 0x80 + floor(n/0x1000),
+-- -- 0x80 + (floor(n/0x40) % 0x40),
+-- -- 0x80 + (n % 0x40)
+-- -- )
+-- return "?"
+-- end
+-- end
+--
+-- merge into:
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+local validatedutf = Cs (
+ (
+ patterns.utf8one
+ + patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ + P(1) / "�"
+ )^0
+)
+
+patterns.validatedutf = validatedutf
+
+function string.validutf(str)
+ return lpegmatch(validatedutf,str)
+end
+
+
+utf.length = string.utflength
+utf.split = string.utfsplit
+utf.splitines = string.utfsplitlines
+utf.valid = string.validutf
+
+if not utf.len then
+ utf.len = utf.length
+end
+
+-- a replacement for simple gsubs:
+
+local utf8char = patterns.utf8char
+
+function utf.remapper(mapping)
+ local pattern = Cs((utf8char/mapping)^0)
+ return function(str)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end, pattern
+end
+
+-- local remap = utf.remapper { a = 'd', b = "c", c = "b", d = "a" }
+-- print(remap("abcd 1234 abcd"))
-- 0 EF BB BF UTF-8
-- 1 FF FE UTF-16-little-endian
@@ -4338,11 +4533,22 @@ local function big(c)
end
end
+-- function unicode.utf8_to_utf16(str,littleendian)
+-- if littleendian then
+-- return char(255,254) .. utfgsub(str,".",little)
+-- else
+-- return char(254,255) .. utfgsub(str,".",big)
+-- end
+-- end
+
+local _, l_remap = utf.remapper(little)
+local _, b_remap = utf.remapper(big)
+
function unicode.utf8_to_utf16(str,littleendian)
if littleendian then
- return char(255,254) .. utfgsub(str,".",little)
+ return char(255,254) .. lpegmatch(l_remap,str)
else
- return char(254,255) .. utfgsub(str,".",big)
+ return char(254,255) .. lpegmatch(b_remap,str)
end
end
@@ -4364,96 +4570,6 @@ function unicode.xstring(s)
end
-local lpegmatch = lpeg.match
-local patterns = lpeg.patterns
-local utftype = patterns.utftype
-
-function unicode.filetype(data)
- return data and lpegmatch(utftype,data) or "unknown"
-end
-
-local toentities = lpeg.Cs (
- (
- patterns.utf8one
- + (
- patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
- )^0
-)
-
-patterns.toentities = toentities
-
-function utf.toentities(str)
- return lpegmatch(toentities,str)
-end
-
-
-
-
-local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
-
-local one = P(1)
-local two = C(1) * C(1)
-local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
-
--- actually one of them is already utf ... sort of useless this one
-
-local pattern = P("\254\255") * Cs( (
- four / function(a,b,c,d)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(a,b)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
- + P("\255\254") * Cs( (
- four / function(b,a,d,c)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(b,a)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
-
-function string.toutf(s)
- return lpegmatch(pattern,s) or s -- todo: utf32
-end
-
-local validatedutf = Cs (
- (
- patterns.utf8one
- + patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- + P(1) / "�"
- )^0
-)
-
-patterns.validatedutf = validatedutf
-
-function string.validutf(str)
- return lpegmatch(validatedutf,str)
-end
-
-
-utf.length = string.utflength
-utf.split = string.utfsplit
-utf.splitines = string.utfsplitlines
-utf.valid = string.validutf
-
-if not utf.len then
- utf.len = utf.length
-end
-
-
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -4513,8 +4629,8 @@ local tables = utilities.tables
local format, gmatch, rep, gsub = string.format, string.gmatch, string.rep, string.gsub
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
-local type, next, rawset, tonumber = type, next, rawset, tonumber
-local lpegmatch = lpeg.match
+local type, next, rawset, tonumber, loadstring = type, next, rawset, tonumber, loadstring
+local lpegmatch, P, Cs = lpeg.match, lpeg.P, lpeg.Cs
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -4668,19 +4784,27 @@ function tables.encapsulate(core,capsule,protect)
end
end
-local function serialize(t,r) -- no mixes
+local escaped = Cs ( (
+ P('\\' ) +
+ P('"' )/'\\"' +
+ P('\n')/'\\n' +
+ P('\r')/'\\r' +
+ 1
+)^0 )
+
+local function serialize(t,r,outer) -- no mixes
r[#r+1] = "{"
local n = #t
if n > 0 then
for i=1,n do
local v = t[i]
local tv = type(v)
- if tv == "table" then
- serialize(v,r)
- elseif tv == "string" then
+ if tv == "string" then
r[#r+1] = format("%q,",v)
elseif tv == "number" then
r[#r+1] = format("%s,",v)
+ elseif tv == "table" then
+ serialize(v,r)
elseif tv == "boolean" then
r[#r+1] = format("%s,",tostring(v))
end
@@ -4688,27 +4812,46 @@ local function serialize(t,r) -- no mixes
else
for k, v in next, t do
local tv = type(v)
- if tv == "table" then
- r[#r+1] = format("[%q]=",k)
- serialize(v,r)
- elseif tv == "string" then
+ if tv == "string" then
r[#r+1] = format("[%q]=%q,",k,v)
elseif tv == "number" then
r[#r+1] = format("[%q]=%s,",k,v)
+ elseif tv == "table" then
+ r[#r+1] = format("[%q]=",k)
+ serialize(v,r)
elseif tv == "boolean" then
r[#r+1] = format("[%q]=%s,",k,tostring(v))
end
end
end
- r[#r+1] = "}"
+ if outer then
+ r[#r+1] = "}"
+ else
+ r[#r+1] = "},"
+ end
return r
end
function table.fastserialize(t,prefix)
- return concat(serialize(t,{ prefix }))
+ return concat(serialize(t,{ prefix },true))
end
--- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7 } })
+-- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7, e = 'f"g\nh' } })
+
+function table.load(filename)
+ if filename then
+ local t = io.loaddata(filename)
+ if t and t ~= "" then
+ t = loadstring(t)
+ if type(t) == "function" then
+ t = t()
+ if type(t) == "table" then
+ return t
+ end
+ end
+ end
+ end
+end
end -- of closure
@@ -7502,7 +7645,7 @@ local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar, utfgsub = utf.char, utf.gsub
+local utfchar = utf.char
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -7752,13 +7895,7 @@ local privates_n = {
-- keeps track of defined ones
}
-local function escaped(s)
- if s == "" then
- return ""
- else
- return (utfgsub(s,".",privates_u))
- end
-end
+local escaped = utf.remapper(privates_u)
local function unescaped(s)
local p = privates_n[s]
@@ -7773,13 +7910,7 @@ local function unescaped(s)
return p
end
-local function unprivatized(s,resolve)
- if s == "" then
- return ""
- else
- return (utfgsub(s,".",privates_p))
- end
-end
+local unprivatized = utf.remapper(privates_p)
xml.privatetoken = unescaped
xml.unprivatized = unprivatized
@@ -14612,7 +14743,7 @@ end
resolvers.resolve = resolve
resolvers.unresolve = unresolve
-if os.uname then
+if type(os.uname) == "function" then
for k, v in next, os.uname() do
if not prefixes[k] then
@@ -16369,7 +16500,7 @@ function templates.resolve(t,mapping)
end
-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" }))
--- inspect(utilities.templates.resolve({ one = "%two%", two = "two" }))
+-- inspect(utilities.templates.resolve({ one = "%two%", two = "two", three = "%three%" }))
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index b2d3fec54..40fe71c0e 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -8,6 +8,11 @@ if not modules then modules = { } end modules ['mtxrun'] = {
license = "see context related readme files"
}
+-- if not lpeg then require("lpeg") end
+-- if not md5 then require("md5") end
+-- if not lfs then require("lfs") end
+-- if not texconfig then texconfig = { } end
+
-- one can make a stub:
--
-- #!/bin/sh
@@ -177,7 +182,8 @@ if not modules then modules = { } end modules ['l-table'] = {
license = "see context related readme files"
}
-local type, next, tostring, tonumber, ipairs, table, string = type, next, tostring, tonumber, ipairs, table, string
+local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs
+local table, string = table, string
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
@@ -188,6 +194,8 @@ local getinfo = debug.getinfo
-- impact on ConTeXt was not that large; the remaining ipairs already
-- have been replaced. In a similar fashion we also hardly used pairs.
--
+-- Hm, actually ipairs was retained, but we no longer use it anyway.
+--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -571,12 +579,25 @@ local function do_serialize(root,name,depth,level,indexed)
end
-- we could check for k (index) being number (cardinal)
if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- if compact then
+ -- -- NOT: for k=1,#root do (we need to quit at nil)
+ -- for k,v in ipairs(root) do -- can we use next?
+ -- if not first then first = k end
+ -- last = last + 1
+ -- end
+ -- end
+ local first, last = nil, 0
if compact then
- -- NOT: for k=1,#root do (we need to quit at nil)
- for k,v in ipairs(root) do -- can we use next?
- if not first then first = k end
- last = last + 1
+ last = #root
+ for k=1,last do
+ if not root[k] then
+ last = k - 1
+ break
+ end
+ end
+ if last > 0 then
+ first = 1
end
end
local sk = sortedkeys(root)
@@ -3662,6 +3683,24 @@ local isdir = lfs.isdir
local isfile = lfs.isfile
local currentdir = lfs.currentdir
+-- in case we load outside luatex
+
+if not isdir then
+ function isdir(name)
+ local a = attributes(name)
+ return a and a.mode == "directory"
+ end
+ lfs.isdir = isdir
+end
+
+if not isfile then
+ function isfile(name)
+ local a = attributes(name)
+ return a and a.mode == "file"
+ end
+ lfs.isfile = isfile
+end
+
-- handy
function dir.current()
@@ -4083,6 +4122,18 @@ if not modules then modules = { } end modules ['l-unicode'] = {
license = "see context related readme files"
}
+-- this module will be reorganized
+
+-- todo: utf.sub replacement (used in syst-aux)
+
+local concat = table.concat
+local type = type
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local utftype = patterns.utftype
+local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
+local utfsplitlines = string.utfsplitlines
+
if not unicode then
unicode = { }
@@ -4140,12 +4191,156 @@ if not utf.char then
end
-local concat = table.concat
-local utfchar, utfbyte, utfgsub = utf.char, utf.byte, utf.gsub
-local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
-local type = type
+if not utf.byte then
-local utfsplitlines = string.utfsplitlines
+ local utf8byte = patterns.utf8byte
+
+ function utf.byte(c)
+ return lpegmatch(utf8byte,c)
+ end
+
+end
+
+local utfchar, utfbyte = utf.char, utf.byte
+
+-- As we want to get rid of the (unmaintained) utf library we implement our own
+-- variants (in due time an independent module):
+
+function unicode.filetype(data)
+ return data and lpegmatch(utftype,data) or "unknown"
+end
+
+local toentities = Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+
+
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+-- actually one of them is already utf ... sort of useless this one
+
+-- function utf.char(n)
+-- if n < 0x80 then
+-- return char(n)
+-- elseif n < 0x800 then
+-- return char(
+-- 0xC0 + floor(n/0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x10000 then
+-- return char(
+-- 0xE0 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x40000 then
+-- return char(
+-- 0xF0 + floor(n/0x40000),
+-- 0x80 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- else
+-- -- return char(
+-- -- 0xF1 + floor(n/0x1000000),
+-- -- 0x80 + floor(n/0x40000),
+-- -- 0x80 + floor(n/0x1000),
+-- -- 0x80 + (floor(n/0x40) % 0x40),
+-- -- 0x80 + (n % 0x40)
+-- -- )
+-- return "?"
+-- end
+-- end
+--
+-- merge into:
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+local validatedutf = Cs (
+ (
+ patterns.utf8one
+ + patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ + P(1) / "�"
+ )^0
+)
+
+patterns.validatedutf = validatedutf
+
+function string.validutf(str)
+ return lpegmatch(validatedutf,str)
+end
+
+
+utf.length = string.utflength
+utf.split = string.utfsplit
+utf.splitines = string.utfsplitlines
+utf.valid = string.validutf
+
+if not utf.len then
+ utf.len = utf.length
+end
+
+-- a replacement for simple gsubs:
+
+local utf8char = patterns.utf8char
+
+function utf.remapper(mapping)
+ local pattern = Cs((utf8char/mapping)^0)
+ return function(str)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end, pattern
+end
+
+-- local remap = utf.remapper { a = 'd', b = "c", c = "b", d = "a" }
+-- print(remap("abcd 1234 abcd"))
-- 0 EF BB BF UTF-8
-- 1 FF FE UTF-16-little-endian
@@ -4338,11 +4533,22 @@ local function big(c)
end
end
+-- function unicode.utf8_to_utf16(str,littleendian)
+-- if littleendian then
+-- return char(255,254) .. utfgsub(str,".",little)
+-- else
+-- return char(254,255) .. utfgsub(str,".",big)
+-- end
+-- end
+
+local _, l_remap = utf.remapper(little)
+local _, b_remap = utf.remapper(big)
+
function unicode.utf8_to_utf16(str,littleendian)
if littleendian then
- return char(255,254) .. utfgsub(str,".",little)
+ return char(255,254) .. lpegmatch(l_remap,str)
else
- return char(254,255) .. utfgsub(str,".",big)
+ return char(254,255) .. lpegmatch(b_remap,str)
end
end
@@ -4364,96 +4570,6 @@ function unicode.xstring(s)
end
-local lpegmatch = lpeg.match
-local patterns = lpeg.patterns
-local utftype = patterns.utftype
-
-function unicode.filetype(data)
- return data and lpegmatch(utftype,data) or "unknown"
-end
-
-local toentities = lpeg.Cs (
- (
- patterns.utf8one
- + (
- patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
- )^0
-)
-
-patterns.toentities = toentities
-
-function utf.toentities(str)
- return lpegmatch(toentities,str)
-end
-
-
-
-
-local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
-
-local one = P(1)
-local two = C(1) * C(1)
-local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
-
--- actually one of them is already utf ... sort of useless this one
-
-local pattern = P("\254\255") * Cs( (
- four / function(a,b,c,d)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(a,b)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
- + P("\255\254") * Cs( (
- four / function(b,a,d,c)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(b,a)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
-
-function string.toutf(s)
- return lpegmatch(pattern,s) or s -- todo: utf32
-end
-
-local validatedutf = Cs (
- (
- patterns.utf8one
- + patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- + P(1) / "�"
- )^0
-)
-
-patterns.validatedutf = validatedutf
-
-function string.validutf(str)
- return lpegmatch(validatedutf,str)
-end
-
-
-utf.length = string.utflength
-utf.split = string.utfsplit
-utf.splitines = string.utfsplitlines
-utf.valid = string.validutf
-
-if not utf.len then
- utf.len = utf.length
-end
-
-
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -4513,8 +4629,8 @@ local tables = utilities.tables
local format, gmatch, rep, gsub = string.format, string.gmatch, string.rep, string.gsub
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
-local type, next, rawset, tonumber = type, next, rawset, tonumber
-local lpegmatch = lpeg.match
+local type, next, rawset, tonumber, loadstring = type, next, rawset, tonumber, loadstring
+local lpegmatch, P, Cs = lpeg.match, lpeg.P, lpeg.Cs
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -4668,19 +4784,27 @@ function tables.encapsulate(core,capsule,protect)
end
end
-local function serialize(t,r) -- no mixes
+local escaped = Cs ( (
+ P('\\' ) +
+ P('"' )/'\\"' +
+ P('\n')/'\\n' +
+ P('\r')/'\\r' +
+ 1
+)^0 )
+
+local function serialize(t,r,outer) -- no mixes
r[#r+1] = "{"
local n = #t
if n > 0 then
for i=1,n do
local v = t[i]
local tv = type(v)
- if tv == "table" then
- serialize(v,r)
- elseif tv == "string" then
+ if tv == "string" then
r[#r+1] = format("%q,",v)
elseif tv == "number" then
r[#r+1] = format("%s,",v)
+ elseif tv == "table" then
+ serialize(v,r)
elseif tv == "boolean" then
r[#r+1] = format("%s,",tostring(v))
end
@@ -4688,27 +4812,46 @@ local function serialize(t,r) -- no mixes
else
for k, v in next, t do
local tv = type(v)
- if tv == "table" then
- r[#r+1] = format("[%q]=",k)
- serialize(v,r)
- elseif tv == "string" then
+ if tv == "string" then
r[#r+1] = format("[%q]=%q,",k,v)
elseif tv == "number" then
r[#r+1] = format("[%q]=%s,",k,v)
+ elseif tv == "table" then
+ r[#r+1] = format("[%q]=",k)
+ serialize(v,r)
elseif tv == "boolean" then
r[#r+1] = format("[%q]=%s,",k,tostring(v))
end
end
end
- r[#r+1] = "}"
+ if outer then
+ r[#r+1] = "}"
+ else
+ r[#r+1] = "},"
+ end
return r
end
function table.fastserialize(t,prefix)
- return concat(serialize(t,{ prefix }))
+ return concat(serialize(t,{ prefix },true))
end
--- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7 } })
+-- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7, e = 'f"g\nh' } })
+
+function table.load(filename)
+ if filename then
+ local t = io.loaddata(filename)
+ if t and t ~= "" then
+ t = loadstring(t)
+ if type(t) == "function" then
+ t = t()
+ if type(t) == "table" then
+ return t
+ end
+ end
+ end
+ end
+end
end -- of closure
@@ -7502,7 +7645,7 @@ local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar, utfgsub = utf.char, utf.gsub
+local utfchar = utf.char
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -7752,13 +7895,7 @@ local privates_n = {
-- keeps track of defined ones
}
-local function escaped(s)
- if s == "" then
- return ""
- else
- return (utfgsub(s,".",privates_u))
- end
-end
+local escaped = utf.remapper(privates_u)
local function unescaped(s)
local p = privates_n[s]
@@ -7773,13 +7910,7 @@ local function unescaped(s)
return p
end
-local function unprivatized(s,resolve)
- if s == "" then
- return ""
- else
- return (utfgsub(s,".",privates_p))
- end
-end
+local unprivatized = utf.remapper(privates_p)
xml.privatetoken = unescaped
xml.unprivatized = unprivatized
@@ -14612,7 +14743,7 @@ end
resolvers.resolve = resolve
resolvers.unresolve = unresolve
-if os.uname then
+if type(os.uname) == "function" then
for k, v in next, os.uname() do
if not prefixes[k] then
@@ -16369,7 +16500,7 @@ function templates.resolve(t,mapping)
end
-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" }))
--- inspect(utilities.templates.resolve({ one = "%two%", two = "two" }))
+-- inspect(utilities.templates.resolve({ one = "%two%", two = "two", three = "%three%" }))
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index b2d3fec54..40fe71c0e 100644
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -8,6 +8,11 @@ if not modules then modules = { } end modules ['mtxrun'] = {
license = "see context related readme files"
}
+-- if not lpeg then require("lpeg") end
+-- if not md5 then require("md5") end
+-- if not lfs then require("lfs") end
+-- if not texconfig then texconfig = { } end
+
-- one can make a stub:
--
-- #!/bin/sh
@@ -177,7 +182,8 @@ if not modules then modules = { } end modules ['l-table'] = {
license = "see context related readme files"
}
-local type, next, tostring, tonumber, ipairs, table, string = type, next, tostring, tonumber, ipairs, table, string
+local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs
+local table, string = table, string
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
@@ -188,6 +194,8 @@ local getinfo = debug.getinfo
-- impact on ConTeXt was not that large; the remaining ipairs already
-- have been replaced. In a similar fashion we also hardly used pairs.
--
+-- Hm, actually ipairs was retained, but we no longer use it anyway.
+--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -571,12 +579,25 @@ local function do_serialize(root,name,depth,level,indexed)
end
-- we could check for k (index) being number (cardinal)
if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- if compact then
+ -- -- NOT: for k=1,#root do (we need to quit at nil)
+ -- for k,v in ipairs(root) do -- can we use next?
+ -- if not first then first = k end
+ -- last = last + 1
+ -- end
+ -- end
+ local first, last = nil, 0
if compact then
- -- NOT: for k=1,#root do (we need to quit at nil)
- for k,v in ipairs(root) do -- can we use next?
- if not first then first = k end
- last = last + 1
+ last = #root
+ for k=1,last do
+ if not root[k] then
+ last = k - 1
+ break
+ end
+ end
+ if last > 0 then
+ first = 1
end
end
local sk = sortedkeys(root)
@@ -3662,6 +3683,24 @@ local isdir = lfs.isdir
local isfile = lfs.isfile
local currentdir = lfs.currentdir
+-- in case we load outside luatex
+
+if not isdir then
+ function isdir(name)
+ local a = attributes(name)
+ return a and a.mode == "directory"
+ end
+ lfs.isdir = isdir
+end
+
+if not isfile then
+ function isfile(name)
+ local a = attributes(name)
+ return a and a.mode == "file"
+ end
+ lfs.isfile = isfile
+end
+
-- handy
function dir.current()
@@ -4083,6 +4122,18 @@ if not modules then modules = { } end modules ['l-unicode'] = {
license = "see context related readme files"
}
+-- this module will be reorganized
+
+-- todo: utf.sub replacement (used in syst-aux)
+
+local concat = table.concat
+local type = type
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local utftype = patterns.utftype
+local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
+local utfsplitlines = string.utfsplitlines
+
if not unicode then
unicode = { }
@@ -4140,12 +4191,156 @@ if not utf.char then
end
-local concat = table.concat
-local utfchar, utfbyte, utfgsub = utf.char, utf.byte, utf.gsub
-local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
-local type = type
+if not utf.byte then
-local utfsplitlines = string.utfsplitlines
+ local utf8byte = patterns.utf8byte
+
+ function utf.byte(c)
+ return lpegmatch(utf8byte,c)
+ end
+
+end
+
+local utfchar, utfbyte = utf.char, utf.byte
+
+-- As we want to get rid of the (unmaintained) utf library we implement our own
+-- variants (in due time an independent module):
+
+function unicode.filetype(data)
+ return data and lpegmatch(utftype,data) or "unknown"
+end
+
+local toentities = Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+
+
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+-- actually one of them is already utf ... sort of useless this one
+
+-- function utf.char(n)
+-- if n < 0x80 then
+-- return char(n)
+-- elseif n < 0x800 then
+-- return char(
+-- 0xC0 + floor(n/0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x10000 then
+-- return char(
+-- 0xE0 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x40000 then
+-- return char(
+-- 0xF0 + floor(n/0x40000),
+-- 0x80 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- else
+-- -- return char(
+-- -- 0xF1 + floor(n/0x1000000),
+-- -- 0x80 + floor(n/0x40000),
+-- -- 0x80 + floor(n/0x1000),
+-- -- 0x80 + (floor(n/0x40) % 0x40),
+-- -- 0x80 + (n % 0x40)
+-- -- )
+-- return "?"
+-- end
+-- end
+--
+-- merge into:
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+local validatedutf = Cs (
+ (
+ patterns.utf8one
+ + patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ + P(1) / "�"
+ )^0
+)
+
+patterns.validatedutf = validatedutf
+
+function string.validutf(str)
+ return lpegmatch(validatedutf,str)
+end
+
+
+utf.length = string.utflength
+utf.split = string.utfsplit
+utf.splitines = string.utfsplitlines
+utf.valid = string.validutf
+
+if not utf.len then
+ utf.len = utf.length
+end
+
+-- a replacement for simple gsubs:
+
+local utf8char = patterns.utf8char
+
+function utf.remapper(mapping)
+ local pattern = Cs((utf8char/mapping)^0)
+ return function(str)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end, pattern
+end
+
+-- local remap = utf.remapper { a = 'd', b = "c", c = "b", d = "a" }
+-- print(remap("abcd 1234 abcd"))
-- 0 EF BB BF UTF-8
-- 1 FF FE UTF-16-little-endian
@@ -4338,11 +4533,22 @@ local function big(c)
end
end
+-- function unicode.utf8_to_utf16(str,littleendian)
+-- if littleendian then
+-- return char(255,254) .. utfgsub(str,".",little)
+-- else
+-- return char(254,255) .. utfgsub(str,".",big)
+-- end
+-- end
+
+local _, l_remap = utf.remapper(little)
+local _, b_remap = utf.remapper(big)
+
function unicode.utf8_to_utf16(str,littleendian)
if littleendian then
- return char(255,254) .. utfgsub(str,".",little)
+ return char(255,254) .. lpegmatch(l_remap,str)
else
- return char(254,255) .. utfgsub(str,".",big)
+ return char(254,255) .. lpegmatch(b_remap,str)
end
end
@@ -4364,96 +4570,6 @@ function unicode.xstring(s)
end
-local lpegmatch = lpeg.match
-local patterns = lpeg.patterns
-local utftype = patterns.utftype
-
-function unicode.filetype(data)
- return data and lpegmatch(utftype,data) or "unknown"
-end
-
-local toentities = lpeg.Cs (
- (
- patterns.utf8one
- + (
- patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
- )^0
-)
-
-patterns.toentities = toentities
-
-function utf.toentities(str)
- return lpegmatch(toentities,str)
-end
-
-
-
-
-local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
-
-local one = P(1)
-local two = C(1) * C(1)
-local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
-
--- actually one of them is already utf ... sort of useless this one
-
-local pattern = P("\254\255") * Cs( (
- four / function(a,b,c,d)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(a,b)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
- + P("\255\254") * Cs( (
- four / function(b,a,d,c)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(b,a)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
-
-function string.toutf(s)
- return lpegmatch(pattern,s) or s -- todo: utf32
-end
-
-local validatedutf = Cs (
- (
- patterns.utf8one
- + patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- + P(1) / "�"
- )^0
-)
-
-patterns.validatedutf = validatedutf
-
-function string.validutf(str)
- return lpegmatch(validatedutf,str)
-end
-
-
-utf.length = string.utflength
-utf.split = string.utfsplit
-utf.splitines = string.utfsplitlines
-utf.valid = string.validutf
-
-if not utf.len then
- utf.len = utf.length
-end
-
-
end -- of closure
do -- create closure to overcome 200 locals limit
@@ -4513,8 +4629,8 @@ local tables = utilities.tables
local format, gmatch, rep, gsub = string.format, string.gmatch, string.rep, string.gsub
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
-local type, next, rawset, tonumber = type, next, rawset, tonumber
-local lpegmatch = lpeg.match
+local type, next, rawset, tonumber, loadstring = type, next, rawset, tonumber, loadstring
+local lpegmatch, P, Cs = lpeg.match, lpeg.P, lpeg.Cs
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -4668,19 +4784,27 @@ function tables.encapsulate(core,capsule,protect)
end
end
-local function serialize(t,r) -- no mixes
+local escaped = Cs ( (
+ P('\\' ) +
+ P('"' )/'\\"' +
+ P('\n')/'\\n' +
+ P('\r')/'\\r' +
+ 1
+)^0 )
+
+local function serialize(t,r,outer) -- no mixes
r[#r+1] = "{"
local n = #t
if n > 0 then
for i=1,n do
local v = t[i]
local tv = type(v)
- if tv == "table" then
- serialize(v,r)
- elseif tv == "string" then
+ if tv == "string" then
r[#r+1] = format("%q,",v)
elseif tv == "number" then
r[#r+1] = format("%s,",v)
+ elseif tv == "table" then
+ serialize(v,r)
elseif tv == "boolean" then
r[#r+1] = format("%s,",tostring(v))
end
@@ -4688,27 +4812,46 @@ local function serialize(t,r) -- no mixes
else
for k, v in next, t do
local tv = type(v)
- if tv == "table" then
- r[#r+1] = format("[%q]=",k)
- serialize(v,r)
- elseif tv == "string" then
+ if tv == "string" then
r[#r+1] = format("[%q]=%q,",k,v)
elseif tv == "number" then
r[#r+1] = format("[%q]=%s,",k,v)
+ elseif tv == "table" then
+ r[#r+1] = format("[%q]=",k)
+ serialize(v,r)
elseif tv == "boolean" then
r[#r+1] = format("[%q]=%s,",k,tostring(v))
end
end
end
- r[#r+1] = "}"
+ if outer then
+ r[#r+1] = "}"
+ else
+ r[#r+1] = "},"
+ end
return r
end
function table.fastserialize(t,prefix)
- return concat(serialize(t,{ prefix }))
+ return concat(serialize(t,{ prefix },true))
end
--- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7 } })
+-- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7, e = 'f"g\nh' } })
+
+function table.load(filename)
+ if filename then
+ local t = io.loaddata(filename)
+ if t and t ~= "" then
+ t = loadstring(t)
+ if type(t) == "function" then
+ t = t()
+ if type(t) == "table" then
+ return t
+ end
+ end
+ end
+ end
+end
end -- of closure
@@ -7502,7 +7645,7 @@ local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar, utfgsub = utf.char, utf.gsub
+local utfchar = utf.char
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -7752,13 +7895,7 @@ local privates_n = {
-- keeps track of defined ones
}
-local function escaped(s)
- if s == "" then
- return ""
- else
- return (utfgsub(s,".",privates_u))
- end
-end
+local escaped = utf.remapper(privates_u)
local function unescaped(s)
local p = privates_n[s]
@@ -7773,13 +7910,7 @@ local function unescaped(s)
return p
end
-local function unprivatized(s,resolve)
- if s == "" then
- return ""
- else
- return (utfgsub(s,".",privates_p))
- end
-end
+local unprivatized = utf.remapper(privates_p)
xml.privatetoken = unescaped
xml.unprivatized = unprivatized
@@ -14612,7 +14743,7 @@ end
resolvers.resolve = resolve
resolvers.unresolve = unresolve
-if os.uname then
+if type(os.uname) == "function" then
for k, v in next, os.uname() do
if not prefixes[k] then
@@ -16369,7 +16500,7 @@ function templates.resolve(t,mapping)
end
-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" }))
--- inspect(utilities.templates.resolve({ one = "%two%", two = "two" }))
+-- inspect(utilities.templates.resolve({ one = "%two%", two = "two", three = "%three%" }))
diff --git a/tex/context/base/back-exp.lua b/tex/context/base/back-exp.lua
index f73ee2d55..46ce4f96c 100644
--- a/tex/context/base/back-exp.lua
+++ b/tex/context/base/back-exp.lua
@@ -22,7 +22,7 @@ local next, type = next, type
local format, match, concat, rep, sub, gsub, gmatch, find = string.format, string.match, table.concat, string.rep, string.sub, string.gsub, string.gmatch, string.find
local validstring = string.valid
local lpegmatch = lpeg.match
-local utfchar, utfbyte, utfsub, utfgsub = utf.char, utf.byte, utf.sub, utf.gsub
+local utfchar, utfbyte = utf.char, utf.byte
local insert, remove = table.insert, table.remove
local topoints = number.topoints
local utfvalues = string.utfvalues
@@ -155,6 +155,8 @@ local somespace = { [0x20] = true, [" "] = true } -- for testing
local entities = { ["&"] = "&amp;", [">"] = "&gt;", ["<"] = "&lt;" }
local attribentities = { ["&"] = "&amp;", [">"] = "&gt;", ["<"] = "&lt;", ['"'] = "quot;" }
+local entityremapper = utf.remapper(entities)
+
local alignmapping = {
flushright = "right",
middle = "center",
@@ -1352,7 +1354,7 @@ local function begintag(result,element,nature,depth,di,skip)
end
result[#result+1] = format("%s<metadata>\n",spaces[depth])
for k, v in table.sortedpairs(metadata) do
- v = utfgsub(v,".",entities)
+ v = entityremapper(v)
result[#result+1] = format("%s<metavariable name=%q>%s</metavariable>\n",spaces[depth+1],k,v)
end
result[#result+1] = format("%s</metadata>\n",spaces[depth])
@@ -1410,7 +1412,7 @@ local function flushtree(result,data,nature,depth)
-- whatever
elseif di.content then
-- already has breaks
- local content = utfgsub(di.content,".",entities)
+ local content = entityremapper(di.content)
if i == nofdata and sub(content,-1) == "\n" then -- move check
-- can be an end of line in par but can also be the last line
if trace_spacing then
diff --git a/tex/context/base/char-cjk.lua b/tex/context/base/char-cjk.lua
index 6232ebcad..3d7de1423 100644
--- a/tex/context/base/char-cjk.lua
+++ b/tex/context/base/char-cjk.lua
@@ -12,7 +12,8 @@ local floor = math.floor
local format = string.format
local utfchar = utf.char
-local ranges = characters.ranges
+local ranges = characters.ranges
+local allocate = utilities.storage.allocate
-- Hangul Syllable
@@ -209,7 +210,7 @@ local remapped = { -- this might be merged into char-def.lua
[0x11C2] = 0x314E, -- H
}
-characters.hangul = {
+characters.hangul = allocate {
decomposed = decomposed,
description = description,
leadconsonant = leadconsonant,
diff --git a/tex/context/base/char-utf.lua b/tex/context/base/char-utf.lua
index cefaca4e3..bd7463522 100644
--- a/tex/context/base/char-utf.lua
+++ b/tex/context/base/char-utf.lua
@@ -19,7 +19,7 @@ in special kinds of output (for instance <l n='pdf'/>).</p>
over a string.</p>
--ldx]]--
-local utfchar, utfbyte, utfgsub = utf.char, utf.byte, utf.gsub
+local utfchar, utfbyte = utf.char, utf.byte
local concat, gmatch, gsub, find = table.concat, string.gmatch, string.gsub, string.find
local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues
local allocate = utilities.storage.allocate
@@ -210,9 +210,13 @@ end
private.set = set
-function private.escape (str) return gsub(str,"(.)", escapes) end
-function private.replace(str) return utfgsub(str,"(.)", low ) end
-function private.revert (str) return utfgsub(str,"(.)", high ) end
+-- function private.escape (str) return gsub(str,"(.)", escapes) end
+-- function private.replace(str) return utfgsub(str,"(.)", low ) end
+-- function private.revert (str) return utfgsub(str,"(.)", high ) end
+
+private.escape = utf.remapper(escapes)
+private.replace = utf.remapper(low)
+private.revert = utf.remapper(high)
for ch in gmatch(special,".") do set(ch) end
diff --git a/tex/context/base/colo-ini.lua b/tex/context/base/colo-ini.lua
index 253027238..f2437cd9d 100644
--- a/tex/context/base/colo-ini.lua
+++ b/tex/context/base/colo-ini.lua
@@ -846,3 +846,21 @@ end
-- context.popcatcodes()
-- end
+-- handy
+
+local models = storage.allocate { "gray", "rgb", "cmyk", "spot" }
+
+colors.models = models -- check for usage elsewhere
+
+function attributes.colors.spec(name)
+ local t = colorvalues[attributes_list[name]] or colorvalues[attributes_list.black]
+ return {
+ model = models[t[1]],
+ s = t[2],
+ r = t[3], g = t[4], b = t[5],
+ c = t[6], m = t[7], y = t[8], k = t[9],
+ }
+end
+
+-- inspect(attributes.colors.spec("red"))
+-- inspect(attributes.colors.spec("red socks"))
diff --git a/tex/context/base/cont-new.mkii b/tex/context/base/cont-new.mkii
index 5bc49b4d0..8fbb41f87 100644
--- a/tex/context/base/cont-new.mkii
+++ b/tex/context/base/cont-new.mkii
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2012.08.28 20:00}
+\newcontextversion{2012.09.04 18:08}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/cont-new.mkiv b/tex/context/base/cont-new.mkiv
index 14fc10ce7..13e895931 100644
--- a/tex/context/base/cont-new.mkiv
+++ b/tex/context/base/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2012.08.28 20:00}
+\newcontextversion{2012.09.04 18:08}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/context.mkii b/tex/context/base/context.mkii
index 0fd18a46a..efb1028ec 100644
--- a/tex/context/base/context.mkii
+++ b/tex/context/base/context.mkii
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2012.08.28 20:00}
+\edef\contextversion{2012.09.04 18:08}
%D For those who want to use this:
diff --git a/tex/context/base/context.mkiv b/tex/context/base/context.mkiv
index 1a587fcc4..3905a49c6 100644
--- a/tex/context/base/context.mkiv
+++ b/tex/context/base/context.mkiv
@@ -25,7 +25,7 @@
%D up and the dependencies are more consistent.
\edef\contextformat {\jobname}
-\edef\contextversion{2012.08.28 20:00}
+\edef\contextversion{2012.09.04 18:08}
%D For those who want to use this:
diff --git a/tex/context/base/data-pre.lua b/tex/context/base/data-pre.lua
index 5b25c5f87..4e82b6186 100644
--- a/tex/context/base/data-pre.lua
+++ b/tex/context/base/data-pre.lua
@@ -146,7 +146,7 @@ end
resolvers.resolve = resolve
resolvers.unresolve = unresolve
-if os.uname then
+if type(os.uname) == "function" then
for k, v in next, os.uname() do
if not prefixes[k] then
diff --git a/tex/context/base/font-afk.lua b/tex/context/base/font-afk.lua
index 2674cb233..8b65b0631 100644
--- a/tex/context/base/font-afk.lua
+++ b/tex/context/base/font-afk.lua
@@ -13,9 +13,10 @@ anything larger is encoding dependent. An interesting complication is that a
character can be in an encoding twice but is hashed once.</p>
--ldx]]--
+local allocate = utilities.storage.allocate
fonts.handlers.afm.helpdata = {
- ligatures = { -- okay, nowadays we could parse the name but type 1 fonts
+ ligatures = allocate { -- okay, nowadays we could parse the name but type 1 fonts
['f'] = { -- don't have that many ligatures anyway
{ 'f', 'ff' },
{ 'i', 'fi' },
@@ -37,7 +38,7 @@ fonts.handlers.afm.helpdata = {
{ 'j', 'ij' }
},
},
- texligatures = {
+ texligatures = allocate {
-- ['space'] = {
-- { 'L', 'Lslash' },
-- { 'l', 'lslash' }
@@ -61,7 +62,7 @@ fonts.handlers.afm.helpdata = {
{ 'hyphen', 'emdash' }
}
},
- leftkerned = {
+ leftkerned = allocate {
AEligature = "A", aeligature = "a",
OEligature = "O", oeligature = "o",
IJligature = "I", ijligature = "i",
@@ -70,7 +71,7 @@ fonts.handlers.afm.helpdata = {
IJ = "I", ij = "i",
Ssharp = "S", ssharp = "s",
},
- rightkerned = {
+ rightkerned = allocate {
AEligature = "E", aeligature = "e",
OEligature = "E", oeligature = "e",
IJligature = "J", ijligature = "j",
@@ -79,7 +80,7 @@ fonts.handlers.afm.helpdata = {
IJ = "J", ij = "j",
Ssharp = "S", ssharp = "s",
},
- bothkerned = {
+ bothkerned = allocate {
Acircumflex = "A", acircumflex = "a",
Ccircumflex = "C", ccircumflex = "c",
Ecircumflex = "E", ecircumflex = "e",
diff --git a/tex/context/base/font-gds.lua b/tex/context/base/font-gds.lua
index ce26c66c5..b4b073433 100644
--- a/tex/context/base/font-gds.lua
+++ b/tex/context/base/font-gds.lua
@@ -234,7 +234,7 @@ end
-- colorschemes
-local colorschemes = fontgoodies.colorschemes or { }
+local colorschemes = fontgoodies.colorschemes or allocate { }
fontgoodies.colorschemes = colorschemes
colorschemes.data = colorschemes.data or { }
@@ -603,7 +603,7 @@ end
fontgoodies.register("compositions", initialize)
-local designsizes = fontgoodies.designsizes or { }
+local designsizes = fontgoodies.designsizes or allocate()
fontgoodies.designsizes = designsizes
local designdata = designsizes.data or allocate()
diff --git a/tex/context/base/font-syn.lua b/tex/context/base/font-syn.lua
index 31af73d97..d99114998 100644
--- a/tex/context/base/font-syn.lua
+++ b/tex/context/base/font-syn.lua
@@ -15,7 +15,6 @@ local find, gmatch = string.find, string.gmatch
local concat, sort, format = table.concat, table.sort, string.format
local serialize = table.serialize
local lpegmatch = lpeg.match
-local utfgsub, utflower = utf.gsub, utf.lower
local unpack = unpack or table.unpack
local allocate = utilities.storage.allocate
@@ -34,13 +33,13 @@ using a table that has keys filtered from the font related files.</p>
fonts = fonts or { } -- also used elsewhere
-local names = font.names or { }
+local names = font.names or allocate { }
fonts.names = names
local filters = names.filters or { }
names.filters = filters
-names.data = names.data or { }
+names.data = names.data or allocate { }
names.version = 1.110
names.basename = "names"
@@ -310,8 +309,6 @@ end
local function cleanname(name)
return (gsub(lower(name),"[^%a%d]",""))
- -- once we can load files with utf names, we can play with the following:
- -- return (utfgsub(utfgsub(lower(str),"[^%a%A%d]",""),"%s",""))
end
local function cleanfilename(fullname,defaultsuffix)
diff --git a/tex/context/base/l-dir.lua b/tex/context/base/l-dir.lua
index 71de3114e..3deb660ce 100644
--- a/tex/context/base/l-dir.lua
+++ b/tex/context/base/l-dir.lua
@@ -25,6 +25,24 @@ local isdir = lfs.isdir
local isfile = lfs.isfile
local currentdir = lfs.currentdir
+-- in case we load outside luatex
+
+if not isdir then
+ function isdir(name)
+ local a = attributes(name)
+ return a and a.mode == "directory"
+ end
+ lfs.isdir = isdir
+end
+
+if not isfile then
+ function isfile(name)
+ local a = attributes(name)
+ return a and a.mode == "file"
+ end
+ lfs.isfile = isfile
+end
+
-- handy
function dir.current()
diff --git a/tex/context/base/l-table.lua b/tex/context/base/l-table.lua
index cc69af0ef..413d31eb5 100644
--- a/tex/context/base/l-table.lua
+++ b/tex/context/base/l-table.lua
@@ -6,7 +6,8 @@ if not modules then modules = { } end modules ['l-table'] = {
license = "see context related readme files"
}
-local type, next, tostring, tonumber, ipairs, table, string = type, next, tostring, tonumber, ipairs, table, string
+local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs
+local table, string = table, string
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
@@ -17,6 +18,8 @@ local getinfo = debug.getinfo
-- impact on ConTeXt was not that large; the remaining ipairs already
-- have been replaced. In a similar fashion we also hardly used pairs.
--
+-- Hm, actually ipairs was retained, but we no longer use it anyway.
+--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -400,12 +403,25 @@ local function do_serialize(root,name,depth,level,indexed)
end
-- we could check for k (index) being number (cardinal)
if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- if compact then
+ -- -- NOT: for k=1,#root do (we need to quit at nil)
+ -- for k,v in ipairs(root) do -- can we use next?
+ -- if not first then first = k end
+ -- last = last + 1
+ -- end
+ -- end
+ local first, last = nil, 0
if compact then
- -- NOT: for k=1,#root do (we need to quit at nil)
- for k,v in ipairs(root) do -- can we use next?
- if not first then first = k end
- last = last + 1
+ last = #root
+ for k=1,last do
+ if not root[k] then
+ last = k - 1
+ break
+ end
+ end
+ if last > 0 then
+ first = 1
end
end
local sk = sortedkeys(root)
diff --git a/tex/context/base/l-unicode.lua b/tex/context/base/l-unicode.lua
index 4e68dc6e1..6447fb040 100644
--- a/tex/context/base/l-unicode.lua
+++ b/tex/context/base/l-unicode.lua
@@ -6,6 +6,18 @@ if not modules then modules = { } end modules ['l-unicode'] = {
license = "see context related readme files"
}
+-- this module will be reorganized
+
+-- todo: utf.sub replacement (used in syst-aux)
+
+local concat = table.concat
+local type = type
+local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+local lpegmatch, patterns = lpeg.match, lpeg.patterns
+local utftype = patterns.utftype
+local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
+local utfsplitlines = string.utfsplitlines
+
if not unicode then
unicode = { }
@@ -63,12 +75,180 @@ if not utf.char then
end
-local concat = table.concat
-local utfchar, utfbyte, utfgsub = utf.char, utf.byte, utf.gsub
-local char, byte, find, bytepairs, utfvalues, format = string.char, string.byte, string.find, string.bytepairs, string.utfvalues, string.format
-local type = type
+if not utf.byte then
-local utfsplitlines = string.utfsplitlines
+ local utf8byte = patterns.utf8byte
+
+ function utf.byte(c)
+ return lpegmatch(utf8byte,c)
+ end
+
+end
+
+local utfchar, utfbyte = utf.char, utf.byte
+
+-- As we want to get rid of the (unmaintained) utf library we implement our own
+-- variants (in due time an independent module):
+
+function unicode.filetype(data)
+ return data and lpegmatch(utftype,data) or "unknown"
+end
+
+local toentities = Cs (
+ (
+ patterns.utf8one
+ + (
+ patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
+ )^0
+)
+
+patterns.toentities = toentities
+
+function utf.toentities(str)
+ return lpegmatch(toentities,str)
+end
+
+--~ local utfchr = { } -- 60K -> 2.638 M extra mem but currently not called that often (on latin)
+--~
+--~ setmetatable(utfchr, { __index = function(t,k) local v = utfchar(k) t[k] = v return v end } )
+--~
+--~ collectgarbage("collect")
+--~ local u = collectgarbage("count")*1024
+--~ local t = os.clock()
+--~ for i=1,1000 do
+--~ for i=1,600 do
+--~ local a = utfchr[i]
+--~ end
+--~ end
+--~ print(os.clock()-t,collectgarbage("count")*1024-u)
+
+--~ collectgarbage("collect")
+--~ local t = os.clock()
+--~ for i=1,1000 do
+--~ for i=1,600 do
+--~ local a = utfchar(i)
+--~ end
+--~ end
+--~ print(os.clock()-t,collectgarbage("count")*1024-u)
+
+--~ local byte = string.byte
+--~ local utfchar = utf.char
+--~ local lpegmatch = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
+
+local one = P(1)
+local two = C(1) * C(1)
+local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
+
+-- actually one of them is already utf ... sort of useless this one
+
+-- function utf.char(n)
+-- if n < 0x80 then
+-- return char(n)
+-- elseif n < 0x800 then
+-- return char(
+-- 0xC0 + floor(n/0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x10000 then
+-- return char(
+-- 0xE0 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- elseif n < 0x40000 then
+-- return char(
+-- 0xF0 + floor(n/0x40000),
+-- 0x80 + floor(n/0x1000),
+-- 0x80 + (floor(n/0x40) % 0x40),
+-- 0x80 + (n % 0x40)
+-- )
+-- else
+-- -- return char(
+-- -- 0xF1 + floor(n/0x1000000),
+-- -- 0x80 + floor(n/0x40000),
+-- -- 0x80 + floor(n/0x1000),
+-- -- 0x80 + (floor(n/0x40) % 0x40),
+-- -- 0x80 + (n % 0x40)
+-- -- )
+-- return "?"
+-- end
+-- end
+--
+-- merge into:
+
+local pattern = P("\254\255") * Cs( (
+ four / function(a,b,c,d)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(a,b)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+ + P("\255\254") * Cs( (
+ four / function(b,a,d,c)
+ local ab = 0xFF * byte(a) + byte(b)
+ local cd = 0xFF * byte(c) + byte(d)
+ return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
+ end
+ + two / function(b,a)
+ return utfchar(byte(a)*256 + byte(b))
+ end
+ + one
+ )^1 )
+
+function string.toutf(s)
+ return lpegmatch(pattern,s) or s -- todo: utf32
+end
+
+local validatedutf = Cs (
+ (
+ patterns.utf8one
+ + patterns.utf8two
+ + patterns.utf8three
+ + patterns.utf8four
+ + P(1) / "�"
+ )^0
+)
+
+patterns.validatedutf = validatedutf
+
+function string.validutf(str)
+ return lpegmatch(validatedutf,str)
+end
+
+
+utf.length = string.utflength
+utf.split = string.utfsplit
+utf.splitines = string.utfsplitlines
+utf.valid = string.validutf
+
+if not utf.len then
+ utf.len = utf.length
+end
+
+-- a replacement for simple gsubs:
+
+local utf8char = patterns.utf8char
+
+function utf.remapper(mapping)
+ local pattern = Cs((utf8char/mapping)^0)
+ return function(str)
+ if not str or str == "" then
+ return ""
+ else
+ return lpegmatch(pattern,str)
+ end
+ end, pattern
+end
+
+-- local remap = utf.remapper { a = 'd', b = "c", c = "b", d = "a" }
+-- print(remap("abcd 1234 abcd"))
-- 0 EF BB BF UTF-8
-- 1 FF FE UTF-16-little-endian
@@ -364,11 +544,22 @@ local function big(c)
end
end
+-- function unicode.utf8_to_utf16(str,littleendian)
+-- if littleendian then
+-- return char(255,254) .. utfgsub(str,".",little)
+-- else
+-- return char(254,255) .. utfgsub(str,".",big)
+-- end
+-- end
+
+local _, l_remap = utf.remapper(little)
+local _, b_remap = utf.remapper(big)
+
function unicode.utf8_to_utf16(str,littleendian)
if littleendian then
- return char(255,254) .. utfgsub(str,".",little)
+ return char(255,254) .. lpegmatch(l_remap,str)
else
- return char(254,255) .. utfgsub(str,".",big)
+ return char(254,255) .. lpegmatch(b_remap,str)
end
end
@@ -388,118 +579,3 @@ end
function unicode.xstring(s)
return format("0x%05X",type(s) == "number" and s or utfbyte(s))
end
-
---~ print(unicode.utfcodes(str))
-
-local lpegmatch = lpeg.match
-local patterns = lpeg.patterns
-local utftype = patterns.utftype
-
-function unicode.filetype(data)
- return data and lpegmatch(utftype,data) or "unknown"
-end
-
-local toentities = lpeg.Cs (
- (
- patterns.utf8one
- + (
- patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- ) / function(s) local b = utfbyte(s) if b < 127 then return s else return format("&#%X;",b) end end
- )^0
-)
-
-patterns.toentities = toentities
-
-function utf.toentities(str)
- return lpegmatch(toentities,str)
-end
-
---~ local utfchr = { } -- 60K -> 2.638 M extra mem but currently not called that often (on latin)
---~
---~ setmetatable(utfchr, { __index = function(t,k) local v = utfchar(k) t[k] = v return v end } )
---~
---~ collectgarbage("collect")
---~ local u = collectgarbage("count")*1024
---~ local t = os.clock()
---~ for i=1,1000 do
---~ for i=1,600 do
---~ local a = utfchr[i]
---~ end
---~ end
---~ print(os.clock()-t,collectgarbage("count")*1024-u)
-
---~ collectgarbage("collect")
---~ local t = os.clock()
---~ for i=1,1000 do
---~ for i=1,600 do
---~ local a = utfchar(i)
---~ end
---~ end
---~ print(os.clock()-t,collectgarbage("count")*1024-u)
-
---~ local byte = string.byte
---~ local utfchar = utf.char
---~ local lpegmatch = lpeg.match, lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
-
-local P, C, R, Cs = lpeg.P, lpeg.C, lpeg.R, lpeg.Cs
-
-local one = P(1)
-local two = C(1) * C(1)
-local four = C(R(utfchar(0xD8),utfchar(0xFF))) * C(1) * C(1) * C(1)
-
--- actually one of them is already utf ... sort of useless this one
-
-local pattern = P("\254\255") * Cs( (
- four / function(a,b,c,d)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(a,b)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
- + P("\255\254") * Cs( (
- four / function(b,a,d,c)
- local ab = 0xFF * byte(a) + byte(b)
- local cd = 0xFF * byte(c) + byte(d)
- return utfchar((ab-0xD800)*0x400 + (cd-0xDC00) + 0x10000)
- end
- + two / function(b,a)
- return utfchar(byte(a)*256 + byte(b))
- end
- + one
- )^1 )
-
-function string.toutf(s)
- return lpegmatch(pattern,s) or s -- todo: utf32
-end
-
-local validatedutf = Cs (
- (
- patterns.utf8one
- + patterns.utf8two
- + patterns.utf8three
- + patterns.utf8four
- + P(1) / "�"
- )^0
-)
-
-patterns.validatedutf = validatedutf
-
-function string.validutf(str)
- return lpegmatch(validatedutf,str)
-end
-
-
-utf.length = string.utflength
-utf.split = string.utfsplit
-utf.splitines = string.utfsplitlines
-utf.valid = string.validutf
-
-if not utf.len then
- utf.len = utf.length
-end
diff --git a/tex/context/base/lang-url.lua b/tex/context/base/lang-url.lua
index 1817fc280..3b354216a 100644
--- a/tex/context/base/lang-url.lua
+++ b/tex/context/base/lang-url.lua
@@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['lang-url'] = {
local utf = unicode.utf8
local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues
-local utfbyte, utfchar, utfgsub = utf.byte, utf.char, utf.gsub
+local utfbyte, utfchar = utf.byte, utf.char
context = context
@@ -63,7 +63,7 @@ local characters = utilities.storage.allocate {
}
local mapping = utilities.storage.allocate {
---~ [utfchar(0xA0)] = "~", -- nbsp (catch)
+ -- [utfchar(0xA0)] = "~", -- nbsp (catch)
}
hyphenatedurl.characters = characters
diff --git a/tex/context/base/lpdf-col.lua b/tex/context/base/lpdf-col.lua
index 60d02c1ff..db9d3268b 100644
--- a/tex/context/base/lpdf-col.lua
+++ b/tex/context/base/lpdf-col.lua
@@ -13,6 +13,8 @@ local round = math.round
local backends, lpdf, nodes = backends, lpdf, nodes
+local allocate = utilities.storage.allocate
+
local nodeinjections = backends.pdf.nodeinjections
local codeinjections = backends.pdf.codeinjections
local registrations = backends.pdf.registrations
@@ -44,7 +46,7 @@ local report_color = logs.reporter("colors","backend")
-- page groups (might move to lpdf-ini.lua)
-local colorspaceconstants = { -- v_none is ignored
+local colorspaceconstants = allocate { -- v_none is ignored
gray = pdfconstant("DeviceGray"),
rgb = pdfconstant("DeviceRGB"),
cmyk = pdfconstant("DeviceCMYK"),
@@ -694,7 +696,7 @@ end
-- this will move to lpdf-spe.lua
-backends.pdf.tables.vfspecials = { -- todo: distinguish between glyph and rule color
+backends.pdf.tables.vfspecials = allocate { -- todo: distinguish between glyph and rule color
red = { "special", 'pdf: 1 0 0 rg 1 0 0 RG' },
green = { "special", 'pdf: 0 1 0 rg 0 1 0 RG' },
diff --git a/tex/context/base/lxml-tab.lua b/tex/context/base/lxml-tab.lua
index fecf42634..9a6ac9082 100644
--- a/tex/context/base/lxml-tab.lua
+++ b/tex/context/base/lxml-tab.lua
@@ -43,7 +43,7 @@ local utf = unicode.utf8
local concat, remove, insert = table.concat, table.remove, table.insert
local type, next, setmetatable, getmetatable, tonumber = type, next, setmetatable, getmetatable, tonumber
local format, lower, find, match, gsub = string.format, string.lower, string.find, string.match, string.gsub
-local utfchar, utfgsub = utf.char, utf.gsub
+local utfchar = utf.char
local lpegmatch = lpeg.match
local P, S, R, C, V, C, Cs = lpeg.P, lpeg.S, lpeg.R, lpeg.C, lpeg.V, lpeg.C, lpeg.Cs
@@ -365,13 +365,7 @@ local privates_n = {
-- keeps track of defined ones
}
-local function escaped(s)
- if s == "" then
- return ""
- else
- return (utfgsub(s,".",privates_u))
- end
-end
+local escaped = utf.remapper(privates_u)
local function unescaped(s)
local p = privates_n[s]
@@ -386,13 +380,7 @@ local function unescaped(s)
return p
end
-local function unprivatized(s,resolve)
- if s == "" then
- return ""
- else
- return (utfgsub(s,".",privates_p))
- end
-end
+local unprivatized = utf.remapper(privates_p)
xml.privatetoken = unescaped
xml.unprivatized = unprivatized
diff --git a/tex/context/base/node-res.lua b/tex/context/base/node-res.lua
index 4bf4610cd..2933fc86d 100644
--- a/tex/context/base/node-res.lua
+++ b/tex/context/base/node-res.lua
@@ -33,6 +33,8 @@ local nodecodes = nodes.nodecodes
local glyph_code = nodecodes.glyph
+local allocate = utilities.storage.allocate
+
local reserved, nofreserved = { }, 0
local function register_node(n)
@@ -325,7 +327,7 @@ end
-- local num = userids["my id"]
-- local str = userids[num]
-local userids = utilities.storage.allocate() pool.userids = userids
+local userids = allocate() pool.userids = userids
local lastid = 0
setmetatable(userids, {
diff --git a/tex/context/base/page-ins.lua b/tex/context/base/page-ins.lua
index fd9d9e024..7f870735d 100644
--- a/tex/context/base/page-ins.lua
+++ b/tex/context/base/page-ins.lua
@@ -18,26 +18,28 @@ if not modules then modules = { } end modules ['page-ins'] = {
-- Maybe we should only register in lua and forget about the tex end.
-structures = structures or { }
-structures.inserts = structures.inserts or { }
-local inserts = structures.inserts
+structures = structures or { }
+structures.inserts = structures.inserts or { }
+local inserts = structures.inserts
local report_inserts = logs.reporter("inserts")
-inserts.stored = inserts.stored or { } -- combining them in one is inefficient in the
-inserts.data = inserts.data or { } -- bytecode storage pool
+local allocate = utilities.storage.allocate
-local variables = interfaces.variables
-local v_page = variables.page
-local v_columns = variables.columns
-local v_firstcolumn = variables.firstcolumn
-local v_lastcolumn = variables.lastcolumn
-local v_text = variables.text
+inserts.stored = inserts.stored or allocate { } -- combining them in one is inefficient in the
+inserts.data = inserts.data or allocate { } -- bytecode storage pool
+
+local variables = interfaces.variables
+local v_page = variables.page
+local v_columns = variables.columns
+local v_firstcolumn = variables.firstcolumn
+local v_lastcolumn = variables.lastcolumn
+local v_text = variables.text
storage.register("structures/inserts/stored", inserts.stored, "structures.inserts.stored")
-local data = inserts.data
-local stored = inserts.stored
+local data = inserts.data
+local stored = inserts.stored
for name, specification in next, stored do
data[specification.number] = specification
diff --git a/tex/context/base/regi-ini.lua b/tex/context/base/regi-ini.lua
index 7076c3e7a..f9507bd0b 100644
--- a/tex/context/base/regi-ini.lua
+++ b/tex/context/base/regi-ini.lua
@@ -14,10 +14,11 @@ runtime.</p>
local commands, context = commands, context
-local utfchar, utfgsub = utf.char, utf.gsub
+local utfchar = utf.char
+local lpegmatch = lpeg.match
local char, gsub, format = string.char, string.gsub, string.format
local next = next
-local insert, remove = table.insert, table.remove
+local insert, remove, fastcopy = table.insert, table.remove, table.fastcopy
local allocate = utilities.storage.allocate
local sequencers = utilities.sequencers
@@ -139,17 +140,51 @@ local function translate(line,regime)
return line
end
+-- local remappers = { }
+--
+-- local function toregime(vector,str,default) -- toregime('8859-1',"abcde Ä","?")
+-- local t = backmapping[vector]
+-- local remapper = remappers[vector]
+-- if not remapper then
+-- remapper = utf.remapper(t)
+-- remappers[t] = remapper
+-- end
+-- local m = getmetatable(t)
+-- setmetatableindex(t, function(t,k)
+-- local v = default or "?"
+-- t[k] = v
+-- return v
+-- end)
+-- str = remapper(str)
+-- setmetatable(t,m)
+-- return str
+-- end
+--
+-- -- much faster (but only matters when we have > 10K calls
+
+local cache = { } -- if really needed we can copy vectors and hash defaults
+
+setmetatableindex(cache, function(t,k)
+ local v = { remappers = { } }
+ t[k] = v
+ return v
+end)
+
local function toregime(vector,str,default) -- toregime('8859-1',"abcde Ä","?")
- local t = backmapping[vector]
- local m = getmetatable(t)
- setmetatableindex(t, function(t,k)
- local v = default or "?"
- t[k] = v
- return v
- end)
- str = utfgsub(str,".",t)
- setmetatable(t,m)
- return str
+ local d = default or "?"
+ local c = cache[vector].remappers
+ local r = c[d]
+ if not r then
+ local t = fastcopy(backmapping[vector])
+ setmetatableindex(t, function(t,k)
+ local v = d
+ t[k] = v
+ return v
+ end)
+ r = utf.remapper(t)
+ c[d] = r
+ end
+ return r(str)
end
local function disable()
diff --git a/tex/context/base/s-inf-03.mkiv b/tex/context/base/s-inf-03.mkiv
index 1ff4cd0d0..bd4ec5dbc 100644
--- a/tex/context/base/s-inf-03.mkiv
+++ b/tex/context/base/s-inf-03.mkiv
@@ -1,6 +1,14 @@
% \nopdfcompression
-\enablemode[ipad]
+\starttext
+
+There is a loop somewhere ... todo.
+
+\stoptext
+
+\enablemode[ipad] % whatever that means
+
+\setupbodyfont[dejavu]
\doifmodeelse {ipad} {
@@ -10,16 +18,16 @@
\definefont
[TitlePageFont]
- [MonoBold at 18pt]
+ [MonoBold at 16pt]
\setupbodyfont
- [tt,10pt]
+ [tt,8pt]
} {
\definefont
[TitlePageFont]
- [MonoBold at 24pt]
+ [MonoBold at 20pt]
\setupbodyfont
[tt]
@@ -207,6 +215,7 @@ local function childtables(key,tab,handler,depth)
else
handler(s,t,depth)
if variant == 3 then
+print("UNMARKED",k)
childtables(false,v,handler,depth+1)
elseif variant == 2 then
childtables(k,v,handler,depth+1)
@@ -231,7 +240,6 @@ end
local function handler(k,t,depth)
k = gsub(k,"([~#$%%^&{}\\\|])",cleanup)
--- NC() rlap("\\quad\\tx " .. upper(sub(t,1,1)) .. " ".. k) NC() NC() NR()
NC() rlap("\\quad\\tx\\kern" .. (depth or 0).. "em" .. upper(sub(t,1,1)) .. " ".. k) NC() NC() NR()
end
@@ -260,67 +268,66 @@ local function show(title,subtitle,alias,builtin,t,lib,libcolor,glo,glocolor,mar
for i=1,#keys do
local k = keys[i]
local v = t[k]
- if k ~= "obsolete" and not skipkeys[k] and (not obsolete or not obsolete[k]) then
+ if k and k ~= "obsolete" and not skipkeys[k] and (not obsolete or not obsolete[k]) then
local inlib = lib and lib[k]
local inglo = glo and glo[k]
- if k then
- local t = type(v)
- local kstr, tstr = k, t
- local obs = t_obsolete and t_obsolete[k]
- if obs then
- tstr = function() overstrike(t) end
- kstr = function() overstrike(k) end
- end
- local marked = marked(v)
- if marked then
- tstr = "data table"
- end
- if t == "table" then
- local m = getmetatable(v)
- if m and m.__call then
- tstr = "function"
- end
- end
- if not mark then
- --
- elseif inlib and tostring(inlib) ~= tostring(v) then
- tstr = "overloaded ".. tstr
- elseif inglo and tostring(inglo) ~= tostring(v) then
- tstr = "overloaded ".. tstr
+ local t = type(v)
+ local kstr, tstr = k, t
+ local obs = t_obsolete and t_obsolete[k]
+ if obs then
+ tstr = function() overstrike(t) end
+ kstr = function() overstrike(k) end
+ end
+ local marked = marked(v)
+ if marked then
+ tstr = "data table"
+ end
+ if t == "table" then
+ local m = getmetatable(v)
+ if m and m.__call then
+ tstr = "function"
end
- NC() bf()
- if inlib then
- if not mark and t == "table" then
- goto(function() color( { libcolor }, kstr) end, { k } )
- else
- color( { libcolor }, kstr)
- end
- elseif inglo then
- if not mark and t == "table" then
- goto(function() color( { glocolor }, kstr) end, { k } )
- else
- color( { glocolor }, kstr)
- end
+ end
+ if not mark then
+ --
+ elseif inlib and tostring(inlib) ~= tostring(v) then
+ tstr = "overloaded ".. tstr
+ elseif inglo and tostring(inglo) ~= tostring(v) then
+ tstr = "overloaded ".. tstr
+ end
+ NC() bf()
+ if inlib then
+ if not mark and t == "table" then
+ goto(function() color( { libcolor }, kstr) end, { k } )
else
- if not mark and t == "table" then
- goto(k, { kstr } )
- else
- context(kstr)
- end
+ color( { libcolor }, kstr)
end
- NC()
- if inlib then
- color( { libcolor }, tstr)
- elseif inglo then
- color( { glocolor }, tstr)
+ elseif inglo then
+ if not mark and t == "table" then
+ goto(function() color( { glocolor }, kstr) end, { k } )
else
- context(tstr)
+ color( { glocolor }, kstr)
end
- NC() NR()
- if mark and t == "table" and title ~= "libraries" and title ~= "package" and not marked then
- childtables(false,v,handler) -- (k,v,handler)
+ else
+ if not mark and t == "table" then
+ goto(k, { kstr } )
+ else
+ context(kstr)
end
end
+ NC()
+ if inlib then
+ color( { libcolor }, tstr)
+ elseif inglo then
+ color( { glocolor }, tstr)
+ else
+ context(tstr)
+ end
+ NC() NR()
+ if mark and t == "table" and title ~= "libraries" and title ~= "package" and not marked then
+print(title,subtitle,k)
+ childtables(false,v,handler) -- (k,v,handler)
+ end
end
end
context.stoptabulate()
diff --git a/tex/context/base/status-files.pdf b/tex/context/base/status-files.pdf
index 20c7b966c..0b2b9db93 100644
--- a/tex/context/base/status-files.pdf
+++ b/tex/context/base/status-files.pdf
Binary files differ
diff --git a/tex/context/base/status-lua.pdf b/tex/context/base/status-lua.pdf
index 37adeca31..7afcfb69b 100644
--- a/tex/context/base/status-lua.pdf
+++ b/tex/context/base/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/tabl-ntb.mkiv b/tex/context/base/tabl-ntb.mkiv
index fca724b71..9c7864184 100644
--- a/tex/context/base/tabl-ntb.mkiv
+++ b/tex/context/base/tabl-ntb.mkiv
@@ -854,11 +854,25 @@
\c_tabl_ntb_maximum_col \zerocount
\c_tabl_ntb_maximum_row \zerocount
\c_tabl_ntb_maximum_row_span\plusone
+ \let\currentTABLErow \tabl_ntb_current_row
+ \let\currentTABLEcolumn\tabl_ntb_current_column
+ \let\nofTABLErows \tabl_ntb_n_of_rows
+ \let\nofTABLEcolumns \tabl_ntb_n_of_columns
\let\bTR\dobTR
\let\bTD\dobTD
\let\bTH\dobTH
\let\bTN\dobTN}
+\def\tabl_ntb_current_row {\m_tabl_ntb_positive_row}
+\def\tabl_ntb_current_column{\m_tabl_ntb_positive_col}
+\def\tabl_ntb_n_of_rows {\number\c_tabl_ntb_maximum_row}
+\def\tabl_ntb_n_of_columns {\number\c_tabl_ntb_maximum_col}
+
+\let\currentTABLErow \!!zerocount
+\let\currentTABLEcolumn\!!zerocount
+\let\nofTABLErows \!!zerocount
+\let\nofTABLEcolumns \!!zerocount
+
\unexpanded\def\dobTR{\dosingleempty\tabl_ntb_tr} % also used in tabl-nte
\unexpanded\def\dobTD{\dosingleempty\tabl_ntb_td} % also used in tabl-nte
\unexpanded\def\dobTH{\dosingleempty\tabl_ntb_th} % also used in tabl-nte
diff --git a/tex/context/base/trac-lmx.lua b/tex/context/base/trac-lmx.lua
index e1805099e..e83fdce7f 100644
--- a/tex/context/base/trac-lmx.lua
+++ b/tex/context/base/trac-lmx.lua
@@ -458,7 +458,7 @@ local function lmxresult(self,variables)
if trace_variables then -- will become templates
report_lmx("converted size: %s",#converted)
end
- return converted
+ return converted or lmxerror("no result from converter")
else
return lmxerror("invalid converter")
end
diff --git a/tex/context/base/util-jsn.lua b/tex/context/base/util-jsn.lua
index 101a75d6b..7493f108d 100644
--- a/tex/context/base/util-jsn.lua
+++ b/tex/context/base/util-jsn.lua
@@ -52,7 +52,7 @@ local jsonconverter = { "value",
object = lbrace * Cf(Ct("") * V("pair") * (comma * V("pair"))^0,rawset) * rbrace,
pair = Cg(optionalws * key * optionalws * colon * V("value")),
array = Ct(lparent * V("value") * (comma * V("value"))^0 * rparent),
- value = optionalws * (jstring + V("object") + V("array") + jtrue + jfalse + jnull + jnumber) * optionalws,
+ value = optionalws * (jstring + V("object") + V("array") + jtrue + jfalse + jnull + jnumber + #rparent) * optionalws,
}
-- local jsonconverter = { "value",
@@ -93,7 +93,7 @@ local function tojson(value,t) -- we could optimize #t
elseif size == 1 then
-- we can optimize for non tables
t[#t+1] = "["
- tojson(value[i],t)
+ tojson(value[1],t)
t[#t+1] = "]"
else
for i=1,size do
@@ -111,7 +111,7 @@ local function tojson(value,t) -- we could optimize #t
t[#t+1] = format("%q",value)
elseif kind == "number" then
t[#t+1] = value
- else
+ elseif kind == "boolean" then
t[#t+1] = tostring(value)
end
return t
diff --git a/tex/context/base/util-sql.lua b/tex/context/base/util-sql.lua
index 1e07bfc3c..3daa60d0b 100644
--- a/tex/context/base/util-sql.lua
+++ b/tex/context/base/util-sql.lua
@@ -10,7 +10,7 @@ if not modules then modules = { } end modules ['util-sql'] = {
-- there is a bit of flux in these libraries. Also, we want the data back in
-- a way that we like.
--- buffer template
+-- Todo: buffer templates when files.
local format = string.format
local rawset, setmetatable, loadstring, type = rawset, setmetatable, loadstring, type
@@ -48,7 +48,6 @@ local engine = "mysql"
local runners = { -- --defaults-extra-file="%inifile"
mysql = [[mysql --user="%username%" --password="%password%" --host="%host%" --port=%port% --database="%database%" < "%queryfile%" > "%resultfile%"]],
--- mysql = [[mysql --user="%username%" --password="%password%" --host="%host%" --port=%port% --database="%database%" < "%queryfile%"]],
}
sql.runners = runners
@@ -151,7 +150,7 @@ local function validspecification(specification)
specification.queryfile = queryfile
specification.resultfile = resultfile
if trace_sql then
- report_state("template file: %q",templatefile)
+ report_state("template file: %q",templatefile or "<none>")
report_state("query file: %q",queryfile)
report_state("result file: %q",resultfile)
end
@@ -167,10 +166,12 @@ local function dataprepared(specification)
end
if query then
io.savedata(specification.queryfile,query)
+ os.remove(specification.resultfile)
return true
else
-- maybe push an error
os.remove(specification.queryfile)
+ os.remove(specification.resultfile)
end
end
@@ -183,7 +184,6 @@ local function datafetched(specification)
report_state("fetchtime: %.3f sec",osclock()-t) -- not okay under linux
else
os.execute(command)
- -- return os.resultof(command)
end
return true
end
@@ -455,28 +455,83 @@ methods.library = {
-- -- --
-local e_pattern = lpeg.replacer { { '\\"','\\\\""' }, {'"','""'} }
+local e_pattern = lpeg.replacer { { '\\"','\\\\""' }, {'"','""'}, {'\\\n', "\\n" }, {'\\\r', "\\r" }, {'\t', " " } }
local u_pattern = lpeg.replacer { { '\\\\','\\' } }
+local u_pattern = lpeg.replacer { { '\\\\','\\' }, { "\n","\\n" } }
-function sql.escape(str)
- return lpegmatch(e_replace,str)
+-- library:
+
+function methods.library.serialize(t)
+ local str = fastserialize(t,"return")
+ local escaped = lpegmatch(e_pattern,str)
+-- print("LIBRARY PUT STR",str)
+-- print("LIBRARY PUT ESC",escaped)
+ return escaped
end
-function sql.unescape(str)
- return lpegmatch(u_replace,str)
+function methods.library.deserialize(str)
+ local unescaped = lpegmatch(u_pattern,str)
+-- print("LIBRARY GET STR",str)
+-- print("LIBRARY GET UES",unescaped)
+ if not unescaped then
+ return
+ end
+ local code = loadstring(unescaped)
+-- print("INVALID CODE")
+ if not code then
+ return
+ end
+ code = code()
+-- table.print(code)
+ if not code then
+ return
+ end
+ return code
end
-function sql.serialize(t)
+-- client
+
+local e_pattern = lpeg.replacer { { '\\"','\\\\""' }, {'"','""'}, {'\\\n', "\\n" }, {'\\\r', "\\r" } }
+local u_pattern = lpeg.replacer { { '\\\\','\\' } }
+
+function methods.client.serialize(t)
return lpegmatch(e_pattern,fastserialize(t,"return"))
end
-function sql.deserialize(data)
- data = lpegmatch(u_pattern,data)
- data = data and loadstring(data)
- data = data and data()
- return data
+function methods.client.deserialize(str)
+ local unescaped = lpegmatch(u_pattern,str)
+ if not unescaped then
+ return
+ end
+ local code = loadstring(unescaped)
+ if not code then
+ return
+ end
+ code = code()
+ if not code then
+ return
+ end
+ return code
+end
+
+sql.serialize = methods.client.serialize
+sql.deserialize = methods.client.deserialize
+
+function sql.escape(str)
+ return lpegmatch(e_pattern,str)
+end
+
+function sql.unescape(str)
+ return lpegmatch(u_pattern,str)
end
+-- local s = sql.serialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7, e = 'f"g\nh' } }
+-- local u = sql.unescape(s)
+-- local t = sql.deserialize(s)
+-- inspect(s)
+-- inspect(u)
+-- inspect(t)
+
-- -- --
if tex and tex.systemmodes then
diff --git a/tex/context/base/util-str.lua b/tex/context/base/util-str.lua
index 75cf443fb..377dd163f 100644
--- a/tex/context/base/util-str.lua
+++ b/tex/context/base/util-str.lua
@@ -62,11 +62,8 @@ function strings.newrepeater(str,offset)
return t
end
---~ local dashes = strings.newrepeater("--",-1)
-
---~ print(dashes[2])
---~ print(dashes[3])
---~ print(dashes[1])
+-- local dashes = strings.newrepeater("--",-1)
+-- print(dashes[2],dashes[3],dashes[1])
local extra, tab, start = 0, 0, 4, 0
diff --git a/tex/context/base/util-tab.lua b/tex/context/base/util-tab.lua
index 7dc9e09f8..d45b58f6f 100644
--- a/tex/context/base/util-tab.lua
+++ b/tex/context/base/util-tab.lua
@@ -13,8 +13,8 @@ local tables = utilities.tables
local format, gmatch, rep, gsub = string.format, string.gmatch, string.rep, string.gsub
local concat, insert, remove = table.concat, table.insert, table.remove
local setmetatable, getmetatable, tonumber, tostring = setmetatable, getmetatable, tonumber, tostring
-local type, next, rawset, tonumber = type, next, rawset, tonumber
-local lpegmatch = lpeg.match
+local type, next, rawset, tonumber, loadstring = type, next, rawset, tonumber, loadstring
+local lpegmatch, P, Cs = lpeg.match, lpeg.P, lpeg.Cs
function tables.definetable(target) -- defines undefined tables
local composed, t, n = nil, { }, 0
@@ -168,19 +168,27 @@ function tables.encapsulate(core,capsule,protect)
end
end
-local function serialize(t,r) -- no mixes
+local escaped = Cs ( (
+ P('\\' ) +
+ P('"' )/'\\"' +
+ P('\n')/'\\n' +
+ P('\r')/'\\r' +
+ 1
+)^0 )
+
+local function serialize(t,r,outer) -- no mixes
r[#r+1] = "{"
local n = #t
if n > 0 then
for i=1,n do
local v = t[i]
local tv = type(v)
- if tv == "table" then
- serialize(v,r)
- elseif tv == "string" then
+ if tv == "string" then
r[#r+1] = format("%q,",v)
elseif tv == "number" then
r[#r+1] = format("%s,",v)
+ elseif tv == "table" then
+ serialize(v,r)
elseif tv == "boolean" then
r[#r+1] = format("%s,",tostring(v))
end
@@ -188,24 +196,43 @@ local function serialize(t,r) -- no mixes
else
for k, v in next, t do
local tv = type(v)
- if tv == "table" then
- r[#r+1] = format("[%q]=",k)
- serialize(v,r)
- elseif tv == "string" then
+ if tv == "string" then
r[#r+1] = format("[%q]=%q,",k,v)
elseif tv == "number" then
r[#r+1] = format("[%q]=%s,",k,v)
+ elseif tv == "table" then
+ r[#r+1] = format("[%q]=",k)
+ serialize(v,r)
elseif tv == "boolean" then
r[#r+1] = format("[%q]=%s,",k,tostring(v))
end
end
end
- r[#r+1] = "}"
+ if outer then
+ r[#r+1] = "}"
+ else
+ r[#r+1] = "},"
+ end
return r
end
function table.fastserialize(t,prefix)
- return concat(serialize(t,{ prefix }))
+ return concat(serialize(t,{ prefix },true))
end
--- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7 } })
+-- inspect(table.fastserialize { a = 1, b = { 4, { 5, 6 } }, c = { d = 7, e = 'f"g\nh' } })
+
+function table.load(filename)
+ if filename then
+ local t = io.loaddata(filename)
+ if t and t ~= "" then
+ t = loadstring(t)
+ if type(t) == "function" then
+ t = t()
+ if type(t) == "table" then
+ return t
+ end
+ end
+ end
+ end
+end
diff --git a/tex/context/base/util-tpl.lua b/tex/context/base/util-tpl.lua
index 61fbbd48e..f97f97d11 100644
--- a/tex/context/base/util-tpl.lua
+++ b/tex/context/base/util-tpl.lua
@@ -78,5 +78,5 @@ function templates.resolve(t,mapping)
end
-- inspect(utilities.templates.replace("test %one% test", { one = "%two%", two = "two" }))
--- inspect(utilities.templates.resolve({ one = "%two%", two = "two" }))
+-- inspect(utilities.templates.resolve({ one = "%two%", two = "two", three = "%three%" }))
diff --git a/tex/context/base/x-mathml.lua b/tex/context/base/x-mathml.lua
index bf4cae43f..cc7238f94 100644
--- a/tex/context/base/x-mathml.lua
+++ b/tex/context/base/x-mathml.lua
@@ -12,7 +12,7 @@ local type, next = type, next
local utf = unicode.utf8
local format, lower, find, gsub = string.format, string.lower, string.find, string.gsub
local strip = string.strip
-local utfchar, utfgsub = utf.char, utf.gsub
+local utfchar = utf.char
local xmlsprint, xmlcprint, xmltext, xmlcontent = xml.sprint, xml.cprint, xml.text, xml.content
local getid = lxml.getid
local utfcharacters, utfvalues = string.utfcharacters, string.utfvalues
@@ -87,6 +87,8 @@ local o_replacements = { -- in main table
}
+local simpleoperatorremapper = utf.remapper(o_replacements)
+
--~ languages.data.labels.functions
local i_replacements = {
@@ -457,8 +459,7 @@ function xml.functions.remapopenmath(e)
end
function mathml.checked_operator(str)
- str = utfgsub(str,".",o_replacements)
- context(str)
+ context(simpleoperatorremapper(str))
end
function mathml.stripped(str)
@@ -481,10 +482,8 @@ end
function mathml.mo(id)
local str = xmlcontent(getid(id)) or ""
- local rep = gsub(str,"&.-;","")
- local rep = utfgsub(rep,".",o_replacements)
- context(rep)
- -- context.mo(rep) -- fails with \left etc
+ local rep = gsub(str,"&.-;","") -- todo
+ context(simpleoperatorremapper(rep))
end
function mathml.mi(id)
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index afdabf931..9650b1024 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : luatex-fonts-merged.lua
-- parent file : luatex-fonts.lua
--- merge date : 08/28/12 20:00:42
+-- merge date : 09/04/12 18:08:16
do -- begin closure to overcome local limits and interference
@@ -144,7 +144,8 @@ if not modules then modules = { } end modules ['l-table'] = {
license = "see context related readme files"
}
-local type, next, tostring, tonumber, ipairs, table, string = type, next, tostring, tonumber, ipairs, table, string
+local type, next, tostring, tonumber, ipairs = type, next, tostring, tonumber, ipairs
+local table, string = table, string
local concat, sort, insert, remove = table.concat, table.sort, table.insert, table.remove
local format, find, gsub, lower, dump, match = string.format, string.find, string.gsub, string.lower, string.dump, string.match
local getmetatable, setmetatable = getmetatable, setmetatable
@@ -155,6 +156,8 @@ local getinfo = debug.getinfo
-- impact on ConTeXt was not that large; the remaining ipairs already
-- have been replaced. In a similar fashion we also hardly used pairs.
--
+-- Hm, actually ipairs was retained, but we no longer use it anyway.
+--
-- Just in case, we provide the fallbacks as discussed in Programming
-- in Lua (http://www.lua.org/pil/7.3.html):
@@ -538,12 +541,25 @@ local function do_serialize(root,name,depth,level,indexed)
end
-- we could check for k (index) being number (cardinal)
if root and next(root) then
- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- local first, last = nil, 0 -- #root cannot be trusted here (will be ok in 5.2 when ipairs is gone)
+ -- if compact then
+ -- -- NOT: for k=1,#root do (we need to quit at nil)
+ -- for k,v in ipairs(root) do -- can we use next?
+ -- if not first then first = k end
+ -- last = last + 1
+ -- end
+ -- end
+ local first, last = nil, 0
if compact then
- -- NOT: for k=1,#root do (we need to quit at nil)
- for k,v in ipairs(root) do -- can we use next?
- if not first then first = k end
- last = last + 1
+ last = #root
+ for k=1,last do
+ if not root[k] then
+ last = k - 1
+ break
+ end
+ end
+ if last > 0 then
+ first = 1
end
end
local sk = sortedkeys(root)