summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--context/data/scite/context/lexers/data/scite-context-data-tex.lua4
-rw-r--r--context/data/scite/context/scite-context-data-tex.properties76
-rw-r--r--context/data/textadept/context/data/scite-context-data-tex.lua4
-rw-r--r--doc/context/documents/general/manuals/musings.pdfbin0 -> 5724777 bytes
-rw-r--r--doc/context/documents/general/qrcs/setup-cs.pdfbin849025 -> 849177 bytes
-rw-r--r--doc/context/documents/general/qrcs/setup-de.pdfbin849966 -> 850128 bytes
-rw-r--r--doc/context/documents/general/qrcs/setup-en.pdfbin853260 -> 853439 bytes
-rw-r--r--doc/context/documents/general/qrcs/setup-fr.pdfbin846341 -> 846497 bytes
-rw-r--r--doc/context/documents/general/qrcs/setup-it.pdfbin851933 -> 852074 bytes
-rw-r--r--doc/context/documents/general/qrcs/setup-mapping-cs.pdfbin346930 -> 347089 bytes
-rw-r--r--doc/context/documents/general/qrcs/setup-mapping-de.pdfbin430928 -> 431091 bytes
-rw-r--r--doc/context/documents/general/qrcs/setup-mapping-en.pdfbin344775 -> 344937 bytes
-rw-r--r--doc/context/documents/general/qrcs/setup-mapping-fr.pdfbin347678 -> 347845 bytes
-rw-r--r--doc/context/documents/general/qrcs/setup-mapping-it.pdfbin346659 -> 346814 bytes
-rw-r--r--doc/context/documents/general/qrcs/setup-mapping-nl.pdfbin345242 -> 345390 bytes
-rw-r--r--doc/context/documents/general/qrcs/setup-mapping-ro.pdfbin508648 -> 508381 bytes
-rw-r--r--doc/context/documents/general/qrcs/setup-nl.pdfbin843476 -> 843643 bytes
-rw-r--r--doc/context/documents/general/qrcs/setup-ro.pdfbin846123 -> 846289 bytes
-rw-r--r--doc/context/sources/general/manuals/musings/musings-children.tex1253
-rw-r--r--doc/context/sources/general/manuals/musings/musings-contents.tex7
-rw-r--r--doc/context/sources/general/manuals/musings/musings-introduction.tex31
-rw-r--r--doc/context/sources/general/manuals/musings/musings-perception.tex180
-rw-r--r--doc/context/sources/general/manuals/musings/musings-stability.tex388
-rw-r--r--doc/context/sources/general/manuals/musings/musings-staygo.tex461
-rw-r--r--doc/context/sources/general/manuals/musings/musings-style.tex92
-rw-r--r--doc/context/sources/general/manuals/musings/musings-titlepage.tex46
-rw-r--r--doc/context/sources/general/manuals/musings/musings-whytex.tex326
-rw-r--r--doc/context/sources/general/manuals/musings/musings.tex20
-rw-r--r--doc/context/sources/general/manuals/texit/texit-conditions.tex108
-rw-r--r--doc/context/sources/general/manuals/texit/texit-contents.tex9
-rw-r--r--doc/context/sources/general/manuals/texit/texit-introduction.tex24
-rw-r--r--doc/context/sources/general/manuals/texit/texit-leaders.tex247
-rw-r--r--doc/context/sources/general/manuals/texit/texit-lookahead.tex387
-rw-r--r--doc/context/sources/general/manuals/texit/texit-style.tex52
-rw-r--r--doc/context/sources/general/manuals/texit/texit-titlepage.tex40
-rw-r--r--doc/context/sources/general/manuals/texit/texit.tex19
-rw-r--r--scripts/context/lua/mtxlibs.lua13
-rw-r--r--scripts/context/lua/mtxrun.lua60
-rw-r--r--scripts/context/stubs/mswin/mtxrun.lua60
-rw-r--r--scripts/context/stubs/unix/mtxrun60
-rw-r--r--scripts/context/stubs/win64/mtxrun.lua60
-rw-r--r--tex/context/base/mkii/cont-new.mkii2
-rw-r--r--tex/context/base/mkii/context.mkii2
-rw-r--r--tex/context/base/mkiv/anch-pos.lua31
-rw-r--r--tex/context/base/mkiv/attr-col.lua27
-rw-r--r--tex/context/base/mkiv/cldf-bas.lua7
-rw-r--r--tex/context/base/mkiv/cont-new.mkiv2
-rw-r--r--tex/context/base/mkiv/context.mkiv3
-rw-r--r--tex/context/base/mkiv/core-con.mkiv19
-rw-r--r--tex/context/base/mkiv/core-two.lua93
-rw-r--r--tex/context/base/mkiv/core-two.mkiv8
-rw-r--r--tex/context/base/mkiv/data-res.lua2
-rw-r--r--tex/context/base/mkiv/font-map.lua60
-rw-r--r--tex/context/base/mkiv/l-lpeg.lua14
-rw-r--r--tex/context/base/mkiv/lpdf-ini.lua7
-rw-r--r--tex/context/base/mkiv/lpdf-wid.lua16
-rw-r--r--tex/context/base/mkiv/luat-cod.lua12
-rw-r--r--tex/context/base/mkiv/luat-soc.lua11
-rw-r--r--tex/context/base/mkiv/luat-soc.mkiv52
-rw-r--r--tex/context/base/mkiv/mlib-pps.lua10
-rw-r--r--tex/context/base/mkiv/mult-prm.lua2
-rw-r--r--tex/context/base/mkiv/node-fin.lua11
-rw-r--r--tex/context/base/mkiv/node-ref.lua102
-rw-r--r--tex/context/base/mkiv/node-res.lua3
-rw-r--r--tex/context/base/mkiv/status-files.pdfbin26245 -> 26249 bytes
-rw-r--r--tex/context/base/mkiv/status-lua.pdfbin267608 -> 267948 bytes
-rw-r--r--tex/context/base/mkiv/strc-con.mkvi6
-rw-r--r--tex/context/base/mkiv/strc-lst.lua18
-rw-r--r--tex/context/base/mkiv/strc-lst.mkvi5
-rw-r--r--tex/context/base/mkiv/strc-ref.lua18
-rw-r--r--tex/context/base/mkiv/strc-ref.mkvi2
-rw-r--r--tex/context/base/mkiv/strc-reg.lua23
-rw-r--r--tex/context/base/mkiv/strc-reg.mkiv4
-rw-r--r--tex/context/base/mkiv/syst-ini.mkiv1
-rw-r--r--tex/context/base/mkiv/util-deb.lua24
-rw-r--r--tex/context/base/mkiv/util-soc-imp-copas.lua930
-rw-r--r--tex/context/base/mkiv/util-soc-imp-ftp.lua400
-rw-r--r--tex/context/base/mkiv/util-soc-imp-headers.lua144
-rw-r--r--tex/context/base/mkiv/util-soc-imp-http.lua432
-rw-r--r--tex/context/base/mkiv/util-soc-imp-ltn12.lua388
-rw-r--r--tex/context/base/mkiv/util-soc-imp-mime.lua105
-rw-r--r--tex/context/base/mkiv/util-soc-imp-reset.lua13
-rw-r--r--tex/context/base/mkiv/util-soc-imp-smtp.lua265
-rw-r--r--tex/context/base/mkiv/util-soc-imp-socket.lua190
-rw-r--r--tex/context/base/mkiv/util-soc-imp-tp.lua142
-rw-r--r--tex/context/base/mkiv/util-soc-imp-url.lua266
-rw-r--r--tex/context/base/mkiv/util-soc.lua23
-rw-r--r--tex/context/base/mkiv/util-str.lua29
-rw-r--r--tex/context/base/mkiv/util-you.lua1
-rw-r--r--tex/context/interface/mkiv/i-context.pdfbin853260 -> 853439 bytes
-rw-r--r--tex/context/interface/mkiv/i-readme.pdfbin61212 -> 61221 bytes
-rw-r--r--tex/context/modules/mkiv/s-languages-system.lua2
-rw-r--r--tex/generic/context/luatex/luatex-fonts-merged.lua35
93 files changed, 7702 insertions, 287 deletions
diff --git a/context/data/scite/context/lexers/data/scite-context-data-tex.lua b/context/data/scite/context/lexers/data/scite-context-data-tex.lua
index 8c518fe38..0e5056f8e 100644
--- a/context/data/scite/context/lexers/data/scite-context-data-tex.lua
+++ b/context/data/scite/context/lexers/data/scite-context-data-tex.lua
@@ -1,9 +1,9 @@
return {
["aleph"]={ "Alephminorversion", "Alephrevision", "Alephversion" },
["etex"]={ "botmarks", "clubpenalties", "currentgrouplevel", "currentgrouptype", "currentifbranch", "currentiflevel", "currentiftype", "detokenize", "dimexpr", "displaywidowpenalties", "eTeXVersion", "eTeXminorversion", "eTeXrevision", "eTeXversion", "everyeof", "firstmarks", "fontchardp", "fontcharht", "fontcharic", "fontcharwd", "glueexpr", "glueshrink", "glueshrinkorder", "gluestretch", "gluestretchorder", "gluetomu", "ifcsname", "ifdefined", "iffontchar", "interactionmode", "interlinepenalties", "lastlinefit", "lastnodetype", "marks", "muexpr", "mutoglue", "numexpr", "pagediscards", "parshapedimen", "parshapeindent", "parshapelength", "predisplaydirection", "protected", "readline", "savinghyphcodes", "savingvdiscards", "scantokens", "showgroups", "showifs", "showtokens", "splitbotmarks", "splitdiscards", "splitfirstmarks", "topmarks", "tracingassigns", "tracinggroups", "tracingifs", "tracingnesting", "tracingscantokens", "unexpanded", "unless", "widowpenalties" },
- ["luatex"]={ "Uchar", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Uhextensible", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathcharclass", "Umathchardef", "Umathcharfam", "Umathcharnum", "Umathcharnumdef", "Umathcharslot", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathnolimitsubfactor", "Umathnolimitsupfactor", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathskewedfractionhgap", "Umathskewedfractionvgap", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Unosubscript", "Unosuperscript", "Uoverdelimiter", "Uradical", "Uroot", "Uskewed", "Uskewedwithdelims", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "Uvextensible", "adjustspacing", "alignmark", "aligntab", "attribute", "attributedef", "automaticdiscretionary", "automatichyphenmode", "automatichyphenpenalty", "begincsname", "bodydir", "bodydirection", "boxdir", "boxdirection", "breakafterdirmode", "catcodetable", "clearmarks", "compoundhyphenmode", "copyfont", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "csstring", "draftmode", "dviextension", "dvifeedback", "dvivariable", "efcode", "endlocalcontrol", "etoksapp", "etokspre", "exceptionpenalty", "expanded", "expandglyphsinfont", "explicitdiscretionary", "explicithyphenpenalty", "fontid", "formatname", "gleaders", "gtoksapp", "gtokspre", "hjcode", "hyphenationbounds", "hyphenationmin", "hyphenpenaltymode", "ifabsdim", "ifabsnum", "ifcondition", "ifincsname", "ifprimitive", "ignoreligaturesinfont", "immediateassigned", "immediateassignment", "initcatcodetable", "insertht", "lastnamedcs", "lastsavedboxresourceindex", "lastsavedimageresourceindex", "lastsavedimageresourcepages", "lastxpos", "lastypos", "latelua", "leftghost", "leftmarginkern", "letcharcode", "letterspacefont", "linedir", "linedirection", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "lpcode", "luabytecode", "luabytecodecall", "luacopyinputnodes", "luadef", "luaescapestring", "luafunction", "luafunctioncall", "luatexbanner", "luatexrevision", "luatexversion", "mathdelimitersmode", "mathdir", "mathdirection", "mathdisplayskipmode", "matheqnogapstep", "mathflattenmode", "mathitalicsmode", "mathnolimitsmode", "mathoption", "mathpenaltiesmode", "mathrulesfam", "mathrulesmode", "mathrulethicknessmode", "mathscriptboxmode", "mathscriptcharmode", "mathscriptsmode", "mathstyle", "mathsurroundmode", "mathsurroundskip", "nohrule", "nokerns", "noligs", "normaldeviate", "nospaces", "novrule", "outputbox", "outputmode", "pagebottomoffset", "pagedir", "pagedirection", "pageheight", "pageleftoffset", "pagerightoffset", "pagetopoffset", "pagewidth", "pardir", "pardirection", "pdfextension", "pdffeedback", "pdfvariable", "postexhyphenchar", "posthyphenchar", "prebinoppenalty", "predisplaygapfactor", "preexhyphenchar", "prehyphenchar", "prerelpenalty", "primitive", "protrudechars", "pxdimen", "quitvmode", "randomseed", "rightghost", "rightmarginkern", "rpcode", "saveboxresource", "savecatcodetable", "saveimageresource", "savepos", "scantextokens", "setfontid", "setrandomseed", "shapemode", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressmathparerror", "suppressoutererror", "suppressprimitiveerror", "synctex", "tagcode", "textdir", "textdirection", "toksapp", "tokspre", "tracingfonts", "uniformdeviate", "useboxresource", "useimageresource", "xtoksapp", "xtokspre" },
+ ["luatex"]={ "Uchar", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Uhextensible", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathcharclass", "Umathchardef", "Umathcharfam", "Umathcharnum", "Umathcharnumdef", "Umathcharslot", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathnolimitsubfactor", "Umathnolimitsupfactor", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathskewedfractionhgap", "Umathskewedfractionvgap", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Unosubscript", "Unosuperscript", "Uoverdelimiter", "Uradical", "Uroot", "Uskewed", "Uskewedwithdelims", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "Uvextensible", "adjustspacing", "alignmark", "aligntab", "attribute", "attributedef", "automaticdiscretionary", "automatichyphenmode", "automatichyphenpenalty", "begincsname", "bodydir", "bodydirection", "boxdir", "boxdirection", "breakafterdirmode", "catcodetable", "clearmarks", "compoundhyphenmode", "copyfont", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "csstring", "draftmode", "dviextension", "dvifeedback", "dvivariable", "efcode", "endlocalcontrol", "etoksapp", "etokspre", "exceptionpenalty", "expanded", "expandglyphsinfont", "explicitdiscretionary", "explicithyphenpenalty", "fixupboxesmode", "fontid", "formatname", "gleaders", "gtoksapp", "gtokspre", "hjcode", "hyphenationbounds", "hyphenationmin", "hyphenpenaltymode", "ifabsdim", "ifabsnum", "ifcondition", "ifincsname", "ifprimitive", "ignoreligaturesinfont", "immediateassigned", "immediateassignment", "initcatcodetable", "insertht", "lastnamedcs", "lastsavedboxresourceindex", "lastsavedimageresourceindex", "lastsavedimageresourcepages", "lastxpos", "lastypos", "latelua", "leftghost", "leftmarginkern", "letcharcode", "letterspacefont", "linedir", "linedirection", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "lpcode", "luabytecode", "luabytecodecall", "luacopyinputnodes", "luadef", "luaescapestring", "luafunction", "luafunctioncall", "luatexbanner", "luatexrevision", "luatexversion", "mathdelimitersmode", "mathdir", "mathdirection", "mathdisplayskipmode", "matheqnogapstep", "mathflattenmode", "mathitalicsmode", "mathnolimitsmode", "mathoption", "mathpenaltiesmode", "mathrulesfam", "mathrulesmode", "mathrulethicknessmode", "mathscriptboxmode", "mathscriptcharmode", "mathscriptsmode", "mathstyle", "mathsurroundmode", "mathsurroundskip", "nohrule", "nokerns", "noligs", "normaldeviate", "nospaces", "novrule", "outputbox", "outputmode", "pagebottomoffset", "pagedir", "pagedirection", "pageheight", "pageleftoffset", "pagerightoffset", "pagetopoffset", "pagewidth", "pardir", "pardirection", "pdfextension", "pdffeedback", "pdfvariable", "postexhyphenchar", "posthyphenchar", "prebinoppenalty", "predisplaygapfactor", "preexhyphenchar", "prehyphenchar", "prerelpenalty", "primitive", "protrudechars", "pxdimen", "quitvmode", "randomseed", "rightghost", "rightmarginkern", "rpcode", "saveboxresource", "savecatcodetable", "saveimageresource", "savepos", "scantextokens", "setfontid", "setrandomseed", "shapemode", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressmathparerror", "suppressoutererror", "suppressprimitiveerror", "synctex", "tagcode", "textdir", "textdirection", "toksapp", "tokspre", "tracingfonts", "uniformdeviate", "useboxresource", "useimageresource", "xtoksapp", "xtokspre" },
["omega"]={ "Omegaminorversion", "Omegarevision", "Omegaversion" },
- ["pdftex"]={ "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlinedepth", "pdfeachlineheight", "pdfendlink", "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfignoreunknownimages", "pdfimageaddfilename", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfinfo", "pdfinfoomitdate", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmajorversion", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfomitcidset", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkfixeddpi", "pdfpkmode", "pdfpkresolution", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrecompress", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdfsuppressoptionalinfo", "pdfsuppressptexinfo", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdftrailerid", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformmargin", "pdfxformname", "pdfxformresources", "pdfximage" },
+ ["pdftex"]={ "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlinedepth", "pdfeachlineheight", "pdfendlink", "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfignoreunknownimages", "pdfimageaddfilename", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfinfo", "pdfinfoomitdate", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmajorversion", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfomitcharset", "pdfomitcidset", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkfixeddpi", "pdfpkmode", "pdfpkresolution", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrecompress", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdfsuppressoptionalinfo", "pdfsuppressptexinfo", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdftrailerid", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformmargin", "pdfxformname", "pdfxformresources", "pdfximage" },
["tex"]={ " ", "-", "/", "Uleft", "Umiddle", "Uright", "above", "abovedisplayshortskip", "abovedisplayskip", "abovewithdelims", "accent", "adjdemerits", "advance", "afterassignment", "aftergroup", "atop", "atopwithdelims", "badness", "baselineskip", "batchmode", "begingroup", "belowdisplayshortskip", "belowdisplayskip", "binoppenalty", "botmark", "boundary", "box", "boxmaxdepth", "brokenpenalty", "catcode", "char", "chardef", "cleaders", "closein", "closeout", "clubpenalty", "copy", "count", "countdef", "cr", "crcr", "csname", "day", "deadcycles", "def", "defaulthyphenchar", "defaultskewchar", "delcode", "delimiter", "delimiterfactor", "delimitershortfall", "dimen", "dimendef", "directlua", "discretionary", "displayindent", "displaylimits", "displaystyle", "displaywidowpenalty", "displaywidth", "divide", "doublehyphendemerits", "dp", "dump", "edef", "else", "emergencystretch", "end", "endcsname", "endgroup", "endinput", "endlinechar", "eqno", "errhelp", "errmessage", "errorcontextlines", "errorstopmode", "escapechar", "everycr", "everydisplay", "everyhbox", "everyjob", "everymath", "everypar", "everyvbox", "exhyphenchar", "exhyphenpenalty", "expandafter", "fam", "fi", "finalhyphendemerits", "firstmark", "firstvalidlanguage", "floatingpenalty", "font", "fontdimen", "fontname", "futurelet", "gdef", "glet", "global", "globaldefs", "halign", "hangafter", "hangindent", "hbadness", "hbox", "hfil", "hfill", "hfilneg", "hfuzz", "hoffset", "holdinginserts", "hpack", "hrule", "hsize", "hskip", "hss", "ht", "hyphenation", "hyphenchar", "hyphenpenalty", "if", "ifcase", "ifcat", "ifdim", "ifeof", "iffalse", "ifhbox", "ifhmode", "ifinner", "ifmmode", "ifnum", "ifodd", "iftrue", "ifvbox", "ifvmode", "ifvoid", "ifx", "ignorespaces", "immediate", "indent", "input", "inputlineno", "insert", "insertpenalties", "interlinepenalty", "jobname", "kern", "language", "lastbox", "lastkern", "lastpenalty", "lastskip", "lccode", "leaders", "left", "lefthyphenmin", "leftskip", "leqno", "let", "limits", "linepenalty", "lineskip", "lineskiplimit", "long", "looseness", "lower", "lowercase", "mag", "mark", "mathaccent", "mathbin", "mathchar", "mathchardef", "mathchoice", "mathclose", "mathcode", "mathinner", "mathop", "mathopen", "mathord", "mathpunct", "mathrel", "mathsurround", "maxdeadcycles", "maxdepth", "meaning", "medmuskip", "message", "middle", "mkern", "month", "moveleft", "moveright", "mskip", "multiply", "muskip", "muskipdef", "newlinechar", "noalign", "noboundary", "noexpand", "noindent", "nolimits", "nonscript", "nonstopmode", "nulldelimiterspace", "nullfont", "number", "omit", "openin", "openout", "or", "outer", "output", "outputpenalty", "over", "overfullrule", "overline", "overwithdelims", "pagedepth", "pagefilllstretch", "pagefillstretch", "pagefilstretch", "pagegoal", "pageshrink", "pagestretch", "pagetotal", "par", "parfillskip", "parindent", "parshape", "parskip", "patterns", "pausing", "penalty", "postdisplaypenalty", "predisplaypenalty", "predisplaysize", "pretolerance", "prevdepth", "prevgraf", "protrusionboundary", "radical", "raise", "read", "relax", "relpenalty", "right", "righthyphenmin", "rightskip", "romannumeral", "scriptfont", "scriptscriptfont", "scriptscriptstyle", "scriptspace", "scriptstyle", "scrollmode", "setbox", "setlanguage", "sfcode", "shipout", "show", "showbox", "showboxbreadth", "showboxdepth", "showlists", "showthe", "skewchar", "skip", "skipdef", "spacefactor", "spaceskip", "span", "special", "splitbotmark", "splitfirstmark", "splitmaxdepth", "splittopskip", "string", "tabskip", "textfont", "textstyle", "the", "thickmuskip", "thinmuskip", "time", "toks", "toksdef", "tolerance", "topmark", "topskip", "tpack", "tracingcommands", "tracinglostchars", "tracingmacros", "tracingonline", "tracingoutput", "tracingpages", "tracingparagraphs", "tracingrestores", "tracingstats", "uccode", "uchyph", "underline", "unhbox", "unhcopy", "unkern", "unpenalty", "unskip", "unvbox", "unvcopy", "uppercase", "vadjust", "valign", "vbadness", "vbox", "vcenter", "vfil", "vfill", "vfilneg", "vfuzz", "voffset", "vpack", "vrule", "vsize", "vskip", "vsplit", "vss", "vtop", "wd", "widowpenalty", "wordboundary", "write", "xdef", "xleaders", "xspaceskip", "year" },
["xetex"]={ "XeTeXversion" },
} \ No newline at end of file
diff --git a/context/data/scite/context/scite-context-data-tex.properties b/context/data/scite/context/scite-context-data-tex.properties
index bfa01d6ad..0dfb6b119 100644
--- a/context/data/scite/context/scite-context-data-tex.properties
+++ b/context/data/scite/context/scite-context-data-tex.properties
@@ -53,33 +53,33 @@ breakafterdirmode catcodetable clearmarks compoundhyphenmode copyfont \
crampeddisplaystyle crampedscriptscriptstyle crampedscriptstyle crampedtextstyle csstring \
draftmode dviextension dvifeedback dvivariable efcode \
endlocalcontrol etoksapp etokspre exceptionpenalty expanded \
-expandglyphsinfont explicitdiscretionary explicithyphenpenalty fontid formatname \
-gleaders gtoksapp gtokspre hjcode hyphenationbounds \
-hyphenationmin hyphenpenaltymode ifabsdim ifabsnum ifcondition \
-ifincsname ifprimitive ignoreligaturesinfont immediateassigned immediateassignment \
-initcatcodetable insertht lastnamedcs lastsavedboxresourceindex lastsavedimageresourceindex \
-lastsavedimageresourcepages lastxpos lastypos latelua leftghost \
-leftmarginkern letcharcode letterspacefont linedir linedirection \
-localbrokenpenalty localinterlinepenalty localleftbox localrightbox lpcode \
-luabytecode luabytecodecall luacopyinputnodes luadef luaescapestring \
-luafunction luafunctioncall luatexbanner luatexrevision luatexversion \
-mathdelimitersmode mathdir mathdirection mathdisplayskipmode matheqnogapstep \
-mathflattenmode mathitalicsmode mathnolimitsmode mathoption mathpenaltiesmode \
-mathrulesfam mathrulesmode mathrulethicknessmode mathscriptboxmode mathscriptcharmode \
-mathscriptsmode mathstyle mathsurroundmode mathsurroundskip nohrule \
-nokerns noligs normaldeviate nospaces novrule \
-outputbox outputmode pagebottomoffset pagedir pagedirection \
-pageheight pageleftoffset pagerightoffset pagetopoffset pagewidth \
-pardir pardirection pdfextension pdffeedback pdfvariable \
-postexhyphenchar posthyphenchar prebinoppenalty predisplaygapfactor preexhyphenchar \
-prehyphenchar prerelpenalty primitive protrudechars pxdimen \
-quitvmode randomseed rightghost rightmarginkern rpcode \
-saveboxresource savecatcodetable saveimageresource savepos scantextokens \
-setfontid setrandomseed shapemode suppressfontnotfounderror suppressifcsnameerror \
-suppresslongerror suppressmathparerror suppressoutererror suppressprimitiveerror synctex \
-tagcode textdir textdirection toksapp tokspre \
-tracingfonts uniformdeviate useboxresource useimageresource xtoksapp \
-xtokspre
+expandglyphsinfont explicitdiscretionary explicithyphenpenalty fixupboxesmode fontid \
+formatname gleaders gtoksapp gtokspre hjcode \
+hyphenationbounds hyphenationmin hyphenpenaltymode ifabsdim ifabsnum \
+ifcondition ifincsname ifprimitive ignoreligaturesinfont immediateassigned \
+immediateassignment initcatcodetable insertht lastnamedcs lastsavedboxresourceindex \
+lastsavedimageresourceindex lastsavedimageresourcepages lastxpos lastypos latelua \
+leftghost leftmarginkern letcharcode letterspacefont linedir \
+linedirection localbrokenpenalty localinterlinepenalty localleftbox localrightbox \
+lpcode luabytecode luabytecodecall luacopyinputnodes luadef \
+luaescapestring luafunction luafunctioncall luatexbanner luatexrevision \
+luatexversion mathdelimitersmode mathdir mathdirection mathdisplayskipmode \
+matheqnogapstep mathflattenmode mathitalicsmode mathnolimitsmode mathoption \
+mathpenaltiesmode mathrulesfam mathrulesmode mathrulethicknessmode mathscriptboxmode \
+mathscriptcharmode mathscriptsmode mathstyle mathsurroundmode mathsurroundskip \
+nohrule nokerns noligs normaldeviate nospaces \
+novrule outputbox outputmode pagebottomoffset pagedir \
+pagedirection pageheight pageleftoffset pagerightoffset pagetopoffset \
+pagewidth pardir pardirection pdfextension pdffeedback \
+pdfvariable postexhyphenchar posthyphenchar prebinoppenalty predisplaygapfactor \
+preexhyphenchar prehyphenchar prerelpenalty primitive protrudechars \
+pxdimen quitvmode randomseed rightghost rightmarginkern \
+rpcode saveboxresource savecatcodetable saveimageresource savepos \
+scantextokens setfontid setrandomseed shapemode suppressfontnotfounderror \
+suppressifcsnameerror suppresslongerror suppressmathparerror suppressoutererror suppressprimitiveerror \
+synctex tagcode textdir textdirection toksapp \
+tokspre tracingfonts uniformdeviate useboxresource useimageresource \
+xtoksapp xtokspre
keywordclass.tex.omega=\
Omegaminorversion Omegarevision Omegaversion
@@ -97,17 +97,17 @@ pdfinfo pdfinfoomitdate pdfinsertht pdflastannot pdflastlinedepth \
pdflastlink pdflastobj pdflastxform pdflastximage pdflastximagepages \
pdflastxpos pdflastypos pdflinkmargin pdfliteral pdfmajorversion \
pdfmapfile pdfmapline pdfminorversion pdfnames pdfnoligatures \
-pdfnormaldeviate pdfobj pdfobjcompresslevel pdfomitcidset pdfoutline \
-pdfoutput pdfpageattr pdfpagebox pdfpageheight pdfpageref \
-pdfpageresources pdfpagesattr pdfpagewidth pdfpkfixeddpi pdfpkmode \
-pdfpkresolution pdfprimitive pdfprotrudechars pdfpxdimen pdfrandomseed \
-pdfrecompress pdfrefobj pdfrefxform pdfrefximage pdfreplacefont \
-pdfrestore pdfretval pdfsave pdfsavepos pdfsetmatrix \
-pdfsetrandomseed pdfstartlink pdfstartthread pdfsuppressoptionalinfo pdfsuppressptexinfo \
-pdftexbanner pdftexrevision pdftexversion pdfthread pdfthreadmargin \
-pdftracingfonts pdftrailer pdftrailerid pdfuniformdeviate pdfuniqueresname \
-pdfvorigin pdfxform pdfxformattr pdfxformmargin pdfxformname \
-pdfxformresources pdfximage
+pdfnormaldeviate pdfobj pdfobjcompresslevel pdfomitcharset pdfomitcidset \
+pdfoutline pdfoutput pdfpageattr pdfpagebox pdfpageheight \
+pdfpageref pdfpageresources pdfpagesattr pdfpagewidth pdfpkfixeddpi \
+pdfpkmode pdfpkresolution pdfprimitive pdfprotrudechars pdfpxdimen \
+pdfrandomseed pdfrecompress pdfrefobj pdfrefxform pdfrefximage \
+pdfreplacefont pdfrestore pdfretval pdfsave pdfsavepos \
+pdfsetmatrix pdfsetrandomseed pdfstartlink pdfstartthread pdfsuppressoptionalinfo \
+pdfsuppressptexinfo pdftexbanner pdftexrevision pdftexversion pdfthread \
+pdfthreadmargin pdftracingfonts pdftrailer pdftrailerid pdfuniformdeviate \
+pdfuniqueresname pdfvorigin pdfxform pdfxformattr pdfxformmargin \
+pdfxformname pdfxformresources pdfximage
keywordclass.tex.tex=\
- / Uleft \
diff --git a/context/data/textadept/context/data/scite-context-data-tex.lua b/context/data/textadept/context/data/scite-context-data-tex.lua
index 8c518fe38..0e5056f8e 100644
--- a/context/data/textadept/context/data/scite-context-data-tex.lua
+++ b/context/data/textadept/context/data/scite-context-data-tex.lua
@@ -1,9 +1,9 @@
return {
["aleph"]={ "Alephminorversion", "Alephrevision", "Alephversion" },
["etex"]={ "botmarks", "clubpenalties", "currentgrouplevel", "currentgrouptype", "currentifbranch", "currentiflevel", "currentiftype", "detokenize", "dimexpr", "displaywidowpenalties", "eTeXVersion", "eTeXminorversion", "eTeXrevision", "eTeXversion", "everyeof", "firstmarks", "fontchardp", "fontcharht", "fontcharic", "fontcharwd", "glueexpr", "glueshrink", "glueshrinkorder", "gluestretch", "gluestretchorder", "gluetomu", "ifcsname", "ifdefined", "iffontchar", "interactionmode", "interlinepenalties", "lastlinefit", "lastnodetype", "marks", "muexpr", "mutoglue", "numexpr", "pagediscards", "parshapedimen", "parshapeindent", "parshapelength", "predisplaydirection", "protected", "readline", "savinghyphcodes", "savingvdiscards", "scantokens", "showgroups", "showifs", "showtokens", "splitbotmarks", "splitdiscards", "splitfirstmarks", "topmarks", "tracingassigns", "tracinggroups", "tracingifs", "tracingnesting", "tracingscantokens", "unexpanded", "unless", "widowpenalties" },
- ["luatex"]={ "Uchar", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Uhextensible", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathcharclass", "Umathchardef", "Umathcharfam", "Umathcharnum", "Umathcharnumdef", "Umathcharslot", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathnolimitsubfactor", "Umathnolimitsupfactor", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathskewedfractionhgap", "Umathskewedfractionvgap", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Unosubscript", "Unosuperscript", "Uoverdelimiter", "Uradical", "Uroot", "Uskewed", "Uskewedwithdelims", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "Uvextensible", "adjustspacing", "alignmark", "aligntab", "attribute", "attributedef", "automaticdiscretionary", "automatichyphenmode", "automatichyphenpenalty", "begincsname", "bodydir", "bodydirection", "boxdir", "boxdirection", "breakafterdirmode", "catcodetable", "clearmarks", "compoundhyphenmode", "copyfont", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "csstring", "draftmode", "dviextension", "dvifeedback", "dvivariable", "efcode", "endlocalcontrol", "etoksapp", "etokspre", "exceptionpenalty", "expanded", "expandglyphsinfont", "explicitdiscretionary", "explicithyphenpenalty", "fontid", "formatname", "gleaders", "gtoksapp", "gtokspre", "hjcode", "hyphenationbounds", "hyphenationmin", "hyphenpenaltymode", "ifabsdim", "ifabsnum", "ifcondition", "ifincsname", "ifprimitive", "ignoreligaturesinfont", "immediateassigned", "immediateassignment", "initcatcodetable", "insertht", "lastnamedcs", "lastsavedboxresourceindex", "lastsavedimageresourceindex", "lastsavedimageresourcepages", "lastxpos", "lastypos", "latelua", "leftghost", "leftmarginkern", "letcharcode", "letterspacefont", "linedir", "linedirection", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "lpcode", "luabytecode", "luabytecodecall", "luacopyinputnodes", "luadef", "luaescapestring", "luafunction", "luafunctioncall", "luatexbanner", "luatexrevision", "luatexversion", "mathdelimitersmode", "mathdir", "mathdirection", "mathdisplayskipmode", "matheqnogapstep", "mathflattenmode", "mathitalicsmode", "mathnolimitsmode", "mathoption", "mathpenaltiesmode", "mathrulesfam", "mathrulesmode", "mathrulethicknessmode", "mathscriptboxmode", "mathscriptcharmode", "mathscriptsmode", "mathstyle", "mathsurroundmode", "mathsurroundskip", "nohrule", "nokerns", "noligs", "normaldeviate", "nospaces", "novrule", "outputbox", "outputmode", "pagebottomoffset", "pagedir", "pagedirection", "pageheight", "pageleftoffset", "pagerightoffset", "pagetopoffset", "pagewidth", "pardir", "pardirection", "pdfextension", "pdffeedback", "pdfvariable", "postexhyphenchar", "posthyphenchar", "prebinoppenalty", "predisplaygapfactor", "preexhyphenchar", "prehyphenchar", "prerelpenalty", "primitive", "protrudechars", "pxdimen", "quitvmode", "randomseed", "rightghost", "rightmarginkern", "rpcode", "saveboxresource", "savecatcodetable", "saveimageresource", "savepos", "scantextokens", "setfontid", "setrandomseed", "shapemode", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressmathparerror", "suppressoutererror", "suppressprimitiveerror", "synctex", "tagcode", "textdir", "textdirection", "toksapp", "tokspre", "tracingfonts", "uniformdeviate", "useboxresource", "useimageresource", "xtoksapp", "xtokspre" },
+ ["luatex"]={ "Uchar", "Udelcode", "Udelcodenum", "Udelimiter", "Udelimiterover", "Udelimiterunder", "Uhextensible", "Umathaccent", "Umathaxis", "Umathbinbinspacing", "Umathbinclosespacing", "Umathbininnerspacing", "Umathbinopenspacing", "Umathbinopspacing", "Umathbinordspacing", "Umathbinpunctspacing", "Umathbinrelspacing", "Umathchar", "Umathcharclass", "Umathchardef", "Umathcharfam", "Umathcharnum", "Umathcharnumdef", "Umathcharslot", "Umathclosebinspacing", "Umathcloseclosespacing", "Umathcloseinnerspacing", "Umathcloseopenspacing", "Umathcloseopspacing", "Umathcloseordspacing", "Umathclosepunctspacing", "Umathcloserelspacing", "Umathcode", "Umathcodenum", "Umathconnectoroverlapmin", "Umathfractiondelsize", "Umathfractiondenomdown", "Umathfractiondenomvgap", "Umathfractionnumup", "Umathfractionnumvgap", "Umathfractionrule", "Umathinnerbinspacing", "Umathinnerclosespacing", "Umathinnerinnerspacing", "Umathinneropenspacing", "Umathinneropspacing", "Umathinnerordspacing", "Umathinnerpunctspacing", "Umathinnerrelspacing", "Umathlimitabovebgap", "Umathlimitabovekern", "Umathlimitabovevgap", "Umathlimitbelowbgap", "Umathlimitbelowkern", "Umathlimitbelowvgap", "Umathnolimitsubfactor", "Umathnolimitsupfactor", "Umathopbinspacing", "Umathopclosespacing", "Umathopenbinspacing", "Umathopenclosespacing", "Umathopeninnerspacing", "Umathopenopenspacing", "Umathopenopspacing", "Umathopenordspacing", "Umathopenpunctspacing", "Umathopenrelspacing", "Umathoperatorsize", "Umathopinnerspacing", "Umathopopenspacing", "Umathopopspacing", "Umathopordspacing", "Umathoppunctspacing", "Umathoprelspacing", "Umathordbinspacing", "Umathordclosespacing", "Umathordinnerspacing", "Umathordopenspacing", "Umathordopspacing", "Umathordordspacing", "Umathordpunctspacing", "Umathordrelspacing", "Umathoverbarkern", "Umathoverbarrule", "Umathoverbarvgap", "Umathoverdelimiterbgap", "Umathoverdelimitervgap", "Umathpunctbinspacing", "Umathpunctclosespacing", "Umathpunctinnerspacing", "Umathpunctopenspacing", "Umathpunctopspacing", "Umathpunctordspacing", "Umathpunctpunctspacing", "Umathpunctrelspacing", "Umathquad", "Umathradicaldegreeafter", "Umathradicaldegreebefore", "Umathradicaldegreeraise", "Umathradicalkern", "Umathradicalrule", "Umathradicalvgap", "Umathrelbinspacing", "Umathrelclosespacing", "Umathrelinnerspacing", "Umathrelopenspacing", "Umathrelopspacing", "Umathrelordspacing", "Umathrelpunctspacing", "Umathrelrelspacing", "Umathskewedfractionhgap", "Umathskewedfractionvgap", "Umathspaceafterscript", "Umathstackdenomdown", "Umathstacknumup", "Umathstackvgap", "Umathsubshiftdown", "Umathsubshiftdrop", "Umathsubsupshiftdown", "Umathsubsupvgap", "Umathsubtopmax", "Umathsupbottommin", "Umathsupshiftdrop", "Umathsupshiftup", "Umathsupsubbottommax", "Umathunderbarkern", "Umathunderbarrule", "Umathunderbarvgap", "Umathunderdelimiterbgap", "Umathunderdelimitervgap", "Unosubscript", "Unosuperscript", "Uoverdelimiter", "Uradical", "Uroot", "Uskewed", "Uskewedwithdelims", "Ustack", "Ustartdisplaymath", "Ustartmath", "Ustopdisplaymath", "Ustopmath", "Usubscript", "Usuperscript", "Uunderdelimiter", "Uvextensible", "adjustspacing", "alignmark", "aligntab", "attribute", "attributedef", "automaticdiscretionary", "automatichyphenmode", "automatichyphenpenalty", "begincsname", "bodydir", "bodydirection", "boxdir", "boxdirection", "breakafterdirmode", "catcodetable", "clearmarks", "compoundhyphenmode", "copyfont", "crampeddisplaystyle", "crampedscriptscriptstyle", "crampedscriptstyle", "crampedtextstyle", "csstring", "draftmode", "dviextension", "dvifeedback", "dvivariable", "efcode", "endlocalcontrol", "etoksapp", "etokspre", "exceptionpenalty", "expanded", "expandglyphsinfont", "explicitdiscretionary", "explicithyphenpenalty", "fixupboxesmode", "fontid", "formatname", "gleaders", "gtoksapp", "gtokspre", "hjcode", "hyphenationbounds", "hyphenationmin", "hyphenpenaltymode", "ifabsdim", "ifabsnum", "ifcondition", "ifincsname", "ifprimitive", "ignoreligaturesinfont", "immediateassigned", "immediateassignment", "initcatcodetable", "insertht", "lastnamedcs", "lastsavedboxresourceindex", "lastsavedimageresourceindex", "lastsavedimageresourcepages", "lastxpos", "lastypos", "latelua", "leftghost", "leftmarginkern", "letcharcode", "letterspacefont", "linedir", "linedirection", "localbrokenpenalty", "localinterlinepenalty", "localleftbox", "localrightbox", "lpcode", "luabytecode", "luabytecodecall", "luacopyinputnodes", "luadef", "luaescapestring", "luafunction", "luafunctioncall", "luatexbanner", "luatexrevision", "luatexversion", "mathdelimitersmode", "mathdir", "mathdirection", "mathdisplayskipmode", "matheqnogapstep", "mathflattenmode", "mathitalicsmode", "mathnolimitsmode", "mathoption", "mathpenaltiesmode", "mathrulesfam", "mathrulesmode", "mathrulethicknessmode", "mathscriptboxmode", "mathscriptcharmode", "mathscriptsmode", "mathstyle", "mathsurroundmode", "mathsurroundskip", "nohrule", "nokerns", "noligs", "normaldeviate", "nospaces", "novrule", "outputbox", "outputmode", "pagebottomoffset", "pagedir", "pagedirection", "pageheight", "pageleftoffset", "pagerightoffset", "pagetopoffset", "pagewidth", "pardir", "pardirection", "pdfextension", "pdffeedback", "pdfvariable", "postexhyphenchar", "posthyphenchar", "prebinoppenalty", "predisplaygapfactor", "preexhyphenchar", "prehyphenchar", "prerelpenalty", "primitive", "protrudechars", "pxdimen", "quitvmode", "randomseed", "rightghost", "rightmarginkern", "rpcode", "saveboxresource", "savecatcodetable", "saveimageresource", "savepos", "scantextokens", "setfontid", "setrandomseed", "shapemode", "suppressfontnotfounderror", "suppressifcsnameerror", "suppresslongerror", "suppressmathparerror", "suppressoutererror", "suppressprimitiveerror", "synctex", "tagcode", "textdir", "textdirection", "toksapp", "tokspre", "tracingfonts", "uniformdeviate", "useboxresource", "useimageresource", "xtoksapp", "xtokspre" },
["omega"]={ "Omegaminorversion", "Omegarevision", "Omegaversion" },
- ["pdftex"]={ "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlinedepth", "pdfeachlineheight", "pdfendlink", "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfignoreunknownimages", "pdfimageaddfilename", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfinfo", "pdfinfoomitdate", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmajorversion", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfomitcidset", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkfixeddpi", "pdfpkmode", "pdfpkresolution", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrecompress", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdfsuppressoptionalinfo", "pdfsuppressptexinfo", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdftrailerid", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformmargin", "pdfxformname", "pdfxformresources", "pdfximage" },
+ ["pdftex"]={ "ifpdfabsdim", "ifpdfabsnum", "ifpdfprimitive", "pdfadjustspacing", "pdfannot", "pdfcatalog", "pdfcolorstack", "pdfcolorstackinit", "pdfcompresslevel", "pdfcopyfont", "pdfcreationdate", "pdfdecimaldigits", "pdfdest", "pdfdestmargin", "pdfdraftmode", "pdfeachlinedepth", "pdfeachlineheight", "pdfendlink", "pdfendthread", "pdffirstlineheight", "pdffontattr", "pdffontexpand", "pdffontname", "pdffontobjnum", "pdffontsize", "pdfgamma", "pdfgentounicode", "pdfglyphtounicode", "pdfhorigin", "pdfignoreddimen", "pdfignoreunknownimages", "pdfimageaddfilename", "pdfimageapplygamma", "pdfimagegamma", "pdfimagehicolor", "pdfimageresolution", "pdfincludechars", "pdfinclusioncopyfonts", "pdfinclusionerrorlevel", "pdfinfo", "pdfinfoomitdate", "pdfinsertht", "pdflastannot", "pdflastlinedepth", "pdflastlink", "pdflastobj", "pdflastxform", "pdflastximage", "pdflastximagepages", "pdflastxpos", "pdflastypos", "pdflinkmargin", "pdfliteral", "pdfmajorversion", "pdfmapfile", "pdfmapline", "pdfminorversion", "pdfnames", "pdfnoligatures", "pdfnormaldeviate", "pdfobj", "pdfobjcompresslevel", "pdfomitcharset", "pdfomitcidset", "pdfoutline", "pdfoutput", "pdfpageattr", "pdfpagebox", "pdfpageheight", "pdfpageref", "pdfpageresources", "pdfpagesattr", "pdfpagewidth", "pdfpkfixeddpi", "pdfpkmode", "pdfpkresolution", "pdfprimitive", "pdfprotrudechars", "pdfpxdimen", "pdfrandomseed", "pdfrecompress", "pdfrefobj", "pdfrefxform", "pdfrefximage", "pdfreplacefont", "pdfrestore", "pdfretval", "pdfsave", "pdfsavepos", "pdfsetmatrix", "pdfsetrandomseed", "pdfstartlink", "pdfstartthread", "pdfsuppressoptionalinfo", "pdfsuppressptexinfo", "pdftexbanner", "pdftexrevision", "pdftexversion", "pdfthread", "pdfthreadmargin", "pdftracingfonts", "pdftrailer", "pdftrailerid", "pdfuniformdeviate", "pdfuniqueresname", "pdfvorigin", "pdfxform", "pdfxformattr", "pdfxformmargin", "pdfxformname", "pdfxformresources", "pdfximage" },
["tex"]={ " ", "-", "/", "Uleft", "Umiddle", "Uright", "above", "abovedisplayshortskip", "abovedisplayskip", "abovewithdelims", "accent", "adjdemerits", "advance", "afterassignment", "aftergroup", "atop", "atopwithdelims", "badness", "baselineskip", "batchmode", "begingroup", "belowdisplayshortskip", "belowdisplayskip", "binoppenalty", "botmark", "boundary", "box", "boxmaxdepth", "brokenpenalty", "catcode", "char", "chardef", "cleaders", "closein", "closeout", "clubpenalty", "copy", "count", "countdef", "cr", "crcr", "csname", "day", "deadcycles", "def", "defaulthyphenchar", "defaultskewchar", "delcode", "delimiter", "delimiterfactor", "delimitershortfall", "dimen", "dimendef", "directlua", "discretionary", "displayindent", "displaylimits", "displaystyle", "displaywidowpenalty", "displaywidth", "divide", "doublehyphendemerits", "dp", "dump", "edef", "else", "emergencystretch", "end", "endcsname", "endgroup", "endinput", "endlinechar", "eqno", "errhelp", "errmessage", "errorcontextlines", "errorstopmode", "escapechar", "everycr", "everydisplay", "everyhbox", "everyjob", "everymath", "everypar", "everyvbox", "exhyphenchar", "exhyphenpenalty", "expandafter", "fam", "fi", "finalhyphendemerits", "firstmark", "firstvalidlanguage", "floatingpenalty", "font", "fontdimen", "fontname", "futurelet", "gdef", "glet", "global", "globaldefs", "halign", "hangafter", "hangindent", "hbadness", "hbox", "hfil", "hfill", "hfilneg", "hfuzz", "hoffset", "holdinginserts", "hpack", "hrule", "hsize", "hskip", "hss", "ht", "hyphenation", "hyphenchar", "hyphenpenalty", "if", "ifcase", "ifcat", "ifdim", "ifeof", "iffalse", "ifhbox", "ifhmode", "ifinner", "ifmmode", "ifnum", "ifodd", "iftrue", "ifvbox", "ifvmode", "ifvoid", "ifx", "ignorespaces", "immediate", "indent", "input", "inputlineno", "insert", "insertpenalties", "interlinepenalty", "jobname", "kern", "language", "lastbox", "lastkern", "lastpenalty", "lastskip", "lccode", "leaders", "left", "lefthyphenmin", "leftskip", "leqno", "let", "limits", "linepenalty", "lineskip", "lineskiplimit", "long", "looseness", "lower", "lowercase", "mag", "mark", "mathaccent", "mathbin", "mathchar", "mathchardef", "mathchoice", "mathclose", "mathcode", "mathinner", "mathop", "mathopen", "mathord", "mathpunct", "mathrel", "mathsurround", "maxdeadcycles", "maxdepth", "meaning", "medmuskip", "message", "middle", "mkern", "month", "moveleft", "moveright", "mskip", "multiply", "muskip", "muskipdef", "newlinechar", "noalign", "noboundary", "noexpand", "noindent", "nolimits", "nonscript", "nonstopmode", "nulldelimiterspace", "nullfont", "number", "omit", "openin", "openout", "or", "outer", "output", "outputpenalty", "over", "overfullrule", "overline", "overwithdelims", "pagedepth", "pagefilllstretch", "pagefillstretch", "pagefilstretch", "pagegoal", "pageshrink", "pagestretch", "pagetotal", "par", "parfillskip", "parindent", "parshape", "parskip", "patterns", "pausing", "penalty", "postdisplaypenalty", "predisplaypenalty", "predisplaysize", "pretolerance", "prevdepth", "prevgraf", "protrusionboundary", "radical", "raise", "read", "relax", "relpenalty", "right", "righthyphenmin", "rightskip", "romannumeral", "scriptfont", "scriptscriptfont", "scriptscriptstyle", "scriptspace", "scriptstyle", "scrollmode", "setbox", "setlanguage", "sfcode", "shipout", "show", "showbox", "showboxbreadth", "showboxdepth", "showlists", "showthe", "skewchar", "skip", "skipdef", "spacefactor", "spaceskip", "span", "special", "splitbotmark", "splitfirstmark", "splitmaxdepth", "splittopskip", "string", "tabskip", "textfont", "textstyle", "the", "thickmuskip", "thinmuskip", "time", "toks", "toksdef", "tolerance", "topmark", "topskip", "tpack", "tracingcommands", "tracinglostchars", "tracingmacros", "tracingonline", "tracingoutput", "tracingpages", "tracingparagraphs", "tracingrestores", "tracingstats", "uccode", "uchyph", "underline", "unhbox", "unhcopy", "unkern", "unpenalty", "unskip", "unvbox", "unvcopy", "uppercase", "vadjust", "valign", "vbadness", "vbox", "vcenter", "vfil", "vfill", "vfilneg", "vfuzz", "voffset", "vpack", "vrule", "vsize", "vskip", "vsplit", "vss", "vtop", "wd", "widowpenalty", "wordboundary", "write", "xdef", "xleaders", "xspaceskip", "year" },
["xetex"]={ "XeTeXversion" },
} \ No newline at end of file
diff --git a/doc/context/documents/general/manuals/musings.pdf b/doc/context/documents/general/manuals/musings.pdf
new file mode 100644
index 000000000..d777b04b3
--- /dev/null
+++ b/doc/context/documents/general/manuals/musings.pdf
Binary files differ
diff --git a/doc/context/documents/general/qrcs/setup-cs.pdf b/doc/context/documents/general/qrcs/setup-cs.pdf
index fa1906a01..3c4dbc0e9 100644
--- a/doc/context/documents/general/qrcs/setup-cs.pdf
+++ b/doc/context/documents/general/qrcs/setup-cs.pdf
Binary files differ
diff --git a/doc/context/documents/general/qrcs/setup-de.pdf b/doc/context/documents/general/qrcs/setup-de.pdf
index c524d17f0..56f73ed2c 100644
--- a/doc/context/documents/general/qrcs/setup-de.pdf
+++ b/doc/context/documents/general/qrcs/setup-de.pdf
Binary files differ
diff --git a/doc/context/documents/general/qrcs/setup-en.pdf b/doc/context/documents/general/qrcs/setup-en.pdf
index 96e7b9c5a..28123182c 100644
--- a/doc/context/documents/general/qrcs/setup-en.pdf
+++ b/doc/context/documents/general/qrcs/setup-en.pdf
Binary files differ
diff --git a/doc/context/documents/general/qrcs/setup-fr.pdf b/doc/context/documents/general/qrcs/setup-fr.pdf
index c36fab3d6..86fb3283e 100644
--- a/doc/context/documents/general/qrcs/setup-fr.pdf
+++ b/doc/context/documents/general/qrcs/setup-fr.pdf
Binary files differ
diff --git a/doc/context/documents/general/qrcs/setup-it.pdf b/doc/context/documents/general/qrcs/setup-it.pdf
index 16966ce5d..a06be230a 100644
--- a/doc/context/documents/general/qrcs/setup-it.pdf
+++ b/doc/context/documents/general/qrcs/setup-it.pdf
Binary files differ
diff --git a/doc/context/documents/general/qrcs/setup-mapping-cs.pdf b/doc/context/documents/general/qrcs/setup-mapping-cs.pdf
index b670c103f..1a201a46f 100644
--- a/doc/context/documents/general/qrcs/setup-mapping-cs.pdf
+++ b/doc/context/documents/general/qrcs/setup-mapping-cs.pdf
Binary files differ
diff --git a/doc/context/documents/general/qrcs/setup-mapping-de.pdf b/doc/context/documents/general/qrcs/setup-mapping-de.pdf
index 9e197be49..2485ea667 100644
--- a/doc/context/documents/general/qrcs/setup-mapping-de.pdf
+++ b/doc/context/documents/general/qrcs/setup-mapping-de.pdf
Binary files differ
diff --git a/doc/context/documents/general/qrcs/setup-mapping-en.pdf b/doc/context/documents/general/qrcs/setup-mapping-en.pdf
index b9ac1a83d..fcae8a2af 100644
--- a/doc/context/documents/general/qrcs/setup-mapping-en.pdf
+++ b/doc/context/documents/general/qrcs/setup-mapping-en.pdf
Binary files differ
diff --git a/doc/context/documents/general/qrcs/setup-mapping-fr.pdf b/doc/context/documents/general/qrcs/setup-mapping-fr.pdf
index c2939afb3..0763cb58c 100644
--- a/doc/context/documents/general/qrcs/setup-mapping-fr.pdf
+++ b/doc/context/documents/general/qrcs/setup-mapping-fr.pdf
Binary files differ
diff --git a/doc/context/documents/general/qrcs/setup-mapping-it.pdf b/doc/context/documents/general/qrcs/setup-mapping-it.pdf
index 37fe767e9..b3f72ae7e 100644
--- a/doc/context/documents/general/qrcs/setup-mapping-it.pdf
+++ b/doc/context/documents/general/qrcs/setup-mapping-it.pdf
Binary files differ
diff --git a/doc/context/documents/general/qrcs/setup-mapping-nl.pdf b/doc/context/documents/general/qrcs/setup-mapping-nl.pdf
index 5160e0573..901bb26ec 100644
--- a/doc/context/documents/general/qrcs/setup-mapping-nl.pdf
+++ b/doc/context/documents/general/qrcs/setup-mapping-nl.pdf
Binary files differ
diff --git a/doc/context/documents/general/qrcs/setup-mapping-ro.pdf b/doc/context/documents/general/qrcs/setup-mapping-ro.pdf
index c95f63899..f0e64fdde 100644
--- a/doc/context/documents/general/qrcs/setup-mapping-ro.pdf
+++ b/doc/context/documents/general/qrcs/setup-mapping-ro.pdf
Binary files differ
diff --git a/doc/context/documents/general/qrcs/setup-nl.pdf b/doc/context/documents/general/qrcs/setup-nl.pdf
index 0ca910659..ed2f0fd05 100644
--- a/doc/context/documents/general/qrcs/setup-nl.pdf
+++ b/doc/context/documents/general/qrcs/setup-nl.pdf
Binary files differ
diff --git a/doc/context/documents/general/qrcs/setup-ro.pdf b/doc/context/documents/general/qrcs/setup-ro.pdf
index de1ecb75a..4bdc49fe3 100644
--- a/doc/context/documents/general/qrcs/setup-ro.pdf
+++ b/doc/context/documents/general/qrcs/setup-ro.pdf
Binary files differ
diff --git a/doc/context/sources/general/manuals/musings/musings-children.tex b/doc/context/sources/general/manuals/musings/musings-children.tex
new file mode 100644
index 000000000..b814675bb
--- /dev/null
+++ b/doc/context/sources/general/manuals/musings/musings-children.tex
@@ -0,0 +1,1253 @@
+% language=uk
+
+% naming-nature.jog
+
+\startcomponent musings-children
+
+\environment musings-style
+
+\definedescription
+ [presomething]
+ [headstyle=\bold,
+ alternative=hanging,
+ width=fit,
+ hang=1]
+
+\startchapter[title={Children of \TEX}]
+
+\startsection[title={The theme}]
+
+Nearly always \TEX\ conferences carry a theme. As there have been many
+conferences the organizers have run out of themes involving fonts, macros and
+typesetting and are now cooking up more fuzzy ones. Take the Bacho\TUG\ 2017
+theme:
+
+\startnarrower[left,8*right] \startpacked
+\startpresomething {Premises}
+ The starting point, what we have, what do we use, what has been achieved?
+\stoppresomething
+\startpresomething {Predilections}
+ How do we act now, how do we want to act, what is important to us and what do
+ we miss?
+\stoppresomething
+\startpresomething {Predictions}
+ What is the future of \TEX, what we'd like to achieve and can we influence
+ it?
+\stoppresomething
+\stoppacked \stopnarrower
+
+My first impression with these three P words was: what do they mean? Followed by
+the thought: this is no longer a place to take kids to. But the Internet gives
+access to the Cambridge Dictionary, so instead of running to the dusty meter of
+dictionaries somewhere else in my place, I made sure that I googled the most
+recent definitions:
+
+\startnarrower[left] \startpacked
+\startpresomething {premise}
+ an idea or theory on which a statement or action is based
+\stoppresomething
+\startpresomething {predilection}
+ if someone has a predilection for something, they like it a lot
+\stoppresomething
+\startpresomething {prediction}
+ a statement about what you think will happen in the future
+\stoppresomething
+\stoppacked \stopnarrower
+
+I won't try to relate these two sets of definitions but several words stand out
+in the second set: idea, theory, action, like, statement and future. Now, as a
+preparation for the usual sobering thoughts that Jerzy, Volker and I have when we
+staring into a Bacho\TEX\ campfire I decided to wrap up some ideas around these
+themes and words. The books that I will mention are just a selection of what you
+can find distributed around my place. This is not some systematic research but
+just the result of a few weeks making a couple of notes while pondering about
+this conference.
+
+\stopsection
+
+\startsection[title=Introduction]
+
+One cannot write the amount of \TEX\ macros that I've written without also liking
+books. If you look at my bookshelves the topics are somewhat spread over the
+possible spectrum of topics: history, biology, astronomy, paleontology, general
+science but surprisingly little math. There are a bunch of typography|-|related
+books but only some have been read: it's the visuals that matter most and as
+there are no real developments I haven't bought new ones in over a decade,
+although I do buy books that look nice for our office display but the content
+should be interesting too. Of course I do have a couple of books about computer
+(related) science and technology but only a few are worth a second look.
+Sometimes I bought computer books expecting to use them (in some project) but I
+must admit that most have not been read and many will soon end up in the paper
+bin (some already went that way). I'll make an exception for Knuth, Wirth and a
+few other fundamental ones that I (want to) read. And, I need to catch up on deep
+learning, so that might need a book.
+
+My colleagues and I have many discussions, especially about what we read, and
+after a few decades one starts seeing patterns. Therefore the last few years it
+was a pleasant surprise for me to run into books and lectures that nicely
+summarize what one has noticed and discussed in a consistent way. My memory is
+not that good, but good enough to let some bells ring.
+
+\startplacefigure[location=top]
+ \startcombination[nx=4,ny=1,width=\textwidth,distance=0pt]
+ {\externalfigure[covers/sapiens.jpg] [height=5cm]} {history}
+ {\externalfigure[covers/homo-deus.jpg] [height=5cm]} {futurology}
+ {\externalfigure[covers/children-of-time.jpg] [height=5cm]} {science fiction}
+ {\externalfigure[covers/superintelligence.jpg][height=5cm]} {informatics}
+ \stopcombination
+\stopplacefigure
+
+The first book that gave me this \quotation {finally a perfect summary of
+historic developments} feeling is \quotation{Sapiens} by Yuval Noah Harari. The
+author summarizes human history from a broad perspective where modern views on
+psychology, anthropology and technical developments are integrated. It's a follow
+up on a history writing trend started by Jared Diamond. The follow up \quotation
+{Homo Deus} looks ahead and is just as well written. It also integrates ideas
+from other fields, for instance those related to development of artificial
+intelligence (Dennett, Bostrom, etc.).
+
+Another inspiration for this talk and article is the 50 hour lecture series on
+behavioral biology by Robert Sapolsky of Stanford University, brought to my
+attention by my nephew Bram who visited a few \TEX\ conferences with me and who
+is now also forced to use \TEX\ for assignments and reports. (How come
+self|-|published books used at universities often look so bad?)
+
+The title of this talk is inspired by the book \quotation {Children of Time} by
+Adrian Tchaikovsky that I read recently. There are science fiction writers who
+focus on long term science and technology, such as some of Alastair Reynolds,
+while others follow up on recent development in all kind of sciences. One can
+recognize aspects of \quotation {Superintelligence} by Bostrom in Neal Asher's
+books, insights in psychology in the older Greg Bear books, while in the
+mentioned \quotation {Children of Time} (socio)biological insights dominate. The
+main thread in that book is the development of intelligence, social behaviour,
+language, script and cooperation in a species quite different from us: spiders.
+It definitely avoids the anthropocentric focus that we normally have.
+
+So how does this relate to the themes of the Bacho\TEX\ conference? I will pick
+out some ways to approach them using ideas from the kind of resources mentioned
+above. I could probably go on and on for pages because once you start relating
+what you read and hear to this \TEX\ ecosystem and community, there is no end.
+So, consider this a snapshot, that somehow relates to the themes:
+
+\startnarrower[left,8*right] \startpacked
+\startpresomething {premise}
+ Let's look at what the live sciences have to say about \TEX\ and friends and
+ let's hope that I don't offend the reader and the field.
+\stoppresomething
+\startpresomething {predilection}
+ Let's figure out what brings us here to this place deeply hidden in the woods,
+ a secret gathering of the \TEX\ sect.
+\stoppresomething
+\startpresomething {prediction}
+ Let's see if the brains present here can predict the future because after
+ all, according to Dennett, that is what brains are for.
+\stoppresomething
+\stoppacked \stopnarrower
+
+At school I was already intrigued by patterns in history: a cyclic, spiral and
+sinusoid social evolution instead of a pure linear sequence of events. It became
+my first typeset|-|by|-|typewriter document: Is history an exact science? Next I
+will use and abuse patterns and ideas to describe the \TEX\ world, not wearing a
+layman's mathematical glasses, but more from the perspective of live sciences,
+where chaos dominates.
+
+\stopsection
+
+\startsection[title={The larger picture}]
+
+History of mankind can be roughly summarized as follows. For a really long time
+we were hunters but at some point (10K years ago) became farmers. As a result we
+could live in larger groups and still feed them. The growing complexity of
+society triggered rules and religion as instruments for stability and
+organization (I use the term religion in its broadest sense here). For quite a
+while cultures came and went, and climate changes are among the reasons.
+
+After the industrial revolution new religions were invented (social, economic and
+national liberalism) and we're now getting dataism (search for Harari on youtube
+for a better summary). Some pretty great minds seem to agree that we're heading
+to a time when humans as we are will be outdated. Massive automation, interaction
+between the self and computer driven ecosystems, lack of jobs and purpose,
+messing around with our genome. Some countries and cultures still have to catch
+up on the industrial revolution, if they manage at all, and maybe we ourselves
+will be just as behind reality soon. Just ask yourself: did you manage to catch
+up? Is \TEX\ a stone age tool or a revolutionary turning point?
+
+A few decades ago a trip to Bacho\TEX\ took more than a day. Now you drive there
+in just over half a day. There was a time that it took weeks: preparation,
+changing horses, avoiding bad roads. Not only your own man|-|hours were involved.
+It became easier later (my first trip took only 24 hours) and recently it turned
+into a piece of cake: you don't pick up maps but start your device; you don't
+need a travel agent but use the Internet; there are no border patrols, you can
+just drive on. (Okay, maybe some day soon border patrols at the Polish border
+show up again, just like road tax police in Germany, but that might be a
+temporary glitch.)
+
+Life gets easier and jobs get lost. Taxi and truck drivers, travel agents, and
+cashiers become as obsolete as agricultural workers before. Next in line are
+doctors, lawyers, typesetters, printers, and all those who think they're safe.
+Well, how many people were needed 400 years ago to produce the proceedings of a
+conference like this in a few days' time span? Why read the introduction of a
+book or a review when you can just listen to the author's summary on the web? How
+many conferences still make proceedings (or go for videos instead), will we
+actually need editors and typesetters in the future? How much easier has it
+become to design a font, including variants? What stories can designers tell in
+the future when programs do the lot? The narrower your speciality is, the worse
+are your changes; hopefully the people present at this conference operate on a
+broader spectrum. It's a snapshot. I will show some book covers as reference but
+am aware that years ago or ahead the selection could have been different.
+
+\stopsection
+
+\startsection[title=Words]
+
+Words (whatever they represent) found a perfect spot to survive: our minds. Then
+they made it from speech (and imagination) into writing: carved in stone, wood,
+lead. At some point they managed to travel over wires but no matter what
+happened, they are still around. Typesetting as visualization is also still
+surrounding us so that might give us a starting point for ensuring a future for
+\TEX\ to work on, because \TEX\ is all about words. There is a lot we don't see;
+imagine if our eyes had microscopic qualities. What if we could hear beyond
+20KHz. Imagine we could see infrared. How is that with words. What tools, similar
+in impact as \TEX, can evolve once we figure that out. What if we get access to
+the areas of our brain that hold information? We went from print to screen and
+\TEX\ could cope with that. Can it cope with what comes next?
+
+The first printing press replaced literal copying by hand. Later we got these
+linotype|-|like machines but apart from a few left, these are already thrown out
+of windows (as we saw in a movie a few Bacho\TeX's ago). Photo|-|typesetting has
+been replaced too and because a traditional centuries old printing press is a
+nice to see item, these probably ring more bells than that gray metal closed box
+typesetters. Organizers of \TEX\ conferences love to bring the audience to old
+printing workshops and museums. At some point computers got used for typesetting
+and in that arena \TEX\ found its place. These gray closed boxes are way less
+interesting than something mechanical that at least invites us to touch it. How
+excited can one be about a stack of \TEX\,Live \DVD{}s?
+
+\stopsection
+
+\startsection[title=Remembering]
+
+Two times I visited the part of the science museum in London with young family
+members: distracted by constantly swiping their small powerful devices, they
+didn't have the least interest in the exhibited computer related items, let alone
+the fact that the couch they were sitting on was a Cray mainframe. Later on,
+climbing on some old monument or an old cannon seemed more fun. So, in a few
+decades folks will still look at wooden printing presses but quickly walk through
+the part of an exhibition where the tools that we use are shown. We need to find
+ways to look interesting. But don't think we're unique: how many kids find
+graphical trend|-|setting games like Myst and Riven still interesting? On the
+other hand a couple of month ago a bunch of nieces and nephews had a lot of fun
+with an old Atari console running low|-|res bitmap games. Maybe there is hope for
+good old \TEX.
+
+If indeed we're heading to a radically different society one can argue if this
+whole discussion makes sense. When the steam engine showed up, the metaphor for
+what went on in our heads was that technology, It's a popular example of speakers
+on this topic: \quotation {venting off steam}. When electricity and radio came
+around metaphors like \quotation {being on the same wavelength} showed up. A few
+decades ago the computer replaced that model although in the meantime the model
+is more neurobiological: we're a hormone and neurotransmitter driven computer. We
+don't have memory the way computers do.
+
+How relevant will page breaks, paragraph and line breaks be in the future? Just
+like \quotation {venting off steam} may make no sense to the youth, asking a
+typesetter to \quotation {give me a break} might not make much sense soon.
+However, when discussing automated typesetting the question \quotation {are we on
+the same page} still has relevance.
+
+Typesetting with a computer might seem like the ultimate solution but it's
+actually rather dumb when we consider truly intelligent systems. On the large
+scale of history and developments what we do might get quite unnoticed. Say that
+mankind survives the next few hundred years one way or the other. Science fiction
+novels by Jack McDevitt have an interesting perspective of rather normal humans
+millennia ahead of us who look back on these times in the same way as we look
+back now. Nothing fundamental changed in the way we run society. Nearly nothing
+from the past is left over and apart from being ruled by \AI{}s people still do
+sort of what they do now. \TEX ? What is that? Well, there once was this great
+computer scientist Knuth (in the remembered row of names like Aristotle |<|I just
+started reading \quotation {The Lagoon} by Armand Leroi|>| Newton, Einstein, his
+will show up) who had a group of followers that used a program that he seems to
+have written. And even that is unlikely to be remembered, unless maybe user
+groups manage to organize an archive and pass that on. Maybe the fact that \TEX\
+was one of the first large scale open source programs, of which someone can study
+the history, makes it a survivor. The first program that was properly documented
+in detail! But then we need to make sure that it gets known and persists.
+
+\startsection[title=Automation]
+
+In a recent interview Daniel Dennett explains that his view of the mind as a big
+neural network, one that can be simulated in software on silicon, is a bit too
+simplistic. He wonders if we shouldn't more tend to think of a network of
+(selfish) neurons that group together in tasks and then compete with each other,
+if only because they want to have something to do.
+
+Maybe attempts to catch the creative mindset and working of a typesetter in
+algorithms is futile. What actually is great typography or good typesetting?
+Recently I took a look at my bookshelf wondering what to get rid of \emdash\
+better do that now than when I'm too old to carry the crap down (crap being
+defined as uninteresting content or bad looking). I was surprised about the
+on|-|the|-|average bad quality of the typesetting and print. It's also not really
+getting better. One just gets accustomed to what is the norm at a certain point.
+Whenever they change the layout and look and feel of the newspaper I read the
+arguments are readability and ease of access. Well, I never had such a hard time
+reading my paper as today (with my old eyes).
+
+Are we, like Dennett, willing to discard old views on our tools and models? When
+my first computer was a \RCA\ 1802 based kit, that had 256 bytes of memory. My
+current laptop (from 2013) is a Dell Precision workstation with an extreme quad
+core processor and 16 GB of memory and ssd storage. Before I arrived there I
+worked with \DECTEN, \VAX\ and the whole range of Intel \CPU{}s. So if you really
+want to compare a brain with a computer, take your choice.
+
+I started with \TEX\ on a 4 MHz desk top with 640 MB memory and a 10 MB hard
+disk. Running \CONTEXT\ \MKIV\ with \LUATEX\ on such a machine is no option at
+all, but I still carry the burden of trying to write efficient code (which is
+still somewhat reflected in the code that makes up \CONTEXT). In the decades that
+we have been using \TEX\ we had to adapt! Demands changed, possibilities changed,
+technologies changed. And they keep changing. How many successive changes can a
+\TEX\ user handle? Sometimes, when I look and listen I wonder.
+
+\startplacefigure[location=top]
+ \startcombination[nx=4,ny=1,width=\textwidth,distance=0pt]
+ {\externalfigure[covers/the-mind-in-the-cave.jpg] [height=5cm]} {paleontology}
+ {\externalfigure[covers/the-ancestors-tale.jpg] [height=5cm]} {evolutionary biology}
+ {\externalfigure[covers/the-good-book-of-human-nature.jpg][height=5cm]} {anthropology}
+ {\externalfigure[covers/chaos-and-harmony.jpg] [height=5cm]} {physics}
+ \stopcombination
+\stopplacefigure
+
+If you look back, that is, if you read about the tens of thousands of years that
+it took humans to evolve (\quotation {The mind in the cave} by Lewis|-|Williams
+is a good exercise) you realize even more in what a fast|-|paced time we live and
+that we're witnessing transitions of another magnitude.
+
+In the evolution of species some tools were invented multiple times, like eyes.
+You see the same in our \TEX\ world: multiple (sub)macro packages, different font
+technologies, the same solutions but with an alternative approach. Some
+disappear, some stay around. Just like different circumstances demand different
+solutions in nature, so do different situations in typesetting, for instance
+different table rendering solutions. Sometime I get the feeling that we focus too
+much on getting rid of all but one solution while more natural would be to accept
+diversity, like bio|-|diversity is accepted. Transitions nowadays happen faster
+but the question is if, like aeons before, we (have to) let them fade away. When
+evolution is discussed the terms \quote {random}, \quote {selection}, \quote
+{fit}, and so on are used. This probably also applies to typography: at some
+point a font can be used a lot, but in the end the best readable and most
+attractive one will survive. Newspapers are printed in many copies, but rare
+beautiful books hold value. Of course, just like in nature some developments
+force the further path of development, we don't suddenly grow more legs or digits
+on our hands. The same happens with \TEX\ on a smaller timescale: successors
+still have the same core technology, also because if we'd drop it, it would be
+something different and then give a reason to reconsider using such technology
+(which likely would result in going by another path).
+
+\stopsection
+
+\startsection[title=Quality]
+
+Richard Dawkins \quotation {The Ancestor's Tale} is a non|-|stop read. In a
+discussion with Jared Diamond about religion and evolution they ponder this
+thread: you holding the hand of your mother who is handing her mother's hand and
+so on till at some point fish get into the picture. The question then is, when do
+we start calling something human? And a related question is, when does what we
+call morality creeps in? Is 50\% neanderthaler human or not?
+
+So, in the history of putting thoughts on paper: where does \TEX\ fit in? When do
+we start calling something automated typesetting? When do we decide that we have
+quality? Is \TEX\ so much different from its predecessors? And when we see
+aspects of \TEX\ (or related font technology) in more modern programs, do we see
+points where we cross qualitative or other boundaries? Is a program doing a
+better job than a human? Where do we stand? There are fields where there is no
+doubt that machines outperform humans. It's probably a bit more difficult in
+aesthetic fields except perhaps when we lower the conditions and expectations
+(something that happens a lot).
+
+For sure \TEX\ will become obsolete, maybe even faster that we think, but so will
+other typesetting technologies. Just look back and have no illusions. Till then
+we can have our fun and eventually, when we have more free time than we need, we
+might use it out of hobbyism. Maybe \TEX\ will be remembered by probably its most
+important side effect: the first large scale open source, the time when users met
+over programs, Knuth's disciples gathered in user groups, etc. The tools that we
+use are just a step in an evolution. And, as with evolution, most branches are
+pruned. So, when in the far future one looks back, will they even notice \TEX ?
+The ancestor's tail turns the tree upside down: at the end of the successful
+branch one doesn't see the dead ends.
+
+Just a thought: \CD{}s and media servers are recently being replaced (or at least
+accompanied) by Long Play records. In the shop where I buy my \CD{}s the space
+allocated to records grows at the cost of more modern media. So, maybe at some
+point retro|-|typesetting will pop up. Of course it might skip \TEX\ and end up
+at woodcutting or printing with lead.
+
+\stopsection
+
+\startsection[title=What mission]
+
+We rely on search engines instead of asking around or browsing libraries. Do
+students really still read books and manuals or do they just search and listen to
+lectures. Harari claims that instead of teaching kids facts in school we should
+just take for granted that they can get all the data they want and that we should
+learn them how to deal with data and adapt to what is coming. We take for granted
+that small devices with human voices show us the route to drive to Bacho\TEX, for
+instance, although by now I can drive it without help. In fact, kids can surprise
+you by asking if we're driving in Germany when we are already in Poland.
+
+We accept that computer programs help physicians in analyzing pictures. Some wear
+watches that warn them about health issues, and I know a few people who monitor
+their sugar levels electronically instead of relying on their own measurements.
+We seem to believe and trust the programs. And indeed, we also believe that \TEX\
+does the job in the best way possible. How many people really understand the way
+\TEX\ works?
+
+We still have mailing lists where we help each other. There are also wikis and
+forums like stack exchange. But who says that even a moderate bit of artificial
+intelligence doesn't answer questions better. Of course there needs to be input
+(manuals, previous answers, etc.) but just like we need fewer people as workforce
+soon, the number of experts needed also can be smaller. And we're still talking
+about a traditional system like \TEX. Maybe the social experience that we have on
+these media will survive somehow, although: how many people are members of
+societies, participate in demonstrations, meet weekly in places where ideas get
+exchanged, compared to a few decades ago? That being said, I love to watch posts
+with beautiful \CONTEXT\ solutions or listen to talks by enthusiastic users who
+do things I hadn't expected. I really hope that this property survives, just like
+I hope that we will be able to see the difference between a real user's response
+and one from an intelligent machine (an unrealistic hope I fear). Satisfaction
+wins and just like our neurological subsystems at some point permanently adapt to
+thresholds (given that you trigger things often enough), we get accustomed to
+what \TEX\ provides and so we stick to it.
+
+\stopsection
+
+\startsection[title={Intelligence versus consciousness}]
+
+Much of what we do is automated. You don't need to think of which leg to move and
+what foot to put down when you walk. Reacting to danger also to a large extent is
+automated. It doesn't help much to start thinking about how dangerous a lion can
+be when it's coming after you, you'd better move fast. Our limbic system is
+responsible for such automated behaviour, for instance driven by emotions. The
+more difficult tasks and thoughts about them happen in the frontal cortex (sort
+of).
+
+\startplacefigure[location=top]
+ \startcombination[nx=4,ny=1,width=\textwidth,distance=0pt]
+ {\externalfigure[covers/death-by-black-hole.jpg] [height=5cm]} {astronomy}
+ {\externalfigure[covers/the-formula.jpg] [height=5cm]} {informatics}
+ {\externalfigure[covers/hals-legacy.jpg] [height=5cm]} {future science}
+ {\externalfigure[covers/lucky-planet.jpg] [height=5cm]} {earth science}
+ \stopcombination
+\stopplacefigure
+
+For most users \TEX\ is like the limbic system: there is not much thinking
+involved, and the easy solutions are the ones used. Just like hitting a nerve
+triggers a chain of reactions, hitting a key eventually produces a typeset
+document. Often this is best because the job needs to get done and no one really
+cares how it looks; just copy a preamble, key in the text and assume that it
+works out well (enough). It is tempting to compare \TEX's penalties, badness and
+other parameters with levels of hormones and neurotransmitters. Their function
+depends on where they get used and the impact can be accumulated, blocked or
+absent. It's all magic, especially when things interact.
+
+Existing \TEX\ users, developers and user groups of course prefer to think
+otherwise, that it is a positive choice by free will. That new users have looked
+around and arrived at \TEX\ for good reason: their frontal cortex steering a
+deliberate choice. Well, it might have played a role but the decision to use
+\TEX\ might in the end be due to survival skills: I want to pass this exam and
+therefore I will use that weird system called \TEX.
+
+All animals, us included, have some level of intelligence but also have this hard
+to describe property that we think makes us what we are. Intelligence and
+consciousness are not the same (at least we know a bit about the first but nearly
+nothing about the second). We can argue about how well composed some music is but
+why we like it is a different matter.
+
+We can make a well thought out choice for using \TEX\ for certain tasks but can
+we say why we started liking it (or not)? Why it gives us pleasure or maybe
+grief? Has it become a drug that we got addicted to? So, one can make an
+intelligent decision about using \TEX\ but getting a grip on why we like it can
+be hard. Do we enjoy the first time struggle? Probably not. Do we like the folks
+involved? Yes, Don Knuth is a special and very nice person. Can we find help and
+run into a friendly community? Yes, and a unique one too, annoying at times,
+often stimulating and on the average friendly for all the odd cases running
+around.
+
+Artificial intelligence is pretty ambitious, so speaking of machine intelligence
+is probably better. Is \TEX\ an intelligent program? There is definitely some
+intelligence built in and the designer of that program is for sure very
+intelligent. The designer is also a conscious entity: he likes what he did and
+finds pleasure in using it. The program on the other hand is just doing its job:
+it doesn't care how it's done and how long it takes: a mindless entity. So here
+is a question: do we really want a more intelligent program doing the job for us,
+or do those who attend conferences like Bacho\TEX\ enjoy \TEX ing so much that
+they happily stay with what they have now? Compared to rockets tumbling down
+and|/|or exploding or Mars landers thrashing themselves due to programming errors
+of interactions, \TEX\ is surprisingly stable and bug free.
+
+\stopsection
+
+\startsection[title={Individual versus group evolution}]
+
+After listening for hours to Sapolsky you start getting accustomed to remarks
+about (unconscious) behaviour driven by genes, expression and environment, aimed
+at \quotation {spreading many copies of your genes}. In most cases that is an
+individual's driving force. However, cooperation between individuals plays a role
+in this. A possible view is that we have now reached a state where survival is
+more dependent on a group than on an individual. This makes sense when we
+consider that developments (around us) can go way faster than regular evolution
+(adaptation) can handle. We take control over evolution, a mechanism that needs
+time to adapt and time is something we don't give it anymore.
+
+Why does \TEX\ stay around? It started with an individual but eventually it's the
+groups that keeps it going. A too|-|small group won't work but too|-|large groups
+won't work either. It's a known fact that one can only handle some 150 social
+contacts: we evolved in small bands that split when they became too large. Larger
+groups demanded abstract beliefs and systems to deal with the numbers: housing,
+food production, protection. The \TEX\ user groups also provide some
+organization: they organize meetings, somehow keep development going and provide
+infrastructure and distributions. They are organized around languages. According
+to Diamond new languages are still discovered but many go extinct too. So the
+potential for language related user groups is not really growing.
+
+Some of the problems that we face in this world have become too large to be dealt
+with by individuals and nations. In spite of what anti|-|globalists want we
+cannot deal with our energy hunger, environmental issues, lack of natural
+resources, upcoming technologies without global cooperation. We currently see a
+regression in cooperation by nationalistic movements, protectionism and the usual
+going back to presumed better times, but that won't work.
+
+Local user groups are important but the number of members is not growing. There
+is some cooperation between groups but eventually we might need to combine the
+groups into one which might succeed unless one wants to come first. Of course we
+will get the same sentiments and arguments as in regular politics but on the
+other hand, we already have the advantage of \TEX\ systems being multi|-|lingual
+and users sharing interest in the diversity of usage and users. The biggest
+challenge is to pass on what we have achieved. We're just a momentary highlight
+and let's not try to embrace some \quotation {\TEX\ first} madness.
+
+\stopsection
+
+\startplacefigure[location=top]
+ \startcombination[nx=4,ny=1,width=\textwidth,distance=0pt]
+ {\externalfigure[covers/3-16.jpg] [height=5cm]} {art}
+ % {\externalfigure[covers/dirt.jpg] [height=5cm]} {history}
+ {\externalfigure[covers/the-winds-of-change.jpg] [height=5cm]} {history}
+ {\externalfigure[covers/pale-blue-dot.jpg] [height=5cm]} {astronomy}
+ {\externalfigure[covers/the-third-chimpanzee.jpg][height=5cm]} {history}
+ \stopcombination
+\stopplacefigure
+
+\startsection[title=Sexes]
+
+Most species have two sexes but it is actually a continuum controlled by hormones
+and genetic expression: we just have to accept it. Although the situation has
+improved there are plenty of places where some gender relationships are
+considered bad even to the extent that one's life can be in danger. Actually
+having strong ideas about these issues is typically human. But in the end one has
+to accept the continuum.
+
+In a similar way we just have to accept that \TEX\ usage, application of \TEX\
+engines, etc.\ is a continuum and not a batch versus \WYSIWYG\ battle any more.
+It's disturbing to read strong recommendations not to use this or that. Of the
+many macro packages that showed up only a few were able to survive. How do users
+of outlines look at bitmaps, how do \DVI\ lovers look at \PDF. But, as
+typesetting relates to esthetics, strong opinions come with the game.
+
+Sapolsky reports about a group of baboons where due to the fact that they get the
+first choice of food the alpha males of pack got poisoned, so that the remaining
+suppressed males who treated the females well became dominant. In fact they can
+then make sure that no new alpha male from outside joins the pack without
+behaving like they do. A sort of social selection. In a similar fashion, until
+now the gatherings of \TEX ies managed to keep its social properties and has not
+been dominated by for instance commerce.
+
+% So, maybe should focus on acceptance and tolerance and then make sure that that
+% we keep what we have and let it not be influenced too much by sectarianism. It
+% makes a nice topic for a meeting of the context (sub)group, that actually has a
+% women as driving force. How can we preserve what we have but still proceed is a
+% legitimate question. Where do we stand in the landscape.
+
+In the animal world often sexes relate to appearance. The word sexy made it to
+other domains as well. Is \TEX\ sexy? For some it is. We often don't see the real
+colors of birds. What looks gray to us looks vivid to a bird which sees in a
+different spectrum. The same is true for \TEX. Some users see a command line
+(shell) and think: this is great! Others just see characters and keystrokes and
+are more attracted to an interactive program. When I see a graphic made by
+\METAPOST, I always note how exact it is. Others don't care if their interactive
+effort doesn't connect the dots well. Some people (also present here) think that
+we should make \TEX\ attractive but keep in mind that like and dislike are not
+fixed human properties. Some mindsets might as well be the result from our
+makeup, others can be driven by culture.
+
+\stopsection
+
+\startsection[title=Religion]
+
+One of Sapolsky's lectures is about religion and it comes in the sequence of
+mental variations including depression and schizophrenia, because all these
+relate to mental states, emotions, thresholds and such (all things human). That
+makes it a tricky topic which is why it has not been taped. As I was raised in a
+moderate Protestant tradition I can imagine that it's an uncomfortable topic
+instead. But there are actually a few years older videos around and they are
+interesting to watch and not as threatening as some might expect. Here I just
+stick to some common characteristics.
+
+If you separate the functions that religions play into for instance explanation
+of the yet unknown, social interactions, control of power and regulation of
+morals, then it's clear why at \TEX\ user group meetings the religious aspect of
+\TEX\ has been discussed in talks. Those who see programs as infallible and
+always right and don't understand the inner working can see it as an almighty
+entity. In the Netherlands church-going diminishes but it looks like alternative
+meetings are replacing it (and I'm not talking of football matches). So what are
+our \TEX\ meetings? What do we believe in? The reason that I bring up this aspect
+is that in the \TEX\ community we can find aspects of the more extremist aspects
+of religions: if you don't use the macro package that I use, you're wrong. If you
+don't use the same operating system as I do, you're evil. You will be punished if
+you use the wrong editor for \TEX ? Why don't you use this library (which, by the
+way, just replaced that other one)? We create angels and daemons. Even for quite
+convinced atheists (it's not hard to run into them on youtube) a religion only
+survives when it has benefits, something that puzzles them. So when we're
+religious about \TEX\ and friends we have to make sure that it's at least
+beneficial. Also, maybe we fall in Dennett's category of \quotation {believers
+who want to believe}: it helps us to do our job if we just believe that we have
+the perfect tool. Religion has inspired visual and aural art and keeps doing
+that. (Don Knuth's current musical composition project is a good example of
+this.)
+
+Scientists can be religious, in flexible ways too, which is demonstrated by Don
+Knuth. In fact, I'm pretty sure \TEX\ would not be in the position it is in now
+if it weren't for his knowledgeable, inspirational, humorous, humble, and always
+positive presence. And for sure he's not at all religious about the open source
+software that he sent viral.
+
+I'm halfway through reading \quotation {The Good Book of Human Nature} (An
+Evolutionary Reading of the Bible) a book about the evolution of the bible and
+monotheism which is quite interesting. It discusses for instance how transitions
+from a hunter to a farmer society demanded a change of rules and introduced
+stories that made sense in that changing paradigm. Staying in one place means
+that possessions became more important and therefore inheritance. Often when
+religion is discussed by behavioral biologists, historians and anthropologists
+they stress this cultural narrative aspect. Also mentioned is that such societies
+were willing to support (in food and shelter) the ones that didn't normally fit
+it but added to the spiritual character of religions. The social and welcoming
+aspect is definitely present in for instance Bacho\TEX\ conferences although a
+bystander can wonder what these folks are doing in the middle of the night around
+a campfire, singing, drinking, frying sausages, spitting fire, and discussing the
+meaning of life.
+
+Those who wrap up the state of religious affairs, do predictions and advocate the
+message, are sometimes called evangelists. I remember a \TEX\ conference in the
+\USA\ where the gospel of \XML\ was preached (by someone from outside the \TEX\
+community). We were all invited to believe it. I was sitting in the back of the
+crowded (!)\ room and that speaker was not at all interested in who spoke before
+and after. Well, I do my share of \XML\ processing with \CONTEXT, but believe me:
+much of the \XML\ that we see is not according to any gospel. It's probably
+blessed the same way as those state officials get blessed when they ask and pray
+for it in public.
+
+It can get worse at \TEX\ conferences. Some present here at Bacho\TEX\ might
+remember the \PDF\ evangelists that we had show up at \TEX\ conferences. You see
+this qualification occasionally and I have become quite allergic to
+qualifications like architect, innovator, visionary, inspirator and evangelist,
+even worse when they look young but qualify as senior. I have no problem with
+religion at all but let's stay away from becoming one. And yes, typography also
+falls into that trap, so we have to be doubly careful.
+
+\stopsection
+
+\startplacefigure[location=top]
+ \startcombination[nx=4,ny=1,width=\textwidth,distance=0pt]
+ {\externalfigure[covers/from-bacteria-to-bach-and-back.jpg][height=5cm]} {philosophy}
+ {\externalfigure[covers/the-lagoon.jpg] [height=5cm]} {science history}
+ {\externalfigure[covers/chaos.jpg] [height=5cm]} {science}
+ {\externalfigure[covers/why-zebras-dont-get-ulcers.jpg] [height=5cm]} {behavioral biology}
+ \stopcombination
+\stopplacefigure
+
+\startsection[title=Chaotic solutions]
+
+The lectures on \quotation {chaos and reductionism} and \quotation {emergence and
+complexity} were the highlights in Sapolsky's lectures. I'm not a good narrator
+so I will not summarize them but it sort of boils down to the fact that certain
+classes of problems cannot be split up in smaller tasks that we understand well,
+after which we can reassemble the solutions to deal with the complex task.
+Emerging systems can however cook up working solutions from random events.
+Examples are colonies of ants and bees.
+
+The \TEX\ community is like a colony: we cook up solutions, often by trial and
+error. We dream of the perfect solutions but deep down know that esthetics cannot
+be programmed in detail. This is a good thing because it doesn't render us
+obsolete. At last year's Bacho\TEX, my nephew Teun and I challenged the anthill
+outside the canteen to typeset the \TEX\ logo with sticks but it didn't persist.
+So we don't need to worry about competition from that end. How do you program a
+hive mind anyway?
+
+When chaos theory evolved in the second half of the previous century not every
+scientist felt happy about it. Instead of converging to more perfect predictions
+and control in some fields a persistent uncertainty became reality.
+
+After about a decade of using \TEX\ and writing macros to solve recurring
+situations I came to the conclusion that striving for a perfect \TEX\ (the
+engine) that can do everything and anything makes no sense. Don Knuth not only
+stopped adding code when he could do what he needed for his books, he also stuck
+to what to me seems reasonable endpoints. Every hard|-|coded solution beyond that
+is just that: a hard|-|coded solution that is not able to deal with the
+exceptions that make up most of the more complex documents. Of course we can
+theorize and discuss at length the perfect never|-|reachable solutions but
+sometimes it makes more sense to admit that an able user of a desktop publishing
+system can do that job in minutes, just by looking at the result and moving
+around an image or piece of text a bit.
+
+There are some hard|-|coded solutions and presets in the programs but with
+\LUATEX\ and \MPLIB\ we try to open those up. And that's about it. Thinking that
+for instance adding features like protrusion or expansion (or whatever else)
+always lead to better results is just a dream. Just as a butterfly flapping its
+wings on one side of the world can have an effect on the other side, so can
+adding a single syllable to your source completely confuse an otherwise clever
+column or page break algorithm. So, we settle for not adding more to the engine,
+and provide just a flexible framework.
+
+A curious observation is that when Edward Lorenz ran into chaotic models it was
+partially due to a restart of a simulation midway, using printed floating point
+numbers that then in the computer were represented with a different accuracy than
+printed. Aware of floating point numbers being represented differently across
+architectures, Don Knuth made sure that \TEX\ was insensitive to this so that its
+outcome was predictable, if you knew how it worked internally. Maybe \LUATEX\
+introduces a bit of chaos because the \LUA\ we use has only floats. In fact, a
+few months ago we did uncover a bug in the backend where the same phenomena gave
+a chaotic crash.
+
+In chaos theory there is the concept of an attractor. When visualized this can be
+the area (seemingly random) covered by a trajectory. Or it can be a single point
+where for instance a pendulum comes to rest. So what is our attractor? We have a
+few actually. First there is the engine, the stable core of primitives always
+present. You often see programs grow more complex every update and for sure that
+happened with \ETEX, \PDFTEX, \XETEX\ and \LUATEX. However there is always the
+core that is supposed to be stable. After some time the new kid arrives at a
+stable state not much different from the parent. The same is true for \METAPOST.
+Fonts are somewhat different because the technology changes but in the end the
+shapes and their interactions become stable as well. Yet another example is \TEX\
+Live: during a year it might diverge from its route but eventually it settles
+down and enters the area where we expect it to end up. The \TEX\ world is at
+times chaotic, but stable in the long run.
+
+So, how about the existence, the reason for it still being around? One can
+speculate about its future trajectory but one thing is sure: as long as we break
+a text into paragraphs and pages \TEX\ is hard to beat. But what if we don't need
+that any more? What if the concept of a page is no longer relevant? What if
+justified texts no longer matter (often designers don't care anyway)? What if
+students are no longer challenged to come up with a nice looking thesis? Do these
+collaborative tools with remote \TEX\ processing really bring new long term users
+or is \TEX\ then just one of the come|-|and|-|go tools?
+
+\stopsection
+
+\startsection[title=Looking ahead]
+
+In an interview (\quotation {World of ideas}) Asimov explains that science
+fiction evolved rapidly when people lived long enough to see that there was a
+future (even for their offspring) that is different from today. It is (at least
+for me) mind boggling to think of an evolution of hundreds of thousands of years
+to achieve something like language. Waiting for the physical being to arrive at a
+spot where you can make sounds, where the brain is suitable for linguistic
+patterns, etc. A few hundred years ago speed of any developments (and science)
+stepped up.
+
+\TEX\ is getting near 40 years old. Now, for software that {\bf is} old! In that
+period we have seen computers evolve: thousands of times faster processing, even
+more increase in memory and storage. If we read about spaceships that travel at a
+reasonable fraction of the speed of light, and think that will not happen soon,
+just think back to the terminals that were sitting in computer labs when \TEX\
+was developed: 300 baud was normal. I actually spent quite some time on
+optimizing time|-|critical components of \CONTEXT\ but on this timescale that is
+really a waste of time. But even temporary bottlenecks can be annoying (and
+costly) enough to trigger such an effort. (Okay, I admit that it can be a
+challenge, a kind of game, too.)
+
+Neil Tyson, in the video \quotation {Storytelling of science} says that when
+science made it possible to make photos it also made possible a transition in
+painting to impressionism. Other technology could make the exact snapshot so
+there was new room for inner feelings and impressions. When the Internet showed
+up we went through a similar transition, but \TEX\ actually dates from before the
+Internet. Did we also have a shift in typesetting? To some extent yes, browsers
+and real time rendering is different from rendering pages on paper. In what space
+and time are \TEX ies rooted?
+
+We get older than previous generations. Quoting Sapolsky \quotation{\unknown\ we
+are now living well enough and long enough to slowly fall apart.} The opposite is
+happening with our tools, especially software: it's useful lifetime becomes
+shorter and changes faster each year. Just look at the version numbers of
+operating systems. Don Knuth expected \TEX\ to last for a long time and compared
+to other software its core concept and implementation is doing surprisingly well.
+We use a tool that suits our lifespan! Let's not stress ourselves out too much
+with complex themes. (It helps to read \quotation {Why zebras don't get ulcers}.)
+
+\stopsection
+
+\startsection[title=Memes]
+
+If you repeat a message often enough, even if it's something not true, it can
+become a meme that gets itself transferred across generations. Conferences like
+this is where they can evolve. We tell ourselves and the audience how good \TEX\
+is and because we spend so many hours, days, weeks, months using it, it actually
+must be good, or otherwise we would not come here and talk about it. We're not so
+stupid as to spend time on something not good, are we? We're always surprised
+when we run into a (potential) customer who seems to know \TEX. It rings a bell,
+and it being around must mean something. Somehow the \TEX\ meme has anchored
+itself when someone attended university. Even if experiences might have been bad
+or usage was minimal. The meme that \TEX\ is the best in math typesetting is a
+strong survivor.
+
+There's a certain kind of person who tries to get away with their own deeds and
+decisions by pointing to \quotation {fake news} and accusations of \quotation
+{mainstream media} cheating on them. But to what extent are our stories true
+about how easy \TEX\ macro packages are to use and how good their result? We have
+to make sure we spread the right memes. And the user groups are the guardians.
+
+Maybe macro packages are like memes too. In the beginning there was a bunch but
+only some survived. It's about adaptation and evolution. Maybe competition was
+too fierce in the beginning. Like ecosystems, organisms and cellular processes in
+biology we can see the \TEX\ ecosystem, users and usage, as a chaotic system.
+Solutions pop up, succeed, survive, lead to new ones. Some look similar and
+slightly different input can give hugely different outcomes. You cannot really
+look too far ahead and you cannot deduce the past from the present. Whenever
+something kicks it off its stable course, like the arrival of color, graphics,
+font technologies, \PDF, \XML, ebooks, the \TEX\ ecosystem has to adapt and find
+its stable state again. The core technology has proven to be quite fit for the
+kind of adaptation needed. But still, do it wrong and you get amplified out of
+existence, don't do anything and the external factors also make you extinct.
+There is no denial that (in the computer domain) \TEX\ is surprisingly stable and
+adaptive. It's also hard not to see how conservatism can lead to extinction.
+
+\startplacefigure[location=top]
+ \startcombination[nx=4,ny=1,width=\textwidth,distance=0pt]
+ {\externalfigure[covers/the-epigenetics-revolution.jpg] [height=5cm]} {genetics}
+ {\externalfigure[covers/dark-matter-and-the-dinosaurs.jpg][height=5cm]} {physics}
+ {\externalfigure[covers/the-world-without-us.jpg] [height=5cm]} {history}
+ {\externalfigure[covers/what-we-cannot-know.jpg] [height=5cm]} {science}
+ \stopcombination
+\stopplacefigure
+
+\stopsection
+
+\startsection[title=Inspiration]
+
+I just took some ideas from different fields. I could have mentioned quantum
+biology, which tries to explain some unexplainable phenomena in living creatures.
+For instance how do birds navigate without visible and measurable clues. How do
+people arrive at \TEX\ while we don't really advertise? Or I could mention
+epigenetics and explorations in junk \DNA. It's not the bit of the genome that we
+thought that matters, but also the expression of the genes driven by other
+factors. Offspring not only gets genetic material passed but it can get presets.
+How can the \TEX\ community pass on Knuth's legacy? Do we need to hide the
+message in subtle ways? Or how about the quest for dark matter? Does it really
+exist or do we want (need) it to exist? Does \TEX\ really have that many users,
+or do we cheat by adding the users that are enforced during college but don't
+like it at all? There's enough inspiration for topics at \TEX\ conferences, we
+just have to look around us.
+
+\stopsection
+
+\startsection[title=Stability]
+
+I didn't go into technical aspects of \TEX\ yet. I must admit that after decades
+of writing macros I've reached a point where I can safely say that there will
+never be perfect automated solutions for really complex documents. When books
+about neural networks show up I wondered if it could be applied (but I couldn't).
+When I ran into genetic algorithms I tried to understand its possible impact (but
+I never did). So I stuck to writing solutions for problems using visualization:
+the trial and error way. Of course, speaking of \CONTEXT, I will adapt what is
+needed, and others can do that as well. Is there a new font technology? Fine,
+let's support it as it's no big deal, just a boring programming task. Does a user
+want a new mechanism? No problem, as solving a reduced subset of problems can be
+fun. But to think of \TEX\ in a reductionist way, i.e.\ solving the small
+puzzles, and to expect the whole to work in tandem to solve a complex task is not
+trivial and maybe even impossible. It's a good thing actually, as it keeps us on
+edge. Also, \CONTEXT\ was designed to help you with your own solutions: be
+creative.
+
+I mentioned my nephew Bram. He has seen part of this crowd a few times, just like
+his brother and sister do now. He's into artificial intelligence now. In a few
+years I'll ask him how he sees the current state of \TEX\ affairs. I might learn
+a few tricks in the process.
+
+In \quotation {The world without us} Weisman explores how fast the world would be
+void of traces of humankind. A mere 10.000 years can be more than enough. Looking
+back, that's about the time hunters became farmers. So here's a challenge: say
+that we want an ant culture that evolves to the level of having archaeologists to
+know that we were here at Bacho\TEX\ \unknown\ what would we leave behind?
+
+Sapolsky ends his series by stressing that we should accept and embrace
+individual differences. The person sitting next to you can have the same makeup
+but be just a bit more sensitive to depression or be the few percent with genes
+controlling schizophrenic behaviour. He stresses that knowing how things work or
+where things go wrong doesn't mean that we should fix everything. So look at this
+room full of \TEX ies: we don't need to be all the same, use all the same, we
+don't need some dominance, we just need to accept and especially we need to
+understand that we can never fully understand (and solve) everything forever.
+
+Predictions, one of the themes, can be hard. It's not true that science has the
+answer to everything. There will always be room for speculation and maybe we will
+always need metaphysics too. I just started to read \quotation {What we cannot
+know} by Sautoy. For sure those present here can not predict how \TEX\ will go on
+and|/|or be remembered.
+
+\stopsection
+
+\startsection[title=Children of \TEX]
+
+I mentioned \quotation {Children of time}. The author lets you see their spidery
+world through spider eyes and physiology. They have different possibilities
+(eyesight, smell) than we do and also different mental capabilities. They evolve
+rapidly and have to cope conceptually with signals from a human surveillance
+satellite up in the sky. Eventually they need to deal with a bunch of (of course)
+quarrelling humans who want their place on the planet. We humans have some
+pre|-|occupation with spiders and other creatures. In a competitive world it is
+sometimes better to be suspicious (and avoid and flee) that to take a risk of
+being eaten. A frequently used example is that a rustle in a bush can be the wind
+or a lion, so best is to run.
+
+We are not that well adapted to our current environment. We evolved at a very
+slow pace so there was no need to look ahead more than a year. And so we still
+don't look too far ahead (and choose politicians accordingly). We can also not
+deal that well with statistics (Dawkins's \quotation {Climbing Mount Probability}
+is a good read) so we make false assumptions, or just forget.
+
+Does our typeset text really look that good on the long run, or do we cheat with
+statistics? It's not too hard to find a bad example of something not made by
+\TEX\ and extrapolate that to the whole body of typeset documents. Just like we
+can take a nice example of something done by \TEX\ and assume that what we do
+ourselves is equally okay. I still remember the tests we did with \PDFTEX\ and
+hz. When \THANH\ and I discussed that with Hermann Zapf he was not surprised at
+all that no one saw a difference between the samples and instead was focusing on
+aspects that \TEX ies are told to look at, like two hyphens in a row.
+
+A tool like \TEX\ has a learning curve. If you don't like that just don't use it.
+If you think that someone doesn't like that, don't enforce this tool on that
+someone. And don't use (or lie with) statistics. Much better arguments are that
+it's a long|-|lived stable tool with a large user base and support. That it's not
+a waste of time. Watching a designer like Hermann Zapf draw shapes is more fun
+than watching click and point in heavily automated tools. It's probably also less
+fun to watch a \TEX ie converge towards a solution.
+
+Spiders are resilient. Ants maybe even more. Ants will survive a nuclear blast
+(mutations might even bring them benefits), they can handle the impact of a
+meteorite, a change in climate won't harm them much. Their biggest enemy is
+probably us, when we try to wipe them out with poison. But, as long as they keep
+a low profile they're okay. \TEX\ doesn't fit into the economic model as there is
+no turnaround involved, no paid development, it is often not seen at all, it's
+just a hit in a search engine and even then you might miss it (if only because no
+one pays for it being shown at the top).
+
+We can learn from that. Keeping a low profile doesn't trigger the competition to
+wipe you out. Many (open source) software projects fade away: some big company
+buys out the developer and stalls the project or wraps what they bought in their
+own stuff, other projects go professional and enterprise and alienate the
+original users. Yet others abort because the authors lose interest. Just like the
+ideals of socialism don't automatically mean that every attempt to implement it
+is a success, so not all open source and free software is good (natured) by
+principle either. The fact that communism failed doesn't mean that capitalism is
+better and a long term winner. The same applies to programs, whether successful
+or not.
+
+Maybe we should be like the sheep. Dennett uses these animals as a clever
+species. They found a way to survive by letting themselves (unconsciously) be
+domesticated. The shepherd guarantees food, shelter and protection. He makes sure
+they don't get ill. Speaking biologically: they definitely made sure that many
+copies of their genes survived. Cows did the same and surprisingly many of them
+are related due to the fact that they share the same father (something now trying
+to be reverted). All \TEX\ spin|-|offs relate to the same parent, and those that
+survived are those that were herded by user groups. We see bits and pieces of
+\TEX\ end up in other applications. Hyphenation is one of them. Maybe we should
+settle for that small victory in a future hall of fame.
+
+When I sit on my balcony and look at the fruit trees in my garden, some simple
+math can be applied. Say that one of the apple trees has 100 apples per year and
+say that this tree survives for 25 years (it's one of those small manipulated
+trees). That makes 2.500 apples. Without human intervention only a few of these
+apples make it into new trees, otherwise the whole world would be dominated by
+apple trees. Of course that tree now only survives because we permit it to
+survive, and for that it has to be humble (something that is very hard for modern
+Apples). Anyway, the apple tree doesn't look too unhappy.
+
+A similar calculation can be done for birds that nest in the trees and under my
+roof. Given that the number of birds stays the same, most of energy spent on
+raising offspring is wasted. Nevertheless they seem to enjoy life. Maybe we
+should be content if we get one enthusiastic new user when we demonstrate \TEX\
+to thousands of potential users.
+
+Maybe, coming back to the themes of the conference, we should not come up with
+these kinds of themes. We seem to be quite happy here. Talking about the things
+that we like, meeting people. We just have to make sure that we survive. Why not
+stay low under the radar? That way nothing will see us as a danger. Let's be like
+the ants and spiders, the invisible hive mind that carries our message, whatever
+that is.
+
+When Dennett discusses language he mentions (coined) words that survive in
+language. He also mentions that children pick up language no matter what. Their
+minds are made for it. Other animals don't do that: they listen but don't start
+talking back. Maybe \TEX\ is just made for certain minds. Some like it and pick
+it up, while for others it's just noise. There's nothing wrong with that.
+Predilection can be a user property.
+
+\stopsection
+
+\startsection[title={The unexpected}]
+
+In a discussion with Dawkins the well|-|spoken astrophysicist Neil deGrasse Tyson
+brings up the following. We differ only a few percent in \DNA\ from a chimp but
+quite a lot in brain power, so how would it be if an alien that differs a few
+percent (or more) passes by earth. Just like we don't talk to ants or chimps or
+whatever expecting an intelligent answer, whatever passes earth won't bother
+wasting time on us. Our rambling about the quality of typesetting probably sounds
+alien to many people who just want to read and who happily reflow a text on an
+ebook device, not bothered by a lack of quality.
+
+\startplacefigure[location=top]
+ \startcombination[nx=4,ny=1,width=\textwidth,distance=0pt]
+ {\externalfigure[covers/live-as-we-do-not-know-it.jpg][height=5cm]} {astrobiology}
+ {\externalfigure[covers/life-on-the-edge.jpg] [height=5cm]} {quantumbiology}
+ {\externalfigure[covers/rare-earth.jpg] [height=5cm]} {astrophysics}
+ {\externalfigure[covers/austerity.jpg] [height=5cm]} {economics}
+ \stopcombination
+\stopplacefigure
+
+We tend to take ourselves as reference. In \quotation {Rare Earth} Ward and
+Brownlee extrapolate the possibility of life elsewhere in the universe. They are
+not alone in thinking that while on one hand applying statistics to these
+formulas of possible life on planets there might also be a chance that we're the
+only intelligent species ever evolved. In a follow up, \quotation {Life as we do
+not know it} paleontologist and astrobiologist Ward (one of my favourite authors)
+discusses the possibility of life not based on carbon, which is not natural for a
+carbon based species. Carl Sagan once pointed out that an alien species looking
+down to earth can easily conclude that cars are the dominant species on earth and
+that the thingies crawling in and out them are some kind of parasites. So, when
+we look at the things that somehow end up on paper (as words, sentences,
+ornaments, etc.), what is dominant there? And is what we consider dominant really
+that dominant in the long run? You can look at a nice page as a whole and don't
+see the details of the content. Maybe beauty hides nonsense.
+
+When \TEX ies look around they look to similar technologies. Commands in shells
+and solutions done by scripting and programming. This make sense in the
+perspective of survival. However, if you want to ponder alternatives, maybe not
+for usage but just for fun, a completely different perspective might be needed.
+You must be willing to accept that communicating with a user of a \WYSIWYG\
+program might be impossible. If mutual puzzlement is a fact, then they can either
+be too smart and you can be too dumb or the reverse. Or both approaches can be
+just too alien, based on different technologies and assumptions. Just try to
+explain \TEX\ to a kid 40 years younger or to an 80 year old grandparent for that
+matter. Today you can be very clever in one area and very stupid in another.
+
+In another debate, Neil deGrasse Tyson asks Dawkins the question why in science
+fiction movies the aliens look so human and when they don't, why they look so
+strange, for instance like cumbersome sluggish snails. The response to that is
+one of puzzlement: the opponent has no reference of such movies. In discussions
+old \TEX ies like to suggest that we should convert young users. They often don't
+understand that kids live in a different universe.
+
+How often does that happen to us? In a world of many billions \TEX\ has its place
+and can happily coexist with other typesetting technologies. Users of other
+technologies can be unaware of us and even create wrong images. In fact, this
+also happens in the community itself: (false) assumptions turned into
+conclusions. Solutions that look alien, weird and wrong to users of the same
+community. Maybe something that I present as hip and modern and high|-|\TEX\ and
+promising might be the opposite: backward, old|-|fashioned and of no use to
+others. Or maybe it is, but the audience is in a different mindset. Does it
+matter? Let's just celebrate that diversity. (So maybe, instead of discussing the
+conference theme, I should have talked about how I abuse \LUATEX\ in controlling
+lights in my home as part of some IoT experiments.)
+
+\stopsection
+
+\startsection[title=What drives us]
+
+I'm no fan of economics and big money talk makes me suspicious. I cannot imagine
+working in a large company where money is the drive. It also means that I have
+not much imagination in that area. We get those calls at the office from far away
+countries who are hired to convince us by phone of investments. Unfortunately
+mentioning that you're not at all interested in investments or that multiplying
+money is irrelevant to you does not silence the line. You have to actively kill
+such calls. This is also why I probably don't understand today's publishing world
+where money also dominates. Recently I ran into talks by Mark Blyth about the
+crisis (what crisis?) and I wish I could argue like he does when it comes to
+typesetting and workflows. He discusses quite well that most politicians have no
+clue what the crisis is about.
+
+I think that the same applies to the management of publishers: many have no clue
+what typesetting is about. So they just throw lots of money into the wrong
+activities, just like the central banks seem to do. It doesn't matter if we \TEX
+ies demonstrate cheap and efficient solutions.
+
+Of course there are exceptions. We're lucky to have some customers that do
+understand the issues at hand. Those are also the customers where authors may use
+the tools themselves. Educating publishers, and explaining that authors can do a
+lot, might be a premise, predilection and prediction in one go! Forget about
+those who don't get it: they will lose eventually, unfortunately not before they
+have reaped and wasted the landscape.
+
+Google, Facebook, Amazon, Microsoft and others invest a lot in artificial
+intelligence (or, having all that virtual cash, just buy other companies that
+do). They already have such entities in place to analyze whatever you do. It is
+predicted that at some point they know more about you then you know yourself.
+Reading Luke Dormehl's \quotation {The Formula} is revealing. So what will that
+do with our so|-|called (disputed by some) free will? Can we choose our own
+tools? What if a potential user is told that all his or her friends use
+WhateverOffice so they'd better do that too? Will subtle pressure lead them or
+even us users away from \TEX ? We already see arguments among \TEX ies, like
+\quotation {It doesn't look updated in 3 years, is it still good?} Why update
+something that is still valid? Will the community be forced to update everything,
+sort of fake updates. Who sets out the rules? Do I really need to update (or
+re|-|run) manuals every five years?
+
+Occasionally I visit the Festo website. This is a (family owned) company that
+does research at the level that used to be common in large companies decades ago.
+If I had to choose a job, that would be the place to go to. Just google for
+\quotation {festo bionic learning network} and you understand why. We lack this
+kind of research in the field we talk about today: research not driven by
+commerce, short term profit, long term control, but because it is fundamental
+fun.
+
+Last year Alan Braslau and I spent some time on \BIBTEX. Apart from dealing with
+all the weird aspects of the \APA\ standard, dealing with the inconsistently
+constructed author fields is a real pain. There have been numerous talks about
+that aspect here at Bacho\TEX\ by Jean|-|Michel Hufflen. We're trying to deal
+with a more than 30|-|year|-|old flawed architecture. Just look back over a curve
+that backtracks 30 years of exponential development in software and databases and
+you realize that it's a real waste of time and a lost battle. It's fine to have a
+text based database, and stable formats are great, but the lack of structure is
+appalling and hard to explain to young programmers. Compare that to the Festo
+projects and you realize that there can be more challenging projects. Of course,
+dealing with the old data can be a challenge, a necessity and eventually even be
+fun, but don't even think that it can be presented as something hip and modern.
+We should be willing to admit flaws. No wonder that Jean|-|Michel decided to
+switch to talking about music instead. Way more fun.
+
+Our brains are massively parallel bio|-|machinery. Groups of neurons cooperate
+and compete for attention. Coming up with solutions that match what comes out of
+our minds demands a different approach. Here we still think in traditional
+programming solutions. Will new ideas about presenting information, the follow up
+on books come from this community? Are we the innovative Festo or are we an old
+dinosaur that just follows the fashion?
+
+\stopsection
+
+\startsection[title=User experience]
+
+Here is a nice one. Harari spends many pages explaining that research shows that
+when an unpleasant experience has less unpleasantness at the end of the period
+involved, the overall experience is valued according to the last experience. Now,
+this is something we can apply to working with \TEX: often, the more you reach
+the final state of typesetting the more it feels as all hurdles are in the
+beginning: initial coding, setting up a layout, figuring things out, etc.
+
+It can only get worse if you have a few left|-|over typesetting disasters but
+there adapting the text can help out. Of course seeing it in a cheap bad print
+can make the whole experience bad again. It happens. There is a catch here: one
+can find lots of bad|-|looking documents typeset by \TEX. Maybe there frustration
+(or indifference) prevails.
+
+I sometimes get to see what kind of documents people make with \CONTEXT\ and it's
+nice to see a good looking thesis with diverse topics: science, philosophy,
+music, etc. Here \TEX\ is just instrumental, as what it is used for is way more
+interesting (and often also more complex) than the tool used to get it on paper.
+We have conferences but they're not about rocket science or particle
+accelerators. Proceedings of such conferences can still scream \TEX, but it's the
+content that matters. Here somehow \TEX\ still sells itself, being silently
+present in rendering and presentations. It's like a rootkit: not really
+appreciated and hard to get rid of. Does one discuss the future of rootkits other
+than in the perspective of extinction? So, even as an invisible rootkit, hidden
+in the workings of other programs, \TEX's future is not safe. Sometimes, when you
+install a Linux system, you automatically get this large \TEX\ installation,
+either because of dependencies or because it is seen as a similar toolkit as for
+instance Open (or is it Libre) Office. If you don't need it, that user might as
+well start seeing it as a (friendly) virus.
+
+\stopsection
+
+\startsection[title=Conclusion]
+
+At some point those who introduced computers in typesetting had no problem
+throwing printing presses out of the window. So don't pity yourself if at some
+point in the near future you figure out that professional typesetting is no
+longer needed. Maybe once we let machines rule the world (even more) we will be
+left alone and can make beautiful documents (or whatever) just for the joy, not
+bothering if we use outdated tools. After all, we play modern music on old
+instruments (and the older rock musicians get, the more they seem to like
+acoustic).
+
+There are now computer generated compositions that experienced listeners cannot
+distinguish from old school. We already had copies of paintings that could only
+be determined forgeries by looking at chemical properties. Both of these
+(artificial) arts can be admired and bring joy. So, the same applies to fully
+automated typeset novels (or runtime rendered ebooks). How bad is that really?
+You don't dig channels with your hand. You don't calculate logarithmic tables
+manually any longer.
+
+However, one of the benefits of the Internet is watching and listening to great
+minds. Another is seeing musicians perform, which is way more fun that watching a
+computer (although googling for \quotation {animusic} brings nice visuals).
+Recently I ran into a wooden musical computer made by \quotation {Wintergatan}
+which reminded me of the \quotation {Paige Compositor} that we use in a \LUATEX\
+cartoon. Watching something like that nicely compensates for a day of rather
+boring programming. Watching how the marble machine x (mmx) evolves is yet
+another nice distraction.
+
+Now, the average age of the audience here is pretty high even if we consider that
+we get older. When I see solutions of \CONTEXT\ users (or experts) posted by
+(young) users on the mailing list or stack exchange I often have to smile because
+my answer would have been worse. A programmable system invokes creative
+solutions. My criterion is always that it has to look nice in code and has some
+elegance. Many posted solutions fit. Do we really want more automation? It's more
+fun to admire the art of solutions and I'm amazed how well users use the
+possibilities (even ones that I already forgot).
+
+One of my favourite artists on my weekly \quotation {check youtube} list is Jacob
+Collier. Right from when I ran into him I realized that a new era in music had
+begun. Just google for his name and \quotation {music theory interview} and you
+probably understand what I mean. When Dennett comments on the next generation
+(say up to 25) he wonders how they will evolve as they grow up in a completely
+different environment of connectivity. I can see that when I watch family
+members. Already long ago Greg Bear wrote the novel \quotation {Darwin's
+Children}. It sets you thinking and when looking around you even wonder if there
+is a truth in it.
+
+There are folks here at Bacho\TEX\ who make music. Now imagine that this is a
+conference about music and that the theme includes the word \quotation {future}.
+Then, imagine watching that video. You see some young musicians, one of them
+probably one of the musical masterminds of this century, others instrumental to
+his success, for instance by wrapping up his work. While listening you realize
+that this next generation knows perfectly well what previous generations did and
+achieved and how they influenced the current. You see the future there. Just look
+at how old musicians reflect on such videos. (There are lots of examples of youth
+evolving into prominent musicians around and I love watching them). There is no
+need to discuss the future, in fact, we might make a fool of ourselves doing so.
+Now back to this conference. Do we really want to discuss the future? What we
+think is the future? Our future? Why not just hope that in the flow of getting
+words on a medium we play our humble role and hope we're not forgotten but
+remembered as inspiration.
+
+One more word about predicting the future. When Arthur Clarke's \quotation {2001:
+A Space Odyssey} was turned into a movie in 1968, a lot of effort went into
+making sure that the not so far ahead future would look right. In 1996 scientists
+were asked to reflect on these predictions in \quotation {Hal's Legacy}. It
+turned out that most predictions were plain wrong. For instance computers got way
+smaller (and even smaller in the next 20 years) while (self|-|aware) artificial
+intelligence had not arrived either. So, let's be careful in what we predict (and
+wish for).
+
+\stopsection
+
+\startsection[title=No more themes]
+
+We're having fun here, that's why we come to Bacho\TEX\ (predilection). That
+should be our focus. Making sure that \TEX's future is not so much in the cutting
+edge but in providing fun to its users (prediction). So we just have to make sure
+it stays around (premise). That's how it started out. Just watch at Don Knuth's
+3:16 poster: via \TEX\ and \METAFONT\ he got in contact with designers and I
+wouldn't be surprised if that sub|-|project was among the most satisfying parts.
+So, maybe instead of ambitious themes the only theme that matters is: show what
+you did and how you did it.
+
+\stopsection
+
+\stopchapter
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/musings/musings-contents.tex b/doc/context/sources/general/manuals/musings/musings-contents.tex
new file mode 100644
index 000000000..45b21ec60
--- /dev/null
+++ b/doc/context/sources/general/manuals/musings/musings-contents.tex
@@ -0,0 +1,7 @@
+\startcomponent musings-contents
+
+\starttitle[title=Content]
+ \placelist[chapter][criterium=text,width=2em]
+\stoptitle
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/musings/musings-introduction.tex b/doc/context/sources/general/manuals/musings/musings-introduction.tex
new file mode 100644
index 000000000..bc32f69ca
--- /dev/null
+++ b/doc/context/sources/general/manuals/musings/musings-introduction.tex
@@ -0,0 +1,31 @@
+% language=uk
+
+\startcomponent musings-introduction
+
+\environment musings-style
+
+\startchapter[title={Introduction}]
+
+This is a collection of articles and wrap|-|ups that don't suit in other manuals
+or collections. Some are published, some meant as draft for a presentation.
+
+The \quotation {Children of \TEX} article is the framework for a presentation at
+Bacho\TEX\ 2017 in Poland, and covers the main theme of the conference. In the
+aftermath of that conference I wrote \quotation {Advertising \TEX} and later
+quotation {Why use \TEX?}. The 2018 Bacho\TEX\ conference theme is explored in
+\quotation {What’s to stay, what’s to go}. After a short discussion on the
+\CONTEXT\ mailing list about stability (at the moment that \MKII\ had been frozen
+for more than a decade but is still used without problems) I wrote \quotation
+{Stability}.
+
+Many of the thoughts in these articles are influenced by discussions with my
+collegues Ton Otten en Kees van Marle. Operating in a similar arena, they provide
+me the reflection needed to sort out my thoughts on these matters.
+
+\startlines
+Hans Hagen
+Hasselt NL
+2017\endash 1028
+\stoplines
+
+\stopchapter
diff --git a/doc/context/sources/general/manuals/musings/musings-perception.tex b/doc/context/sources/general/manuals/musings/musings-perception.tex
new file mode 100644
index 000000000..993604473
--- /dev/null
+++ b/doc/context/sources/general/manuals/musings/musings-perception.tex
@@ -0,0 +1,180 @@
+% language=uk
+
+\definefontfeature[ligatures][liga=yes,mode=node]
+
+\startcomponent musings-perception
+
+\environment musings-style
+
+\startchapter[title=Advertising \TEX]
+
+I can get upset when I hear \TEX ies boast about the virtues of \TEX\ compared to
+for instance Microsoft Word. Not that I feel responsible for defending a program
+that I never use(d) but attacking something for no good reason makes not much
+sense to me. It is especially annoying when the attack is accompanied by a
+presentation that looks pretty bad in design and typography. The best
+advertisements for \TEX\ should of course come from outside the \TEX\ community,
+by people impressed by its capabilities. How many \TEX ies can really claim that
+Word is bad when they never tried to make something in it with a similar learning
+curve as they had in \TEX\ or the same amount of energy spent in editing and
+perfecting a word|-|processor|-|made document.
+
+In movies where computer technology plays a role one can encounter weird
+assumptions about what computers and programs can do. Run into a server room,
+pull one disk out of a \RAID-5 array and get all information from it. Connect
+some magic device to a usb port of a phone and copy all data from it in seconds.
+Run a high speed picture or fingerprint scan on a computer (probably on a remote
+machine) and show all pictures flying by. Okay, it's not so far from other
+unrealistic aspects in movies, like talking animals, so maybe it is just a
+metaphor for complexity and speed. When zapping channels on my television I saw
+\in{figure}[fig:tex-in-movie] and as the media box permits replay I could make a
+picture. I have no clue what the movie was about or what movie it was so a
+reference is lacking here. Anyway it's interesting that seeing a lot of \TEX\
+code flying by can impress someone: the viewer, even if no \TEX ie will ever see
+that on the console unless in some error or tracing message and even then it's
+hard to get that amount. So, the viewer will never realize that what is seen is
+definitely not what a \TEX ie wants to see.
+
+\startplacefigure[title={\TEX\ in a movie},reference=fig:tex-in-movie]
+ \externalfigure[tex-in-movie.jpg][height=8cm]
+\stopplacefigure
+
+So, as that kind of free advertisement doesn't promote \TEX\ well, what of an
+occasional mentioning of \TEX\ in highly|-|regarded literature? When reading
+\quotation {From bacteria to Bach and back, the evolution of minds} by Daniel
+Dennett I ran into the following:
+
+\startquotation
+In Microsoft Word, for instance, there are the typographical operations of
+superscript and subscript, as illustrated by
+
+\startnarrower
+base\high{power}
+\stopnarrower
+
+and
+
+\startnarrower
+human\low{female}
+\stopnarrower
+
+But try to add another superscript to base\high{power}\emdash it {\em should}
+work, but it doesn't! In mathematics, you can raise powers to powers to powers
+forever, but you can't get Microsoft Word to display these (there are other
+text|-|editing systems, such as TeX, that can). Now, are we sure that human
+languages make use of true recursion, or might some or all of them be more like
+Microsoft Word? Might our interpretation of grammars as recursive be rather an
+elegant mathematical idealization of the actual \quotation {moving parts} of a
+grammar?
+\stopquotation
+
+Now, that book is a wonderfully interesting read and the author often refers to
+other sources. When one reads some reference (with a quote) then one assumes that
+what one reads is correct, and I have no reason to doubt Dennett in this. But
+this remark about \TEX\ has some curious inaccuracies. \footnote {Of course one
+can wonder in general that when one encounters such an inaccuracy, how valid
+other examples and conclusions are. However, consistency in arguments and
+confirmation by other sources can help to counter this.}
+
+First of all a textual raise or lower is normally not meant to be recursive.
+Nesting would have interesting consequences for the interline space so one will
+avoid it whenever possible. There are fonts that have superscript and subscript
+glyphs and even \UNICODE\ has slots for a bunch of characters. I'm not sure what
+Word does: take the special glyph or use a scaled down copy?
+
+Then there is the reference to \TEX\ where we can accept that the \quotation {E}
+is not lowered but just kept as a regular \quotation {e}. Actually the mentioning
+of nested scripts refers to typesetting math and that's what the superscripts and
+subscripts are for in \TEX. In math mode however, one will normally raise or
+lower symbols and numbers, not words: that happens in text mode.
+
+While Word will use the regular text font when scripting in text mode, a \TEX\
+user will either have to use a macro to make sure that the right size (and font)
+is used, or one can revert to math mode. But how to explain that one has to enter
+math and then explicitly choose the right font? Think of this:
+
+\startbuffer
+efficient\high{efficient} or
+efficient$^{\text{efficient}}$ or \par
+{\bf efficient\high{efficient} or
+efficient$^{\text{efficient}}$}
+\stopbuffer
+
+\typebuffer
+
+Which gives (in Cambria)
+
+\getbuffer
+
+Now this,
+
+\startbuffer
+efficient\high{efficient\high{efficient}} or
+efficient$^{\text{efficient$^{\text{efficient}}$}}$ or \par
+{\bf efficient\high{efficient\high{efficient}} or
+efficient$^{\text{efficient$^{\text{efficient}}$}}$}
+\stopbuffer
+
+\typebuffer
+
+will work okay but the math variant is probably quite frightening at a glance for
+an average Word user (or beginner in \TEX) and I can understand why someone would
+rather stick to click and point.
+
+\getbuffer
+
+Oh, and it's tempting to try the following:
+
+\startbuffer
+efficient{\addff{f:superiors}efficient}
+\stopbuffer
+
+\typebuffer
+
+but that only works with fonts that have such a feature, like Cambria:
+
+\blank {\switchtobodyfont[cambria]\getbuffer} \blank
+
+To come back to Dennett's remark: when typesetting math in Word, one just has to
+switch to the math editing mode and one can have nested scripts! And, when using
+\TEX\ one should not use math mode for text scripts. So in the end in both
+systems one has to know what one is doing, and both systems are equally capable.
+
+The recursion example is needed in order to explain how (following recent ideas
+from Chomsky) for modern humans some recursive mechanism is needed in our
+wetware. Now, I won't go into details about that (as I can only mess up an
+excellent explanation) but if you want to refer to \TEX\ in some way, then
+expansion \footnote{Expanding macros actually works well with tail recursion.} of
+(either combined or not) snippets of knowledge might be a more interesting model
+than recursion, because much of what \TEX\ is capable of relates to expansion.
+But I leave that to others to explore. \footnote {One quickly starts thinking of
+how \cs {expandafter}, \type {noexpand}, \type {unexpanded}, \type {protected}
+and other primitives can be applied to language, understanding and also
+misunderstanding.}
+
+Now, comparing \TEX\ to Word is always kind of tricky: Word is a text editor with
+typesetting capabilities and \TEX\ is a typesetting engine with programming
+capabilities. Recursion is not really that relevant in this perspective. Endless
+recursion in scripts makes little sense and even \TEX\ has its limits there: the
+\TEX\ math engine only distinguishes three levels (text, script and scriptscript)
+and sometimes I'd like to have a level more. Deeper nesting is just more of
+scriptscript unless one explicitly enforces some style. So, it's recursive in the
+sense that there can be many levels, but it also sort of freezes at level three.
+
+\startplacefigure[title={Nicer than \TEX},reference=fig:nicer-than-tex]
+ \externalfigure[mathematics.png][width=\textwidth]
+\stopplacefigure
+
+I love \TEX\ and I like what you can do with it and it keeps surprising me. And
+although mathematics is part of that, I seldom have to typeset math myself. So, I
+can't help that \in {figure} [fig:nicer-than-tex] impresses me more. It even has
+the so|-|familiar|-|to|-|\TEX ies dollar symbols in it: the poem \quotation
+{Poetry versus Orchestra} written by Hollie McNish, music composed by Jules
+Buckley and artwork by Martin Pyper (I have the \DVD\ but you can also find it on
+\YOUTUBE). It reminds me of Don Knuth's talk at a \TUG\ meeting. In \TUGBOAT\
+31:2 (2010) you can read Don's announcement of his new typesetting engine i\TEX:
+\quotation {Output can be automatically formatted for lasercutters, embroidery
+machines, \THREED\ printers, milling machines, and other \CNC\ devices \unknown}.
+Now that is something that Word can't do!
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/musings/musings-stability.tex b/doc/context/sources/general/manuals/musings/musings-stability.tex
new file mode 100644
index 000000000..7dc35c6be
--- /dev/null
+++ b/doc/context/sources/general/manuals/musings/musings-stability.tex
@@ -0,0 +1,388 @@
+% language=uk
+
+\environment musings-style
+
+\startcomponent musings-stability
+
+\startchapter[title={Stability}]
+
+\startsubject[title=Introduction]
+
+How stable is \CONTEXT ? This question is hard to answer. For instance \MKII\
+hasn't changed for years and seems to work quite well: no changes equals
+stability. Those who use it can do with what it offers. The potentially sensitive
+dependencies on for instance fonts are probably absent because there is not much
+development in the 8 bit fonts arena. As long as these are available we're okay,
+in fact, \OPENTYPE\ fonts are more a moving target and therefore less stable.
+
+What do we mean by stable? The fundamental differences between an 8 bit engine
+(and fonts) and an \UNICODE\ aware engine able to handle \OPENTYPE\ fonts is
+substantial which is why we dropped some functionality and added some relevant
+new. One can consider that a problem but in practice using fonts has become
+easier so no one is hurt by it. Here we need to keep in mind that \PDFTEX\ is
+really stable: it uses fonts and technology that doesn't change. On the other
+hand \XETEX\ and \LUATEX\ follow new trends. Thereby \XETEX\ uses libraries,
+which introduces a dependency and instability, while \LUATEX\ assumes solutions
+in \LUA\ which means that users and macro writers can tweak and thereby also
+introduce instability (but at least one can adapt that code).
+
+Due to the way the user interface is set up, it is unlikely that \CONTEXT\ will
+change. But the fact that we now have \LUA\ available means that many commands
+have been touched. Most behave compatible, some have more functionality, and of
+course we have a \LUA\ interface. We include a lot of support code which also
+lessens dependencies.
+
+The user input is normally \TEX\ but when you use \XML\ the move to \MKIV\ meant
+that we dropped the \MKII\ way of dealing with it in favour of a completely new
+mechanism. I get the impression that those using \XML\ don't regret that change.
+Talking of stability the \MKIV\ \XML\ interface is typically a mechanism that is
+stable and might change little. We can add new trickery but the old stays as it
+is.
+
+If we look at the output, there is \DVI\ and \PDF. In \MKII\ the \DVI\ could
+become \POSTSCRIPT. As there are different \DVI\ post|-|processors the backend
+code was using a plug|-|in model. Contrary to other macro packages there was only
+one so called format that could adapt itself to the required (engine specific)
+output. A \CONTEXT\ run has always been managed by a wrapper so users were not
+bothered much by what \TEX\ engine they used and|/|or what backend was triggered.
+This changed with \MKIV\ where we use just \LUATEX, always produce \PDF\ and
+optionally can export \XML. But again the run is managed by a wrapper, which
+incidentally is written in \LUA\ and thereby avoids dependencies on for instance
+\PERL, \RUBY\ or \PYTHON, which are moving targets, use libraries and additional
+user code, and thereby are potentially instable too.
+
+The \PDF\ code that is produced is a mix of what the engine spits out and what
+the macro package injects. The code is normally rather simple. This means that
+it's no big deal to support the so called standards. It also means that we can
+support advanced interactivity and other features but these also depends on the
+viewers used. So, stability here is more fluent, for instance because the \PDF\
+standard evolves and|/|or we need to adapt to viewers. Special demands like
+tagged \PDF\ have been supported right from the start but how that evolves
+depends mostly on input from users who need it. Again, that is less important
+(and crucial) for stability than the rendering capabilities.
+
+The fact that we use \LUA\ creates a dependency on that language but the reason
+that we use it is {\em because} it is so stable. We follow the updates and so far
+that worked out well. Now, say that we had a frozen version of \CONTEXT\ 2010 and
+\LUATEX\ 1.09 that uses \LUA\ 5.3, would that work? First of all, in 2010
+\LUATEX\ itself was evolving so the answer is probably \quotation {no}, unless
+one adds a few compatibility patches. I'm not going to try it. The change from
+5.1 to 5.2 to 5.3 was not really a problem I think and the few issues could be
+dealt with easily. If you want long term stability and use a lot of \LUA\ code
+you can take it into account when coding. Avoiding external libraries is a good
+start.
+
+Fonts are more than before moving targets. So, if you want stability there you
+should save them with your document source. The processing of them has evolved
+and has been improved over time. By now it's rather stable. More recent code can
+catch more issues and fixes are relatively easy. But it's an area that you always
+need to check when you update an old distribution. The same is true for language
+related hyphenation patterns and script specific support. The community is no
+longer leading in the math department either (\OPENTYPE\ math is a \MICROSOFT\
+invention). But, the good news is that the \TEX\ ecosystem is always fast to
+adapt and can also often provide more functionality.
+
+Vertical spacing, in fact spacing in general is an aera that can always be
+improved, so there is where you can expect changed. The same is true for side
+floats or mechanisms where content is somehow attached to other moving content,
+for instance marginal notes.
+
+But code dealing with fonts, color, scripts, structure, and specific features
+that once written don't need more, will not change that much. As mentioned for
+fonts, like any resource, we also depend on third parties. Colors can relate to
+standards, but their main properties are unchanged. Support for specific scripts
+can (and will) be improved due to user input and demands so there the users also
+influence stability. Structure doesn't really influence the overall rendering,
+but the way you set it up does, but that's user styling. Of course during the
+transition from \MKII\ to \MKIV\ and the evolution of \LUATEX\ things could be
+broken, but fixing something structural seldom relates to rendering. If for
+instance we improve the interpretation of \BIBTEX\ input , which can be real
+messy, that involves data processing, nor rendering. When we improve support for
+the \APA\ standard, which is complex, it might involve rendering but then that's
+asked for and expected. One cannot do better than the input permits.
+
+\stopsubject
+
+\startsubject[title=Publishers]
+
+When discussing stability and especially stability as requirement we need to look
+at the way \CONTEXT\ is used. So let's look at a few scenarios. Say that a
+publisher gets a camera ready book from an author in \PDF\ format. In that
+case the author can do all tweaks needed. Now say that the publisher also wants
+the source code in a format that makes reuse possible.
+
+But let's face reality. Will that publisher really reformat the document in \PDF\
+again? It's very unlikely. First of all the original \PDF\ can be kept, and
+second, a reformat only makes sense after updating the content or going for a
+completely different layout. It's basically a new book then. In that case literal
+similarity of output is irrelevant. It is a cheap demand without much substance.
+
+When the source is used for a different purpose the tool used to make the \PDF\
+is irrelevant. In that case the coding of the source can matter. If it is in some
+dialect of \TEX, fine, one has to convert it anyway (to suit the other usage). If
+there is an \XML\ export available, fine too as it can be transformed, given that
+the structure is rich enough, something that is unlikely to have been checked
+when the original was archived. Then there could have been the demand for a
+document in some other format and who can guarantee stability of the tools used
+there? Just look at how \MICROSOFT\ Word evolved, or for that matter, its
+competitors. On the average \TEX\ is more stable as one can snapshot a \TEX\ tree
+and run binaries for years, if needed, in a virtual machine.
+
+So, I don't think that a publisher is of any relevance in the discussion about
+stability. Even if we can clearly define what a publisher is, I doubt if
+publishers themselves can be considered long term stable organizations. Not
+today. I'm not sure if (especially the large) publishers really deserve a place
+in the discussion about stability but I'm willing to discuss that when I run into
+one.
+
+The main problem that an author can face when being confronted with the stability
+issue this way is that the times are long gone that publishers have a clue about
+what \TEX\ is, how it evolved and how it always had to and did adapt to changing
+requirements. If you're lucky you will run into someone who does know all this.
+They're normally a bit older and have seen the organization from any angles and
+therefore are fun to work with.
+
+But even then, rendering issues are often not high on their agenda. Outsourcing
+often has become the modus operandi which basically brings us to the second group
+involved in this discussion: suppliers.
+
+\stopsubject
+
+\startsubject[title=Suppliers]
+
+I don't know many suppliers other than the ones we ran into over a few decades.
+At least where I live the departments that are responsible for outsourcing
+typesetting like to deal with only a few large suppliers, interestingly because
+they assume that they are stable. However, in my experience hardly any of those
+seem to have survived. (Of course one can wonder if long term commitment really
+is that important in a world where companies change so fast.) This is somewhat
+obscured by the fact that publishers themselves merge, reorganize, move people
+around, etc. so who can check on the stability of suppliers. It is definitely a
+fact that at least recently hardly any of them played a rol of any relevance in
+the development of stable tools. In the past the membership of \TEX\ user groups
+contained people working at publishers and suppliers but that has changed.
+
+Let's focus on the suppliers that somehow use \TEX\ and let's consider two kind
+of suppliers: small ones, one were only a few people work, and large ones. The
+small ones depend on stable \TEX\ distributions, like \TEX Live where they can
+get the resources from: styles, fonts, patterns, binaries. If they get the
+authors \TEX\ files they need to have that access. They have to rework that input
+into what the customer demands and that likely involves tweaks. So, maybe they
+have developed their own additional code. For that code, stability is their own
+responsibility. Did they tweak core code of a macro package? Fine, but you might
+have it coming when you update. You cannot expect the evolving free meal world to
+stick to your commercial needs. A supplier can play safe and somehow involve the
+developers of macro packages or consult them occasionally, but does that really
+happen often? Interesting is that a few times that I was asked for input it was
+also wrapped in obscurity, as if some holy grail of styling was involved, while
+it's quite likely that the developer of a macro package can write such a style
+(or extra code) easily and probably also better. There really is not that much
+unique code around.
+
+Small suppliers can be on mailing lists where they can contribute, get feedback,
+provide testing, etc. They are part of a process and as such have some influence
+on stability. If they charge by the page, then a change in their tools can be
+reflected in what they charge. Basically redoing a book (or so) after a decade is
+doing a new job. And adapting to some new options in a package, as part of a
+typesetting job is probably no big deal. Is commercial really more stable than
+open source free software? Probably not, except from open source software
+developers whose real objective is to eventually sell their stuff to some company
+(and cash) and even accept it to be ditched. Small suppliers are more flexible.
+
+The large suppliers are a different group. They often guard their secrets and
+stay in the dark. They probably seldom share (fundamental) code and information.
+If they are present in a community it can be for marketing reasons. If at some
+point a large supplier would demand stability, then my first response would be:
+sure I can make you a stable setup and maybe even provide intermediate patches
+but put your money where your mouth is. But that never happened and I've come to
+the conclusion that we can safely ignore that group. The \TEX\ user groups create
+distributions and have for instance funded font development and it are the common
+users who paid for that, not the scale ones. To some extent this is actually good
+because large (software related) organizations often have special agendas that
+can contradict what we aim at in the long term.
+
+From the authors perspective there is a dilemma here. When you submit to a
+publisher who outsources, it can be a demand to deliver in a specific \TEX\
+format. Often a \PDF\ comes with the source then, so that the intended rendering
+is known. Then that source goes to a supplier who then (quite likely) redoes a
+lot of the coding in some stable subset, maybe even in a very old version of the
+macro package. If I were such an author I'd render the document in \quote {as
+stupid as possible mode} because you gain nothing by spending time on the looks.
+So, stability within the package that you use is easy and translation from one to
+another probably also. It's best to check beforehand what will happen with your
+source and let stability, if mentioned, be their problem. After all they get paid
+for it.
+
+Suppliers seldom know \CONTEXT. An interesting question is if they really know
+the alternatives well, apart from the bit they use. A well structured \CONTEXT\
+source (or probably any source) is often easy to convert to another format. You
+can assume that a supplier has tools for that (although we're often surprised
+about the poor quality of tools used). Often the strict demand for some kind of
+format is an excuse for lack of knowledge. Unfortunately you need a large author
+base to change that attitude.
+
+\stopsubject
+
+\startsubject[title=Authors]
+
+Before we move to some variants of the above, first I will look at stability from
+the authors perspective. When a book is being written the typesetting more or
+less happens as part of the process. The way it looks can influence the way you
+write and vise versa. Once the book is done it can go in print and, unless you
+were using beta versions of \CONTEXT\ and updated frequently. Normally you will
+try to work in a stable setup. Of course when a user asks for additional features
+while working on a project, he or she should also accept other beta features
+and side effects.
+
+After a few years an author might decide to update the book. The worst that can
+happen is that the code doesn't run with the latest \CONTEXT. This is not so
+likely because commands are upward compatible. However, the text might come out a
+bit different, for instance because different fonts or patterns are used. But on
+the average paragraphs will come out the same in \TEX. You can encounter
+differences in the vertical spacing and page breaks, because that is where
+improvements are still possible. If you use conceptually and implementation wise
+complex mechanism like side floats, you can also run into compatibility issues.
+But all these don't really matter much because the text will be updated anyway
+and fine|-|tuning of page breaks (if at all) happens at the end. The more you try
+to compete with desk top publishing, and the more tweaks you apply, the greater
+is the risk that you introduce instability. It is okay for a one|-|time job, but
+when you come back to it after a decade, be prepared for surprises.
+
+Even if you stick to the original coding, it makes sense to sacrifice some of that
+stability if new mechanisms have become available. For instance, if you use
+\METAPOST, better ways to solve your problem might have become available. Or if
+you document is 15 years old, a move from \MKII\ to \MKIV\ is a valid option,
+in which case you might also consider using the latest fonts.
+
+Of course, when you made a style where you patched core code, you can expect
+problems, because anything not explicitly mentioned in the interface definition
+files is subjected to change. But you probably see that coming anyway.
+
+So, is an author (or stand alone user) really dependent on stability? Probably
+less than thought. In fact, the operating system, internet and browsers,
+additional tools: all change over time and one adapts. It's something one can
+live with. Just see how people adapt to phones, tablets, social media, electric
+cars, etc. As long as the document processes and reasonable output is generated
+it's fine. And that is always what we aim at! After all we need to be able to use
+it ourselves, don't we?
+
+\stopsubject
+
+\startsubject[title=Projects]
+
+Although it is often overlooked as valid alternative in rendering in large scale
+projects, \CONTEXT\ is perfect as component in a larger whole. Something goes
+in, something comes out. In a long term project one can just install a minimal
+distribution, write styles, and run it for ages. Use a virtual machine and
+we're talking decades without any change. And, when one updates, it's easy to check
+if all still works. Often the demands and styles are simple and predictable. It's
+way more likely that a hard coded solution in some large programming environment has
+stability issues than that the \CONTEXT\ bit has.
+
+If \CONTEXT\ is used in for instance documentation of (say) software, again there
+is no real issue. Such documents are simple, evolve and therefore have no stable
+page flow, and updating \CONTEXT\ is not needed if the once decided upon coding
+is stable. You don't need the latest features. We've written styles and setups for
+such tasks and indeed they run for ages.
+
+It can make me smile to see how much effort sometimes goes in low quality
+rendering where \CONTEXT\ could do a way better job with far less investment in
+time and money but where using some presumed stable toolkit is used instead, one
+that comes with expensive licensing, from companies that come and go but shine in
+marketing. (A valid question is to what extent the quality of and care for
+documentation reflects the core products that a company produces, at least under
+the hood.)
+
+The biggest hurdle in setting up a decent efficient workflow is that it has to be
+seen as a project: proper analysis, proper planning, prototyping and testing,
+etc. You invest first and gain later. When dealing with paper many publishers
+still think in price per page and have problems seeing that a stable mostly
+automated flow in the end can result in a ridiculous low price per page,
+especially in typesetting on demand.
+
+\stopsubsubject
+
+\startsubject[title=Hybrids]
+
+Last I will mention a setup that we sometimes are involved in. An author writes
+books and uses \TEX. The publisher is okay with that and adds some quality
+assurance but in the end the product comes from the author. Maybe images are
+oursourced (not always for the better) but these can be handled easily. It can be that
+a copy|-|editor is involved and that person also then has to use \TEX\ of course,
+or feedback to the author.
+
+Publishers, and this really depends on knowledgeable persons, which as said can
+be fun to work with, can look beyond paper and also decide for additional
+materials, for instance web pages, interactive exercises, etc. In that case
+either \CONTEXT\ input has to be available as \XML\ (an export) or (often better)
+\XML\ is the starting point for multiple output. Contrary to what is believed,
+there are authors out there who have no problem coding in \XML\ directly. They
+think in structured content and reuse! The fact that they can hit a button in the
+editor and see the result in \PDF\ helps a lot. It just works.
+
+Here stability is either achieved by simply not updating during a project. There
+are however cases where an update is needed, for instance because demands
+changed. An example is a project where \ASCIIMATH\ is used which is a moving
+target. Of course one can update just that module, and often that works, but not
+when a module uses some new improved core helpers. Another example is additional
+proofing options.
+
+The budget of such projects seldom permit patching an existing distribution, so
+we then just update to the latest but not after checking if the used style works
+okay. There is no author involvement in this. Depending on the workflow, it can
+even be that the final rendering which involves fine tuning (side) float placement
+or page breaks (often educational documents have special demands) is done by us
+using special directives.
+
+Such hybrid workflows are quite convenient for all parties. The publisher works
+with the author who likes using these tools, the author can do her or his thing
+in the preferred way, and we do what we're best in: supporting this. And it
+scales up pretty well too if needed, without much costs for the publishers.
+
+\stopsubject
+
+\startsubject[title=Conclusion]
+
+So what can we conclude with respect to the demand for stability? First of course
+that it's important that our files keep running well. So, functionality should be
+stable. Freezing a distribution will make sure that during project you don't run
+into issues. Many \CONTEXT\ users update frequently in order to benefit from the
+latest additions. Most will not be harmed by this, but when something really
+breaks it's users like those on the \CONTEXT\ support list (who often also
+contribute in helping out other users) that are listened to first. Publishers
+demands play no role in this, if only because they also play no role in
+typesetting, and if they want to they should also contribute. The same is true
+for large suppliers. We're talking of free software often written without any
+compensation so these parties have no say in the matter unless they pay for it.
+It's small suppliers, authors and general users that matter most. If \CONTEXT\ is
+part of a workflow that we support, of course stability is guaranteed quite well,
+and those paying for that never have an issue with better solutions popping up.
+In fact, \CONTEXT\ is often just a tool then, one that does the job and questions
+about stability don't matter much in practice, as long as it does the job well.
+
+The main engine we use, \LUATEX, will be quite stable from version 1.10 and we'll
+try to make sure that newer versions are capable of running an older \CONTEXT,
+which is easier when no fundamental changes happen in the engine. Maybe a stripped
+down version of \LUATEX\ for \CONTEXT\ can facilitate that objective even more.
+
+Users themselves can try to stick to standard \CONTEXT\ features. The more tricks
+you apply, the less stable your future might be. Most mechanism are not evolving
+but some, like those that deal with columns, might become better over time. But
+typesetting in columns is often a one|-|shot adventure anyway (and who needs
+columns in the future).
+
+Of one thing users can be sure. There will never be a \CONTEXT\ professional or
+\CONTEXT\ enterprise. There is only one variant. All users get the same
+functionality and policies don't change suddenly. There will be no lock in to some
+cloud or web based service either. Of course one can hire us for support of any
+kind but that's independent of the distributed package. There is support by users
+for users on mailing lists and other media. That itself can also guard stability.
+
+But, always keep in mind that stability and progress, either of not driven by the
+environment that we operate in, can be in conflict.
+
+\stopsubject
+
+\stopchapter
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/musings/musings-staygo.tex b/doc/context/sources/general/manuals/musings/musings-staygo.tex
new file mode 100644
index 000000000..4be647e47
--- /dev/null
+++ b/doc/context/sources/general/manuals/musings/musings-staygo.tex
@@ -0,0 +1,461 @@
+% language=uk
+
+% Written with on repeat:
+%
+% Rai Thistlethwayte: Betty Page (the keyscape version)
+
+% \usemodule[art-01,abr-04]
+%
+% \setupbodyfont[12pt]
+%
+% \startdocument
+% [title={What’s to stay, what’s to go},
+% subtitle={The 2018 Bacho\TeX\ theme},
+% author={Hans Hagen}]
+
+\definedescription
+ [theme]
+ [before=\startnarrower,
+ after=\stopnarrower,
+ title=yes,
+ alternative=serried,
+ width=fit,
+ distance=.5\emwidth,
+ text={\documentvariable{title}:}]
+
+% \starttitle[title=\documentvariable{title}\\\txx\documentvariable{subtitle}]
+
+\startcomponent musings-staygo
+
+\environment musings-style
+
+\startchapter[title={What’s to stay, what’s to go}]
+
+\startsection[title=Introduction]
+
+The following text was written as preparation for a 2018 talk at Bacho\TEX, which
+has this theme. It's mostly a collection of thoughts. It was also more meant as a
+wrapup for the presentation (possibly with some discussions) than an article.
+
+\stopsection
+
+\startsection[title=Attraction]
+
+There are those movies where some whiz-kid sits down behind a computer, keys in a
+few commands, and miracles happen. Ten fingers are used to generate programs that
+work immediately. It's no problem to bypass firewalls. There is no lag over
+network connections. Checking massive databases is no big deal and there's even
+processing power left for real time visualization or long logs to the terminal.
+
+How boring and old fashioned must a regular edit||run||preview cycle look
+compared to this. If we take this 2018 movie reality as reference, in a time when
+one can suck a phone empty with a simple connection, pull a hard drive from a
+raid five array and still get all data immediately available, when we can follow
+realtime whoever we want using cameras spread over the country, it's pretty clear
+that this relatively slow page production engine \TEX\ has no chance to survive,
+unless we want to impress computer illiterate friends with a log flying by on the
+console (which in fact is used in movies to impress as well).
+
+On YouTube you can find these (a few hours) sessions where Jacob Collier
+harmonizes live in one of these Digital Audio Workstation programs. A while later
+on another channel June Lee will transcribe these masterpieces into complex
+sheets of music by ear. Or you can watch the weekly Wintergatan episodes on
+building the Marble Machine from wood using drilling, milling, drawing programs
+etc. There are impressive videos of multi|-|dimensional led arrays made by hand
+and controlled by small computers and robots that solve Rubic Cubes. You can be
+impressed by these Animusic videos, musicians show their craftmanship and
+interesting informative movies are all over the place. I simply cannot imagine
+millions of kids watching a \TEX\ style being written in a few hours. It's a real
+challenge for an attention span. I hope to be proven wrong but I fear that for
+the upcoming generation it's probably already too late because the \quote {whow}
+factor of \TEX\ is low at first encounter. Although: picking up one of Don Knuths
+books can have that effect: a nice mixture of code, typesetting and subtle
+graphics, combined with great care, only possible with a system like \TEX.
+
+\starttheme
+ Biology teaches us that \quote {cool} is not a recipe for \quote {survival}.
+ Not all designs by nature look cool, and it's only efficiency and
+ functionality that matters. Beauty sometimes matters too but many functional
+ mechanisms can do without. So far \TEX\ and its friends were quite capable to
+ survive so there must be something in it that prevents it to be discarded.
+ But survival is hard to explain. So far \TEX\ just stayed around but lack of
+ visual attraction is a missing competitive trait.
+\stoptheme
+
+\stopsection
+
+\startsection[title=Satisfaction]
+
+Biology also teaches us that chemistry can overload reason. When we go for
+short|-|term pleasure instead of long|-|term satisfaction (Google for Simon Sinek
+on this topic), addiction kicks in (for instance driven by crossing the dopamine
+thresholds too often, Google for Robert Sapolsky). Cool might relate more to
+pleasure while satisfaction relates to an effort. Using \TEX\ is not that cool
+and often takes an effort. But the results can be very satisfying. Where \quote
+{cool} is rewarding in the short term, \quote {satisfaction} is more a long term
+effect. So, you probably get the best (experience) out of \TEX\ by using it a
+lifetime. That's why we see so many old \TEX ies here: many like the rewards.
+
+If we want to draw new users we run into the problem that humans are not that
+good in long term visions. This means that we cannot rely on showing cool (and
+easy) features but must make sure that the long term reward is clear. We can try
+to be \quote {cool} to draw in new users, but it will not be the reason they
+stay. Instant success is important for kids who have to make a report for school,
+and a few days \quotation {getting acquainted with a program} doesn't fit in.
+It's hard to make kids addicted to \TEX\ (which could be a dubious objective).
+
+\starttheme
+ As long as the narrative of satisfaction can be told we will see new users.
+ Meetings like Bacho\TEX\ is where the narrative gets told. What will happen
+ when we no longer meet?
+\stoptheme
+
+\stopsection
+
+\startsection[title=Survival]
+
+Survival relates to improvements, stability and discarding of weak aspects.
+Unfortunately that does not work out well in practice. Fully automated
+multi||columns typesetting with all other elements done well too (we just mention
+images) is hard and close to impossible for arbitrary cases, so nature would have
+gotten rid of it. Ligatures can be a pain especially when the language is not
+tagged and some kind of intelligence is needed to selectively disable them. They
+are the tail of the peacock: not that handy but meant to be impressive. Somehow
+it stayed around in automated typesetting, in biology it would be called a freak
+of nature: probably a goodbye in wildlife. And how about page breaks on an
+electronic device: getting rid of them would make the floating figures go away
+and remove boundary conditions often imposed. It would also make widows and clubs
+less of a problem. One can even wonder if with page breaks the windows and clubs
+are the biggest problems, and if one can simply live with them. After all, we can
+live with our own bodily limitations too. After all, (depending on what country
+you live in) you can also live with bad roads, bad weather, polution, taxes, lack
+of healthcare for many, too much sugar in food, and more.
+
+
+\starttheme
+ Animals or plants that can adapt to live on a specific island might not
+ survive elsewhere. Animals or plants introduced in an isolated environment
+ might quickly dominate and wipe out the locals. What are the equivalents in
+ our \TEX\ ecosystem?
+\stoptheme
+
+\stopsection
+
+\startsection[title=Niches]
+
+But arguments will not help us determine if \TEX\ is the fittest for survival.
+It's not a rational thing. Humans are bad in applying statistics in their live,
+and looking far ahead is not a treat needed to survive. Often nature acts in
+retrospect. (Climbing mount probability by Richard Dawkins). So, it doesn't
+matter if we save time in the future if it complicates the current job. If
+governments and companies cannot look ahead and act accordingly, how can we
+extrapolate software (usage) or more specifically typesetting demands. Just look
+at the political developments in the country that hosts this conference. Could we
+have predicted the diminishing popularity of the \EU\ (and disturbing retrograde
+political mess in some countries) of 2018 when we celebrated the moment Poland
+joining the \EU\ at a Bacho\TEX\ campfire?
+
+Extrapolating the future quality of versions of \TEX\ or macro packages also doesn't
+matter much. With machine learning and artificial intelligence around the corner and
+with unavoidable new interfaces that hook into our brains, who knows what systems
+we need in the future. A generic flexible typesetting system is probably not the
+most important tool then. When we discuss quality and design it gets personal so
+a learning system that renders neutrally coded content into a form that suits
+an individual, demands a different kind of tool than we have now.
+
+On the short term (our live span) it makes more sense to look around and see how
+other software (ecosystems) fare. Maybe we can predict \TEX's future from that.
+Maybe we can learn from others mistakes. In the meantime we should not flatter
+ourselves with the idea that a near perfect typesetting system will draw attention
+and be used by a large audience. Factors external to the community play a too
+important role in this.
+
+\starttheme
+ It all depends on how well it fits into a niche. Sometimes survival is only
+ possible by staying low on the radar. But just as we destroy nature and kill
+ animals competing for space, programs get driven out of the software world.
+ On a positive note: in a project that provides open (free) math for schools
+ students expressed to favour a printed book over \WEB|-|only (one curious
+ argument for \WEB\ was that it permits easier listening to music at the same
+ time).
+\stoptheme
+
+\stopsection
+
+\startsection[title=Dominance]
+
+Last year I installed a bit clever (evohome) heating control system. It's
+probably the only \quotation {working out of the box} system that supports 12
+zones but at the same time it has a rather closed interface as any other. One can
+tweak a bit via a web interface but that one works by a proxy outside so there is
+a lock in. Such a system is a gamble because it's closed and we're talking of a
+20 year investment. I was able to add a layer of control (abusing \LUATEX\ as
+\LUA\ engine and \CONTEXT\ as library) so let's see. When I updated the boiler I
+also reconfigured some components (like valves) and was surprised how limited
+upgrading was supported. One ends up with lost settings and weird interference
+and it's because I know a bit of programming that I kept going and managed to add
+more control. Of course, after a few weeks I had to check a few things in the
+manuals, like how to enter the right menu.
+
+So, as the original manuals are stored somewhere, one picks up the smart phone
+and looks for the manual on the web. I have no problem with proper \PDF\ as a
+manual but why not provide a simple standard format document alongside the fancy
+folded A3 one. Is it because it's hard to produce different instances from one
+source? Is it because it takes effort? We're talking of a product that doesn't
+change for years.
+
+\starttheme
+ The availability of flexible tools for producing manuals doesn't mean that
+ they are used as such. They don't support the survival of tools. Bad examples
+ are a threat. Dominant species win.
+\stoptheme
+
+\stopsection
+
+\startsection[title=Extinction]
+
+When I was writing this I happened to visit a bookshop where I always check the
+SciFi section for new publications. I picked out a pocket and wondered if I had
+the wrong glasses on. The text was wobbling and looked kind of weird. On close
+inspection indeed the characters were kind of randomly dancing on the baseline
+and looked like some 150 \DPI\ (at most) scan. (By the way, I checked this the
+next time I was there by showing the book to a nephew.) I get the idea that quite
+some books get published first in the (more expensive) larger formats, so
+normally I wait till a pocket size shows up (which can take a year) so maybe here
+I had to do with a scan of a larger print scaled down.
+
+What does that tell us? First of all that the publisher doesn't care about the
+reader: this book is just unreadable. Second, it demonstrates that the printer
+didn't ask for the original \PDF\ file and then scaled down the outline copy. It
+really doesn't matter in this case if you use some high quality typesetting
+program then. It's also a waste of time to talk to such publishers about quality
+typesetting. The printer probably didn't bother to ask for a \PDF\ file that
+could be scaled down.
+
+\starttheme
+ In the end most of the publishing industry will die and this is just one of
+ the symptoms. Typesetting as we know it might fade away.
+\stoptheme
+
+\stopsection
+
+\startsection[title=Desinterest]
+
+The newspaper that I read has a good reputation for design. But why do they need
+to drastically change the layout and font setup every few years? Maybe like an
+animal marking his or her territory a new department head also has to put a mark
+on the layout. Who knows. For me the paper became pretty hard to read: a too
+light font that suits none of the several glasses that I have. So yes, I spend
+less time reading the paper. In a recent commentary about the 75 year history of
+the paper there was a remark about the introduction of a modern look a few
+decades ago by using a sans serif font. I'm not sure why sans is considered
+modern (most handwriting is sans) and to me some of these sans fonts look pretty
+old fashioned compared to a modern elegant serif (or mix).
+
+\starttheme
+ If marketing and fashion of the day dominate then a wrong decision can result
+ in dying pretty fast.
+\stoptheme
+
+\stopsection
+
+\startsection[title=Persistence]
+
+Around the turn of the century I had to replace my \CD\ player and realized that it
+made more sense to invest in ripping the \CD's to \FLAC\ files and use a decent
+\DAC\ to render the sound. This is a generic approach similar to processing
+documents with \TEX\ and it looks as future proof as well. So, I installed a
+virtual machine running SlimServer and bought a few SlimDevices, although by that
+time they were already called SqueezeBoxes.
+
+What started as an independent supplier of hardware and an open source program
+had gone the (nowadays rather predictable) route of a buy out by a larger company
+(Logitech). That company later ditched the system, even if it had a decent share
+of users. This \quotation {start something interesting and rely on dedicated
+users}, then \quotation {sell yourself (to the highest bidder)} and a bit later
+\quotation {accept that the product gets abandoned} is where open source can fail
+in many aspects: loyal users are ignored and offended with the original author
+basically not caring about it. The only good thing is that because the software
+is open source there can be a follow up, but of course that requires that there
+are users able to program.
+
+I have 5 small boxes and a larger transporter so my setup is for now safe from
+extinction. And I can run the server on any (old) \LINUX\ or \MSWINDOWS\
+distribution. For the record, when I recently connected the 20 year old Cambridge
+CD2 I was surprised how well it sounded on my current headphones. The only
+drawback was that it needs 10 minutes for the transport to warm up and get
+working.
+
+In a similar fashion I can still use \TEX, even when we originally started using
+it with the only viable quality \DVI\ to \POSTSCRIPT\ backend at that time
+(\DVIPSONE). But I'm not so sure what I'd done if I had not been involved in the
+development of \PDFTEX\ and later \LUATEX . As an average user I might just have
+dropped out. As with the \CD\ player, maybe someone will dust off an old \TEX\
+some day and maybe the only hurdle is to get it running on a virtual retro
+machine. Although \unknown\ recently I ran into an issue with a virtual machine
+that didn't provide a console after a \KVM\ host update, so I'm also getting
+pessimistic about that escape for older programs. (Not seldom when a library
+update is forced into the \LUATEX\ repository we face some issue and it's not
+something the average user want (or is able to) cope with.)
+
+\starttheme
+ Sometimes it's hard to go extinct, even when commerce interfered at some
+ point. But it does happen that users successfully take (back) control.
+\stoptheme
+
+\stopsection
+
+\startsection[title=Freedom]
+
+If you buy a book originating in academia written and typeset by the author,
+there is a chance that it is produced by some flavour of \TEX\ and looks quite
+okay. This is because the author could iterate to the product she or he likes.
+Unfortunately the web is also a source of bad looking documents produced by \TEX.
+Even worse is that many authors don't even bother to set up a document layout
+properly, think about structure and choose a font setup that matches well. One
+can argue that only content matters. Fine, but than also one shouldn't claim
+quality simply because \TEX\ has been used.
+
+I've seen examples of material meant for bachelor students that made me pretend
+that I am not familiar with \TEX\ and cannot be held responsible. Letter based
+layouts on A4 paper, or worse, meant for display (or e|-|book devices) without
+bothering to remove the excessive margins. Then these students are forced to use
+some collaborative \TEX\ environment, which makes them dependent on the quality
+standards of fellow students. No wonder that one then sees dozens of packages
+being loaded, abundant copy and paste and replace of already entered formulas and
+interesting mixtures of inline and display math, skips, kerns and whatever can
+help to make the result look horrible.
+
+\starttheme
+ Don't expect enthusiast new users when you impose \TEX\ but take away freedom
+ and force folks to cooperate with those with lesser standards. It will not
+ help quality \TEX\ to stay around. You cannot enforce survival, it just
+ happens or not, probably better with no competition or with a competition so
+ powerful that it doesn't bother with the niches. In fact, keeping a low
+ profile might be best! The number of users is no indication of quality,
+ although one can abuse that statistic selectively?
+\stoptheme
+
+\stopsection
+
+\startsection[title=Diversity]
+
+Diversity in nature is enormous. There are or course niches, but in general there
+are multiple variants of the same. When humans started breeding stock or
+companion animals diversity also was a property. No one is forcing the same dog
+upon everyone or the same cow. However, when industrialization kicks in things
+become worse. Many cows in our country share the same dad. And when we look at
+for instance corn, tomatoes or whatever dominance is not dictated by what nature
+figures out best, but by what commercially makes most sense, even if that means
+that something can't reproduce by itself any longer.
+
+In a similar way the diversity of methods and devices to communicate (on paper)
+at some point turns into commercial uniformity. The diversity is simply very
+small, also in typesetting. And even worse, a user even has to defend
+her|/|himself for a choice of system (even in the \TEX\ community). It's just
+against nature.
+
+\starttheme
+ Normally something stays around till it no longer can survive. However, we
+ humans have a tendency to destroy and commerce is helping a hand here. In
+ that respect it's a surprise that \TEX\ is still around. On the other hand,
+ humans also have a tendency to keep things artificially alive and even
+ revive. Can we revive \TEX\ in a few hundred years given the complex code
+ base and Make infrastructure?
+\stoptheme
+
+\stopsection
+
+\startsection[title=Publishing]
+
+What will happen with publishing? In the production notes of some of my recently
+bought books the author mentions that the first prints were self|-|published
+(either or not sponsored). This means that when a publisher \quotation {takes
+over} (which still happens when one scales up) not much work has to be done.
+Basically the only thing an author needs is a distribution network. My personal
+experience with for instance \CD's produced by a group of musicians is that it is
+often hard to get it from abroad (if at all) simply because one needs a payment
+channel and mail costs are also relatively high.
+
+But both demonstrate that given good facilitating options it is unlikely that
+publishers as we have now have not much change of survival. Add to the argument
+that while in Gutenbergs time a publisher also was involved in the technology,
+today nothing innovative comes from publishers: the internet, ebook devices,
+programs, etc.\ all come from elsewhere. And I get the impression that even in
+picking up on technology publishers lag behind and mostly just react. Even
+arguments like added value in terms of peer review are disappearing with the
+internet where peer groups can take over that task. Huge amounts of money are
+wasted on short|-|term modern media. (I bet similar amounts were never spend on
+typesetting.)
+
+\starttheme
+ Publishers, publishing, publications and their public: as they are now they
+ might not stay around. Lack of long term vision and ideas and decoupling of
+ technology can make sure of that. Publishing will stay but anyone can
+ publish; we only need the infrastructure. Creativity can win over greed and
+ exploitation, small can win over big. And tools like \TEX\ can thrive in
+ there, as it already does on a small scale.
+\stoptheme
+
+\stopsection
+
+\startsection[title=Understanding]
+
+\quotation {Why do you use \TEX?} If we limit this question to typesetting, you
+can think of \quotation {Why don't you use \MSWORD ?} \quotation {Why don't you use
+Indesign?}, \quotation {Why don't you use that macro package?}, \quotation {Why
+don't you use this \TEX\ engine?} and alike. I'm sure that most of the readers
+had to answer questions like this, questions that sort of assume that you're not
+happy with what you use now, or maybe even suggest that you must be stupid not to
+use \unknown
+
+It's not that easy to explain why I use \TEX\ and|/|or why \TEX\ is good a the
+job. If you are in a one|-|to|-|one (or few) sessions you can demonstrate its
+virtues but \quote {selling} it to for instance a publisher is close to
+impossible because this kind of technology is rather unknown and far from the
+click|-|and|-|point paradigm. It's even harder when students get accustomed to
+these interactive books from wherein they can even run code snippets although one
+can wonder how individual these are when a student has the web as a source of
+solutions. Only after a long exposure to similar and maybe imperfect alternatives
+books will get appreciated.
+
+For instance speaking of \quotation {automated typesetting} assumes that one
+knows what typesetting is and also is aware that automated has some benefits. A
+simple \quotation {it's an \XML\ to \PDF\ converter} might work better but that
+assumes \XML\ being used which for instance not always makes sense. And while
+hyphenation, fancy font support and proper justification might impress a \TEX\
+user it often is less of an argument than one thinks.
+
+The \quotation {Why don't you} also can be heard in the \TEX\ community. In the
+worst case it's accompanied by a \quotation {\unknown\ because everybody uses
+\unknown} which of course makes no sense because you can bet that the same user
+will not fall for that argument when it comes to using an operating system or so.
+Also from outside the community there is pressure to use something else: one can
+find defense of minimal markup over \TEX\ markup or even \HTML\ markup as better
+alternative for dissemination than for instance \PDF\ or \TEX\ sources. The
+problem here is that old||timers can reflect on how relatively wonderful a
+current technique really is, given changes over time, but who wants to listen to
+an old|-|timer. Progress is needed and stimulating (which doesn't mean that all
+old technology is obsolete). When I watched Endre eNerd's \quotation {The Time
+Capsule} blu|-|ray I noticed an Ensoniq Fizmo keyboard and looked up what it was.
+I ended up in interesting reads where the bottom line was \quotation {Either you
+get it or you don't}. Reading the threads rang a bell. As with \TEX, you cannot
+decide after a quick test or even a few hours if you (get the concept and) like
+it or not: you need days, weeks, or maybe even months, and some actually never
+really get it after years.
+
+\starttheme
+ It is good to wonder why you use some program but what gets used by others
+ depends on understanding. If we can't explain the benefits there is no
+ future for \TEX. Or more exact: if it no longer provide benefits, it will
+ just disappear. Just walk around a gallery in a science museum that deals
+ with computers: it can be a bit pathetic experience.
+\stoptheme
+
+\stopsection
+
+{\bf Who knows \unknown}
+
+\stoptitle
+
+\stopdocument
diff --git a/doc/context/sources/general/manuals/musings/musings-style.tex b/doc/context/sources/general/manuals/musings/musings-style.tex
new file mode 100644
index 000000000..5ed934f2f
--- /dev/null
+++ b/doc/context/sources/general/manuals/musings/musings-style.tex
@@ -0,0 +1,92 @@
+\startenvironment musings-style
+
+\usemodule[abr-04]
+
+\setupbodyfont
+ [pagella]
+
+\setuplayout
+ [topspace=2cm,
+ header=0pt,
+ footer=1.5cm,
+ bottomspace=1cm,
+ width=middle,
+ height=middle]
+
+% \definecolor[maincolor] [darkyellow]
+% \definecolor[extracolor][darkblue]
+
+\definecolor[maincolor] [middleorange]
+\definecolor[extracolor][middleblue]
+
+\setuptype
+ [color=maincolor]
+
+\setuptyping
+ [color=maincolor]
+
+\setuphead
+ [color=maincolor]
+
+\setuphead
+ [chapter]
+ [style=\bfd]
+
+\setuphead
+ [chapter]
+ [after={\blank[3*line]},
+ align=flushright,
+ command=\ChapterCommand]
+
+\starttexdefinition unexpanded ChapterCommand#1#2
+ \hbox to \textwidth {
+ \hss
+ % title
+ #2
+ \doifmode {*sectionnumber} {
+ % distance
+ \hskip10mm
+ % number
+ \struttedbox{\offset[x=-1mm,y=2.5mm]{\scale[height=2cm]{#1}}}
+ }
+ }
+\stoptexdefinition
+
+\setuphead
+ [section]
+ [style=\bfb]
+
+\setuphead
+ [subsection]
+ [style=\bf,
+ before=\blank,
+ after=\blank]
+
+\setuppagenumbering
+ [alternative=doublesided]
+
+\setupfootertexts
+ [][{\getmarking[chapter]\hbox to 2em{\hss\pagenumber}}]
+ [{\hbox to 2em{\pagenumber\hss}\getmarking[chapter]}][]
+
+\setupwhitespace
+ [big]
+
+\setuplist
+ [chapter]
+ [width=3em,
+ before={\testpage[3]\blank},
+ after={\blank[samepage]},
+ color=maincolor,
+ style=bold]
+
+\setuplist
+ [section]
+ [width=3em,
+ before={\blank[nowhite]},
+ after={\blank[nowhite]}]
+
+% \logo [MATHTYPE] {MathType}
+% \logo [SYNCTEX] {Sync\TeX}
+
+\stopenvironment
diff --git a/doc/context/sources/general/manuals/musings/musings-titlepage.tex b/doc/context/sources/general/manuals/musings/musings-titlepage.tex
new file mode 100644
index 000000000..33eb44d95
--- /dev/null
+++ b/doc/context/sources/general/manuals/musings/musings-titlepage.tex
@@ -0,0 +1,46 @@
+\environment musings-style
+
+\startcomponent musings-titlepage
+
+\startMPpage
+
+ fill Page withcolor "maincolor" ;
+
+ draw image (
+ draw textext.bot("m") xysized (20mm, 85mm) shifted (point .8 along (topboundary Page)) shifted (0,-10mm) ;
+ draw textext.bot("u") xysized (20mm, 60mm) shifted (point .7 along (topboundary Page)) shifted (0,-25mm) ;
+ draw textext.bot("s") xysized (20mm, 50mm) shifted (point .6 along (topboundary Page)) shifted (0,-45mm) ;
+ draw textext.bot("i") xysized (20mm,100mm) shifted (point .5 along (topboundary Page)) shifted (0, 00mm) ;
+ draw textext.bot("n") xysized (20mm, 55mm) shifted (point .4 along (topboundary Page)) shifted (0,-20mm) ;
+ draw textext.bot("g") xysized (20mm, 70mm) shifted (point .3 along (topboundary Page)) shifted (0,-20mm) ;
+ draw textext.bot("s") xysized (20mm, 40mm) shifted (point .2 along (topboundary Page)) shifted (0,-15mm) ;
+ ) shifted (0,-120mm) withcolor "white" ;
+
+ draw image (
+ draw textext.bot("c") xysized (20mm, 85mm) shifted (point .8 along (topboundary Page)) shifted (0,-10mm) ;
+ draw textext.bot("o") xysized (20mm, 60mm) shifted (point .7 along (topboundary Page)) shifted (0,-25mm) ;
+ draw textext.bot("n") xysized (20mm, 50mm) shifted (point .6 along (topboundary Page)) shifted (0,-45mm) ;
+ draw textext.bot("t") xysized (20mm,100mm) shifted (point .5 along (topboundary Page)) shifted (0, 00mm) ;
+ draw textext.bot("e") xysized (20mm, 55mm) shifted (point .4 along (topboundary Page)) shifted (0,-20mm) ;
+ draw textext.bot("x") xysized (20mm, 70mm) shifted (point .3 along (topboundary Page)) shifted (0,-20mm) ;
+ draw textext.bot("t") xysized (20mm, 40mm) shifted (point .2 along (topboundary Page)) shifted (0,-15mm) ;
+ ) shifted (0,-10mm) withcolor "white" ;
+
+ draw image (
+ draw textext.bot("h") xysized (10mm, 20.0mm) shifted (point .75 along (topboundary Page)) shifted (0,-10.0mm) ;
+ draw textext.bot("a") xysized (10mm, 22.5mm) shifted (point .70 along (topboundary Page)) shifted (0,-20.0mm) ;
+ draw textext.bot("n") xysized (10mm, 27.5mm) shifted (point .65 along (topboundary Page)) shifted (0,-12.5mm) ;
+ draw textext.bot("s") xysized (10mm, 30.0mm) shifted (point .60 along (topboundary Page)) shifted (0, 0mm) ;
+
+ draw textext.bot("h") xysized (10mm, 25mm) shifted (point .45 along (topboundary Page)) shifted (0,-05.0mm) ;
+ draw textext.bot("a") xysized (10mm, 30mm) shifted (point .40 along (topboundary Page)) shifted (0,-15.0mm) ;
+ draw textext.bot("g") xysized (10mm, 35mm) shifted (point .35 along (topboundary Page)) shifted (0,-17.5mm) ;
+ draw textext.bot("e") xysized (10mm, 20mm) shifted (point .30 along (topboundary Page)) shifted (0,-20.0mm) ;
+ draw textext.bot("n") xysized (10mm, 25mm) shifted (point .25 along (topboundary Page)) shifted (0,-10.0mm) ;
+ ) shifted (0,-235mm) withcolor "white" ;
+
+\stopMPpage
+
+\page[empty]
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/musings/musings-whytex.tex b/doc/context/sources/general/manuals/musings/musings-whytex.tex
new file mode 100644
index 000000000..8f9b7de9b
--- /dev/null
+++ b/doc/context/sources/general/manuals/musings/musings-whytex.tex
@@ -0,0 +1,326 @@
+% language=uk
+
+\startcomponent musings-whytex
+
+\environment musings-style
+
+\startchapter[title={Why use \TEX ?}]
+
+\startsection[title={Introduction}]
+
+Let's assume that you know what \TEX\ is: a program that interprets a language
+with the same name that makes it possible to convert (tagged) input into for
+instance \PDF. For many of its users it is a black box: you key in some text, hit
+a button and get some typeset result in return. After a while you start tweaking
+this black box, meet other users (on the web), become more fluent and stick to it
+forever.
+
+But now let's assume that you don't know \TEX\ and are in search of a system
+that helps you create beautiful documents in an efficient way. When your
+documents have a complex structure you are probably willing to spend some time on
+figuring out what the best tool is. Even if a search lets you end up with
+something called \TEX, a three letter word with a dropped E, you still don't
+know what it is. Advertisement for \TEX\ is often pretty weak. It's rather easy
+to point to the numerous documents that can be found on the web. But what exactly
+does \TEX\ do and what are its benefits? In order to answer this we need to know
+who you are: an author, editor, an organization that deals with documents or needs
+to generate readable output, like publishers do.
+
+\stopsection
+
+\startsection[title={Authors}]
+
+We start with authors. Students of sciences that use mathematics don't have much
+of a choice. But most of these documents hardly communicate the message that
+\quotation {Everyone should use \TEX.} or that \quotation {All documents produced
+by \TEX\ look great.} but they do advocate that for rendering math it is a pretty
+good system. The source code of these documents often look rather messy and
+unattractive and for a non|-|math user it can be intimidating. Choosing some
+lightweight click|-|and|-|ping alternative looks attractive.
+lightweight click|-|and|-|ping alternative looks attractive.
+
+Making \TEX\ popular is not going to happen by convincing those who have to write
+an occasional letter or report. They should just use whatever suits them. On the
+other hand if you love consistency, long term support, need math, are dealing
+with a rare language or script, like to reuse content, prefer different styling
+from one source, use one source for multiple documents, or maybe love open source
+tools, then you are a candidate. Of course there is a learning curve but normally
+you can master \TEX\ rather fast and once you get the hang of it there's often no
+way back. But you always need to invest a bit beforehand.
+
+So what authors are candidates for \TEX ? It could be that \TEX\ is the only tool
+that does the job. If so, you probably learned that from someone who saw you
+struggle or had the same experience and wrote or talked about it somewhere. In
+that case using \TEX\ for creating just one document (like a thesis) makes sense.
+Otherwise, you should really wonder if you want to invest time in a tool that you
+probably have to ditch later on as most organizations stick to standard
+(commercial) word processing tools.
+
+Talking to customers we are often surprised that people have heard about \TEX, or
+even used it for a few documents in college. Some universities just prescribe the
+use of \TEX\ for reporting, so not much of a choice there. Memories are normally
+rather positive in the sense that they know that it can do the job and that it's
+flexible.
+
+User group journals, presentations at \TEX\ meetings, journals, books and manuals
+that come with \TEX\ macro packages can all be used to determine if this tool
+suits an author. Actually, I started using \TEX\ because the original \TEX book
+had some magic, and reading it was just that: reading it, as I had no running
+implementation. A few years later, when I had to write (evolving) reports, I
+picked up again. But I'm not a typical user.
+
+\stopsection
+
+\startsection[title={Programmers}]
+
+When you are a programmer who has to generate reports, for instance in \PDF, or
+write manuals, then \TEX\ can really be beneficial. Of course \TEX\ is not always
+an obvious choice, but if you're a bit able to use it it's hard to beat in
+quality, flexibility and efficiency. I'm often surprised that companies are
+willing to pay a fortune for functionality that basically comes for free.
+Programmers are accustomed to running commands and working in a code editor with
+syntax highlighting so that helps too. They too recognize when something can be
+done more efficiently.
+
+When you need to go from some kind of input (document source, database,
+generated) to some rendered output there currently are a few endpoints: a
+(dynamic) \HTML\ page, a \PDF\ document, something useable in a word processor,
+or a representation using the desktop user interface. It's the second category
+where \TEX\ is hard to beat but even using \TEX\ and \METAPOST\ for creating a
+chart can make sense.
+
+There are of course special cases where \TEX\ fits in nicely. Say that you have
+to combine \PDF\ documents. There are numerous tools to do that and \TEX\ is one.
+The advantage of \TEX\ over other tools is that it's trivial to add additional
+text, number pages, provide headers and footers. And it will work forever. Why?
+Because \TEX\ has been around for decades and will be around for decades to come.
+It's an independent component. The problem with choosing for \TEX\ is that the
+starting point is important. The question is not \quotation {What tool should I
+use?} but \quotation {What problem do I need to solve?}. An open discussion about
+the objectives and possibilities is needed, not some checklist based on
+assumptions. If you don't know \TEX\ and have never worked with a programmable
+typesetting environment, you probably don't see the possibilities. In fact, you
+might even choose for \TEX\ for the wrong reasons.
+
+The problem with this category of users is that they seldom have the freedom to
+choose their tools. There are not that many jobs where the management is able to
+recognize the clever programmer who can determine that \TEX\ is suitable for a
+lot of jobs and can save money and time. Even the long term availability and
+support is not an argument since not only most tools (or even apis) changes every
+few years but also organizations themselves change ownership, objectives, and
+personnel on a whim. The concept of \quote {long term} is hard to grasp for most
+people (just look at politics) and it's only in retrospect that one can say
+\quote {We used that toolkit for over a decade.}
+
+\stopsection
+
+\startsection[title={Organizations}]
+
+Authors (often) have the advantage that they can choose themselves: they can use
+what they like. In practice any decent programmer is able to find the suitable
+tools but convincing the management to use one of them can be a challenge. Here
+we're also talking of \quote {comfort zones}: you have to like a tool(chain).
+Organizations normally don't look for \TEX. Special departments are responsible
+for choosing and negotiating whatever is used in a company. Unfortunately
+companies don't always start from the open question \quotation {We have this
+problem, we want to go there, what should we do?} and then discuss options with
+for instance those who know \TEX. Instead requirements are formulated and matches
+are found. The question then is \quotation {Are these requirements cut in stone?}
+and if not (read: we just omit some requirements when most alternatives don't
+meet them), were other requirements forgotten? Therefore organizations can end up
+with the wrong choice (using \TEX\ in a situation where it makes no sense) or
+don't see opportunities (not using \TEX\ while it makes most sense). It doesn't
+help that a hybrid solution (use a mix of \TEX\ and other tools) is often not an
+option. Where an author can just stop using a tool after a few days of
+disappointment, and where a programmer can play around a bit before making a
+choice, an organization probably best can start small with a proof of concept.
+
+Let's take a use case. A publisher wants to automatically convert \XML\ files
+into \PDF. One product can come from multiple sources (we have cases where
+thousands of small \XML\ files combine into one final product). Say that we have
+three different layouts: a theory book, a teachers manual and an answer book. In
+addition special proofing documents have to be rendered. The products might be
+produced on demand with different topics in any combination. There is at least
+one image and table per page, but there can be more. There are color and
+backgrounds used, tables of contents generated, there is extensive cross
+referencing and an index. Of course there is math.
+
+Now let's assume an initial setup costs 20K Euro and, what happens often when the
+real products show up, a revision after one year takes the same amount. We also
+assume 10K for the following eight years for support. So, we end up with 120K
+over 10 years. If one goes cheap we can consider half of that, or we can be
+pessimistic and double the amount.
+
+The first year 10K pages are produced, the second year 20K and after that 30K per
+year. So, we're talking of 270K pages. If we include customer specific documents
+and proofing we might as well end up with a multiple of that.
+
+So, we have 120K Euro divided by 270K pages or about half an Euro per page. But
+likely we have more pages so it costs less. If we double the costs then we can
+assume that some major changes took place which means more pages. In fact we had
+projects where the layout changed, all documents were regenerated and the costs
+were included in the revision, so far from double. We also see many more pages
+being generated so in practice the price per page drops below half an Euro. The
+more we process the cheaper it gets and one server can produce a lot of pages!
+
+Now, the interesting bit of such a calculation is that the costs only concern the
+hours spent on a solution. A \TEX\ based system comes for free and there are no
+license costs. Whatever alternative is taken, even if it is as flexible, it will
+involve additional costs. From the perspective of costs it's very hard to beat
+\TEX. Add to that the possibility for custom extensions, long term usage and the
+fact that one can adapt the system. The main question of course is: does it do
+the job. The only way to find out is to either experiment (which is free),
+consult an expert (not free, but then needed anyway for any solution) or ask an
+expert to make a proof of concept (also not free but relatively cheap and
+definitely cheaper than a failure). In fact, before making decisions about what
+solution is best it might be a good idea to check with an expert anyway, because
+more or less than one thinks might be possible. Also, take into account that the
+\TEX\ ecosystem is often one of the first to support new technologies, and
+normally does that within its existing interface. And there is plenty of free
+support and knowledge available once you know how to find it. Instead of wasting
+time and money on advertisement and fancy websites, effort goes into support and
+development. Even if you doubt that the current provider is around in the decade
+to come, you can be sure that there will be others, simply because \TEX\ attracts
+people. Okay, it doesn't help that large companies like to out source to
+far||far||away and expect support around the corner, so in the end they might
+kill their support chain.
+
+When talking of \TEX\ used in organizations we tend to think of publishers. But
+this is only a small subset of organizations where information gets transformed
+into something presentable. For small organizations the choice for \TEX\ can be
+easy: costs, long term stability, knowing some experts are driving forces. For
+large organizations these factors seem (at least to us) hardly relevant. We've
+(had) projects where actually the choice for using a \TEX\ based solution was (in
+retrospect) a negative one: there was no other tool than this relatively unknown
+thing called \TEX. Or, because the normal tools could not be used, one ended up
+with a solution where (behind the scenes) \TEX\ is used, without the organization
+knowing it. Or, it happened that the problem at hand was mostly one that demands
+in|-|depth knowledge of manipulating content, cleaning up messy data, combining
+resources (images or \PDF\ documents), all things that happen to be available in
+the perspective of \TEX. If you can solve a hard to solve problem for them then
+an organization doesn't care what tool you use. What does matter is that the
+solution runs forever, that costs are controllable and above all, that it
+\quotation {Just works.} And if you can make it work fast, that helps too. We
+can safely claim that when \TEX\ is evaluated as being a good option, that in the
+end it always works out quite well.
+
+Among arguments that (large) organizations like to use against a choice for \TEX\
+(or something comparable) are the size of the company that they buy their
+solution from, the expected availability for support, and the wide|-|spread usage
+of the tool at hand. One can wonder if it also matters that many vendors change
+ownership, change products every few years, change license conditions when they
+like, charge a lot for support or just abort a tool chain. Unfortunately when that
+happens those responsible for choosing such a system can have moved on to another
+job, so this is seldom part of an evaluation. For the supplier the other side of
+the table is just as much of a gamble. In that respect, an organization that
+wants to use an open source (and|/|or free) solution should realize that getting
+a return on investment on such a development is pretty hard to achieve. So, who
+really takes the risk for writing open source?
+
+For us, the reason to develop \CONTEXT\ and make it open is that it fits in our
+philosophy and we like the community. It is actually not really giving us an
+advantage commercially: it costs way more to develop, support and keep
+up|-|to|-|date than it will ever return. We can come up with better, faster and
+easier solutions and in the end we pay the price because it takes less time to
+cook up styles. So there is some backslash involved because commercially a
+difficult solution leads to more billable hours. Luckily we tend to avoid wasting
+time so we improve when possible and then it ends up in the distributed code.
+And, once the solution is there, anyone can use it. Basically also for us it's
+just a tool, like the operating system, editor and viewer are. So, what keep
+development going is mostly the interaction with the community. This also means
+that a customer can't really demand functionality for free: either wait for it to
+show up or pay for it (which seldom happens). Open source is not equivalent with
+\quotation {You get immediately what you want because someone out there writes
+the code.}. There has to be a valid reason and often it's just users and meetings
+or just some challenge that drives it.
+
+This being said, it is hard to convince a company to use \TEX. It has to come
+from users in the organization. Or, what we sometimes see with publishers, it
+comes with an author team or acquired product line where it's the only option.
+Even then we seldom see transfer to other branches in the organizations. No one
+seems to wonder \quotation {How on earth can that \XML\ to \PDF\ project produce
+whatever output in large quantities in a short period of time} while other (past)
+projects failed. It probably relates to the abstraction of the process. Even
+among \TEX\ users it can be that you demonstrate something with a click on a
+button and that many years afterwards someone present at that moment tells you
+that they just discovered that this or that can be done by hitting a button. I'm
+not claiming that \TEX\ is the magic wand for everything but in some areas it's
+pretty much ahead of the pack. Go to a \TEX\ user meeting and you will be surprised
+about the accumulated diverse knowledge present in the room. It's user demand that
+drives \CONTEXT\ development, not commerce.
+
+\stopsection
+
+\startsection[title={Choosing}]
+
+So, where can one find information about \TEX\ and friends? On the web
+one has to use the right search keys, so adding \type {tex} helps: \typ {context
+tex} or \typ {xml tex pdf} and so on. Can one make a fancy hip website, sure, but
+it being a life|-|long, already old and mature environment, and given that it
+comes for free, or is used low|-|budget, not much effort and money can be spent
+on advertising it. A benefit is that no false promises and hypes are made either.
+If you want to know more, just ask the right folks.
+
+For all kind of topics one can find interesting videos and blogs. One can
+subscribe to channels on YouTube or join forums. Unfortunately not that many
+bloggers or vloggers or podcasters come up with original material every time, and
+often one starts to recognize patterns and will get boring by repetition of wisdom
+and arguments. The same is true for manuals. Is a ten year old manual really
+obsolete? Should we just recompile it to fake an update while in fact there has
+been no need for it? Should we post twenty similar presentations while one can
+do? (If one already wants to present the same topic twenty times in the first
+place?) Maybe one should compare \TEX\ with cars: they became better over time
+and can last for decades. And no new user manual is needed.
+
+As with blogs and vlogs advertising \TEX\ carries the danger for triggering
+political discussions and drawing people into discussions that are not pleasant:
+\TEX\ versus some word processor, open versus closed source, free versus paid
+software, this versus that operating system, editor such or editor so.
+
+To summarize, it's not that trivial to come up with interesting information about
+\TEX, unless one goes into details that are beyond the average user. And those
+who are involved are often involved for a long time so it gets more complex over
+time. User group journals that started with tutorials later on became expert
+platforms. This is a side effect of being an old and long|-|term toolkit. If
+you run into it, and wonder if it can serve your purpose, just ask an expert.
+
+Most \TEX\ solutions are open source and come for free as well. Of course if you
+want a specific solution or want support beyond what is offered on mailing lists
+and forums you should be willing to pay for the hours spent. For a professional
+publisher (of whatever kind) this is not a problem, if only because any other
+solution also will cost something. It is hard to come up with a general estimate.
+A popular measure of typesetting costs is the price per page, which can range
+from a couple of euro's per page to two digit numbers. We've heard of cases where
+initial setup costs were charged. If not much manual intervention is needed a
+\TEX\ solution mostly concerns initial costs.
+
+Let's return to the main question \quotation {Why use \TEX ?} in which you can
+replace \TEX\ by one of the macro packages build on top of it, for instance
+\CONTEXT. If an (somewhat older) organization considers using \TEX\ it should
+also ask itself, why it wasn't considered long ago already? For sure there have
+been developments in \TEX\ engines (in \CONTEXT\ we use \LUATEX) as well as
+possibilities of macro packages but if you look at the documents produced with
+them, there is not that much difference with decades ago. Processing has become
+faster, some things have become easier, but new technologies have always been
+supported as soon at they showed up. Advertising is often just repeating an old
+message.
+
+The \TEX\ ecosystem was among the first in supporting for instance \OPENTYPE, and
+the community even made sure that there were free fonts available. A format like
+\PDF\ was supported as soon as it shows up and \TEX\ was the first to demonstrate
+what advanced features were there and how way it was to adapt to changes.
+Processing \XML\ using \TEX\ has never been a big deal and if that is a reason to
+look at this already old and mature technology, then an organization can wonder
+if years and opportunities (for instance for publishing on demand or easy
+updating of manuals) have been lost. Of course there are (and have been)
+alternative tools but the arguments for using \TEX\ or not are not much different
+now. It can be bad marketing of open and free software. It can be that \TEX\ has
+been around too long. It can also be that its message was not understood yet. On
+the other hand, in software development it's quite common to reinvent wheels and
+present old as new. It's never to late to catch on.
+
+\stopsection
+
+\stopchapter
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/musings/musings.tex b/doc/context/sources/general/manuals/musings/musings.tex
new file mode 100644
index 000000000..e84af5298
--- /dev/null
+++ b/doc/context/sources/general/manuals/musings/musings.tex
@@ -0,0 +1,20 @@
+\environment musings-style
+
+\startproduct musings
+
+\component musings-titlepage
+
+\startfrontmatter
+ \component musings-contents
+ \component musings-introduction
+\stopfrontmatter
+
+\startbodymatter
+ \component musings-children
+ \component musings-perception
+ \component musings-whytex
+ \component musings-staygo
+ \component musings-stability
+\stopbodymatter
+
+\stopproduct
diff --git a/doc/context/sources/general/manuals/texit/texit-conditions.tex b/doc/context/sources/general/manuals/texit/texit-conditions.tex
new file mode 100644
index 000000000..83fb0a7bc
--- /dev/null
+++ b/doc/context/sources/general/manuals/texit/texit-conditions.tex
@@ -0,0 +1,108 @@
+\environment texit-style
+
+\startcomponent texit-conditions
+
+\startchapter[title={Conditions}]
+
+In case you wonder why we have modes in \CONTEXT, here is an example that might
+convince you. The \TEX\ language has conditionals and they are in fact quite
+efficient, take for instance:
+
+\startTEX
+\ifnum\scratchcounter>10
+ \ifdim\scratchdimen>10pt
+ one
+ \else
+ two
+ \fi
+\else
+ three
+\fi
+\stopTEX
+
+When the first test fails, \TEX\ will do a fast scan over the following tokens
+and expand the \type {three} branch. In order to do such a fast scan, the nested
+condition needs to be properly balanced: the \type {\else} is optional but the
+nested \type {\fi} definitely isn't. Now imagine that you use a few pseudo
+booleans, like:
+
+\startTEX
+\newif\ifalpha \alphatrue
+\newif\ifbeta \betatrue
+\stopTEX
+
+And you need it in:
+
+\startTEX
+\ifalpha
+ \ifbeta
+ YES
+ \else
+ NOP
+ \fi
+\else
+ NOP
+\fi
+\stopTEX
+
+This happens occasionally in real applications and one can either repeat the
+\type {NOP} or wrap it in a macro in order to save tokens. However, way more
+natural would be something like this:
+
+\startTEX
+\ifalphaorbeta
+ YES
+\else
+ NOP
+\fi
+\stopTEX
+
+This basically would introduce a new kind concept: an expandable macro flagged as
+\type {\if} kind of token. I actually experimented with that in \LUATEX\ but
+rejected it eventually. Instead \type {\ifcondition} was introduced. This is
+basically equivalent to \type {\iffalse} when \TEX\ is in fast \type {\if*}
+skipping mode, but when a real test happens the next argument is expanded. That
+macro is expected to end up as something equivalent to \type {\iftrue} or \type
+{\iffalse} so that other the nexct branch or the \type {\else} is entered. Here
+is an example:
+
+\startTEX
+\ifcondition\alphaorbeta
+ YES
+\else
+ NOP
+\fi
+\stopTEX
+
+There are several ways to define \type {\alphaorbeta} now and we show a few here.
+It's up to you to figure out which ons is the most efficient.
+
+\startTEX
+\def\alphaorbeta{\ifcase0\ifalpha \else\ifbeta \else1\fi\fi\relax}
+\def\alphaorbeta{\ifcase \ifalpha0\else\ifbeta0\else1\fi\fi\relax}
+\def\alphaorbeta{\ifnum1=\ifalpha1\else\ifbeta1\else0\fi\fi\relax}
+\def\alphaorbeta{\ifnum 0\ifalpha1\fi \ifbeta1\fi >1\relax}
+\stopTEX
+
+Now, do we expect users to come up with such constructs? Of course not. Even in
+\CONTEXT\ we don't really need them, although there are a few places where they
+can be used. In \CONTEXT\ you would just do this:
+
+\startTEX
+\enablemode[alpha]
+\enablemode[beta]
+
+\doifelsemode {alpha,beta} {
+ YES
+} {
+ NOP
+}
+\stopTEX
+
+Of course such a verbose macro is less efficient but even if you use this test
+10.000 times in a run it will not take more than 0.06 seconds on a decent 2013
+laptop.
+
+\stopchapter
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/texit/texit-contents.tex b/doc/context/sources/general/manuals/texit/texit-contents.tex
new file mode 100644
index 000000000..f22db1c38
--- /dev/null
+++ b/doc/context/sources/general/manuals/texit/texit-contents.tex
@@ -0,0 +1,9 @@
+\environment texit-style
+
+\startcomponent texit-contents
+
+\starttitle[title={Contents}]
+ \placelist[chapter]
+\stoptitle
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/texit/texit-introduction.tex b/doc/context/sources/general/manuals/texit/texit-introduction.tex
new file mode 100644
index 000000000..6b33437ec
--- /dev/null
+++ b/doc/context/sources/general/manuals/texit/texit-introduction.tex
@@ -0,0 +1,24 @@
+\environment texit-style
+
+\startcomponent texit-introduction
+
+\startchapter[title={Introduction}]
+
+I needed a place to collect examples of \TEX\ coding and this is it. The examples
+presented here are an unorganized bunch. Some originate in questions asked on the
+mailing list. Others are byproducts of tests made when playing with some (new)
+functionality. When you plan to use \TEX\ for a long time, it doesn't hurt to see
+a bit of \TEX coding but when possible I will also show the \CONTEXT\ way.
+
+I hope that this document is useful. You can of course always try to challenge me
+for more examples. Hopefully I will nto forget about this document and extend it
+occasionaly.
+
+\startlines
+Hans Hagen
+Hasselt NL
+\stoplines
+
+\stopchapter
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/texit/texit-leaders.tex b/doc/context/sources/general/manuals/texit/texit-leaders.tex
new file mode 100644
index 000000000..ed224da5c
--- /dev/null
+++ b/doc/context/sources/general/manuals/texit/texit-leaders.tex
@@ -0,0 +1,247 @@
+\environment texit-style
+
+\startcomponent texit-leaders
+
+\startchapter[title={Leaders}]
+
+The following example comes from a question on the \CONTEXT\ list. It
+exhibits a few low level tricks. For the purpose of this example we
+use \type {\ruledhbox} instead of \type {\hbox}. We start with a simple
+command that puts something at the end of a line:
+
+\startbuffer
+\starttexdefinition MyFill #1
+ \removeunwantedspaces
+ \hfill
+ \ruledhbox{#1}
+\stoptexdefinition
+\stopbuffer
+
+\typebuffer[option=TEX] \getbuffer
+
+We use this in:
+
+\startbuffer[sample]
+\startitemize[packed,joinedup][rightmargin=3em]
+ \startitem
+ \samplefile{ward}\MyFill{DW}
+ \stopitem
+\stopitemize
+\stopbuffer
+
+\typebuffer[sample][option=TEX]
+
+and get:
+
+\getbuffer[sample]
+
+But, the requirement was that we move the number towards the right margin, so
+instead we need something:
+
+\startbuffer
+\starttexdefinition MyFill #1
+ \removeunwantedspaces
+ \hfill
+ \rlap{\ruledhbox to \rightskip{\hss#1}}
+\stoptexdefinition
+\stopbuffer
+
+\typebuffer[option=TEX] \getbuffer
+
+This already looks more like it:
+
+\getbuffer[sample]
+
+But also part of the requirements was that there should be dots between the end
+of the last sentence and the number. In low level \TEX\ speak that means using
+leaders: repeated boxed content where the repitition is driven by a glue
+specification. Let's naively use leaders now:
+
+\startbuffer
+\starttexdefinition MyFill #1
+ \leaders
+ \ruledhbox to 1em{\hss.\hss}
+ \hfill
+ \ruledhbox{#1}
+\stoptexdefinition
+\stopbuffer
+
+\typebuffer[option=TEX] \getbuffer
+
+Let's see what we get:
+
+\getbuffer[sample]
+
+Again we need to move the number to the right. This time we need a different
+solution because we need to fill the space in between. When \TEX\ ends a
+paragraph it adds \type {\parfillskip} so we will now manipulate that parameter.
+
+\startbuffer
+\starttexdefinition MyFill #1
+ \parfillskip-1\rightskip plus 1fil\relax
+ \leaders
+ \ruledhbox to 1em{\hss.\hss}
+ \hfill
+ \ruledhbox{#1}
+\stoptexdefinition
+\stopbuffer
+
+\typebuffer[option=TEX] \getbuffer
+
+Does it look better?
+
+\getbuffer[sample]
+
+Indeed it does, but watch this:
+
+\startbuffer[sample]
+\startitemize[packed,joinedup][rightmargin=8.5em]
+ \startitem
+ \samplefile{ward}\MyFill{DW}\par
+ \samplefile{ward}\par
+ \samplefile{ward}\MyFill{DW}
+ \stopitem
+\stopitemize
+\stopbuffer
+
+\typebuffer[sample][option=TEX]
+
+The first \type {\MyFill} will set the \type {\parfillskip} to a value that will
+also be used later on.
+
+\getbuffer[sample]
+
+The way out is the following
+
+\startbuffer
+\starttexdefinition MyFill #1
+ \begingroup
+ \parfillskip-1\rightskip plus 1fil\relax
+ \leaders
+ \ruledhbox to 1em{\hss.\hss}
+ \hfill
+ \ruledhbox{#1}
+ \par
+ \endgroup
+\stoptexdefinition
+\stopbuffer
+
+\typebuffer[option=TEX] \getbuffer
+
+This looks more or less okay. The \type {\par} keeps the adaption local but for
+it to work well, the \type {\par} must be inside the group.
+
+\getbuffer[sample]
+
+Now it's time to go for perfection! First of all, we get rid of any leading
+spacing. If we need some we should inject it after a cleanup. We also use a
+different leader command. Instead of \type {to} we use a \type {spread} so that
+we get half the emwidth and not something slightly less due to the width of the
+period.
+
+\startbuffer
+\starttexdefinition MyFill #1
+ \removeunwantedspaces
+ \begingroup
+ \parfillskip-1\rightskip plus 1fil\relax
+ \cleaders
+ \ruledhbox spread 1em{\hss.\hss}
+ \hfill
+ \ruledhbox{#1}
+ \par
+ \endgroup
+\stoptexdefinition
+\stopbuffer
+
+\typebuffer[option=TEX] \getbuffer
+
+So, we end up here:
+
+\startbuffer[sample]
+\startitemize[packed,joinedup][rightmargin=5em]
+ \startitem
+ \samplefile{sapolsky}\MyFill{RS}\par
+ \stopitem
+\stopitemize
+\stopbuffer
+
+\getbuffer[sample]
+
+For which we used this:
+
+\typebuffer[sample][option=TEX]
+
+Finally we get rid of the tracing:
+
+\startbuffer
+\starttexdefinition unexpanded MyFill #1
+ \begingroup
+ \parfillskip-1\rightskip plus 1fil\relax
+ \leaders
+ \hbox to \emwidth{\hss.\hss}
+ \hfill
+ \hbox{#1}
+ \par
+ \endgroup
+\stoptexdefinition
+\stopbuffer
+
+\typebuffer[option=TEX] \getbuffer
+
+Watch a few more details. It brings us to:
+
+\getbuffer[sample]
+
+\page
+
+\startbuffer
+\definefiller
+ [MyFiller]
+ [offset=.25\emwidth,
+ method=middle]
+
+\starttexdefinition unexpanded MyFill #1
+ \begingroup
+ \parfillskip-1\rightskip plus 1fil\relax
+ \filler[MyFiller]%
+ \hbox{#1}
+ \par
+ \endgroup
+\stoptexdefinition
+\stopbuffer
+
+\typebuffer[option=TEX] \getbuffer
+
+\getbuffer[sample]
+
+When writing these examples I realized that it's rather trivial to add this
+option to the already existing filler mechanism. The definition of such a filler
+looks like this:
+
+\startbuffer
+\definefiller
+ [MyFiller]
+ [offset=.25\emwidth,
+ rightmargindistance=-\rightskip,
+ method=middle]
+\stopbuffer
+
+\typebuffer[option=TEX] \getbuffer
+
+\startbuffer[sample]
+\startitemize[packed,joinedup][rightmargin=5em]
+ \startitem
+ \samplefile{sapolsky}\fillupto[MyFiller]{RS}
+ \stopitem
+\stopitemize
+\stopbuffer
+
+The sample code now becomes:
+
+\typebuffer[sample][option=TEX]
+
+Ans as expected renders as:
+
+\getbuffer[sample]
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/texit/texit-lookahead.tex b/doc/context/sources/general/manuals/texit/texit-lookahead.tex
new file mode 100644
index 000000000..d3652e744
--- /dev/null
+++ b/doc/context/sources/general/manuals/texit/texit-lookahead.tex
@@ -0,0 +1,387 @@
+\environment texit-style
+
+\startcomponent texit-lookahead
+
+\startchapter[title={Lookahead}]
+
+When you look at the \TEX\ source of a macro package, your can often see
+constructs like this:
+
+\startTEX
+\def\foo#1%
+ {We do something with "#1".}
+\stopTEX
+
+or maybe:
+
+\startTEX
+\def\foo#1{%
+ We do something with "#1".%
+}
+\stopTEX
+
+Normally the percentage symbol is used to indicate a comment, but here
+are no comments. In these cases it makes the definition effectively
+
+\startTEX
+\def\foo#1{do something with "#1"!}
+\stopTEX
+
+which is different from when we would not have that percent sign there:
+
+\startTEX
+\def\foo#1 {We do something with "#1"!}
+\stopTEX
+
+That variant is valid \TEX\ code but expects a space as delimiter of the
+argument to \type {\foo}. This means that you can say:
+
+\startTEX
+\foo{1} \foo 2 \foo {34} and \foo 56 .
+\stopTEX
+
+while this can trigger an error message (when no space is seen at some point) or
+at least give unexpected results.
+
+\startTEX
+\foo{1}\foo 2\foo {34}and\foo 56.
+\stopTEX
+
+A different use of the percent is seen in cases like this:
+
+\startTEX
+\def\foo#1%
+ {We do something %
+ with "#1".}
+\stopTEX
+
+This time we want to preserve the space after \type {something} because an
+end|-|of|-|line would either or not collapse it with \type {with} depending on
+how the endofline character is set up. Normally:
+
+\startTEX
+\def\foo#1%
+ {We do something
+ with "#1".}
+\stopTEX
+
+Will also add a space after something but when \TEX\ is set up to ignore lines
+you get a collapse. So the explicit space is a robust way out. Both cases of
+using or omitting the comment symbol are easy to spot as they trigger an error
+or result in weird typeset results.
+
+\startbuffer[defs]
+\def\fooA#1%
+ {\ifnum#1>100
+ yes\else nop%
+ \fi}
+
+\def\fooB#1{\ifnum#1>100 yes\else nop \fi}
+
+\def\fooC#1%
+ {\ifnum#1>100%
+ yes\else nop%
+ \fi}
+\stopbuffer
+
+\typebuffer[defs][option=TEX] \getbuffer[defs]
+
+We test this with:
+
+\startbuffer[demo]
+\fooA{100} \fooB{100} \fooC{100}
+\fooA{101} \fooB{101} \fooC{101}
+\stopbuffer
+
+\typebuffer[demo][option=TEX]
+
+And the result is probably what you expect:
+
+\startlines
+\getbuffer[demo]
+\stoplines
+
+\startbuffer[defs]
+\def\fooA#1%
+ {\ifnum#1>100
+ 1\else 0%
+ \fi}
+
+\def\fooB#1{\ifnum#1>100 1\else 0\fi}
+
+\def\fooC#1%
+ {\ifnum#1>100%
+ 1\else 0%
+ \fi}
+\stopbuffer
+
+However, when we have the following macro body:
+
+\typebuffer[defs][option=TEX] \getbuffer[defs]
+
+We get this output. Do you see the issue?
+
+\startlines
+\getbuffer[demo]
+\stoplines
+
+A preferred way to catch this is the following as a \type {\relax} ends scanning
+for a number:
+
+\startbuffer[defs]
+\def\foo#1%
+ {\ifnum#1>100\relax
+ 1\else 0%
+ \fi}
+\stopbuffer
+
+\typebuffer[defs][option=TEX] \getbuffer[defs]
+
+However, watch what happens here:
+
+\startbuffer[demo]
+\edef\result{\foo{123}}
+\stopbuffer
+
+\typebuffer[demo][option=TEX] \getbuffer[demo]
+
+The \type {\result} macro has the following body:
+
+\expanded{\setbuffer[result]\meaning\result\endbuffer}
+
+\typebuffer[result][option=TEX]
+
+A neat trick out of this is the following:
+
+\startbuffer[defs]
+\def\foo#1%
+ {\ifnum#1>\numexpr100\relax
+ 1\else 0%
+ \fi}
+\stopbuffer
+
+\typebuffer[defs][option=TEX] \getbuffer[defs]
+
+\getbuffer[demo]
+
+Now the body of \type {\result} looks like this:
+
+\expanded{\setbuffer[result]\meaning\result\endbuffer}
+
+\typebuffer[result][option=TEX]
+
+Of course this also works:
+
+\startTEX
+\def\foo#1%
+ {\ifnum#1>100 %
+ 1\else 0%
+ \fi}
+\stopTEX
+
+as a space also delimits scanning the number. But that method can actually introduce
+that space in the output. Think of this definition:
+
+\startbuffer[defs]
+\def\foo#1#2%
+ {\ifnum#1>#2 %
+ 1\else 0%
+ \fi}
+\stopbuffer
+
+\typebuffer[defs][option=TEX] \getbuffer[defs]
+
+What if \type {#2} has a trailing space? What if it is a verbose number? What if
+it is a counter variable?
+
+\startbuffer[demo]
+\scratchcounter=100
+ [\foo{101}{100}] [\foo{101}{100 }] [\foo{101}\scratchcounter]
+\scratchcounter=101
+ [\foo{100}{101}] [\foo{100}{101 }] [\foo{100}\scratchcounter]
+\stopbuffer
+
+\typebuffer[demo][option=TEX]
+
+\startlines
+\getbuffer[demo]
+\stoplines
+
+If you really want to introduce an unpredictable situation, use a coding style like
+this:
+
+\startTEX
+\def\foo#1#2#3#4{\if#1=#2#3\else#4\fi}
+\stopTEX
+
+This is not that imaginary as you often see users play safe and do things like this:
+
+\startTEX
+\ifnum\scratchcounterone=\scratchcountertwo%
+ ...
+\else
+ ...
+\fi
+\stopTEX
+
+Here the percent sign is useless as the number scanner already got the number,
+just try:
+
+\startTEX
+\scratchcounterone=1
+\scratchcountertwo=1
+
+\ifnum\scratchcounterone=\scratchcountertwo
+ yes
+\else
+ nop
+\fi
+\stopTEX
+
+A previous one liner formatted like this really is not better!
+
+\startTEX
+\def\foo#1#2#3#4%
+ {\ifnum#1=#2%
+ #3%
+ \else
+ #4%
+ \fi}
+\stopTEX
+
+When you define macros more often than not you don't want unexpected spaces (aka spurious spaces)
+which is why in \CONTEXT\ for instance setups ignores lines:
+
+\startbuffer[defs]
+\startsetups foo
+ here
+ we ignore
+ spaces at the end
+ of a line
+\stopsetups
+\stopbuffer
+
+\typebuffer[defs][option=TEX] \getbuffer[defs]
+
+so we get: \quotation {\directsetup{foo}} which means that the normally few times
+that we {\em do} want spaces we need to be explicit:
+
+\startbuffer[defs]
+\startsetups foo
+ here\space
+ we ignore\space
+ spaces at the end\space
+ of a line\space
+\stopsetups
+\stopbuffer
+
+\typebuffer[defs][option=TEX] \getbuffer[defs]
+
+Now we're okay: \quotation {\directsetup{foo}}. The same is true for:
+
+\startTEX
+\starttexdefinition foo
+ here\space
+ we ignore\space
+ spaces at the end\space
+ of a line\space
+\stoptexdefinition
+\stopTEX
+
+There are more cases where \TEX\ will look further. Take for example skip (glue)
+scanning. A glue specification can have \type {plus} and \type {minus} fields.
+
+\startbuffer[defs]
+\scratchdimenone=10pt
+\scratchskipone =10pt plus 10pt minus 10pt
+\scratchskiptwo =0pt
+\stopbuffer
+
+\typebuffer[defs][option=TEX]
+
+Now take the following test:
+
+\startbuffer[demo]
+{1 \scratchskiptwo 10pt plus 10pt \relax\the\scratchskiptwo}
+{2 \scratchskiptwo \scratchdimenone plus 10pt \relax\the\scratchskiptwo}
+{3 \scratchskiptwo 1\scratchdimenone plus 10pt \relax\the\scratchskiptwo}
+{4 \scratchskiptwo \scratchskipone plus 10pt \relax\the\scratchskiptwo}
+{5 \scratchskiptwo 1\scratchskipone plus 10pt \relax\the\scratchskiptwo}
+\stopbuffer
+
+\typebuffer[demo][option=TEX]
+
+\startlines
+\inlinebuffer[defs]\getbuffer[demo]
+\stoplines
+
+If you wonder what the second \type {\relax} does, here is a variant:
+
+\startlines
+{1 \scratchskiptwo 10pt plus 10pt \the\scratchskiptwo}
+{2 \scratchskiptwo \scratchdimenone plus 10pt \the\scratchskiptwo}
+{3 \scratchskiptwo 1\scratchdimenone plus 10pt \the\scratchskiptwo}
+{4 \scratchskiptwo \scratchskipone plus 10pt \the\scratchskiptwo}
+{5 \scratchskiptwo 1\scratchskipone plus 10pt \the\scratchskiptwo}
+\stoplines
+
+\typebuffer[demo][option=TEX]
+
+\startlines
+\inlinebuffer[defs]\getbuffer[demo]
+\stoplines
+
+In this second variant \TEX\ happily keep looking for a glue specification when
+it sees the \type {\the} so it serializes \type {\scratchskiptwo}. But as it sees
+\type {0pt} then, it stops scanning the glue spec. What we get typeset is the old
+value, not the new one! If you want to prevent this you need to \type {\relax}.
+
+Another case where \TEX\ keeps scanning is the following:
+
+\startbuffer[demo]
+\vrule width 40pt height 2pt depth 5pt \quad
+\vrule width 40pt height 20pt depth 5pt height 10pt \quad
+\vrule width 40pt height 10pt height 20pt \quad
+\vrule width 40pt height 20pt depth 5pt height 10pt width 80pt
+\stopbuffer
+
+\typebuffer[demo][option=TEX]
+
+This gives the rules:
+
+\startlinecorrection \darkgray
+\getbuffer[demo]
+\stoplinecorrection
+
+So you can overload dimensions. The space before the \type {quad} is gobbled as
+part of the look ahead for more keywords.
+
+Often rules (just like glue assignments) are wrapped in macro definitions where the
+macro writer used \type {\relax} to look ahead. That way you prevent an error message
+in cases like:
+
+\startTEX
+\def\foo{\vrule width 40pt height 2pt}
+
+The \foo depth of this thought is amazing.
+\stopTEX
+
+because \type {of} definitely is not a valid dimension. Even more subtle is:
+
+\startTEX
+\def\foo{\hskip 10pt plus 1fil}
+
+The \foo fine points of typesetting can actually become a nightmare.
+\stopTEX
+
+As \TEX\ will now see the \type {f} of \type {fine} as further specification and
+think that you want \type {1fill}.
+
+So, the most important lesson of this chapter is that you need to be aware of the way
+\TEX\ scans for quantities and specifications. In most cases the users can safely use
+a \type {\relax} to prevent a lookahead. And try to avoid adding percent signs all
+over the place.
+
+\stopchapter
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/texit/texit-style.tex b/doc/context/sources/general/manuals/texit/texit-style.tex
new file mode 100644
index 000000000..bee526d7b
--- /dev/null
+++ b/doc/context/sources/general/manuals/texit/texit-style.tex
@@ -0,0 +1,52 @@
+\startenvironment texit-style
+
+\setupbodyfont
+ [dejavu,11pt]
+
+\setuphead
+ [chapter]
+ [style=\bfc,
+ header=empty,
+ color=darkgray]
+
+\setuplist
+ [chapter]
+ [alternative=c,
+ width=1.5em]
+
+\setuplayout
+ [width=middle,
+ height=middle,
+ topspace=15mm,
+ backspace=15mm,
+ header=15mm,
+ footer=0mm]
+
+\setupwhitespace
+ [big]
+
+\setupheadertexts
+ []
+
+\setupheadertexts
+ []
+ [{\getmarking[chapter]\quad\pagenumber}]
+ [{\pagenumber\quad\getmarking[chapter]}]
+ []
+
+\setupheader
+ [style=\bf,
+ color=darkgray]
+
+% \setuptype
+% [color=darkred]
+
+% \setuptyping
+% [color=darkred]
+
+\setuppagenumbering
+ [alternative=doublesided]
+
+\usemodule[scite]
+
+\stopenvironment
diff --git a/doc/context/sources/general/manuals/texit/texit-titlepage.tex b/doc/context/sources/general/manuals/texit/texit-titlepage.tex
new file mode 100644
index 000000000..ee5db1969
--- /dev/null
+++ b/doc/context/sources/general/manuals/texit/texit-titlepage.tex
@@ -0,0 +1,40 @@
+\environment texit-style
+
+\startcomponent texit-titlepage
+
+\startMPpage
+
+fill Page
+ withcolor "darkyellow"
+ withtransparency (1,.75) ;
+
+draw image (
+ for i=1 upto 500 :
+ draw (((1,-2) ... origin ... (-1,2)) randomized 0.2)
+ scaled 1cm
+ rotated (0 randomized 15)
+ shifted (origin randomized (PaperWidth,PaperHeight))
+ withpen pencircle yscaled 5mm xscaled 2mm rotated 45
+ withcolor "darkmagenta"
+ withtransparency (1,.5)
+ ;
+ endfor ;
+) shifted center Page ;
+
+clip currentpicture to Page ;
+
+draw textext.ulft ("\TeX it") ysized 5cm
+ shifted lrcorner Page shifted (-1cm,1cm)
+ withcolor white ;
+
+draw textext.llft ("Hans Hagen") ysized 1cm
+ rotated 90
+ shifted urcorner Page shifted (-15mm,-1cm)
+ withcolor white ;
+
+\stopMPpage
+
+\startstandardmakeup[doublesided=no,page=no]
+\stopstandardmakeup
+
+\stopcomponent
diff --git a/doc/context/sources/general/manuals/texit/texit.tex b/doc/context/sources/general/manuals/texit/texit.tex
new file mode 100644
index 000000000..b8b70c00d
--- /dev/null
+++ b/doc/context/sources/general/manuals/texit/texit.tex
@@ -0,0 +1,19 @@
+\environment texit-style
+
+\startdocument
+
+\component texit-titlepage
+
+\startfrontmatter
+ \component texit-contents
+ \component texit-introduction
+\stopfrontmatter
+
+\startbodymatter
+ \component texit-lookahead
+ \component texit-conditions
+ \component texit-leaders
+ % \component texit-efficiency
+\stopbodymatter
+
+\stopdocument
diff --git a/scripts/context/lua/mtxlibs.lua b/scripts/context/lua/mtxlibs.lua
index f6839eb9d..fa539b2bf 100644
--- a/scripts/context/lua/mtxlibs.lua
+++ b/scripts/context/lua/mtxlibs.lua
@@ -98,6 +98,19 @@ local ownlibs = {
-- "util-fmt.lua", -- no need for table formatters
-- "util-deb.lua", -- no need for debugging (and tracing)
+ "util-soc-imp-reset",
+ "util-soc-imp-socket",
+ "util-soc-imp-copas",
+ "util-soc-imp-ltn12",
+ -- "util-soc-imp-mbox",
+ "util-soc-imp-mime",
+ "util-soc-imp-url",
+ "util-soc-imp-headers",
+ "util-soc-imp-tp",
+ "util-soc-imp-http",
+ "util-soc-imp-ftp",
+ "util-soc-imp-smtp",
+
"trac-set.lua",
"trac-log.lua",
-- "trac-pro.lua", -- not relevant outside context
diff --git a/scripts/context/lua/mtxrun.lua b/scripts/context/lua/mtxrun.lua
index 8702204b2..824a7b7aa 100644
--- a/scripts/context/lua/mtxrun.lua
+++ b/scripts/context/lua/mtxrun.lua
@@ -1028,7 +1028,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
--- original size: 39717, stripped down to: 21361
+-- original size: 39759, stripped down to: 21371
if not modules then modules={} end modules ['l-lpeg']={
version=1.001,
@@ -1772,7 +1772,7 @@ do
local nonzero=digit-zero
local trailingzeros=zero^1*endofstring
local stripper=Cs((1-period)^0*(
- (period*trailingzeros/"")+period*(nonzero^1+(trailingzeros/"")+zero^1)^0
+ period*trailingzeros/""+period*(nonzero^1+(trailingzeros/"")+zero^1)^0+endofstring
))
lpeg.patterns.stripzero=stripper
end
@@ -6203,7 +6203,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-str"] = package.loaded["util-str"] or true
--- original size: 41537, stripped down to: 23512
+-- original size: 42387, stripped down to: 23340
if not modules then modules={} end modules ['util-str']={
version=1.001,
@@ -6797,10 +6797,9 @@ end
local format_N=function(f)
n=n+1
if not f or f=="" then
- return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or ((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%.9f',a%s)))",n,n,n,n,n)
- else
- return format("(((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%%sf',a%s)))",n,n,f,n)
- end
+ f=".9"
+ end
+ return format("(((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%%sf',a%s)))",n,n,f,n)
end
local format_a=function(f)
n=n+1
@@ -10936,7 +10935,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-deb"] = package.loaded["util-deb"] or true
--- original size: 8984, stripped down to: 6573
+-- original size: 9387, stripped down to: 6861
if not modules then modules={} end modules ['util-deb']={
version=1.001,
@@ -11017,7 +11016,7 @@ end
setmetatableindex(names,function(t,name)
local v=setmetatableindex(function(t,source)
local v=setmetatableindex(function(t,line)
- local v={ total=0,count=0 }
+ local v={ total=0,count=0,nesting=0 }
t[line]=v
return v
end)
@@ -11046,12 +11045,24 @@ local function hook(where)
end
local data=names[name][source][line]
if where=="call" then
- data.count=data.count+1
- insert(data,ticks())
+ local nesting=data.nesting
+ if nesting==0 then
+ data.count=data.count+1
+ insert(data,ticks())
+ data.nesting=1
+ else
+ data.nesting=nesting+1
+ end
elseif where=="return" then
- local t=remove(data)
- if t then
- data.total=data.total+ticks()-t
+ local nesting=data.nesting
+ if nesting==1 then
+ local t=remove(data)
+ if t then
+ data.total=data.total+ticks()-t
+ end
+ data.nesting=0
+ else
+ data.nesting=nesting-1
end
end
end
@@ -18005,7 +18016,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-res"] = package.loaded["data-res"] or true
--- original size: 68255, stripped down to: 47783
+-- original size: 68195, stripped down to: 47727
if not modules then modules={} end modules ['data-res']={
version=1.001,
@@ -18015,7 +18026,7 @@ if not modules then modules={} end modules ['data-res']={
license="see context related readme files",
}
local gsub,find,lower,upper,match,gmatch=string.gsub,string.find,string.lower,string.upper,string.match,string.gmatch
-local concat,insert,remove,sortedkeys,sortedhash=table.concat,table.insert,table.remove,table.sortedkeys,table.sortedhash
+local concat,insert,remove=table.concat,table.insert,table.remove
local next,type,rawget=next,type,rawget
local os=os
local P,S,R,C,Cc,Cs,Ct,Carg=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Carg
@@ -21694,8 +21705,8 @@ end -- of closure
-- used libraries : l-lua.lua l-macro.lua l-sandbox.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-tpl.lua util-sbx.lua util-mrg.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 891048
--- stripped bytes : 321893
+-- original bytes : 892283
+-- stripped bytes : 323058
-- end library merge
@@ -21747,6 +21758,19 @@ local ownlibs = { -- order can be made better
'util-prs.lua',
'util-fmt.lua',
+ 'util-soc-imp-reset',
+ 'util-soc-imp-socket',
+ 'util-soc-imp-copas',
+ 'util-soc-imp-ltn12',
+ -- 'util-soc-imp-mbox',
+ 'util-soc-imp-mime',
+ 'util-soc-imp-url',
+ 'util-soc-imp-headers',
+ 'util-soc-imp-tp',
+ 'util-soc-imp-http',
+ 'util-soc-imp-ftp',
+ 'util-soc-imp-smtp',
+
'trac-set.lua',
'trac-log.lua',
'trac-inf.lua', -- was before trac-set
diff --git a/scripts/context/stubs/mswin/mtxrun.lua b/scripts/context/stubs/mswin/mtxrun.lua
index 8702204b2..824a7b7aa 100644
--- a/scripts/context/stubs/mswin/mtxrun.lua
+++ b/scripts/context/stubs/mswin/mtxrun.lua
@@ -1028,7 +1028,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
--- original size: 39717, stripped down to: 21361
+-- original size: 39759, stripped down to: 21371
if not modules then modules={} end modules ['l-lpeg']={
version=1.001,
@@ -1772,7 +1772,7 @@ do
local nonzero=digit-zero
local trailingzeros=zero^1*endofstring
local stripper=Cs((1-period)^0*(
- (period*trailingzeros/"")+period*(nonzero^1+(trailingzeros/"")+zero^1)^0
+ period*trailingzeros/""+period*(nonzero^1+(trailingzeros/"")+zero^1)^0+endofstring
))
lpeg.patterns.stripzero=stripper
end
@@ -6203,7 +6203,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-str"] = package.loaded["util-str"] or true
--- original size: 41537, stripped down to: 23512
+-- original size: 42387, stripped down to: 23340
if not modules then modules={} end modules ['util-str']={
version=1.001,
@@ -6797,10 +6797,9 @@ end
local format_N=function(f)
n=n+1
if not f or f=="" then
- return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or ((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%.9f',a%s)))",n,n,n,n,n)
- else
- return format("(((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%%sf',a%s)))",n,n,f,n)
- end
+ f=".9"
+ end
+ return format("(((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%%sf',a%s)))",n,n,f,n)
end
local format_a=function(f)
n=n+1
@@ -10936,7 +10935,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-deb"] = package.loaded["util-deb"] or true
--- original size: 8984, stripped down to: 6573
+-- original size: 9387, stripped down to: 6861
if not modules then modules={} end modules ['util-deb']={
version=1.001,
@@ -11017,7 +11016,7 @@ end
setmetatableindex(names,function(t,name)
local v=setmetatableindex(function(t,source)
local v=setmetatableindex(function(t,line)
- local v={ total=0,count=0 }
+ local v={ total=0,count=0,nesting=0 }
t[line]=v
return v
end)
@@ -11046,12 +11045,24 @@ local function hook(where)
end
local data=names[name][source][line]
if where=="call" then
- data.count=data.count+1
- insert(data,ticks())
+ local nesting=data.nesting
+ if nesting==0 then
+ data.count=data.count+1
+ insert(data,ticks())
+ data.nesting=1
+ else
+ data.nesting=nesting+1
+ end
elseif where=="return" then
- local t=remove(data)
- if t then
- data.total=data.total+ticks()-t
+ local nesting=data.nesting
+ if nesting==1 then
+ local t=remove(data)
+ if t then
+ data.total=data.total+ticks()-t
+ end
+ data.nesting=0
+ else
+ data.nesting=nesting-1
end
end
end
@@ -18005,7 +18016,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-res"] = package.loaded["data-res"] or true
--- original size: 68255, stripped down to: 47783
+-- original size: 68195, stripped down to: 47727
if not modules then modules={} end modules ['data-res']={
version=1.001,
@@ -18015,7 +18026,7 @@ if not modules then modules={} end modules ['data-res']={
license="see context related readme files",
}
local gsub,find,lower,upper,match,gmatch=string.gsub,string.find,string.lower,string.upper,string.match,string.gmatch
-local concat,insert,remove,sortedkeys,sortedhash=table.concat,table.insert,table.remove,table.sortedkeys,table.sortedhash
+local concat,insert,remove=table.concat,table.insert,table.remove
local next,type,rawget=next,type,rawget
local os=os
local P,S,R,C,Cc,Cs,Ct,Carg=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Carg
@@ -21694,8 +21705,8 @@ end -- of closure
-- used libraries : l-lua.lua l-macro.lua l-sandbox.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-tpl.lua util-sbx.lua util-mrg.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 891048
--- stripped bytes : 321893
+-- original bytes : 892283
+-- stripped bytes : 323058
-- end library merge
@@ -21747,6 +21758,19 @@ local ownlibs = { -- order can be made better
'util-prs.lua',
'util-fmt.lua',
+ 'util-soc-imp-reset',
+ 'util-soc-imp-socket',
+ 'util-soc-imp-copas',
+ 'util-soc-imp-ltn12',
+ -- 'util-soc-imp-mbox',
+ 'util-soc-imp-mime',
+ 'util-soc-imp-url',
+ 'util-soc-imp-headers',
+ 'util-soc-imp-tp',
+ 'util-soc-imp-http',
+ 'util-soc-imp-ftp',
+ 'util-soc-imp-smtp',
+
'trac-set.lua',
'trac-log.lua',
'trac-inf.lua', -- was before trac-set
diff --git a/scripts/context/stubs/unix/mtxrun b/scripts/context/stubs/unix/mtxrun
index 8702204b2..824a7b7aa 100644
--- a/scripts/context/stubs/unix/mtxrun
+++ b/scripts/context/stubs/unix/mtxrun
@@ -1028,7 +1028,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
--- original size: 39717, stripped down to: 21361
+-- original size: 39759, stripped down to: 21371
if not modules then modules={} end modules ['l-lpeg']={
version=1.001,
@@ -1772,7 +1772,7 @@ do
local nonzero=digit-zero
local trailingzeros=zero^1*endofstring
local stripper=Cs((1-period)^0*(
- (period*trailingzeros/"")+period*(nonzero^1+(trailingzeros/"")+zero^1)^0
+ period*trailingzeros/""+period*(nonzero^1+(trailingzeros/"")+zero^1)^0+endofstring
))
lpeg.patterns.stripzero=stripper
end
@@ -6203,7 +6203,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-str"] = package.loaded["util-str"] or true
--- original size: 41537, stripped down to: 23512
+-- original size: 42387, stripped down to: 23340
if not modules then modules={} end modules ['util-str']={
version=1.001,
@@ -6797,10 +6797,9 @@ end
local format_N=function(f)
n=n+1
if not f or f=="" then
- return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or ((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%.9f',a%s)))",n,n,n,n,n)
- else
- return format("(((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%%sf',a%s)))",n,n,f,n)
- end
+ f=".9"
+ end
+ return format("(((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%%sf',a%s)))",n,n,f,n)
end
local format_a=function(f)
n=n+1
@@ -10936,7 +10935,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-deb"] = package.loaded["util-deb"] or true
--- original size: 8984, stripped down to: 6573
+-- original size: 9387, stripped down to: 6861
if not modules then modules={} end modules ['util-deb']={
version=1.001,
@@ -11017,7 +11016,7 @@ end
setmetatableindex(names,function(t,name)
local v=setmetatableindex(function(t,source)
local v=setmetatableindex(function(t,line)
- local v={ total=0,count=0 }
+ local v={ total=0,count=0,nesting=0 }
t[line]=v
return v
end)
@@ -11046,12 +11045,24 @@ local function hook(where)
end
local data=names[name][source][line]
if where=="call" then
- data.count=data.count+1
- insert(data,ticks())
+ local nesting=data.nesting
+ if nesting==0 then
+ data.count=data.count+1
+ insert(data,ticks())
+ data.nesting=1
+ else
+ data.nesting=nesting+1
+ end
elseif where=="return" then
- local t=remove(data)
- if t then
- data.total=data.total+ticks()-t
+ local nesting=data.nesting
+ if nesting==1 then
+ local t=remove(data)
+ if t then
+ data.total=data.total+ticks()-t
+ end
+ data.nesting=0
+ else
+ data.nesting=nesting-1
end
end
end
@@ -18005,7 +18016,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-res"] = package.loaded["data-res"] or true
--- original size: 68255, stripped down to: 47783
+-- original size: 68195, stripped down to: 47727
if not modules then modules={} end modules ['data-res']={
version=1.001,
@@ -18015,7 +18026,7 @@ if not modules then modules={} end modules ['data-res']={
license="see context related readme files",
}
local gsub,find,lower,upper,match,gmatch=string.gsub,string.find,string.lower,string.upper,string.match,string.gmatch
-local concat,insert,remove,sortedkeys,sortedhash=table.concat,table.insert,table.remove,table.sortedkeys,table.sortedhash
+local concat,insert,remove=table.concat,table.insert,table.remove
local next,type,rawget=next,type,rawget
local os=os
local P,S,R,C,Cc,Cs,Ct,Carg=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Carg
@@ -21694,8 +21705,8 @@ end -- of closure
-- used libraries : l-lua.lua l-macro.lua l-sandbox.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-tpl.lua util-sbx.lua util-mrg.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 891048
--- stripped bytes : 321893
+-- original bytes : 892283
+-- stripped bytes : 323058
-- end library merge
@@ -21747,6 +21758,19 @@ local ownlibs = { -- order can be made better
'util-prs.lua',
'util-fmt.lua',
+ 'util-soc-imp-reset',
+ 'util-soc-imp-socket',
+ 'util-soc-imp-copas',
+ 'util-soc-imp-ltn12',
+ -- 'util-soc-imp-mbox',
+ 'util-soc-imp-mime',
+ 'util-soc-imp-url',
+ 'util-soc-imp-headers',
+ 'util-soc-imp-tp',
+ 'util-soc-imp-http',
+ 'util-soc-imp-ftp',
+ 'util-soc-imp-smtp',
+
'trac-set.lua',
'trac-log.lua',
'trac-inf.lua', -- was before trac-set
diff --git a/scripts/context/stubs/win64/mtxrun.lua b/scripts/context/stubs/win64/mtxrun.lua
index 8702204b2..824a7b7aa 100644
--- a/scripts/context/stubs/win64/mtxrun.lua
+++ b/scripts/context/stubs/win64/mtxrun.lua
@@ -1028,7 +1028,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["l-lpeg"] = package.loaded["l-lpeg"] or true
--- original size: 39717, stripped down to: 21361
+-- original size: 39759, stripped down to: 21371
if not modules then modules={} end modules ['l-lpeg']={
version=1.001,
@@ -1772,7 +1772,7 @@ do
local nonzero=digit-zero
local trailingzeros=zero^1*endofstring
local stripper=Cs((1-period)^0*(
- (period*trailingzeros/"")+period*(nonzero^1+(trailingzeros/"")+zero^1)^0
+ period*trailingzeros/""+period*(nonzero^1+(trailingzeros/"")+zero^1)^0+endofstring
))
lpeg.patterns.stripzero=stripper
end
@@ -6203,7 +6203,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-str"] = package.loaded["util-str"] or true
--- original size: 41537, stripped down to: 23512
+-- original size: 42387, stripped down to: 23340
if not modules then modules={} end modules ['util-str']={
version=1.001,
@@ -6797,10 +6797,9 @@ end
local format_N=function(f)
n=n+1
if not f or f=="" then
- return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or ((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%.9f',a%s)))",n,n,n,n,n)
- else
- return format("(((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%%sf',a%s)))",n,n,f,n)
- end
+ f=".9"
+ end
+ return format("(((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%%sf',a%s)))",n,n,f,n)
end
local format_a=function(f)
n=n+1
@@ -10936,7 +10935,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["util-deb"] = package.loaded["util-deb"] or true
--- original size: 8984, stripped down to: 6573
+-- original size: 9387, stripped down to: 6861
if not modules then modules={} end modules ['util-deb']={
version=1.001,
@@ -11017,7 +11016,7 @@ end
setmetatableindex(names,function(t,name)
local v=setmetatableindex(function(t,source)
local v=setmetatableindex(function(t,line)
- local v={ total=0,count=0 }
+ local v={ total=0,count=0,nesting=0 }
t[line]=v
return v
end)
@@ -11046,12 +11045,24 @@ local function hook(where)
end
local data=names[name][source][line]
if where=="call" then
- data.count=data.count+1
- insert(data,ticks())
+ local nesting=data.nesting
+ if nesting==0 then
+ data.count=data.count+1
+ insert(data,ticks())
+ data.nesting=1
+ else
+ data.nesting=nesting+1
+ end
elseif where=="return" then
- local t=remove(data)
- if t then
- data.total=data.total+ticks()-t
+ local nesting=data.nesting
+ if nesting==1 then
+ local t=remove(data)
+ if t then
+ data.total=data.total+ticks()-t
+ end
+ data.nesting=0
+ else
+ data.nesting=nesting-1
end
end
end
@@ -18005,7 +18016,7 @@ do -- create closure to overcome 200 locals limit
package.loaded["data-res"] = package.loaded["data-res"] or true
--- original size: 68255, stripped down to: 47783
+-- original size: 68195, stripped down to: 47727
if not modules then modules={} end modules ['data-res']={
version=1.001,
@@ -18015,7 +18026,7 @@ if not modules then modules={} end modules ['data-res']={
license="see context related readme files",
}
local gsub,find,lower,upper,match,gmatch=string.gsub,string.find,string.lower,string.upper,string.match,string.gmatch
-local concat,insert,remove,sortedkeys,sortedhash=table.concat,table.insert,table.remove,table.sortedkeys,table.sortedhash
+local concat,insert,remove=table.concat,table.insert,table.remove
local next,type,rawget=next,type,rawget
local os=os
local P,S,R,C,Cc,Cs,Ct,Carg=lpeg.P,lpeg.S,lpeg.R,lpeg.C,lpeg.Cc,lpeg.Cs,lpeg.Ct,lpeg.Carg
@@ -21694,8 +21705,8 @@ end -- of closure
-- used libraries : l-lua.lua l-macro.lua l-sandbox.lua l-package.lua l-lpeg.lua l-function.lua l-string.lua l-table.lua l-io.lua l-number.lua l-set.lua l-os.lua l-file.lua l-gzip.lua l-md5.lua l-url.lua l-dir.lua l-boolean.lua l-unicode.lua l-math.lua util-str.lua util-tab.lua util-fil.lua util-sac.lua util-sto.lua util-prs.lua util-fmt.lua trac-set.lua trac-log.lua trac-inf.lua trac-pro.lua util-lua.lua util-deb.lua util-tpl.lua util-sbx.lua util-mrg.lua util-env.lua luat-env.lua lxml-tab.lua lxml-lpt.lua lxml-mis.lua lxml-aux.lua lxml-xml.lua trac-xml.lua data-ini.lua data-exp.lua data-env.lua data-tmp.lua data-met.lua data-res.lua data-pre.lua data-inp.lua data-out.lua data-fil.lua data-con.lua data-use.lua data-zip.lua data-tre.lua data-sch.lua data-lua.lua data-aux.lua data-tmf.lua data-lst.lua util-lib.lua luat-sta.lua luat-fmt.lua
-- skipped libraries : -
--- original bytes : 891048
--- stripped bytes : 321893
+-- original bytes : 892283
+-- stripped bytes : 323058
-- end library merge
@@ -21747,6 +21758,19 @@ local ownlibs = { -- order can be made better
'util-prs.lua',
'util-fmt.lua',
+ 'util-soc-imp-reset',
+ 'util-soc-imp-socket',
+ 'util-soc-imp-copas',
+ 'util-soc-imp-ltn12',
+ -- 'util-soc-imp-mbox',
+ 'util-soc-imp-mime',
+ 'util-soc-imp-url',
+ 'util-soc-imp-headers',
+ 'util-soc-imp-tp',
+ 'util-soc-imp-http',
+ 'util-soc-imp-ftp',
+ 'util-soc-imp-smtp',
+
'trac-set.lua',
'trac-log.lua',
'trac-inf.lua', -- was before trac-set
diff --git a/tex/context/base/mkii/cont-new.mkii b/tex/context/base/mkii/cont-new.mkii
index 06c0d4626..253b3250a 100644
--- a/tex/context/base/mkii/cont-new.mkii
+++ b/tex/context/base/mkii/cont-new.mkii
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2018.08.10 16:51}
+\newcontextversion{2018.08.14 23:10}
%D This file is loaded at runtime, thereby providing an
%D excellent place for hacks, patches, extensions and new
diff --git a/tex/context/base/mkii/context.mkii b/tex/context/base/mkii/context.mkii
index 33917ad55..633142980 100644
--- a/tex/context/base/mkii/context.mkii
+++ b/tex/context/base/mkii/context.mkii
@@ -20,7 +20,7 @@
%D your styles an modules.
\edef\contextformat {\jobname}
-\edef\contextversion{2018.08.10 16:51}
+\edef\contextversion{2018.08.14 23:10}
%D For those who want to use this:
diff --git a/tex/context/base/mkiv/anch-pos.lua b/tex/context/base/mkiv/anch-pos.lua
index 99763edae..47fee067f 100644
--- a/tex/context/base/mkiv/anch-pos.lua
+++ b/tex/context/base/mkiv/anch-pos.lua
@@ -25,7 +25,7 @@ more efficient.</p>
-- save much here (at least not now)
local tostring, next, rawget, rawset, setmetatable, tonumber = tostring, next, rawget, rawset, setmetatable, tonumber
-local sort, sortedhash, sortedkeys = table.sort, table.sortedhash, table.sortedkeys
+local sort = table.sort
local format, gmatch = string.format, string.gmatch
local rawget = rawget
local lpegmatch = lpeg.match
@@ -46,6 +46,8 @@ local commands = commands
local context = context
local ctxnode = context.nodes.flush
+local ctx_latelua = context.latelua
+
local tex = tex
local texgetcount = tex.getcount
local texsetcount = tex.setcount
@@ -489,7 +491,8 @@ scanners.bposcolumnregistered = function() -- tag
local tag = scanstring()
insert(columns,tag)
column = tag
- ctxnode(new_latelua_node(function() b_column(tag) end))
+-- ctxnode(new_latelua_node(function() b_column(tag) end))
+ ctx_latelua(function() b_column(tag) end)
end
scanners.eposcolumn = function()
@@ -498,7 +501,8 @@ scanners.eposcolumn = function()
end
scanners.eposcolumnregistered = function()
- ctxnode(new_latelua_node(e_column))
+-- ctxnode(new_latelua_node(e_column))
+ ctx_latelua(e_column)
remove(columns)
column = columns[#columns]
end
@@ -635,7 +639,8 @@ scanners.parpos = function() -- todo: relate to localpar (so this is an intermed
end
local tag = f_p_tag(nofparagraphs)
tobesaved[tag] = t
- ctxnode(new_latelua_node(function() enhance(tobesaved[tag]) end))
+-- ctxnode(new_latelua_node(function() enhance(tobesaved[tag]) end))
+ ctx_latelua(function() enhance(tobesaved[tag]) end)
end
scanners.dosetposition = function() -- name
@@ -649,7 +654,8 @@ scanners.dosetposition = function() -- name
n = nofparagraphs > 0 and nofparagraphs or nil,
r2l = texgetcount("inlinelefttoright") == 1 or nil,
}
- ctxnode(new_latelua_node(function() enhance(tobesaved[name]) end))
+-- ctxnode(new_latelua_node(function() enhance(tobesaved[name]) end))
+ ctx_latelua(function() enhance(tobesaved[name]) end)
end
scanners.dosetpositionwhd = function() -- name w h d extra
@@ -669,7 +675,8 @@ scanners.dosetpositionwhd = function() -- name w h d extra
n = nofparagraphs > 0 and nofparagraphs or nil,
r2l = texgetcount("inlinelefttoright") == 1 or nil,
}
- ctxnode(new_latelua_node(function() enhance(tobesaved[name]) end))
+-- ctxnode(new_latelua_node(function() enhance(tobesaved[name]) end))
+ ctx_latelua(function() enhance(tobesaved[name]) end)
end
scanners.dosetpositionbox = function() -- name box
@@ -688,7 +695,8 @@ scanners.dosetpositionbox = function() -- name box
n = nofparagraphs > 0 and nofparagraphs or nil,
r2l = texgetcount("inlinelefttoright") == 1 or nil,
}
- ctxnode(new_latelua_node(function() enhance(tobesaved[name]) end))
+-- ctxnode(new_latelua_node(function() enhance(tobesaved[name]) end))
+ ctx_latelua(function() enhance(tobesaved[name]) end)
end
scanners.dosetpositionplus = function() -- name w h d extra
@@ -709,7 +717,8 @@ scanners.dosetpositionplus = function() -- name w h d extra
e = scanstring(),
r2l = texgetcount("inlinelefttoright") == 1 or nil,
}
- ctxnode(new_latelua_node(function() enhance(tobesaved[name]) end))
+-- ctxnode(new_latelua_node(function() enhance(tobesaved[name]) end))
+ ctx_latelua(function() enhance(tobesaved[name]) end)
end
scanners.dosetpositionstrut = function() -- name
@@ -727,7 +736,8 @@ scanners.dosetpositionstrut = function() -- name
n = nofparagraphs > 0 and nofparagraphs or nil,
r2l = texgetcount("inlinelefttoright") == 1 or nil,
}
- ctxnode(new_latelua_node(function() enhance(tobesaved[name]) end))
+-- ctxnode(new_latelua_node(function() enhance(tobesaved[name]) end))
+ ctx_latelua(function() enhance(tobesaved[name]) end)
end
scanners.dosetpositionstrutkind = function() -- name
@@ -747,7 +757,8 @@ scanners.dosetpositionstrutkind = function() -- name
n = nofparagraphs > 0 and nofparagraphs or nil,
r2l = texgetcount("inlinelefttoright") == 1 or nil,
}
- ctxnode(new_latelua_node(function() enhance(tobesaved[name]) end))
+-- ctxnode(new_latelua_node(function() enhance(tobesaved[name]) end))
+ ctx_latelua(function() enhance(tobesaved[name]) end)
end
function jobpositions.getreserved(tag,n)
diff --git a/tex/context/base/mkiv/attr-col.lua b/tex/context/base/mkiv/attr-col.lua
index 28e63b177..f970fb8e7 100644
--- a/tex/context/base/mkiv/attr-col.lua
+++ b/tex/context/base/mkiv/attr-col.lua
@@ -406,10 +406,29 @@ function colors.setmodel(name,weightgray)
weightgray = true
end
end
- colors.model = name -- global, not useful that way
- colors.default = models[name] or 1 -- global
- colors.weightgray = weightgray -- global
- return colors.default
+ local default = models[name] or 1
+
+ colors.model = name -- global, not useful that way
+ colors.default = default -- global
+ colors.weightgray = weightgray -- global
+
+ -- avoid selective checking is no need for it
+
+ local forced = colors.forced
+
+ if forced == nil then
+ -- unset
+ colors.forced = default
+ elseif forced == false then
+ -- assumed mixed
+ elseif forced ~= default then
+ -- probably mixed
+ colors.forced = false
+ else
+ -- stil the same
+ end
+
+ return default
end
function colors.register(name, colorspace, ...) -- passing 9 vars is faster (but not called that often)
diff --git a/tex/context/base/mkiv/cldf-bas.lua b/tex/context/base/mkiv/cldf-bas.lua
index 15e941db2..02f8e3e5b 100644
--- a/tex/context/base/mkiv/cldf-bas.lua
+++ b/tex/context/base/mkiv/cldf-bas.lua
@@ -31,10 +31,13 @@ local concat = table.concat
local context = context
local ctxcore = context.core
local variables = interfaces.variables
+local sprint = context.sprint
local nodepool = nodes.pool
local new_rule = nodepool.rule
local new_glyph = nodepool.glyph
+local new_latelua = nodepool.latelua
+
local current_attr = nodes.current_attr
local current_font = font.current
@@ -172,3 +175,7 @@ context.registers = {
-- not really a register but kind of belongs here
newchar = function(name,u) context([[\chardef\%s=%s\relax]],name,u) end,
}
+
+function context.latelua(f)
+ sprint(new_latelua(f)) -- maybe just context
+end
diff --git a/tex/context/base/mkiv/cont-new.mkiv b/tex/context/base/mkiv/cont-new.mkiv
index 0de7057d6..84022b992 100644
--- a/tex/context/base/mkiv/cont-new.mkiv
+++ b/tex/context/base/mkiv/cont-new.mkiv
@@ -11,7 +11,7 @@
%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
%C details.
-\newcontextversion{2018.08.10 16:51}
+\newcontextversion{2018.08.14 23:10}
%D This file is loaded at runtime, thereby providing an excellent place for
%D hacks, patches, extensions and new features.
diff --git a/tex/context/base/mkiv/context.mkiv b/tex/context/base/mkiv/context.mkiv
index bc599f373..357707a14 100644
--- a/tex/context/base/mkiv/context.mkiv
+++ b/tex/context/base/mkiv/context.mkiv
@@ -42,7 +42,7 @@
%D has to match \type {YYYY.MM.DD HH:MM} format.
\edef\contextformat {\jobname}
-\edef\contextversion{2018.08.10 16:51}
+\edef\contextversion{2018.08.14 23:10}
\edef\contextkind {beta}
%D For those who want to use this:
@@ -99,6 +99,7 @@
\loadmarkfile{luat-cod}
\loadmarkfile{luat-bas}
\loadmarkfile{luat-lib}
+\loadmarkfile{luat-soc}
\loadmarkfile{catc-ini}
\loadmarkfile{catc-act}
diff --git a/tex/context/base/mkiv/core-con.mkiv b/tex/context/base/mkiv/core-con.mkiv
index d0e53833d..3ac33f46f 100644
--- a/tex/context/base/mkiv/core-con.mkiv
+++ b/tex/context/base/mkiv/core-con.mkiv
@@ -946,4 +946,23 @@
data {#1}%
\relax}
+%D For those who sart counting at zero:
+%D
+%D \starttyping
+%D \defineconversionset [zero] [n,zero] [n]
+%D
+%D \setuphead [sectionconversionset=zero]
+%D
+%D \starttext
+%D \startchapter [title=Introduction]
+%D \startsection [title=First topic] \stopsection
+%D \startsection [title=Second topic] \stopsection
+%D \stopchapter
+%D \stoptext
+%D \stoptyping
+
+\def\zeronumberconversion#1{\number\numexpr#1-\plusone\relax}
+
+\defineconversion [zero] [\zeronumberconversion]
+
\protect \endinput
diff --git a/tex/context/base/mkiv/core-two.lua b/tex/context/base/mkiv/core-two.lua
index 1e59004be..3ab2112b9 100644
--- a/tex/context/base/mkiv/core-two.lua
+++ b/tex/context/base/mkiv/core-two.lua
@@ -32,7 +32,7 @@ end
job.register('job.passes.collected', tobesaved, initializer, nil)
-local function allocate(id)
+local function define(id)
local p = tobesaved[id]
if not p then
p = { }
@@ -41,10 +41,8 @@ local function allocate(id)
return p
end
-jobpasses.define = allocate
-
-function jobpasses.save(id,str,index)
- local jti = allocate(id)
+local function save(id,str,index)
+ local jti = define(id)
if index then
jti[index] = str
else
@@ -52,30 +50,30 @@ function jobpasses.save(id,str,index)
end
end
-function jobpasses.savetagged(id,tag,str)
- local jti = allocate(id)
+local function savetagged(id,tag,str)
+ local jti = define(id)
jti[tag] = str
end
-function jobpasses.getdata(id,index,default)
+local function getdata(id,index,default)
local jti = collected[id]
local value = jti and jti[index]
return value ~= "" and value or default or ""
end
-function jobpasses.getfield(id,index,tag,default)
+local function getfield(id,index,tag,default)
local jti = collected[id]
jti = jti and jti[index]
local value = jti and jti[tag]
return value ~= "" and value or default or ""
end
-function jobpasses.getcollected(id)
+local function getcollected(id)
return collected[id] or { }
end
-function jobpasses.gettobesaved(id)
- return allocate(id)
+local function gettobesaved(id)
+ return define(id)
end
local function get(id)
@@ -87,23 +85,17 @@ end
local function first(id)
local jti = collected[id]
- if jti and #jti > 0 then
- return jti[1]
- end
+ return jti and jti[1]
end
local function last(id)
local jti = collected[id]
- if jti and #jti > 0 then
- return jti[#jti]
- end
+ return jti and jti[#jti]
end
local function find(id,n)
local jti = collected[id]
- if jti and jti[n] then
- return jti[n]
- end
+ return jti and jti[n] or nil
end
local function count(id)
@@ -132,44 +124,49 @@ end
local check = first
---
-
-jobpasses.get = get
-jobpasses.first = first
-jobpasses.last = last
-jobpasses.find = find
-jobpasses.list = list
-jobpasses.count = count
-jobpasses.check = check
-jobpasses.inlist = inlist
+jobpasses.define = define
+jobpasses.save = save
+jobpasses.savetagged = savetagged
+jobpasses.getdata = getdata
+jobpasses.getfield = getfield
+jobpasses.getcollected = getcollected
+jobpasses.gettobesaved = gettobesaved
+jobpasses.get = get
+jobpasses.first = first
+jobpasses.last = last
+jobpasses.find = find
+jobpasses.list = list
+jobpasses.count = count
+jobpasses.check = check
+jobpasses.inlist = inlist
-- interface
local implement = interfaces.implement
-implement { name = "gettwopassdata", actions = { get , context }, arguments = "string" }
+implement { name = "gettwopassdata", actions = { get, context }, arguments = "string" }
implement { name = "getfirsttwopassdata",actions = { first, context }, arguments = "string" }
-implement { name = "getlasttwopassdata", actions = { last , context }, arguments = "string" }
-implement { name = "findtwopassdata", actions = { find , context }, arguments = "2 strings" }
-implement { name = "gettwopassdatalist", actions = { list , context }, arguments = "string" }
+implement { name = "getlasttwopassdata", actions = { last, context }, arguments = "string" }
+implement { name = "findtwopassdata", actions = { find, context }, arguments = "2 strings" }
+implement { name = "gettwopassdatalist", actions = { list, context }, arguments = "string" }
implement { name = "counttwopassdata", actions = { count, context }, arguments = "string" }
implement { name = "checktwopassdata", actions = { check, context }, arguments = "string" }
implement {
name = "definetwopasslist",
- actions = jobpasses.define,
+ actions = define,
arguments = "string"
}
implement {
name = "savetwopassdata",
- actions = jobpasses.save,
+ actions = save,
arguments = "2 strings",
}
implement {
name = "savetaggedtwopassdata",
- actions = jobpasses.savetagged,
+ actions = savetagged,
arguments = "3 strings",
}
@@ -178,3 +175,23 @@ implement {
actions = { inlist, commands.doifelse },
arguments = "2 strings",
}
+
+-- local ctx_latelua = context.latelua
+
+-- implement {
+-- name = "lazysavetwopassdata",
+-- arguments = "3 strings",
+-- public = true,
+-- actions = function(a,b,c)
+-- ctx_latelua(function() save(a,c) end)
+-- end,
+-- }
+
+-- implement {
+-- name = "lazysavetaggedtwopassdata",
+-- arguments = "3 strings",
+-- public = true,
+-- actions = function(a,b,c)
+-- ctx_latelua(function() savetagged(a,b,c) end)
+-- end,
+-- }
diff --git a/tex/context/base/mkiv/core-two.mkiv b/tex/context/base/mkiv/core-two.mkiv
index f83d63042..aae4902bc 100644
--- a/tex/context/base/mkiv/core-two.mkiv
+++ b/tex/context/base/mkiv/core-two.mkiv
@@ -74,10 +74,10 @@
\registerctxluafile{core-two}{}
\def\immediatesavetwopassdata #1#2#3{\normalexpanded{\noexpand\clf_savetwopassdata{#1}{#3}}}
-\def\savetwopassdata #1#2#3{\normalexpanded{\noexpand\ctxlatecommand{savetwopassdata('#1',"#3")}}}
-\def\lazysavetwopassdata #1#2#3{\normalexpanded{\noexpand\ctxlatecommand{savetwopassdata('#1',"#3")}}}
-\def\savetaggedtwopassdata #1#2#3#4{\normalexpanded{\noexpand\clf_savetaggedtwopassdata{#1}{#3}{#4}}}
-\def\lazysavetaggedtwopassdata#1#2#3#4{\normalexpanded{\noexpand\ctxlatecommand{savetaggedtwopassdata('#1','#3',"#4")}}}
+\def \lazysavetwopassdata #1#2#3{\normalexpanded{\noexpand\ctxlatecommand{savetwopassdata("#1","#3")}}}
+\let \savetwopassdata \lazysavetwopassdata
+\def \savetaggedtwopassdata#1#2#3#4{\normalexpanded{\noexpand\clf_savetaggedtwopassdata{#1}{#3}{#4}}}
+\def\lazysavetaggedtwopassdata#1#2#3#4{\normalexpanded{\noexpand\ctxlatecommand{savetaggedtwopassdata("#1",'#3',"#4")}}}
% temp hack: needs a proper \starteverytimeluacode
diff --git a/tex/context/base/mkiv/data-res.lua b/tex/context/base/mkiv/data-res.lua
index e3c5c32b9..0c2735fc2 100644
--- a/tex/context/base/mkiv/data-res.lua
+++ b/tex/context/base/mkiv/data-res.lua
@@ -9,7 +9,7 @@ if not modules then modules = { } end modules ['data-res'] = {
-- todo: cache:/// home:/// selfautoparent:/// (sometime end 2012)
local gsub, find, lower, upper, match, gmatch = string.gsub, string.find, string.lower, string.upper, string.match, string.gmatch
-local concat, insert, remove, sortedkeys, sortedhash = table.concat, table.insert, table.remove, table.sortedkeys, table.sortedhash
+local concat, insert, remove = table.concat, table.insert, table.remove
local next, type, rawget = next, type, rawget
local os = os
diff --git a/tex/context/base/mkiv/font-map.lua b/tex/context/base/mkiv/font-map.lua
index 140702ec8..a7fbfe49e 100644
--- a/tex/context/base/mkiv/font-map.lua
+++ b/tex/context/base/mkiv/font-map.lua
@@ -214,7 +214,51 @@ local unknown = f_single(0xFFFD)
-- end
-- end
-local hash = table.setmetatableindex(function(t,k)
+-- local hash = table.setmetatableindex(function(t,k)
+-- local v
+-- if k >= 0x00E000 and k <= 0x00F8FF then
+-- v = unknown
+-- elseif k >= 0x0F0000 and k <= 0x0FFFFF then
+-- v = unknown
+-- elseif k >= 0x100000 and k <= 0x10FFFF then
+-- v = unknown
+-- elseif k < 0xD7FF or (k > 0xDFFF and k <= 0xFFFF) then
+-- v = f_single(k)
+-- else
+-- k = k - 0x10000
+-- v = f_double(rshift(k,10)+0xD800,k%1024+0xDC00)
+-- end
+-- t[k] = v
+-- return v
+-- end)
+--
+-- table.makeweak(hash)
+--
+-- local function tounicode(unicode)
+-- if type(unicode) == "table" then
+-- local t = { }
+-- for l=1,#unicode do
+-- t[l] = hash[unicode[l]]
+-- end
+-- return concat(t)
+-- else
+-- return hash[unicode]
+-- end
+-- end
+
+local hash = { }
+local conc = { }
+
+-- table.makeweak(hash)
+
+table.setmetatableindex(hash,function(t,k)
+ if type(k) == "table" then
+ local n = #k
+ for l=1,n do
+ conc[l] = hash[k[l]]
+ end
+ return concat(conc,"",1,n)
+ end
local v
if k >= 0x00E000 and k <= 0x00F8FF then
v = unknown
@@ -232,18 +276,8 @@ local hash = table.setmetatableindex(function(t,k)
return v
end)
-table.makeweak(hash)
-
-local function tounicode(unicode,name)
- if type(unicode) == "table" then
- local t = { }
- for l=1,#unicode do
- t[l] = hash[unicode[l]]
- end
- return concat(t)
- else
- return hash[unicode]
- end
+local function tounicode(unicode)
+ return hash[unicode]
end
local function fromunicode16(str)
diff --git a/tex/context/base/mkiv/l-lpeg.lua b/tex/context/base/mkiv/l-lpeg.lua
index 827564464..750d5e698 100644
--- a/tex/context/base/mkiv/l-lpeg.lua
+++ b/tex/context/base/mkiv/l-lpeg.lua
@@ -1135,7 +1135,7 @@ end
do
- local trailingzeros = zero^0 * -digit -- suggested by Roberto R
+ local trailingzeros = zero^0 * -digit -- suggested by Roberto
local stripper = Cs((
digits * (
period * trailingzeros / ""
@@ -1145,15 +1145,15 @@ do
lpeg.patterns.stripzeros = stripper -- multiple in string
- local nonzero = digit - zero
-
+ local nonzero = digit - zero
local trailingzeros = zero^1 * endofstring
local stripper = Cs( (1-period)^0 * (
- (period * trailingzeros/"") +
- period * (nonzero^1 + (trailingzeros/"") + zero^1)^0
+ period * trailingzeros/""
+ + period * (nonzero^1 + (trailingzeros/"") + zero^1)^0
+ + endofstring
))
- lpeg.patterns.stripzero = stripper -- slightly more efficient
+ lpeg.patterns.stripzero = stripper -- slightly more efficient but expects a float !
-- local sample = "bla 11.00 bla 11 bla 0.1100 bla 1.00100 bla 0.00 bla 0.001 bla 1.1100 bla 0.100100100 bla 0.00100100100"
-- collectgarbage("collect")
@@ -1164,7 +1164,7 @@ do
end
--- for practical reasone we keep this here:
+-- for practical reasons we keep this here:
local byte_to_HEX = { }
local byte_to_hex = { }
diff --git a/tex/context/base/mkiv/lpdf-ini.lua b/tex/context/base/mkiv/lpdf-ini.lua
index 79ed3470c..c53d90848 100644
--- a/tex/context/base/mkiv/lpdf-ini.lua
+++ b/tex/context/base/mkiv/lpdf-ini.lua
@@ -447,13 +447,6 @@ do
end
-local function merge_t(a,b)
- local t = { }
- for k,v in next, a do t[k] = v end
- for k,v in next, b do t[k] = v end
- return setmetatable(t,getmetatable(a))
-end
-
local tostring_a, tostring_d
do
diff --git a/tex/context/base/mkiv/lpdf-wid.lua b/tex/context/base/mkiv/lpdf-wid.lua
index 8647a7b39..11cd80623 100644
--- a/tex/context/base/mkiv/lpdf-wid.lua
+++ b/tex/context/base/mkiv/lpdf-wid.lua
@@ -239,10 +239,12 @@ local function flushembeddedfiles()
e[#e+1] = pdfstring(tag)
e[#e+1] = reference -- already a reference
else
- -- messy spec ... when annot not in named else twice in menu list acrobat
+ -- -- messy spec ... when annot not in named else twice in menu list acrobat
end
end
- lpdf.addtonames("EmbeddedFiles",pdfreference(pdfflushobject(pdfdictionary{ Names = e })))
+ if #e > 0 then
+ lpdf.addtonames("EmbeddedFiles",pdfreference(pdfflushobject(pdfdictionary{ Names = e })))
+ end
end
end
@@ -612,7 +614,8 @@ local function insertrendering(specification)
local option = settings_to_hash(specification.option)
if not mf[label] then
local filename = specification.filename
- local isurl = find(filename,"://",1,true)
+ local isurl = find(filename,"://",1,true)
+ local mimetype = specification.mimetype or specification.mime
-- local start = pdfdictionary {
-- Type = pdfconstant("MediaOffset"),
-- S = pdfconstant("T"), -- time
@@ -648,13 +651,16 @@ local function insertrendering(specification)
if isurl then
descriptor.FS = pdfconstant("URL")
elseif option[v_embed] then
- descriptor.EF = codeinjections.embedfile { file = filename }
+ descriptor.EF = codeinjections.embedfile {
+ file = filename,
+ mimetype = mimetype, -- yes or no
+ }
end
local clip = pdfdictionary {
Type = pdfconstant("MediaClip"),
S = pdfconstant("MCD"),
N = label,
- CT = specification.mime,
+ CT = mimetype,
Alt = pdfarray { "", "file not found" }, -- language id + message
D = pdfreference(pdfflushobject(descriptor)),
-- P = pdfreference(pdfflushobject(parameters)),
diff --git a/tex/context/base/mkiv/luat-cod.lua b/tex/context/base/mkiv/luat-cod.lua
index f74c53e82..790f741c1 100644
--- a/tex/context/base/mkiv/luat-cod.lua
+++ b/tex/context/base/mkiv/luat-cod.lua
@@ -53,7 +53,9 @@ local strip = false if arg then for i=-1,#arg do if arg[i] == "--c:strip" then s
function lua.registercode(filename,options)
local barename = gsub(filename,"%.[%a%d]+$","")
- if barename == filename then filename = filename .. ".lua" end
+ if barename == filename then
+ filename = filename .. ".lua"
+ end
local basename = match(barename,"^.+[/\\](.-)$") or barename
if not bytedone[basename] then
local opts = { }
@@ -157,12 +159,14 @@ environment.initexmode = INITEXMODE
if not environment.luafilechunk then
function environment.luafilechunk(filename)
+ local fullname = filename
if sourcepath ~= "" then
- filename = sourcepath .. "/" .. filename
+ fullname = sourcepath .. "/" .. filename
end
- local data = loadfile(filename)
- texio.write("term and log","<",data and "+ " or "- ",filename,">")
+ local data = loadfile(fullname)
+ texio.write("term and log","<",data and "+ " or "- ",fullname,">")
if data then
+-- package.loaded[gsub(filename,"%..-$"] =
data()
end
return data
diff --git a/tex/context/base/mkiv/luat-soc.lua b/tex/context/base/mkiv/luat-soc.lua
deleted file mode 100644
index 9342a4b33..000000000
--- a/tex/context/base/mkiv/luat-soc.lua
+++ /dev/null
@@ -1,11 +0,0 @@
--- This is just a loader. The package handler knows about the TEX tree.
-
--- require "luatex/lua/socket.lua"
--- require "luatex/lua/ltn12.lua"
--- require "luatex/lua/mime.lua"
--- require "luatex/lua/socket/http.lua"
--- require "luatex/lua/socket/url.lua"
--- require "luatex/lua/socket/tp.lua"
--- require "luatex/lua/socket/ftp.lua"
-
--- "luatex/lua/socket/smtp.lua"
diff --git a/tex/context/base/mkiv/luat-soc.mkiv b/tex/context/base/mkiv/luat-soc.mkiv
new file mode 100644
index 000000000..e17ff22d3
--- /dev/null
+++ b/tex/context/base/mkiv/luat-soc.mkiv
@@ -0,0 +1,52 @@
+%D \module
+%D [ file=luat-soc,
+%D version=2018.08.05,
+%D title=\CONTEXT\ Lua Macros,
+%D subtitle=Socket Libraries,
+%D author=Hans Hagen,
+%D date=\currentdate,
+%D copyright={PRAGMA ADE \& \CONTEXT\ Development Team}]
+%C
+%C This module is part of the \CONTEXT\ macro||package and is
+%C therefore copyrighted by \PRAGMA. See mreadme.pdf for
+%C details.
+
+\writestatus{loading}{ConTeXt Lua Macros / Socket Libraries}
+
+%D In \LUATEX\ we provide the socket library that is more or less the standard one
+%D for \LUA. It has been around for a while and seems to be pretty stable. The
+%D binary module is copmpiled into \LUATEX\ and the accompanying .lua files are
+%D preloaded. These files are mostly written by Diego Nehab, Andre Carregal, Javier
+%D Guerra, and Fabio Mascarenhas with contributions from Diego Nehab, Mike Pall,
+%D David Burgess, Leonardo Godinho, Thomas Harning Jr., and Gary NG. The originals
+%D are part of and copyrighted by the Kepler project.
+%D
+%D Here we reload a slightly reworked version of these \type {.lua} files. We keep
+%D the same (documented) interface but streamlined some fo the code. No more
+%D modules, no more pre 5.2 \LUA, etc. Also, as it loads into the \CONTEXT
+%D ecosystem, we plug in some logging. (and maybe tracing in the future). As we
+%D don't support serial ports in \LUATEX, related code has been dropped.
+%D
+%D The files are reformatted so that we can more easilly add additional features
+%D and|/|or tracing options. Any error introduced there is our fault! The url module
+%D might be replaced by the one in \CONTEXT. When we need mbox a suitable variant
+%D will be provided.
+
+%D Currently we preload the related \LUA\ code in \LUATEX, but that might change at
+%D some point. We're prepared for that.
+
+\registerctxluafile{util-soc-imp-reset} {}
+
+\registerctxluafile{util-soc-imp-socket} {}
+%registerctxluafile{util-soc-imp-copas} {}
+\registerctxluafile{util-soc-imp-ltn12} {}
+%registerctxluafile{util-soc-imp-mbox} {}
+\registerctxluafile{util-soc-imp-mime} {}
+\registerctxluafile{util-soc-imp-url} {}
+\registerctxluafile{util-soc-imp-headers}{}
+\registerctxluafile{util-soc-imp-http} {}
+\registerctxluafile{util-soc-imp-tp} {}
+%registerctxluafile{util-soc-imp-ftp} {}
+%registerctxluafile{util-soc-imp-smtp} {}
+
+\endinput
diff --git a/tex/context/base/mkiv/mlib-pps.lua b/tex/context/base/mkiv/mlib-pps.lua
index 05c29dad3..0c52aa0b9 100644
--- a/tex/context/base/mkiv/mlib-pps.lua
+++ b/tex/context/base/mkiv/mlib-pps.lua
@@ -101,9 +101,10 @@ local f_f3 = formatters["%.3F"]
local f_gray = formatters["%.3F g %.3F G"]
local f_rgb = formatters["%.3F %.3F %.3F rg %.3F %.3F %.3F RG"]
local f_cmyk = formatters["%.3F %.3F %.3F %.3F k %.3F %.3F %.3F %.3F K"]
-local f_cm_b = formatters["q %F %F %F %F %F %F cm"]
-local f_shade = formatters["MpSh%s"]
+local f_cm_b = formatters["q %.6F %.6F %.6F %.6F %.6F %.6F cm"]
+local f_shade = formatters["MpSh%s"]
+local f_spot = formatters["/%s cs /%s CS %s SCN %s scn"]
local s_cm_e = "Q"
directives.register("metapost.stripzeros",function()
@@ -112,12 +113,9 @@ directives.register("metapost.stripzeros",function()
f_gray = formatters["%.3N g %.3N G"]
f_rgb = formatters["%.3N %.3N %.3N rg %.3N %.3N %.3N RG"]
f_cmyk = formatters["%.3N %.3N %.3N %.3N k %.3N %.3N %.3N %.3N K"]
- f_cm_b = formatters["q %N %N %N %N %N %N cm"]
- f_shade = formatters["MpSh%s"]
+ f_cm_b = formatters["q %.6N %.6N %.6N %.6N %.6N %.6N cm"]
end)
-local f_spot = formatters["/%s cs /%s CS %s SCN %s scn"]
-
local function checked_color_pair(color,...)
if not color then
return innercolor, outercolor
diff --git a/tex/context/base/mkiv/mult-prm.lua b/tex/context/base/mkiv/mult-prm.lua
index dd19e40b8..77c53beb9 100644
--- a/tex/context/base/mkiv/mult-prm.lua
+++ b/tex/context/base/mkiv/mult-prm.lua
@@ -251,6 +251,7 @@ return {
"expandglyphsinfont",
"explicitdiscretionary",
"explicithyphenpenalty",
+ "fixupboxesmode",
"fontid",
"formatname",
"gleaders",
@@ -448,6 +449,7 @@ return {
"pdfnormaldeviate",
"pdfobj",
"pdfobjcompresslevel",
+ "pdfomitcharset",
"pdfomitcidset",
"pdfoutline",
"pdfoutput",
diff --git a/tex/context/base/mkiv/node-fin.lua b/tex/context/base/mkiv/node-fin.lua
index 3139263ab..3e7a4cd1b 100644
--- a/tex/context/base/mkiv/node-fin.lua
+++ b/tex/context/base/mkiv/node-fin.lua
@@ -16,10 +16,8 @@ local next, type, format = next, type, string.format
local attributes, nodes, node = attributes, nodes, node
local nuts = nodes.nuts
-local tonut = nuts.tonut
local getnext = nuts.getnext
-local getprev = nuts.getprev
local getid = nuts.getid
local getlist = nuts.getlist
local getleader = nuts.getleader
@@ -605,7 +603,6 @@ local function stacked(attribute,head,default) -- no triggering, no inheritance,
elseif id == rule_code then
check = getwidth(stack) ~= 0
end
-
if check then
local a = getattr(stack,attribute)
if a then
@@ -698,7 +695,7 @@ local function stacker(attribute,head,default) -- no triggering, no inheritance,
end
local n = nsstep(a)
if n then
- head = insert_node_before(head,current,tonut(n)) -- a
+ head = insert_node_before(head,current,n) -- a
end
attrib = a
if leader then
@@ -717,7 +714,7 @@ local function stacker(attribute,head,default) -- no triggering, no inheritance,
if stacked then
local n = nsend()
while n do
- head = insert_node_after(head,previous,tonut(n))
+ head = insert_node_after(head,previous,n)
n = nsend()
end
end
@@ -780,7 +777,7 @@ end
-- end
-- local n = nsstep(a)
-- if n then
--- head = insert_node_before(head,current,tonut(n)) -- a
+-- head = insert_node_before(head,current,n) -- a
-- end
-- attrib = a
-- if leader then
@@ -800,7 +797,7 @@ end
-- if stacked then
-- local n = nsend()
-- while n do
--- head = insert_node_after(head,previous,tonut(n))
+-- head = insert_node_after(head,previous,n)
-- n = nsend()
-- end
-- end
diff --git a/tex/context/base/mkiv/node-ref.lua b/tex/context/base/mkiv/node-ref.lua
index 27d209701..da72337b8 100644
--- a/tex/context/base/mkiv/node-ref.lua
+++ b/tex/context/base/mkiv/node-ref.lua
@@ -444,6 +444,108 @@ local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,tx
return head, pardir, txtdir
end
+-- -- not faster either:
+--
+-- local findattr = node.direct.find_attribute
+--
+-- local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,txtdir) -- main
+-- local first, last, firstdir, reference
+-- if not pardir then
+-- pardir = "==="
+-- end
+-- if not texdir then
+-- txtdir = "==="
+-- end
+-- local someatt = findattr(head,attribute)
+-- if someatt then
+-- local current = head
+-- while current do
+-- local id = getid(current)
+-- if id == hlist_code or id == vlist_code then
+-- local r = getattr(current,attribute)
+-- -- test \goto{test}[page(2)] test \gotobox{test}[page(2)]
+-- -- test \goto{\TeX}[page(2)] test \gotobox{\hbox {x} \hbox {x}}[page(2)]
+-- -- if r and (not skip or r >) skip then -- maybe no > test
+-- -- inject_list(id,current,r,make,stack,pardir,txtdir)
+-- -- end
+-- if r then
+-- if not reference then
+-- reference, first, last, firstdir = r, current, current, txtdir
+-- elseif r == reference then
+-- -- same link
+-- last = current
+-- elseif (done[reference] or 0) == 0 then
+-- if not skip or r > skip then -- maybe no > test
+-- head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
+-- reference, first, last, firstdir = nil, nil, nil, nil
+-- end
+-- else
+-- reference, first, last, firstdir = r, current, current, txtdir
+-- end
+-- done[r] = (done[r] or 0) + 1
+-- end
+-- local list = getlist(current)
+-- if list then
+-- local h
+-- h, pardir, txtdir = inject_areas(list,attribute,make,stack,done,r or skip or 0,current,pardir,txtdir)
+-- if h ~= current then
+-- setlist(current,h)
+-- end
+-- end
+-- if r then
+-- done[r] = done[r] - 1
+-- end
+-- elseif id == dir_code then
+-- txtdir = getdir(current)
+-- elseif id == localpar_code then -- only test at begin
+-- pardir = getdir(current)
+-- elseif id == glue_code and getsubtype(current) == leftskip_code then -- any glue at the left?
+-- --
+-- else
+-- local r = getattr(current,attribute)
+-- if not r then
+-- -- just go on, can be kerns
+-- elseif not reference then
+-- reference, first, last, firstdir = r, current, current, txtdir
+-- elseif r == reference then
+-- last = current
+-- elseif (done[reference] or 0) == 0 then -- or id == glue_code and getsubtype(current) == right_skip_code
+-- if not skip or r > skip then -- maybe no > test
+-- head, current = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
+-- reference, first, last, firstdir = nil, nil, nil, nil
+-- end
+-- else
+-- reference, first, last, firstdir = r, current, current, txtdir
+-- end
+-- end
+-- current = getnext(current)
+-- end
+-- if reference and (done[reference] or 0) == 0 then
+-- head = inject_range(head,first,last,reference,make,stack,parent,pardir,firstdir)
+-- end
+-- else
+-- local current = head
+-- while current do
+-- local id = getid(current)
+-- if id == hlist_code or id == vlist_code then
+-- local list = getlist(current)
+-- if list then
+-- local h = inject_areas(list,attribute,make,stack,done,skip or 0,current,pardir,txtdir)
+-- if h ~= current then
+-- setlist(current,h)
+-- end
+-- end
+-- elseif id == dir_code then
+-- txtdir = getdir(current)
+-- elseif id == localpar_code then -- only test at begin
+-- pardir = getdir(current)
+-- end
+-- current = getnext(current)
+-- end
+-- end
+-- return head, pardir, txtdir
+-- end
+
-- -- maybe first check for glyphs and use a goto:
--
-- local function inject_areas(head,attribute,make,stack,done,skip,parent,pardir,txtdir) -- main
diff --git a/tex/context/base/mkiv/node-res.lua b/tex/context/base/mkiv/node-res.lua
index 39d47f647..b591cafdf 100644
--- a/tex/context/base/mkiv/node-res.lua
+++ b/tex/context/base/mkiv/node-res.lua
@@ -428,7 +428,8 @@ end
function nutpool.latelua(code)
local n = copy_nut(latelua)
- setfield(n,"string",code)
+ -- setfield(n,"string",code)
+ setdata(n,code)
return n
end
diff --git a/tex/context/base/mkiv/status-files.pdf b/tex/context/base/mkiv/status-files.pdf
index afcce6971..e95010141 100644
--- a/tex/context/base/mkiv/status-files.pdf
+++ b/tex/context/base/mkiv/status-files.pdf
Binary files differ
diff --git a/tex/context/base/mkiv/status-lua.pdf b/tex/context/base/mkiv/status-lua.pdf
index 2eb9c7104..5e0c029e7 100644
--- a/tex/context/base/mkiv/status-lua.pdf
+++ b/tex/context/base/mkiv/status-lua.pdf
Binary files differ
diff --git a/tex/context/base/mkiv/strc-con.mkvi b/tex/context/base/mkiv/strc-con.mkvi
index a5360f52a..ed7e401f0 100644
--- a/tex/context/base/mkiv/strc-con.mkvi
+++ b/tex/context/base/mkiv/strc-con.mkvi
@@ -1093,7 +1093,7 @@
\endgroup
\edef\noexpand\currentconstructionlistentry {\the\scratchcounter}%
\edef\noexpand\currentconstructionattribute {\the\lastdestinationattribute}%
- \edef\noexpand\currentconstructionsynchronize{\ctxlatecommand{enhancelist(\the\scratchcounter)}}%
+ \edef\noexpand\currentconstructionsynchronize{\clf_deferredenhancelist\scratchcounter}%
}%
\fi}
@@ -1103,11 +1103,11 @@
\def\reinstateconstructionnumberentry#1% was xdef
{\edef\currentconstructionattribute {\clf_getinternallistreference#1}%
- \edef\currentconstructionsynchronize{\ctxlatecommand{enhancelist(#1)}}}
+ \edef\currentconstructionsynchronize{\clf_deferredenhancelist#1}}
\def\reinstatecachedconstructionnumberentry#1% was xdef | #1 = cached index can be different from real
{\edef\currentconstructionattribute {\clf_getinternalcachedlistreference#1}% destination
- \edef\currentconstructionsynchronize{\ctxlatecommand{enhancelist(#1)}}}
+ \edef\currentconstructionsynchronize{\clf_deferredenhancelist#1}}
\installstructurelistprocessor{construction}{\usestructurelistprocessor{number+title}}
diff --git a/tex/context/base/mkiv/strc-lst.lua b/tex/context/base/mkiv/strc-lst.lua
index 13bdf7786..1d2a8bb39 100644
--- a/tex/context/base/mkiv/strc-lst.lua
+++ b/tex/context/base/mkiv/strc-lst.lua
@@ -35,6 +35,8 @@ local commands = commands
local implement = interfaces.implement
local conditionals = tex.conditionals
+local ctx_latelua = context.latelua
+
local structures = structures
local lists = structures.lists
local sections = structures.sections
@@ -307,7 +309,7 @@ local synchronizepage = function(r) -- bah ... will move
return synchronizepage(r)
end
-function lists.enhance(n)
+local function enhancelist(n)
local l = cached[n]
if not l then
report_lists("enhancing %a, unknown internal",n)
@@ -348,6 +350,8 @@ function lists.enhance(n)
end
end
+lists.enhance = enhancelist
+
-- we can use level instead but we can also decide to remove level from the metadata
local nesting = { }
@@ -1071,8 +1075,16 @@ implement {
implement {
name = "enhancelist",
- actions = lists.enhance,
- arguments = "integer"
+ arguments = "integer",
+ actions = enhancelist,
+}
+
+implement {
+ name = "deferredenhancelist",
+ arguments = "integer",
+ actions = function(n)
+ ctx_latelua(function() enhancelist(n) end)
+ end,
}
implement {
diff --git a/tex/context/base/mkiv/strc-lst.mkvi b/tex/context/base/mkiv/strc-lst.mkvi
index c450e1884..5f6512b82 100644
--- a/tex/context/base/mkiv/strc-lst.mkvi
+++ b/tex/context/base/mkiv/strc-lst.mkvi
@@ -145,8 +145,9 @@
{\endgroup}
% \unexpanded
+
\def\strc_lists_inject_enhance#listindex#internal%
- {\normalexpanded{\ctxlatecommand{enhancelist(\number#listindex)}}}
+ {\normalexpanded{\clf_deferredenhancelist#listindex}}
\unexpanded\def\strc_lists_inject_yes[#settings][#userdata]% can be used directly
{\setupcurrentlist[\c!type=userdata,\c!location=\v!none,#settings]% grouped (use \let...
@@ -168,7 +169,7 @@
userdata {\detokenize\expandafter{\normalexpanded{#userdata}}}
\relax
\edef\currentlistnumber{\the\scratchcounter}%
-\setxvalue{\??listlocations\currentlist}{\the\locationcount}%
+ \setxvalue{\??listlocations\currentlist}{\the\locationcount}%
\ifx\p_location\v!here
% this branch injects nodes !
\strc_lists_inject_enhance{\currentlistnumber}{\the\locationcount}%
diff --git a/tex/context/base/mkiv/strc-ref.lua b/tex/context/base/mkiv/strc-ref.lua
index 2c9765a44..951c9a44a 100644
--- a/tex/context/base/mkiv/strc-ref.lua
+++ b/tex/context/base/mkiv/strc-ref.lua
@@ -52,6 +52,8 @@ local context = context
local commands = commands
local implement = interfaces.implement
+local ctx_latelua = context.latelua
+
local texgetcount = tex.getcount
local texsetcount = tex.setcount
local texconditionals = tex.conditionals
@@ -436,17 +438,27 @@ end
references.synchronizepage = synchronizepage
-function references.enhance(prefix,tag)
+local function enhancereference(prefix,tag)
local l = tobesaved[prefix][tag]
if l then
synchronizepage(l.references)
end
end
+references.enhance = enhancereference
+
+-- implement {
+-- name = "enhancereference",
+-- arguments = "2 strings",
+-- actions = references.enhance,
+-- }
+
implement {
- name = "enhancereference",
- actions = references.enhance,
+ name = "deferredenhancereference",
arguments = "2 strings",
+ actions = function(prefix,tag)
+ ctx_latelua(function() enhancereference(prefix,tag) end)
+ end,
}
-- -- -- related to strc-ini.lua -- -- --
diff --git a/tex/context/base/mkiv/strc-ref.mkvi b/tex/context/base/mkiv/strc-ref.mkvi
index 8b887754c..0ae2cfccc 100644
--- a/tex/context/base/mkiv/strc-ref.mkvi
+++ b/tex/context/base/mkiv/strc-ref.mkvi
@@ -128,7 +128,7 @@
\newcount\lastdestinationattribute
\def\strc_references_finish#prefix#reference#internal%
- {\normalexpanded{\ctxlatecommand{enhancereference("#prefix","#reference")}}}
+ {\normalexpanded{\clf_deferredenhancereference{#prefix}{#reference}}}
\let\dofinishreference\strc_references_finish % used at lua end
diff --git a/tex/context/base/mkiv/strc-reg.lua b/tex/context/base/mkiv/strc-reg.lua
index 919290c8f..61e13e7e4 100644
--- a/tex/context/base/mkiv/strc-reg.lua
+++ b/tex/context/base/mkiv/strc-reg.lua
@@ -48,6 +48,7 @@ local v_last = variables.last
local v_text = variables.text
local context = context
+local ctx_latelua = context.latelua
local implement = interfaces.implement
@@ -564,9 +565,7 @@ local function storeregister(rawdata) -- metadata, references, entries
return #entries
end
-registers.store = storeregister
-
-function registers.enhance(name,n)
+local function enhanceregister(name,n)
local data = tobesaved[name].metadata.notsaved and collected[name] or tobesaved[name]
local entry = data.entries[n]
if entry then
@@ -574,7 +573,7 @@ function registers.enhance(name,n)
end
end
-function registers.extend(name,tag,rawdata) -- maybe do lastsection internally
+local function extendregister(name,tag,rawdata) -- maybe do lastsection internally
if type(tag) == "string" then
tag = tagged[tag]
end
@@ -618,6 +617,10 @@ function registers.extend(name,tag,rawdata) -- maybe do lastsection internally
end
end
+registers.store = storeregister
+registers.enhance = enhanceregister
+registers.extend = extendregister
+
function registers.get(tag,n)
local list = tobesaved[tag]
return list and list.entries[n]
@@ -625,13 +628,21 @@ end
implement {
name = "enhanceregister",
- actions = registers.enhance,
arguments = { "string", "integer" },
+ actions = enhanceregister,
+}
+
+implement {
+ name = "deferredenhanceregister",
+ arguments = { "string", "integer" },
+ actions = function(name,n)
+ ctx_latelua(function() enhanceregister(name,n) end)
+ end,
}
implement {
name = "extendregister",
- actions = registers.extend,
+ actions = extendregister,
arguments = "2 strings",
}
diff --git a/tex/context/base/mkiv/strc-reg.mkiv b/tex/context/base/mkiv/strc-reg.mkiv
index 3f8331745..1b77f135f 100644
--- a/tex/context/base/mkiv/strc-reg.mkiv
+++ b/tex/context/base/mkiv/strc-reg.mkiv
@@ -305,7 +305,7 @@
\ifx\currentregisterownnumber\v!yes
\glet\currentregistersynchronize\relax
\else
- \xdef\currentregistersynchronize{\ctxlatecommand{enhanceregister("\currentregister",\currentregisternumber)}}%
+ \xdef\currentregistersynchronize{\clf_deferredenhanceregister{\currentregister}\currentregisternumber}%
\fi
\currentregistersynchronize % here?
% needs thinking ... bla\index{bla}. will break before the . but adding a
@@ -341,7 +341,7 @@
% internal \locationcount
% view {\interactionparameter\c!focus}%
\relax % this will change
- \xdef\currentregistersynchronize{\ctxlatecommand{enhanceregister("\currentregister",\currentregisternumber)}}%
+ \xdef\currentregistersynchronize{\clf_deferredenhanceregister{\currentregister}\currentregisternumber}%
\currentregistersynchronize % here?
\dostarttagged\t!registerlocation\currentregister
\attribute\destinationattribute\lastdestinationattribute \signalcharacter % no \strut as it will be removed during cleanup
diff --git a/tex/context/base/mkiv/syst-ini.mkiv b/tex/context/base/mkiv/syst-ini.mkiv
index 1ca2bf1ac..b62dbfd7f 100644
--- a/tex/context/base/mkiv/syst-ini.mkiv
+++ b/tex/context/base/mkiv/syst-ini.mkiv
@@ -1276,5 +1276,6 @@
\ifdefined\breakafterdirmode \else \newcount\breakafterdirmode \fi
\ifdefined\exceptionpenalty \else \newcount\exceptionpenalty \fi
\ifdefined\luacopyinputnodes \else \newcount\luacopyinputnodes \fi
+\ifdefined\fixupboxesmode \else \newcount\fixupboxesmode \fi
\protect \endinput
diff --git a/tex/context/base/mkiv/util-deb.lua b/tex/context/base/mkiv/util-deb.lua
index b8db0c583..9488a728b 100644
--- a/tex/context/base/mkiv/util-deb.lua
+++ b/tex/context/base/mkiv/util-deb.lua
@@ -98,7 +98,7 @@ end
setmetatableindex(names,function(t,name)
local v = setmetatableindex(function(t,source)
local v = setmetatableindex(function(t,line)
- local v = { total = 0, count = 0 }
+ local v = { total = 0, count = 0, nesting = 0 }
t[line] = v
return v
end)
@@ -128,12 +128,24 @@ local function hook(where)
end
local data = names[name][source][line]
if where == "call" then
- data.count = data.count + 1
- insert(data,ticks())
+ local nesting = data.nesting
+ if nesting == 0 then
+ data.count = data.count + 1
+ insert(data,ticks())
+ data.nesting = 1
+ else
+ data.nesting = nesting + 1
+ end
elseif where == "return" then
- local t = remove(data)
- if t then
- data.total = data.total + ticks() - t
+ local nesting = data.nesting
+ if nesting == 1 then
+ local t = remove(data)
+ if t then
+ data.total = data.total + ticks() - t
+ end
+ data.nesting = 0
+ else
+ data.nesting = nesting - 1
end
end
end
diff --git a/tex/context/base/mkiv/util-soc-imp-copas.lua b/tex/context/base/mkiv/util-soc-imp-copas.lua
new file mode 100644
index 000000000..8e2278eb2
--- /dev/null
+++ b/tex/context/base/mkiv/util-soc-imp-copas.lua
@@ -0,0 +1,930 @@
+-- original file : copas.lua
+-- for more into : see util-soc.lua
+
+local socket = socket or require("socket")
+local ssl = ssl or nil -- only loaded upon demand
+
+local WATCH_DOG_TIMEOUT = 120
+local UDP_DATAGRAM_MAX = 8192
+
+local type, next, pcall, getmetatable, tostring = type, next, pcall, getmetatable, tostring
+local min, max, random = math.min, math.max, math.random
+local find = string.find
+local insert, remove = table.insert, table.remove
+
+local gettime = socket.gettime
+local selectsocket = socket.select
+
+local createcoroutine = coroutine.create
+local resumecoroutine = coroutine.resume
+local yieldcoroutine = coroutine.yield
+local runningcoroutine = coroutine.running
+
+-- Redefines LuaSocket functions with coroutine safe versions (this allows the use
+-- of socket.http from within copas).
+
+-- Meta information is public even if beginning with an "_"
+
+local report = logs and logs.reporter("copas") or function(fmt,first,...)
+ if fmt then
+ fmt = "copas: " .. fmt
+ if first then
+ print(format(fmt,first,...))
+ else
+ print(fmt)
+ end
+ end
+end
+
+local copas = {
+
+ _COPYRIGHT = "Copyright (C) 2005-2016 Kepler Project",
+ _DESCRIPTION = "Coroutine Oriented Portable Asynchronous Services",
+ _VERSION = "Copas 2.0.1",
+
+ autoclose = true,
+ running = false,
+
+ report = report,
+
+}
+
+local function statushandler(status, ...)
+ if status then
+ return ...
+ end
+ local err = (...)
+ if type(err) == "table" then
+ err = err[1]
+ end
+ report("error: %s",tostring(err))
+ return nil, err
+end
+
+function socket.protect(func)
+ return function(...)
+ return statushandler(pcall(func,...))
+ end
+end
+
+function socket.newtry(finalizer)
+ return function (...)
+ local status = (...)
+ if not status then
+ local detail = select(2,...)
+ pcall(finalizer,detail)
+ report("error: %s",tostring(detail))
+ return
+ end
+ return ...
+ end
+end
+
+-- Simple set implementation based on LuaSocket's tinyirc.lua example
+-- adds a FIFO queue for each value in the set
+
+local function newset()
+ local reverse = { }
+ local set = { }
+ local queue = { }
+ setmetatable(set, {
+ __index = {
+ insert =
+ function(set, value)
+ if not reverse[value] then
+ local n = #set +1
+ set[n] = value
+ reverse[value] = n
+ end
+ end,
+ remove =
+ function(set, value)
+ local index = reverse[value]
+ if index then
+ reverse[value] = nil
+ local n = #set
+ local top = set[n]
+ set[n] = nil
+ if top ~= value then
+ reverse[top] = index
+ set[index] = top
+ end
+ end
+ end,
+ push =
+ function (set, key, itm)
+ local entry = queue[key]
+ if entry == nil then -- hm can it be false then?
+ queue[key] = { itm }
+ else
+ entry[#entry + 1] = itm
+ end
+ end,
+ pop =
+ function (set, key)
+ local top = queue[key]
+ if top ~= nil then
+ local ret = remove(top,1)
+ if top[1] == nil then
+ queue[key] = nil
+ end
+ return ret
+ end
+ end
+ }
+ } )
+ return set
+end
+
+local _sleeping = {
+ times = { }, -- list with wake-up times
+ cos = { }, -- list with coroutines, index matches the 'times' list
+ lethargy = { }, -- list of coroutines sleeping without a wakeup time
+
+ insert =
+ function()
+ end,
+ remove =
+ function()
+ end,
+ push =
+ function(self, sleeptime, co)
+ if not co then
+ return
+ end
+ if sleeptime < 0 then
+ --sleep until explicit wakeup through copas.wakeup
+ self.lethargy[co] = true
+ return
+ else
+ sleeptime = gettime() + sleeptime
+ end
+ local t = self.times
+ local c = self.cos
+ local i = 1
+ local n = #t
+ while i <= n and t[i] <= sleeptime do
+ i = i + 1
+ end
+ insert(t,i,sleeptime)
+ insert(c,i,co)
+ end,
+ getnext =
+ -- returns delay until next sleep expires, or nil if there is none
+ function(self)
+ local t = self.times
+ local delay = t[1] and t[1] - gettime() or nil
+ return delay and max(delay, 0) or nil
+ end,
+ pop =
+ -- find the thread that should wake up to the time
+ function(self, time)
+ local t = self.times
+ local c = self.cos
+ if #t == 0 or time < t[1] then
+ return
+ end
+ local co = c[1]
+ remove(t,1)
+ remove(c,1)
+ return co
+ end,
+ wakeup =
+ function(self, co)
+ local let = self.lethargy
+ if let[co] then
+ self:push(0, co)
+ let[co] = nil
+ else
+ local c = self.cos
+ local t = self.times
+ for i=1,#c do
+ if c[i] == co then
+ remove(c,i)
+ remove(t,i)
+ self:push(0, co)
+ return
+ end
+ end
+ end
+ end
+}
+
+local _servers = newset() -- servers being handled
+local _reading = newset() -- sockets currently being read
+local _writing = newset() -- sockets currently being written
+
+local _reading_log = { }
+local _writing_log = { }
+
+local _is_timeout = { -- set of errors indicating a timeout
+ timeout = true, -- default LuaSocket timeout
+ wantread = true, -- LuaSec specific timeout
+ wantwrite = true, -- LuaSec specific timeout
+}
+
+-- Coroutine based socket I/O functions.
+
+local function isTCP(socket)
+ return not find(tostring(socket),"^udp")
+end
+
+-- Reads a pattern from a client and yields to the reading set on timeouts UDP: a
+-- UDP socket expects a second argument to be a number, so it MUST be provided as
+-- the 'pattern' below defaults to a string. Will throw a 'bad argument' error if
+-- omitted.
+
+local function copasreceive(client, pattern, part)
+ if not pattern or pattern == "" then
+ pattern = "*l"
+ end
+ local current_log = _reading_log
+ local s, err
+ repeat
+ s, err, part = client:receive(pattern, part)
+ if s or (not _is_timeout[err]) then
+ current_log[client] = nil
+ return s, err, part
+ end
+ if err == "wantwrite" then
+ current_log = _writing_log
+ current_log[client] = gettime()
+ yieldcoroutine(client, _writing)
+ else
+ current_log = _reading_log
+ current_log[client] = gettime()
+ yieldcoroutine(client, _reading)
+ end
+ until false
+end
+
+-- Receives data from a client over UDP. Not available for TCP. (this is a copy of
+-- receive() method, adapted for receivefrom() use).
+
+local function copasreceivefrom(client, size)
+ local s, err, port
+ if not size or size == 0 then
+ size = UDP_DATAGRAM_MAX
+ end
+ repeat
+ -- upon success err holds ip address
+ s, err, port = client:receivefrom(size)
+ if s or err ~= "timeout" then
+ _reading_log[client] = nil
+ return s, err, port
+ end
+ _reading_log[client] = gettime()
+ yieldcoroutine(client, _reading)
+ until false
+end
+
+-- Same as above but with special treatment when reading chunks, unblocks on any
+-- data received.
+
+local function copasreceivepartial(client, pattern, part)
+ if not pattern or pattern == "" then
+ pattern = "*l"
+ end
+ local logger = _reading_log
+ local queue = _reading
+ local s, err
+ repeat
+ s, err, part = client:receive(pattern, part)
+ if s or (type(pattern) == "number" and part ~= "" and part) or not _is_timeout[err] then
+ logger[client] = nil
+ return s, err, part
+ end
+ if err == "wantwrite" then
+ logger = _writing_log
+ queue = _writing
+ else
+ logger = _reading_log
+ queue = _reading
+ end
+ logger[client] = gettime()
+ yieldcoroutine(client, queue)
+ until false
+end
+
+-- Sends data to a client. The operation is buffered and yields to the writing set
+-- on timeouts Note: from and to parameters will be ignored by/for UDP sockets
+
+local function copassend(client, data, from, to)
+ if not from then
+ from = 1
+ end
+ local lastIndex = from - 1
+ local logger = _writing_log
+ local queue = _writing
+ local s, err
+ repeat
+ s, err, lastIndex = client:send(data, lastIndex + 1, to)
+ -- Adds extra coroutine swap and garantees that high throughput doesn't take
+ -- other threads to starvation.
+ if random(100) > 90 then
+ logger[client] = gettime()
+ yieldcoroutine(client, queue)
+ end
+ if s or not _is_timeout[err] then
+ logger[client] = nil
+ return s, err,lastIndex
+ end
+ if err == "wantread" then
+ logger = _reading_log
+ queue = _reading
+ else
+ logger = _writing_log
+ queue = _writing
+ end
+ logger[client] = gettime()
+ yieldcoroutine(client, queue)
+ until false
+end
+
+-- Sends data to a client over UDP. Not available for TCP. (this is a copy of send()
+-- method, adapted for sendto() use).
+
+local function copassendto(client, data, ip, port)
+ repeat
+ local s, err = client:sendto(data, ip, port)
+ -- Adds extra coroutine swap and garantees that high throughput doesn't
+ -- take other threads to starvation.
+ if random(100) > 90 then
+ _writing_log[client] = gettime()
+ yieldcoroutine(client, _writing)
+ end
+ if s or err ~= "timeout" then
+ _writing_log[client] = nil
+ return s, err
+ end
+ _writing_log[client] = gettime()
+ yieldcoroutine(client, _writing)
+ until false
+end
+
+-- Waits until connection is completed.
+
+local function copasconnect(skt, host, port)
+ skt:settimeout(0)
+ local ret, err, tried_more_than_once
+ repeat
+ ret, err = skt:connect (host, port)
+ -- A non-blocking connect on Windows results in error "Operation already in
+ -- progress" to indicate that it is completing the request async. So
+ -- essentially it is the same as "timeout".
+ if ret or (err ~= "timeout" and err ~= "Operation already in progress") then
+ -- Once the async connect completes, Windows returns the error "already
+ -- connected" to indicate it is done, so that error should be ignored.
+ -- Except when it is the first call to connect, then it was already
+ -- connected to something else and the error should be returned.
+ if not ret and err == "already connected" and tried_more_than_once then
+ ret = 1
+ err = nil
+ end
+ _writing_log[skt] = nil
+ return ret, err
+ end
+ tried_more_than_once = tried_more_than_once or true
+ _writing_log[skt] = gettime()
+ yieldcoroutine(skt, _writing)
+ until false
+end
+
+-- Peforms an (async) ssl handshake on a connected TCP client socket. Replacec all
+-- previous socket references, with the returned new ssl wrapped socket Throws error
+-- and does not return nil+error, as that might silently fail in code like this.
+
+local function copasdohandshake(skt, sslt) -- extra ssl parameters
+ if not ssl then
+ ssl = require("ssl")
+ end
+ if not ssl then
+ report("error: no ssl library")
+ return
+ end
+ local nskt, err = ssl.wrap(skt, sslt)
+ if not nskt then
+ report("error: %s",tostring(err))
+ return
+ end
+ nskt:settimeout(0)
+ local queue
+ repeat
+ local success, err = nskt:dohandshake()
+ if success then
+ return nskt
+ elseif err == "wantwrite" then
+ queue = _writing
+ elseif err == "wantread" then
+ queue = _reading
+ else
+ report("error: %s",tostring(err))
+ return
+ end
+ yieldcoroutine(nskt, queue)
+ until false
+end
+
+-- Flushes a client write buffer.
+
+local function copasflush(client)
+end
+
+-- Public.
+
+copas.connect = copassconnect
+copas.send = copassend
+copas.sendto = copassendto
+copas.receive = copasreceive
+copas.receivefrom = copasreceivefrom
+copas.copasreceivepartial = copasreceivepartial
+copas.copasreceivePartial = copasreceivepartial
+copas.dohandshake = copasdohandshake
+copas.flush = copasflush
+
+-- Wraps a TCP socket to use Copas methods (send, receive, flush and settimeout).
+
+local function _skt_mt_tostring(self)
+ return tostring(self.socket) .. " (copas wrapped)"
+end
+
+local _skt_mt_tcp_index = {
+ send =
+ function(self, data, from, to)
+ return copassend (self.socket, data, from, to)
+ end,
+ receive =
+ function (self, pattern, prefix)
+ if self.timeout == 0 then
+ return copasreceivePartial(self.socket, pattern, prefix)
+ else
+ return copasreceive(self.socket, pattern, prefix)
+ end
+ end,
+
+ flush =
+ function (self)
+ return copasflush(self.socket)
+ end,
+
+ settimeout =
+ function (self, time)
+ self.timeout = time
+ return true
+ end,
+ -- TODO: socket.connect is a shortcut, and must be provided with an alternative
+ -- if ssl parameters are available, it will also include a handshake
+ connect =
+ function(self, ...)
+ local res, err = copasconnect(self.socket, ...)
+ if res and self.ssl_params then
+ res, err = self:dohandshake()
+ end
+ return res, err
+ end,
+ close =
+ function(self, ...)
+ return self.socket:close(...)
+ end,
+ -- TODO: socket.bind is a shortcut, and must be provided with an alternative
+ bind =
+ function(self, ...)
+ return self.socket:bind(...)
+ end,
+ -- TODO: is this DNS related? hence blocking?
+ getsockname =
+ function(self, ...)
+ return self.socket:getsockname(...)
+ end,
+ getstats =
+ function(self, ...)
+ return self.socket:getstats(...)
+ end,
+ setstats =
+ function(self, ...)
+ return self.socket:setstats(...)
+ end,
+ listen =
+ function(self, ...)
+ return self.socket:listen(...)
+ end,
+ accept =
+ function(self, ...)
+ return self.socket:accept(...)
+ end,
+ setoption =
+ function(self, ...)
+ return self.socket:setoption(...)
+ end,
+ -- TODO: is this DNS related? hence blocking?
+ getpeername =
+ function(self, ...)
+ return self.socket:getpeername(...)
+ end,
+ shutdown =
+ function(self, ...)
+ return self.socket:shutdown(...)
+ end,
+ dohandshake =
+ function(self, sslt)
+ self.ssl_params = sslt or self.ssl_params
+ local nskt, err = copasdohandshake(self.socket, self.ssl_params)
+ if not nskt then
+ return nskt, err
+ end
+ self.socket = nskt
+ return self
+ end,
+}
+
+local _skt_mt_tcp = {
+ __tostring = _skt_mt_tostring,
+ __index = _skt_mt_tcp_index,
+}
+
+-- wraps a UDP socket, copy of TCP one adapted for UDP.
+
+local _skt_mt_udp_index = {
+ -- UDP sending is non-blocking, but we provide starvation prevention, so replace
+ -- anyway.
+ sendto =
+ function (self, ...)
+ return copassendto(self.socket,...)
+ end,
+ receive =
+ function (self, size)
+ return copasreceive(self.socket, size or UDP_DATAGRAM_MAX)
+ end,
+ receivefrom =
+ function (self, size)
+ return copasreceivefrom(self.socket, size or UDP_DATAGRAM_MAX)
+ end,
+ -- TODO: is this DNS related? hence blocking?
+ setpeername =
+ function(self, ...)
+ return self.socket:getpeername(...)
+ end,
+ setsockname =
+ function(self, ...)
+ return self.socket:setsockname(...)
+ end,
+ -- do not close client, as it is also the server for udp.
+ close =
+ function(self, ...)
+ return true
+ end
+}
+
+local _skt_mt_udp = {
+ __tostring = _skt_mt_tostring,
+ __index = _skt_mt_udp_index,
+}
+
+for k, v in next, _skt_mt_tcp_index do
+ if not _skt_mt_udp_index[k] then
+ _skt_mt_udp_index[k] = v
+ end
+end
+
+-- Wraps a LuaSocket socket object in an async Copas based socket object.
+
+-- @param skt the socket to wrap
+-- @sslt (optional) Table with ssl parameters, use an empty table to use ssl with defaults
+-- @return wrapped socket object
+
+local function wrap(skt, sslt)
+ if getmetatable(skt) == _skt_mt_tcp or getmetatable(skt) == _skt_mt_udp then
+ return skt -- already wrapped
+ end
+ skt:settimeout(0)
+ if isTCP(skt) then
+ return setmetatable ({ socket = skt, ssl_params = sslt }, _skt_mt_tcp)
+ else
+ return setmetatable ({ socket = skt }, _skt_mt_udp)
+ end
+end
+
+copas.wrap = wrap
+
+-- Wraps a handler in a function that deals with wrapping the socket and doing
+-- the optional ssl handshake.
+
+function copas.handler(handler, sslparams)
+ return function (skt,...)
+ skt = wrap(skt)
+ if sslparams then
+ skt:dohandshake(sslparams)
+ end
+ return handler(skt,...)
+ end
+end
+
+-- Error handling (a handler per coroutine).
+
+local _errhandlers = { }
+
+function copas.setErrorHandler(err)
+ local co = runningcoroutine()
+ if co then
+ _errhandlers[co] = err
+ end
+end
+
+local function _deferror (msg, co, skt)
+ report("%s (%s) (%s)", msg, tostring(co), tostring(skt))
+end
+
+-- Thread handling
+
+local function _doTick (co, skt, ...)
+ if not co then
+ return
+ end
+
+ local ok, res, new_q = resumecoroutine(co, skt, ...)
+
+ if ok and res and new_q then
+ new_q:insert(res)
+ new_q:push(res, co)
+ else
+ if not ok then
+ pcall(_errhandlers[co] or _deferror, res, co, skt)
+ end
+ -- Do not auto-close UDP sockets, as the handler socket is also the server socket.
+ if skt and copas.autoclose and isTCP(skt) then
+ skt:close()
+ end
+ _errhandlers[co] = nil
+ end
+end
+
+-- Accepts a connection on socket input.
+
+local function _accept(input, handler)
+ local client = input:accept()
+ if client then
+ client:settimeout(0)
+ local co = createcoroutine(handler)
+ _doTick (co, client)
+ -- _reading:insert(client)
+ end
+ return client
+end
+
+-- Handle threads on a queue.
+
+local function _tickRead(skt)
+ _doTick(_reading:pop(skt), skt)
+end
+
+local function _tickWrite(skt)
+ _doTick(_writing:pop(skt), skt)
+end
+
+-- Adds a server/handler pair to Copas dispatcher.
+
+local function addTCPserver(server, handler, timeout)
+ server:settimeout(timeout or 0)
+ _servers[server] = handler
+ _reading:insert(server)
+end
+
+local function addUDPserver(server, handler, timeout)
+ server:settimeout(timeout or 0)
+ local co = createcoroutine(handler)
+ _reading:insert(server)
+ _doTick(co, server)
+end
+
+function copas.addserver(server, handler, timeout)
+ if isTCP(server) then
+ addTCPserver(server, handler, timeout)
+ else
+ addUDPserver(server, handler, timeout)
+ end
+end
+
+function copas.removeserver(server, keep_open)
+ local s = server
+ local mt = getmetatable(server)
+ if mt == _skt_mt_tcp or mt == _skt_mt_udp then
+ s = server.socket
+ end
+ _servers[s] = nil
+ _reading:remove(s)
+ if keep_open then
+ return true
+ end
+ return server:close()
+end
+
+-- Adds an new coroutine thread to Copas dispatcher. Create a coroutine that skips
+-- the first argument, which is always the socket passed by the scheduler, but `nil`
+-- in case of a task/thread
+
+function copas.addthread(handler, ...)
+ local thread = createcoroutine(function(_, ...) return handler(...) end)
+ _doTick(thread, nil, ...)
+ return thread
+end
+
+-- tasks registering
+
+local _tasks = { }
+
+-- Lets tasks call the default _tick().
+
+local function addtaskRead(tsk)
+ tsk.def_tick = _tickRead
+ _tasks[tsk] = true
+end
+
+-- Lets tasks call the default _tick().
+
+local function addtaskWrite(tsk)
+ tsk.def_tick = _tickWrite
+ _tasks[tsk] = true
+end
+
+local function tasks()
+ return next, _tasks
+end
+
+-- A task to check ready to read events.
+
+local _readable_t = {
+ events =
+ function(self)
+ local i = 0
+ return function ()
+ i = i + 1
+ return self._evs[i]
+ end
+ end,
+ tick =
+ function(self, input)
+ local handler = _servers[input]
+ if handler then
+ input = _accept(input, handler)
+ else
+ _reading:remove(input)
+ self.def_tick(input)
+ end
+ end
+}
+
+addtaskRead(_readable_t)
+
+-- A task to check ready to write events.
+
+local _writable_t = {
+ events =
+ function(self)
+ local i = 0
+ return function()
+ i = i + 1
+ return self._evs[i]
+ end
+ end,
+ tick =
+ function(self, output)
+ _writing:remove(output)
+ self.def_tick(output)
+ end
+}
+
+addtaskWrite(_writable_t)
+
+--sleeping threads task
+
+local _sleeping_t = {
+ tick = function(self, time, ...)
+ _doTick(_sleeping:pop(time), ...)
+ end
+}
+
+-- yields the current coroutine and wakes it after 'sleeptime' seconds.
+-- If sleeptime<0 then it sleeps until explicitly woken up using 'wakeup'
+function copas.sleep(sleeptime)
+ yieldcoroutine((sleeptime or 0), _sleeping)
+end
+
+-- Wakes up a sleeping coroutine 'co'.
+
+function copas.wakeup(co)
+ _sleeping:wakeup(co)
+end
+
+-- Checks for reads and writes on sockets
+
+local last_cleansing = 0
+
+local function _select(timeout)
+
+ local now = gettime()
+
+ local r_evs, w__evs, err = selectsocket(_reading, _writing, timeout)
+
+ _readable_t._evs = r_evs
+ _writable_t._evs = w_evs
+
+ if (last_cleansing - now) > WATCH_DOG_TIMEOUT then
+
+ last_cleansing = now
+
+ -- Check all sockets selected for reading, and check how long they have been
+ -- waiting for data already, without select returning them as readable.
+
+ for skt, time in next, _reading_log do
+
+ if not r_evs[skt] and (time - now) > WATCH_DOG_TIMEOUT then
+
+ -- This one timedout while waiting to become readable, so move it in
+ -- the readable list and try and read anyway, despite not having
+ -- been returned by select.
+
+ local n = #r_evs + 1
+ _reading_log[skt] = nil
+ r_evs[n] = skt
+ r_evs[skt] = n
+ end
+ end
+
+ -- Do the same for writing.
+
+ for skt, time in next, _writing_log do
+ if not w_evs[skt] and (time - now) > WATCH_DOG_TIMEOUT then
+ local n = #w_evs + 1
+ _writing_log[skt] = nil
+ w_evs[n] = skt
+ w_evs[skt] = n
+ end
+ end
+
+ end
+
+ if err == "timeout" and #r_evs + #w_evs > 0 then
+ return nil
+ else
+ return err
+ end
+
+end
+
+-- Check whether there is something to do. It returns false if there are no sockets
+-- for read/write nor tasks scheduled (which means Copas is in an empty spin).
+
+local function copasfinished()
+ return not (next(_reading) or next(_writing) or _sleeping:getnext())
+end
+
+-- Dispatcher loop step. It listens to client requests and handles them and returns
+-- false if no data was handled (timeout), or true if there was data handled (or nil
+-- + error message).
+
+local function copasstep(timeout)
+ _sleeping_t:tick(gettime())
+
+ local nextwait = _sleeping:getnext()
+ if nextwait then
+ timeout = timeout and min(nextwait,timeout) or nextwait
+ elseif finished() then
+ return false
+ end
+
+ local err = _select(timeout)
+ if err then
+ if err == "timeout" then
+ return false
+ end
+ return nil, err
+ end
+
+ for task in tasks() do
+ for event in task:events() do
+ tsk:tick(event)
+ end
+ end
+ return true
+end
+
+copas.finished = copasfinished
+copas.step = copasstep
+
+-- Dispatcher endless loop. It listens to client requests and handles them forever.
+
+function copas.loop(timeout)
+ copas.running = true
+ while not copasfinished() do
+ copasstep(timeout)
+ end
+ copas.running = false
+end
+
+if logs then
+ _G.copas = copas
+ package.loaded.copas = copas
+ -- report("module (re)installed")
+end
+
+return copas
diff --git a/tex/context/base/mkiv/util-soc-imp-ftp.lua b/tex/context/base/mkiv/util-soc-imp-ftp.lua
new file mode 100644
index 000000000..b9f5f15db
--- /dev/null
+++ b/tex/context/base/mkiv/util-soc-imp-ftp.lua
@@ -0,0 +1,400 @@
+-- original file : ftp.lua
+-- for more into : see util-soc.lua
+
+local setmetatable, type, next = setmetatable, type, next
+local find, format, gsub, match = string.find, string.format, string.gsub, string.match
+local concat = table.concat
+local mod = math.mod
+
+local socket = socket or require("socket")
+local url = socket.url or require("socket.url")
+local tp = socket.tp or require("socket.tp")
+local ltn12 = ltn12 or require("ltn12")
+
+local tcpsocket = socket.tcp
+local trysocket = socket.try
+local skipsocket = socket.skip
+local sinksocket = socket.sink
+local selectsocket = socket.select
+local bindsocket = socket.bind
+local newtrysocket = socket.newtry
+local sourcesocket = socket.source
+local protectsocket = socket.protect
+
+local parseurl = url.parse
+local unescapeurl = url.unescape
+
+local pumpall = ltn12.pump.all
+local pumpstep = ltn12.pump.step
+local sourcestring = ltn12.source.string
+local sinktable = ltn12.sink.table
+
+local ftp = {
+ TIMEOUT = 60,
+ USER = "ftp",
+ PASSWORD = "anonymous@anonymous.org",
+}
+
+socket.ftp = ftp
+
+local PORT = 21
+
+local methods = { }
+local mt = { __index = methods }
+
+function ftp.open(server, port, create)
+ local tp = trysocket(tp.connect(server, port or PORT, ftp.TIMEOUT, create))
+ local f = setmetatable({ tp = tp }, metat)
+ f.try = newtrysocket(function() f:close() end)
+ return f
+end
+
+function methods.portconnect(self)
+ local try = self.try
+ local server = self.server
+ try(server:settimeout(ftp.TIMEOUT))
+ self.data = try(server:accept())
+ try(self.data:settimeout(ftp.TIMEOUT))
+end
+
+function methods.pasvconnect(self)
+ local try = self.try
+ self.data = try(tcpsocket())
+ self(self.data:settimeout(ftp.TIMEOUT))
+ self(self.data:connect(self.pasvt.address, self.pasvt.port))
+end
+
+function methods.login(self, user, password)
+ local try = self.try
+ local tp = self.tp
+ try(tp:command("user", user or ftp.USER))
+ local code, reply = try(tp:check{"2..", 331})
+ if code == 331 then
+ try(tp:command("pass", password or ftp.PASSWORD))
+ try(tp:check("2.."))
+ end
+ return 1
+end
+
+function methods.pasv(self)
+ local try = self.try
+ local tp = self.tp
+ try(tp:command("pasv"))
+ local code, reply = try(self.tp:check("2.."))
+ local pattern = "(%d+)%D(%d+)%D(%d+)%D(%d+)%D(%d+)%D(%d+)"
+ local a, b, c, d, p1, p2 = skipsocket(2, find(reply, pattern))
+ try(a and b and c and d and p1 and p2, reply)
+ local address = format("%d.%d.%d.%d", a, b, c, d)
+ local port = p1*256 + p2
+ local server = self.server
+ self.pasvt = {
+ address = address,
+ port = port,
+ }
+ if server then
+ server:close()
+ self.server = nil
+ end
+ return address, port
+end
+
+function methods.epsv(self)
+ local try = self.try
+ local tp = self.tp
+ try(tp:command("epsv"))
+ local code, reply = try(tp:check("229"))
+ local pattern = "%((.)(.-)%1(.-)%1(.-)%1%)"
+ local d, prt, address, port = match(reply, pattern)
+ try(port, "invalid epsv response")
+ local address = tp:getpeername()
+ local server = self.server
+ self.pasvt = {
+ address = address,
+ port = port,
+ }
+ if self.server then
+ server:close()
+ self.server = nil
+ end
+ return address, port
+end
+
+function methods.port(self, address, port)
+ local try = self.try
+ local tp = self.tp
+ self.pasvt = nil
+ if not address then
+ address, port = try(tp:getsockname())
+ self.server = try(bindsocket(address, 0))
+ address, port = try(self.server:getsockname())
+ try(self.server:settimeout(ftp.TIMEOUT))
+ end
+ local pl = mod(port,256)
+ local ph = (port - pl)/256
+ local arg = gsub(format("%s,%d,%d", address, ph, pl), "%.", ",")
+ try(tp:command("port", arg))
+ try(tp:check("2.."))
+ return 1
+end
+
+function methods.eprt(self, family, address, port)
+ local try = self.try
+ local tp = self.tp
+ self.pasvt = nil
+ if not address then
+ address, port = try(tp:getsockname())
+ self.server = try(bindsocket(address, 0))
+ address, port = try(self.server:getsockname())
+ try(self.server:settimeout(ftp.TIMEOUT))
+ end
+ local arg = format("|%s|%s|%d|", family, address, port)
+ try(tp:command("eprt", arg))
+ try(tp:check("2.."))
+ return 1
+end
+
+function methods.send(self, sendt)
+ local try = self.try
+ local tp = self.tp
+ -- so we try a table or string ?
+ try(self.pasvt or self.server, "need port or pasv first")
+ if self.pasvt then
+ self:pasvconnect()
+ end
+ local argument = sendt.argument or unescapeurl(gsub(sendt.path or "", "^[/\\]", ""))
+ if argument == "" then
+ argument = nil
+ end
+ local command = sendt.command or "stor"
+ try(tp:command(command, argument))
+ local code, reply = try(tp:check{"2..", "1.."})
+ if not self.pasvt then
+ self:portconnect()
+ end
+ local step = sendt.step or pumpstep
+ local readt = { tp }
+ local checkstep = function(src, snk)
+ local readyt = selectsocket(readt, nil, 0)
+ if readyt[tp] then
+ code = try(tp:check("2.."))
+ end
+ return step(src, snk)
+ end
+ local sink = sinksocket("close-when-done", self.data)
+ try(pumpall(sendt.source, sink, checkstep))
+ if find(code, "1..") then
+ try(tp:check("2.."))
+ end
+ self.data:close()
+ local sent = skipsocket(1, self.data:getstats())
+ self.data = nil
+ return sent
+end
+
+function methods.receive(self, recvt)
+ local try = self.try
+ local tp = self.tp
+ try(self.pasvt or self.server, "need port or pasv first")
+ if self.pasvt then self:pasvconnect() end
+ local argument = recvt.argument or unescapeurl(gsub(recvt.path or "", "^[/\\]", ""))
+ if argument == "" then
+ argument = nil
+ end
+ local command = recvt.command or "retr"
+ try(tp:command(command, argument))
+ local code,reply = try(tp:check{"1..", "2.."})
+ if code >= 200 and code <= 299 then
+ recvt.sink(reply)
+ return 1
+ end
+ if not self.pasvt then
+ self:portconnect()
+ end
+ local source = sourcesocket("until-closed", self.data)
+ local step = recvt.step or pumpstep
+ try(pumpall(source, recvt.sink, step))
+ if find(code, "1..") then
+ try(tp:check("2.."))
+ end
+ self.data:close()
+ self.data = nil
+ return 1
+end
+
+function methods.cwd(self, dir)
+ local try = self.try
+ local tp = self.tp
+ try(tp:command("cwd", dir))
+ try(tp:check(250))
+ return 1
+end
+
+function methods.type(self, typ)
+ local try = self.try
+ local tp = self.tp
+ try(tp:command("type", typ))
+ try(tp:check(200))
+ return 1
+end
+
+function methods.greet(self)
+ local try = self.try
+ local tp = self.tp
+ local code = try(tp:check{"1..", "2.."})
+ if find(code, "1..") then
+ try(tp:check("2.."))
+ end
+ return 1
+end
+
+function methods.quit(self)
+ local try = self.try
+ try(self.tp:command("quit"))
+ try(self.tp:check("2.."))
+ return 1
+end
+
+function methods.close(self)
+ local data = self.data
+ if data then
+ data:close()
+ end
+ local server = self.server
+ if server then
+ server:close()
+ end
+ local tp = self.tp
+ if tp then
+ tp:close()
+ end
+end
+
+local function override(t)
+ if t.url then
+ local u = parseurl(t.url)
+ for k, v in next, t do
+ u[k] = v
+ end
+ return u
+ else
+ return t
+ end
+end
+
+local function tput(putt)
+ putt = override(putt)
+ local host = putt.host
+ trysocket(host, "missing hostname")
+ local f = ftp.open(host, putt.port, putt.create)
+ f:greet()
+ f:login(putt.user, putt.password)
+ local typ = putt.type
+ if typ then
+ f:type(typ)
+ end
+ f:epsv()
+ local sent = f:send(putt)
+ f:quit()
+ f:close()
+ return sent
+end
+
+local default = {
+ path = "/",
+ scheme = "ftp",
+}
+
+local function genericform(u)
+ local t = trysocket(parseurl(u, default))
+ trysocket(t.scheme == "ftp", "wrong scheme '" .. t.scheme .. "'")
+ trysocket(t.host, "missing hostname")
+ local pat = "^type=(.)$"
+ if t.params then
+ local typ = skipsocket(2, find(t.params, pat))
+ t.type = typ
+ trysocket(typ == "a" or typ == "i", "invalid type '" .. typ .. "'")
+ end
+ return t
+end
+
+ftp.genericform = genericform
+
+local function sput(u, body)
+ local putt = genericform(u)
+ putt.source = sourcestring(body)
+ return tput(putt)
+end
+
+ftp.put = protectsocket(function(putt, body)
+ if type(putt) == "string" then
+ return sput(putt, body)
+ else
+ return tput(putt)
+ end
+end)
+
+local function tget(gett)
+ gett = override(gett)
+ local host = gett.host
+ trysocket(host, "missing hostname")
+ local f = ftp.open(host, gett.port, gett.create)
+ f:greet()
+ f:login(gett.user, gett.password)
+ if gett.type then
+ f:type(gett.type)
+ end
+ f:epsv()
+ f:receive(gett)
+ f:quit()
+ return f:close()
+end
+
+local function sget(u)
+ local gett = genericform(u)
+ local t = { }
+ gett.sink = sinktable(t)
+ tget(gett)
+ return concat(t)
+end
+
+ftp.command = protectsocket(function(cmdt)
+ cmdt = override(cmdt)
+ local command = cmdt.command
+ local argument = cmdt.argument
+ local check = cmdt.check
+ local host = cmdt.host
+ trysocket(host, "missing hostname")
+ trysocket(command, "missing command")
+ local f = ftp.open(host, cmdt.port, cmdt.create)
+ local try = f.try
+ local tp = f.tp
+ f:greet()
+ f:login(cmdt.user, cmdt.password)
+ if type(command) == "table" then
+ local argument = argument or { }
+ for i=1,#command do
+ local cmd = command[i]
+ try(tp:command(cmd, argument[i]))
+ if check and check[i] then
+ try(tp:check(check[i]))
+ end
+ end
+ else
+ try(tp:command(command, argument))
+ if check then
+ try(tp:check(check))
+ end
+ end
+ f:quit()
+ return f:close()
+end)
+
+ftp.get = protectsocket(function(gett)
+ if type(gett) == "string" then
+ return sget(gett)
+ else
+ return tget(gett)
+ end
+end)
+
+return ftp
diff --git a/tex/context/base/mkiv/util-soc-imp-headers.lua b/tex/context/base/mkiv/util-soc-imp-headers.lua
new file mode 100644
index 000000000..ee889956c
--- /dev/null
+++ b/tex/context/base/mkiv/util-soc-imp-headers.lua
@@ -0,0 +1,144 @@
+-- original file : headers.lua
+-- for more into : see util-soc.lua
+
+local next = next
+local lower = string.lower
+local concat = table.concat
+
+local socket = socket or require("socket")
+
+local canonic = {
+ ["accept"] = "Accept",
+ ["accept-charset"] = "Accept-Charset",
+ ["accept-encoding"] = "Accept-Encoding",
+ ["accept-language"] = "Accept-Language",
+ ["accept-ranges"] = "Accept-Ranges",
+ ["action"] = "Action",
+ ["alternate-recipient"] = "Alternate-Recipient",
+ ["age"] = "Age",
+ ["allow"] = "Allow",
+ ["arrival-date"] = "Arrival-Date",
+ ["authorization"] = "Authorization",
+ ["bcc"] = "Bcc",
+ ["cache-control"] = "Cache-Control",
+ ["cc"] = "Cc",
+ ["comments"] = "Comments",
+ ["connection"] = "Connection",
+ ["content-description"] = "Content-Description",
+ ["content-disposition"] = "Content-Disposition",
+ ["content-encoding"] = "Content-Encoding",
+ ["content-id"] = "Content-ID",
+ ["content-language"] = "Content-Language",
+ ["content-length"] = "Content-Length",
+ ["content-location"] = "Content-Location",
+ ["content-md5"] = "Content-MD5",
+ ["content-range"] = "Content-Range",
+ ["content-transfer-encoding"] = "Content-Transfer-Encoding",
+ ["content-type"] = "Content-Type",
+ ["cookie"] = "Cookie",
+ ["date"] = "Date",
+ ["diagnostic-code"] = "Diagnostic-Code",
+ ["dsn-gateway"] = "DSN-Gateway",
+ ["etag"] = "ETag",
+ ["expect"] = "Expect",
+ ["expires"] = "Expires",
+ ["final-log-id"] = "Final-Log-ID",
+ ["final-recipient"] = "Final-Recipient",
+ ["from"] = "From",
+ ["host"] = "Host",
+ ["if-match"] = "If-Match",
+ ["if-modified-since"] = "If-Modified-Since",
+ ["if-none-match"] = "If-None-Match",
+ ["if-range"] = "If-Range",
+ ["if-unmodified-since"] = "If-Unmodified-Since",
+ ["in-reply-to"] = "In-Reply-To",
+ ["keywords"] = "Keywords",
+ ["last-attempt-date"] = "Last-Attempt-Date",
+ ["last-modified"] = "Last-Modified",
+ ["location"] = "Location",
+ ["max-forwards"] = "Max-Forwards",
+ ["message-id"] = "Message-ID",
+ ["mime-version"] = "MIME-Version",
+ ["original-envelope-id"] = "Original-Envelope-ID",
+ ["original-recipient"] = "Original-Recipient",
+ ["pragma"] = "Pragma",
+ ["proxy-authenticate"] = "Proxy-Authenticate",
+ ["proxy-authorization"] = "Proxy-Authorization",
+ ["range"] = "Range",
+ ["received"] = "Received",
+ ["received-from-mta"] = "Received-From-MTA",
+ ["references"] = "References",
+ ["referer"] = "Referer",
+ ["remote-mta"] = "Remote-MTA",
+ ["reply-to"] = "Reply-To",
+ ["reporting-mta"] = "Reporting-MTA",
+ ["resent-bcc"] = "Resent-Bcc",
+ ["resent-cc"] = "Resent-Cc",
+ ["resent-date"] = "Resent-Date",
+ ["resent-from"] = "Resent-From",
+ ["resent-message-id"] = "Resent-Message-ID",
+ ["resent-reply-to"] = "Resent-Reply-To",
+ ["resent-sender"] = "Resent-Sender",
+ ["resent-to"] = "Resent-To",
+ ["retry-after"] = "Retry-After",
+ ["return-path"] = "Return-Path",
+ ["sender"] = "Sender",
+ ["server"] = "Server",
+ ["smtp-remote-recipient"] = "SMTP-Remote-Recipient",
+ ["status"] = "Status",
+ ["subject"] = "Subject",
+ ["te"] = "TE",
+ ["to"] = "To",
+ ["trailer"] = "Trailer",
+ ["transfer-encoding"] = "Transfer-Encoding",
+ ["upgrade"] = "Upgrade",
+ ["user-agent"] = "User-Agent",
+ ["vary"] = "Vary",
+ ["via"] = "Via",
+ ["warning"] = "Warning",
+ ["will-retry-until"] = "Will-Retry-Until",
+ ["www-authenticate"] = "WWW-Authenticate",
+ ["x-mailer"] = "X-Mailer",
+}
+
+setmetatable(canonic, {
+ __index = function(t,k)
+ socket.report("invalid header: %s",k)
+ t[k] = k
+ return k
+ end
+})
+
+local function normalizeheaders(headers)
+ if not headers then
+ return { }
+ end
+ local normalized = { }
+ for k, v in next, headers do
+ normalized[#normalized+1] = canonic[k] .. ": " .. v
+ end
+ normalized[#normalized+1] = ""
+ normalized[#normalized+1] = ""
+ return concat(normalized,"\r\n")
+end
+
+local function lowerheaders(lowered,headers)
+ if not lowered then
+ return { }
+ end
+ if not headers then
+ lowered, headers = { }, lowered
+ end
+ for k, v in next, headers do
+ lowered[lower(k)] = v
+ end
+ return lowered
+end
+
+socket.headers = {
+ canonic = canonic,
+ normalize = normalizeheaders,
+ lower = lowerheaders,
+}
+
+return socket.headers
diff --git a/tex/context/base/mkiv/util-soc-imp-http.lua b/tex/context/base/mkiv/util-soc-imp-http.lua
new file mode 100644
index 000000000..98789fa7b
--- /dev/null
+++ b/tex/context/base/mkiv/util-soc-imp-http.lua
@@ -0,0 +1,432 @@
+-- original file : http.lua
+-- for more into : see util-soc.lua
+
+local tostring, tonumber, setmetatable, next, type = tostring, tonumber, setmetatable, next, type
+local find, lower, format, gsub, match = string.find, string.lower, string.format, string.gsub, string.match
+local concat = table.concat
+
+local socket = socket or require("socket")
+local url = socket.url or require("socket.url")
+local ltn12 = ltn12 or require("ltn12")
+local mime = mime or require("mime")
+local headers = socket.headers or require("socket.headers")
+
+local normalizeheaders = headers.normalize
+
+local parseurl = url.parse
+local buildurl = url.build
+local absoluteurl = url.absolute
+local unescapeurl = url.unescape
+
+local skipsocket = socket.skip
+local sinksocket = socket.sink
+local sourcesocket = socket.source
+local trysocket = socket.try
+local tcpsocket = socket.tcp
+local newtrysocket = socket.newtry
+local protectsocket = socket.protect
+
+local emptysource = ltn12.source.empty
+local stringsource = ltn12.source.string
+local rewindsource = ltn12.source.rewind
+local pumpstep = ltn12.pump.step
+local pumpall = ltn12.pump.all
+local sinknull = ltn12.sink.null
+local sinktable = ltn12.sink.table
+
+local mimeb64 = mime.b64
+
+-- todo: localize ltn12
+
+local http = {
+ TIMEOUT = 60, -- connection timeout in seconds
+ USERAGENT = socket._VERSION, -- user agent field sent in request
+}
+
+socket.http = http
+
+local PORT = 80
+local SCHEMES = {
+ http = true,
+}
+
+-- Reads MIME headers from a connection, unfolding where needed
+
+local function receiveheaders(sock, headers)
+ if not headers then
+ headers = { }
+ end
+ -- get first line
+ local line, err = sock:receive()
+ if err then
+ return nil, err
+ end
+ -- headers go until a blank line is found
+ while line ~= "" do
+ -- get field-name and value
+ local name, value = skipsocket(2, find(line, "^(.-):%s*(.*)"))
+ if not (name and value) then
+ return nil, "malformed reponse headers"
+ end
+ name = lower(name)
+ -- get next line (value might be folded)
+ line, err = sock:receive()
+ if err then
+ return nil, err
+ end
+ -- unfold any folded values
+ while find(line, "^%s") do
+ value = value .. line
+ line = sock:receive()
+ if err then
+ return nil, err
+ end
+ end
+ -- save pair in table
+ local found = headers[name]
+ if found then
+ value = found .. ", " .. value
+ end
+ headers[name] = value
+ end
+ return headers
+end
+
+-- Extra sources and sinks
+
+socket.sourcet["http-chunked"] = function(sock, headers)
+ return setmetatable (
+ {
+ getfd = function() return sock:getfd() end,
+ dirty = function() return sock:dirty() end,
+ }, {
+ __call = function()
+ local line, err = sock:receive()
+ if err then
+ return nil, err
+ end
+ local size = tonumber(gsub(line, ";.*", ""), 16)
+ if not size then
+ return nil, "invalid chunk size"
+ end
+ if size > 0 then
+ local chunk, err, part = sock:receive(size)
+ if chunk then
+ sock:receive()
+ end
+ return chunk, err
+ else
+ headers, err = receiveheaders(sock, headers)
+ if not headers then
+ return nil, err
+ end
+ end
+ end
+ }
+ )
+end
+
+socket.sinkt["http-chunked"] = function(sock)
+ return setmetatable(
+ {
+ getfd = function() return sock:getfd() end,
+ dirty = function() return sock:dirty() end,
+ },
+ {
+ __call = function(self, chunk, err)
+ if not chunk then
+ chunk = ""
+ end
+ return sock:send(format("%X\r\n%s\r\n",#chunk,chunk))
+ end
+ })
+end
+
+-- Low level HTTP API
+
+local methods = { }
+local mt = { __index = methods }
+
+local function openhttp(host, port, create)
+ local c = trysocket((create or tcpsocket)())
+ local h = setmetatable({ c = c }, mt)
+ local try = newtrysocket(function() h:close() end)
+ h.try = try
+ try(c:settimeout(http.TIMEOUT))
+ try(c:connect(host, port or PORT))
+ return h
+end
+
+http.open = openhttp
+
+function methods.sendrequestline(self, method, uri)
+ local requestline = format("%s %s HTTP/1.1\r\n", method or "GET", uri)
+ return self.try(self.c:send(requestline))
+end
+
+function methods.sendheaders(self,headers)
+ self.try(self.c:send(normalizeheaders(headers)))
+ return 1
+end
+
+function methods.sendbody(self, headers, source, step)
+ if not source then
+ source = emptysource()
+ end
+ if not step then
+ step = pumpstep
+ end
+ local mode = "http-chunked"
+ if headers["content-length"] then
+ mode = "keep-open"
+ end
+ return self.try(pumpall(source, sinksocket(mode, self.c), step))
+end
+
+function methods.receivestatusline(self)
+ local try = self.try
+ local status = try(self.c:receive(5))
+ if status ~= "HTTP/" then
+ return nil, status -- HTTP/0.9
+ end
+ status = try(self.c:receive("*l", status))
+ local code = skipsocket(2, find(status, "HTTP/%d*%.%d* (%d%d%d)"))
+ return try(tonumber(code), status)
+end
+
+function methods.receiveheaders(self)
+ return self.try(receiveheaders(self.c))
+end
+
+function methods.receivebody(self, headers, sink, step)
+ if not sink then
+ sink = sinknull()
+ end
+ if not step then
+ step = pumpstep
+ end
+ local length = tonumber(headers["content-length"])
+ local encoding = headers["transfer-encoding"] -- shortcut
+ local mode = "default" -- connection close
+ if encoding and encoding ~= "identity" then
+ mode = "http-chunked"
+ elseif length then
+ mode = "by-length"
+ end
+ --hh: so length can be nil
+ return self.try(pumpall(sourcesocket(mode, self.c, length), sink, step))
+end
+
+function methods.receive09body(self, status, sink, step)
+ local source = rewindsource(sourcesocket("until-closed", self.c))
+ source(status)
+ return self.try(pumpall(source, sink, step))
+end
+
+function methods.close(self)
+ return self.c:close()
+end
+
+-- High level HTTP API
+
+local function adjusturi(request)
+ if not request.proxy and not http.PROXY then
+ request = {
+ path = trysocket(request.path, "invalid path 'nil'"),
+ params = request.params,
+ query = request.query,
+ fragment = request.fragment,
+ }
+ end
+ return buildurl(request)
+end
+
+local function adjustheaders(request)
+ local headers = {
+ ["user-agent"] = http.USERAGENT,
+ ["host"] = gsub(request.authority, "^.-@", ""),
+ ["connection"] = "close, TE",
+ ["te"] = "trailers"
+ }
+ local username = request.user
+ local password = request.password
+ if username and password then
+ headers["authorization"] = "Basic " .. (mimeb64(username .. ":" .. unescapeurl(password)))
+ end
+ local proxy = request.proxy or http.PROXY
+ if proxy then
+ proxy = parseurl(proxy)
+ local username = proxy.user
+ local password = proxy.password
+ if username and password then
+ headers["proxy-authorization"] = "Basic " .. (mimeb64(username .. ":" .. password))
+ end
+ end
+ local requestheaders = request.headers
+ if requestheaders then
+ headers = lowerheaders(headers,requestheaders)
+ end
+ return headers
+end
+
+-- default url parts
+
+local default = {
+ host = "",
+ port = PORT,
+ path = "/",
+ scheme = "http"
+}
+
+local function adjustrequest(originalrequest)
+ local url = originalrequest.url
+ local request = url and parseurl(url,default) or { }
+ for k, v in next, originalrequest do
+ request[k] = v
+ end
+ local host = request.host
+ local port = request.port
+ local uri = request.uri
+ if not host or host == "" then
+ trysocket(nil, "invalid host '" .. tostring(host) .. "'")
+ end
+ if port == "" then
+ request.port = PORT
+ end
+ if not uri or uri == "" then
+ request.uri = adjusturi(request)
+ end
+ request.headers = adjustheaders(request)
+ local proxy = request.proxy or http.PROXY
+ if proxy then
+ proxy = parseurl(proxy)
+ request.host = proxy.host
+ request.port = proxy.port or 3128
+ end
+ return request
+end
+
+local maxredericts = 4
+local validredirects = { [301] = true, [302] = true, [303] = true, [307] = true }
+local validmethods = { [false] = true, GET = true, HEAD = true }
+
+local function shouldredirect(request, code, headers)
+ local location = headers.location
+ if not location then
+ return false
+ end
+ location = gsub(location, "%s", "")
+ if location == "" then
+ return false
+ end
+ local scheme = match(location, "^([%w][%w%+%-%.]*)%:")
+ if scheme and not SCHEMES[scheme] then
+ return false
+ end
+ local method = request.method
+ local redirect = request.redirect
+ local redirects = request.nredirects or 0
+ return redirect and validredirects[code] and validmethods[method] and redirects <= maxredericts
+end
+
+local function shouldreceivebody(request, code)
+ if request.method == "HEAD" then
+ return nil
+ end
+ if code == 204 or code == 304 then
+ return nil
+ end
+ if code >= 100 and code < 200 then
+ return nil
+ end
+ return 1
+end
+
+local tredirect, trequest, srequest
+
+tredirect = function(request, location)
+ local result, code, headers, status = trequest {
+ url = absoluteurl(request.url,location),
+ source = request.source,
+ sink = request.sink,
+ headers = request.headers,
+ proxy = request.proxy,
+ nredirects = (request.nredirects or 0) + 1,
+ create = request.create,
+ }
+ if not headers then
+ headers = { }
+ end
+ if not headers.location then
+ headers.location = location
+ end
+ return result, code, headers, status
+end
+
+trequest = function(originalrequest)
+ local request = adjustrequest(originalrequest)
+ local connection = openhttp(request.host, request.port, request.create)
+ local headers = request.headers
+ connection:sendrequestline(request.method, request.uri)
+ connection:sendheaders(headers)
+ if request.source then
+ connection:sendbody(headers, request.source, request.step)
+ end
+ local code, status = connection:receivestatusline()
+ if not code then
+ connection:receive09body(status, request.sink, request.step)
+ return 1, 200
+ end
+ while code == 100 do
+ headers = connection:receiveheaders()
+ code, status = connection:receivestatusline()
+ end
+ headers = connection:receiveheaders()
+ if shouldredirect(request, code, headers) and not request.source then
+ connection:close()
+ return tredirect(originalrequest,headers.location)
+ end
+ if shouldreceivebody(request, code) then
+ connection:receivebody(headers, request.sink, request.step)
+ end
+ connection:close()
+ return 1, code, headers, status
+end
+
+-- turns an url and a body into a generic request
+
+local function genericform(url, body)
+ local buffer = { }
+ local request = {
+ url = url,
+ sink = sinktable(buffer),
+ target = buffer,
+ }
+ if body then
+ request.source = stringsource(body)
+ request.method = "POST"
+ request.headers = {
+ ["content-length"] = #body,
+ ["content-type"] = "application/x-www-form-urlencoded"
+ }
+ end
+ return request
+end
+
+http.genericform = genericform
+
+srequest = function(url, body)
+ local request = genericform(url, body)
+ local _, code, headers, status = trequest(request)
+ return concat(request.target), code, headers, status
+end
+
+http.request = protectsocket(function(request, body)
+ if type(request) == "string" then
+ return srequest(request, body)
+ else
+ return trequest(request)
+ end
+end)
+
+return http
diff --git a/tex/context/base/mkiv/util-soc-imp-ltn12.lua b/tex/context/base/mkiv/util-soc-imp-ltn12.lua
new file mode 100644
index 000000000..0a389896b
--- /dev/null
+++ b/tex/context/base/mkiv/util-soc-imp-ltn12.lua
@@ -0,0 +1,388 @@
+-- original file : ltn12.lua
+-- for more into : see util-soc.lua
+
+local select, unpack = select, unpack
+local insert, remove = table.insert, table.remove
+local sub = string.sub
+
+local report = logs and logs.reporter("ltn12") or function(fmt,first,...)
+ if fmt then
+ fmt = "ltn12: " .. fmt
+ if first then
+ print(format(fmt,first,...))
+ else
+ print(fmt)
+ end
+ end
+end
+
+local filter = { }
+local source = { }
+local sink = { }
+local pump = { }
+
+local ltn12 = {
+
+ _VERSION = "LTN12 1.0.3",
+
+ BLOCKSIZE = 2048,
+
+ filter = filter,
+ source = source,
+ sink = sink,
+ pump = pump,
+
+ report = report,
+
+}
+
+-- returns a high level filter that cycles a low-level filter
+
+function filter.cycle(low, ctx, extra)
+ if low then
+ return function(chunk)
+ return (low(ctx, chunk, extra))
+ end
+ end
+end
+
+-- chains a bunch of filters together
+
+function filter.chain(...)
+ local arg = { ... }
+ local n = select('#',...)
+ local top = 1
+ local index = 1
+ local retry = ""
+ return function(chunk)
+ retry = chunk and retry
+ while true do
+ local action = arg[index]
+ if index == top then
+ chunk = action(chunk)
+ if chunk == "" or top == n then
+ return chunk
+ elseif chunk then
+ index = index + 1
+ else
+ top = top + 1
+ index = top
+ end
+ else
+ chunk = action(chunk or "")
+ if chunk == "" then
+ index = index - 1
+ chunk = retry
+ elseif chunk then
+ if index == n then
+ return chunk
+ else
+ index = index + 1
+ end
+ else
+ report("error: filter returned inappropriate 'nil'")
+ return
+ end
+ end
+ end
+ end
+end
+
+-- create an empty source
+
+local function empty()
+ return nil
+end
+
+function source.empty()
+ return empty
+end
+
+-- returns a source that just outputs an error
+
+local function sourceerror(err)
+ return function()
+ return nil, err
+ end
+end
+
+source.error = sourceerror
+
+-- creates a file source
+
+function source.file(handle, io_err)
+ if handle then
+ local blocksize = ltn12.BLOCKSIZE
+ return function()
+ local chunk = handle:read(blocksize)
+ if not chunk then
+ handle:close()
+ end
+ return chunk
+ end
+ else
+ return sourceerror(io_err or "unable to open file")
+ end
+end
+
+-- turns a fancy source into a simple source
+
+function source.simplify(src)
+ return function()
+ local chunk, err_or_new = src()
+ if err_or_new then
+ src = err_or_new
+ end
+ if chunk then
+ return chunk
+ else
+ return nil, err_or_new
+ end
+ end
+end
+
+-- creates string source
+
+function source.string(s)
+ if s then
+ local blocksize = ltn12.BLOCKSIZE
+ local i = 1
+ return function()
+ local nexti = i + blocksize
+ local chunk = sub(s, i, nexti - 1)
+ i = nexti
+ if chunk ~= "" then
+ return chunk
+ else
+ return nil
+ end
+ end
+ else return source.empty() end
+end
+
+-- creates rewindable source
+
+function source.rewind(src)
+ local t = { }
+ return function(chunk)
+ if chunk then
+ insert(t, chunk)
+ else
+ chunk = remove(t)
+ if chunk then
+ return chunk
+ else
+ return src()
+ end
+ end
+ end
+end
+
+-- chains a source with one or several filter(s)
+
+function source.chain(src, f, ...)
+ if ... then
+ f = filter.chain(f, ...)
+ end
+ local last_in = ""
+ local last_out = ""
+ local state = "feeding"
+ local err
+ return function()
+ if not last_out then
+ report("error: source is empty")
+ return
+ end
+ while true do
+ if state == "feeding" then
+ last_in, err = src()
+ if err then
+ return nil, err
+ end
+ last_out = f(last_in)
+ if not last_out then
+ if last_in then
+ report("error: filter returned inappropriate 'nil'")
+ end
+ return nil
+ elseif last_out ~= "" then
+ state = "eating"
+ if last_in then
+ last_in = ""
+ end
+ return last_out
+ end
+ else
+ last_out = f(last_in)
+ if last_out == "" then
+ if last_in == "" then
+ state = "feeding"
+ else
+ report("error: filter returned nothing")
+ return
+ end
+ elseif not last_out then
+ if last_in then
+ report("filter returned inappropriate 'nil'")
+ end
+ return nil
+ else
+ return last_out
+ end
+ end
+ end
+ end
+end
+
+-- creates a source that produces contents of several sources, one after the
+-- other, as if they were concatenated
+
+function source.cat(...)
+ local arg = { ... }
+ local src = remove(arg,1)
+ return function()
+ while src do
+ local chunk, err = src()
+ if chunk then
+ return chunk
+ end
+ if err then
+ return nil, err
+ end
+ src = remove(arg,1)
+ end
+ end
+end
+
+-- creates a sink that stores into a table
+
+function sink.table(t)
+ if not t then
+ t = { }
+ end
+ local f = function(chunk, err)
+ if chunk then
+ insert(t, chunk)
+ end
+ return 1
+ end
+ return f, t
+end
+
+-- turns a fancy sink into a simple sink
+
+function sink.simplify(snk)
+ return function(chunk, err)
+ local ret, err_or_new = snk(chunk, err)
+ if not ret then
+ return nil, err_or_new
+ end
+ if err_or_new then
+ snk = err_or_new
+ end
+ return 1
+ end
+end
+
+-- creates a sink that discards data
+
+local function null()
+ return 1
+end
+
+function sink.null()
+ return null
+end
+
+-- creates a sink that just returns an error
+
+local function sinkerror(err)
+ return function()
+ return nil, err
+ end
+end
+
+sink.error = sinkerror
+
+-- creates a file sink
+
+function sink.file(handle, io_err)
+ if handle then
+ return function(chunk, err)
+ if not chunk then
+ handle:close()
+ return 1
+ else
+ return handle:write(chunk)
+ end
+ end
+ else
+ return sinkerror(io_err or "unable to open file")
+ end
+end
+
+-- chains a sink with one or several filter(s)
+
+function sink.chain(f, snk, ...)
+ if ... then
+ local args = { f, snk, ... }
+ snk = remove(args, #args)
+ f = filter.chain(unpack(args))
+ end
+ return function(chunk, err)
+ if chunk ~= "" then
+ local filtered = f(chunk)
+ local done = chunk and ""
+ while true do
+ local ret, snkerr = snk(filtered, err)
+ if not ret then
+ return nil, snkerr
+ end
+ if filtered == done then
+ return 1
+ end
+ filtered = f(done)
+ end
+ else
+ return 1
+ end
+ end
+end
+
+-- pumps one chunk from the source to the sink
+
+function pump.step(src, snk)
+ local chunk, src_err = src()
+ local ret, snk_err = snk(chunk, src_err)
+ if chunk and ret then
+ return 1
+ else
+ return nil, src_err or snk_err
+ end
+end
+
+-- pumps all data from a source to a sink, using a step function
+
+function pump.all(src, snk, step)
+ if not step then
+ step = pump.step
+ end
+ while true do
+ local ret, err = step(src, snk)
+ if not ret then
+ if err then
+ return nil, err
+ else
+ return 1
+ end
+ end
+ end
+end
+
+if logs then
+ _G.ltn12 = ltn12
+ package.loaded.ltn12 = ltn12
+ -- report("module (re)installed")
+end
+
+return ltn12
diff --git a/tex/context/base/mkiv/util-soc-imp-mime.lua b/tex/context/base/mkiv/util-soc-imp-mime.lua
new file mode 100644
index 000000000..b1a5827ac
--- /dev/null
+++ b/tex/context/base/mkiv/util-soc-imp-mime.lua
@@ -0,0 +1,105 @@
+-- original file : mime.lua
+-- for more into : see util-soc.lua
+
+local type, tostring = type, tostring
+
+local mime = require("mime.core")
+local ltn12 = ltn12 or require("ltn12")
+
+local filtercycle = ltn12.filter.cycle
+
+local report = logs and logs.reporter("mime") or function(fmt,first,...)
+ if fmt then
+ fmt = "mime: " .. fmt
+ if first then
+ print(format(fmt,first,...))
+ else
+ print(fmt)
+ end
+ end
+end
+
+mime.report = report
+
+local encodet = { }
+local decodet = { }
+local wrapt = { }
+
+mime.encodet = encodet
+mime.decodet = decodet
+mime.wrapt = wrapt
+
+local mime_b64 = mime.b64
+local mime_qp = mime.qp
+local mime_unb64 = mime.unb64
+local mime_unqp = mime.unqp
+local mime_wrp = mime.wrp
+local mime_qpwrp = mime.qpwrp
+local mime_eol = mime_eol
+local mime_dot = mime_dot
+
+encodet['base64'] = function()
+ return filtercycle(mime_b64,"")
+end
+
+encodet['quoted-printable'] = function(mode)
+ return filtercycle(mime_qp, "", mode == "binary" and "=0D=0A" or "\r\n")
+end
+
+decodet['base64'] = function()
+ return filtercycle(mime_unb64, "")
+end
+
+decodet['quoted-printable'] = function()
+ return filtercycle(mime_unqp, "")
+end
+
+local wraptext = function(length)
+ if not length then
+ length = 76
+ end
+ return filtercycle(mime_wrp, length, length)
+end
+
+local wrapquoted = function()
+ return filtercycle(mime_qpwrp, 76, 76)
+end
+
+wrapt['text'] = wraptext
+wrapt['base64'] = wraptext
+wrapt['default'] = wraptext
+wrapt['quoted-printable'] = wrapquoted
+
+function mime.normalize(marker)
+ return filtercycle(mime_eol, 0, marker)
+end
+
+function mime.stuff()
+ return filtercycle(mime_dot, 2)
+end
+
+local function choose(list)
+ return function(name, opt1, opt2)
+ if type(name) ~= "string" then
+ name, opt1, opt2 = "default", name, opt1
+ end
+ local filter = list[name or "nil"]
+ if filter then
+ return filter(opt1, opt2)
+ else
+ report("error: unknown key '%s'",tostring(name))
+ end
+ end
+end
+
+mime.encode = choose(encodet)
+mime.decode = choose(decodet)
+mime.wrap = choose(wrapt)
+
+if logs then
+ _G.mime = mime
+ package.loaded.mime = mime
+ -- report("module (re)installed")
+end
+
+return mime
diff --git a/tex/context/base/mkiv/util-soc-imp-reset.lua b/tex/context/base/mkiv/util-soc-imp-reset.lua
new file mode 100644
index 000000000..a4a489b0f
--- /dev/null
+++ b/tex/context/base/mkiv/util-soc-imp-reset.lua
@@ -0,0 +1,13 @@
+local loaded = package.loaded
+
+loaded["socket"] = nil
+loaded["copas"] = nil
+loaded["ltn12"] = nil
+loaded["mbox"] = nil
+loaded["mime"] = nil
+loaded["socket.url"] = nil
+loaded["socket.headers"] = nil
+loaded["socket.tp"] = nil
+loaded["socket.http"] = nil
+loaded["socket.ftp"] = nil
+loaded["socket.smtp"] = nil
diff --git a/tex/context/base/mkiv/util-soc-imp-smtp.lua b/tex/context/base/mkiv/util-soc-imp-smtp.lua
new file mode 100644
index 000000000..c13a02688
--- /dev/null
+++ b/tex/context/base/mkiv/util-soc-imp-smtp.lua
@@ -0,0 +1,265 @@
+-- original file : smtp.lua
+-- for more into : see util-soc.lua
+
+local type, setmetatable, next = type, setmetatable, next
+local find, lower, format = string.find, string.lower, string.format
+local osdate, osgetenv = os.data, os.getenv
+local random = math.random
+
+local socket = socket or require("socket")
+local headers = socket.headers or require("socket.headers")
+local ltn12 = ltn12 or require("ltn12")
+local tp = socket.tp or require("socket.tp")
+local mime = mime or require("mime")
+
+local mimeb64 = mime.b64
+local mimestuff = mime.stuff
+
+local skipsocket = socket.skip
+local trysocket = socket.try
+local newtrysocket = socket.newtry
+local protectsocket = socket.protect
+
+local normalizeheaders = headers.normalize
+local lowerheaders = headers.lower
+
+local createcoroutine = coroutine.create
+local resumecoroutine = coroutine.resume
+local yieldcoroutine = coroutine.resume
+
+local smtp = {
+ TIMEOUT = 60,
+ SERVER = "localhost",
+ PORT = 25,
+ DOMAIN = osgetenv("SERVER_NAME") or "localhost",
+ ZONE = "-0000",
+}
+
+socket.smtp = smtp
+
+local methods = { }
+local mt = { __index = methods }
+
+function methods.greet(self, domain)
+ local try = self.try
+ local tp = self.tp
+ try(tp:check("2.."))
+ try(tp:command("EHLO", domain or _M.DOMAIN))
+ return skipsocket(1, try(tp:check("2..")))
+end
+
+function methods.mail(self, from)
+ local try = self.try
+ local tp = self.tp
+ try(tp:command("MAIL", "FROM:" .. from))
+ return try(tp:check("2.."))
+end
+
+function methods.rcpt(self, to)
+ local try = self.try
+ local tp = self.tp
+ try(tp:command("RCPT", "TO:" .. to))
+ return try(tp:check("2.."))
+end
+
+function methods.data(self, src, step)
+ local try = self.try
+ local tp = self.tp
+ try(tp:command("DATA"))
+ try(tp:check("3.."))
+ try(tp:source(src, step))
+ try(tp:send("\r\n.\r\n"))
+ return try(tp:check("2.."))
+end
+
+function methods.quit(self)
+ local try = self.try
+ local tp = self.tp
+ try(tp:command("QUIT"))
+ return try(tp:check("2.."))
+end
+
+function methods.close(self)
+ return self.tp:close()
+end
+
+function methods.login(self, user, password)
+ local try = self.try
+ local tp = self.tp
+ try(tp:command("AUTH", "LOGIN"))
+ try(tp:check("3.."))
+ try(tp:send(mimeb64(user) .. "\r\n"))
+ try(tp:check("3.."))
+ try(tp:send(mimeb64(password) .. "\r\n"))
+ return try(tp:check("2.."))
+end
+
+function methods.plain(self, user, password)
+ local try = self.try
+ local tp = self.tp
+ local auth = "PLAIN " .. mimeb64("\0" .. user .. "\0" .. password)
+ try(tp:command("AUTH", auth))
+ return try(tp:check("2.."))
+end
+
+function methods.auth(self, user, password, ext)
+ if not user or not password then
+ return 1
+ end
+ local try = self.try
+ if find(ext, "AUTH[^\n]+LOGIN") then
+ return self:login(user,password)
+ elseif find(ext, "AUTH[^\n]+PLAIN") then
+ return self:plain(user,password)
+ else
+ try(nil, "authentication not supported")
+ end
+end
+
+function methods.send(self, mail)
+ self:mail(mail.from)
+ local receipt = mail.rcpt
+ if type(receipt) == "table" then
+ for i=1,#receipt do
+ self:rcpt(receipt[i])
+ end
+ elseif receipt then
+ self:rcpt(receipt)
+ end
+ self:data(ltn12.source.chain(mail.source, mimestuff()), mail.step)
+end
+
+local function opensmtp(self, server, port, create)
+ if not server or server == "" then
+ server = smtp.SERVER
+ end
+ if not port or port == "" then
+ port = smtp.PORT
+ end
+ local s = {
+ tp = trysocket(tp.connect(server, port, smtp.TIMEOUT, create)),
+ try = newtrysocket(function()
+ s:close()
+ end),
+ }
+ setmetatable(s, mt)
+ return s
+end
+
+smtp.open = opensmtp
+
+local nofboundaries = 0
+
+local function newboundary()
+ nofboundaries = nofboundaries + 1
+ return format('%s%05d==%05u', osdate('%d%m%Y%H%M%S'), random(0,99999), nofboundaries)
+end
+
+local send_message
+
+local function send_headers(headers)
+ yieldcoroutine(normalizeheaders(headers))
+end
+
+local function send_multipart(message)
+ local boundary = newboundary()
+ local headers = lowerheaders(message.headers)
+ local body = message.body
+ local preamble = body.preamble
+ local epilogue = body.epilogue
+ local content = headers['content-type'] or 'multipart/mixed'
+ headers['content-type'] = content .. '; boundary="' .. boundary .. '"'
+ send_headers(headers)
+ if preamble then
+ yieldcoroutine(preamble)
+ yieldcoroutine("\r\n")
+ end
+ for i=1,#body do
+ yieldcoroutine("\r\n--" .. boundary .. "\r\n")
+ send_message(body[i])
+ end
+ yieldcoroutine("\r\n--" .. boundary .. "--\r\n\r\n")
+ if epilogue then
+ yieldcoroutine(epilogue)
+ yieldcoroutine("\r\n")
+ end
+end
+
+local default_content_type = 'text/plain; charset="UTF-8"'
+
+local function send_source(message)
+ local headers = lowerheaders(message.headers)
+ if not headers['content-type'] then
+ headers['content-type'] = default_content_type
+ end
+ send_headers(headers)
+ local getchunk = message.body
+ while true do
+ local chunk, err = getchunk()
+ if err then
+ yieldcoroutine(nil, err)
+ elseif chunk then
+ yieldcoroutine(chunk)
+ else
+ break
+ end
+ end
+end
+
+local function send_string(message)
+ local headers = lowerheaders(message.headers)
+ if not headers['content-type'] then
+ headers['content-type'] = default_content_type
+ end
+ send_headers(headers)
+ yieldcoroutine(message.body)
+end
+
+function send_message(message)
+ local body = message.body
+ if type(body) == "table" then
+ send_multipart(message)
+ elseif type(body) == "function" then
+ send_source(message)
+ else
+ send_string(message)
+ end
+end
+
+local function adjust_headers(message)
+ local headers = lowerheaders(message.headers)
+ if not headers["date"] then
+ headers["date"] = osdate("!%a, %d %b %Y %H:%M:%S ") .. (message.zone or smtp.ZONE)
+ end
+ if not headers["x-mailer"] then
+ headers["x-mailer"] = socket._VERSION
+ end
+ headers["mime-version"] = "1.0"
+ return headers
+end
+
+function smtp.message(message)
+ message.headers = adjust_headers(message)
+ local action = createcoroutine(function()
+ send_message(message)
+ end)
+ return function()
+ local ret, a, b = resumecoroutine(action)
+ if ret then
+ return a, b
+ else
+ return nil, a
+ end
+ end
+end
+
+smtp.send = protectsocket(function(mail)
+ local snd = opensmtp(mail.server, mail.port, mail.create)
+ local ext = snd:greet(mail.domain)
+ snd:auth(mail.user, mail.password, ext)
+ snd:send(mail)
+ snd:quit()
+ return snd:close()
+end)
+
+return smtp
diff --git a/tex/context/base/mkiv/util-soc-imp-socket.lua b/tex/context/base/mkiv/util-soc-imp-socket.lua
new file mode 100644
index 000000000..0ad685d75
--- /dev/null
+++ b/tex/context/base/mkiv/util-soc-imp-socket.lua
@@ -0,0 +1,190 @@
+-- original file : socket.lua
+-- for more into : see util-soc.lua
+
+local type, tostring, setmetatable = type, tostring, setmetatable
+local min = math.min
+local format = string.format
+
+local socket = require("socket.core")
+
+local connect = socket.connect
+local tcp4 = socket.tcp4
+local tcp6 = socket.tcp6
+local getaddrinfo = socket.dns.getaddrinfo
+
+local report = logs and logs.reporter("socket") or function(fmt,first,...)
+ if fmt then
+ fmt = "socket: " .. fmt
+ if first then
+ print(format(fmt,first,...))
+ else
+ print(fmt)
+ end
+ end
+end
+
+socket.report = report
+
+function socket.connect4(address, port, laddress, lport)
+ return connect(address, port, laddress, lport, "inet")
+end
+
+function socket.connect6(address, port, laddress, lport)
+ return connect(address, port, laddress, lport, "inet6")
+end
+
+function socket.bind(host, port, backlog)
+ if host == "*" or host == "" then
+ host = defaulthost
+ end
+ local addrinfo, err = getaddrinfo(host)
+ if not addrinfo then
+ return nil, err
+ end
+ for i=1,#addrinfo do
+ local alt = addrinfo[i]
+ local sock, err = (alt.family == "inet" and tcp4 or tcp6)()
+ if not sock then
+ return nil, err or "unknown error"
+ end
+ sock:setoption("reuseaddr", true)
+ local res, err = sock:bind(alt.addr, port)
+ if res then
+ res, err = sock:listen(backlog)
+ if res then
+ return sock
+ else
+ sock:close()
+ end
+ else
+ sock:close()
+ end
+ end
+ return nil, "invalid address"
+end
+
+socket.try = socket.newtry()
+
+function socket.choose(list)
+ return function(name, opt1, opt2)
+ if type(name) ~= "string" then
+ name, opt1, opt2 = "default", name, opt1
+ end
+ local f = list[name or "nil"]
+ if f then
+ return f(opt1, opt2)
+ else
+ report("error: unknown key '%s'",tostring(name))
+ end
+ end
+end
+
+local sourcet = { }
+local sinkt = { }
+
+socket.sourcet = sourcet
+socket.sinkt = sinkt
+
+socket.BLOCKSIZE = 2048
+
+sinkt["close-when-done"] = function(sock)
+ return setmetatable (
+ {
+ getfd = function() return sock:getfd() end,
+ dirty = function() return sock:dirty() end,
+ },
+ {
+ __call = function(self, chunk, err)
+ if chunk then
+ return sock:send(chunk)
+ else
+ sock:close()
+ return 1 -- why 1
+ end
+ end
+ }
+ )
+end
+
+sinkt["keep-open"] = function(sock)
+ return setmetatable (
+ {
+ getfd = function() return sock:getfd() end,
+ dirty = function() return sock:dirty() end,
+ }, {
+ __call = function(self, chunk, err)
+ if chunk then
+ return sock:send(chunk)
+ else
+ return 1 -- why 1
+ end
+ end
+ }
+ )
+end
+
+sinkt["default"] = sinkt["keep-open"]
+
+socket.sink = socket.choose(sinkt)
+
+sourcet["by-length"] = function(sock, length)
+ local blocksize = socket.BLOCKSIZE
+ return setmetatable (
+ {
+ getfd = function() return sock:getfd() end,
+ dirty = function() return sock:dirty() end,
+ },
+ {
+ __call = function()
+ if length <= 0 then
+ return nil
+ end
+ local chunk, err = sock:receive(min(blocksize,length))
+ if err then
+ return nil, err
+ end
+ length = length - #chunk
+ return chunk
+ end
+ }
+ )
+end
+
+sourcet["until-closed"] = function(sock)
+ local blocksize = socket.BLOCKSIZE
+ local done = false
+ return setmetatable (
+ {
+ getfd = function() return sock:getfd() end,
+ dirty = function() return sock:dirty() end,
+ }, {
+ __call = function()
+ if done then
+ return nil
+ end
+ local chunk, status, partial = sock:receive(blocksize)
+ if not status then
+ return chunk
+ elseif status == "closed" then
+ sock:close()
+ done = true
+ return partial
+ else
+ return nil, status
+ end
+ end
+ }
+ )
+end
+
+sourcet["default"] = sourcet["until-closed"]
+
+socket.source = socket.choose(sourcet)
+
+if logs then
+ _G.socket = socket
+ package.loaded.socket = socket
+ -- report("module (re)installed")
+end
+
+return socket
diff --git a/tex/context/base/mkiv/util-soc-imp-tp.lua b/tex/context/base/mkiv/util-soc-imp-tp.lua
new file mode 100644
index 000000000..de3f3f5af
--- /dev/null
+++ b/tex/context/base/mkiv/util-soc-imp-tp.lua
@@ -0,0 +1,142 @@
+-- original file : tp.lua
+-- for more into : see util-soc.lua
+
+local setmetatable, next, type, tonumber = setmetatable, next, type, tonumber
+local find, upper = string.find, string,upper
+
+local socket = socket or require("socket")
+local ltn12 = ltn12 or require("ltn12")
+
+local skipsocket = socket.skip
+local sinksocket = socket.sink
+local tcpsocket = socket.tcp
+
+local ltn12pump = ltn12.pump
+local pumpall = ltn12pump.all
+local pumpstep = ltn12pump.step
+
+local tp = {
+ TIMEOUT = 60,
+}
+
+socket.tp = tp
+
+local function get_reply(c)
+ local line, err = c:receive()
+ local reply = line
+ if err then return
+ nil, err
+ end
+ local code, sep = skipsocket(2, find(line, "^(%d%d%d)(.?)"))
+ if not code then
+ return nil, "invalid server reply"
+ end
+ if sep == "-" then
+ local current
+ repeat
+ line, err = c:receive()
+ if err then
+ return nil, err
+ end
+ current, sep = skipsocket(2, find(line, "^(%d%d%d)(.?)"))
+ reply = reply .. "\n" .. line
+ until code == current and sep == " "
+ end
+ return code, reply
+end
+
+local methods = { }
+local mt = { __index = methods }
+
+function methods.getpeername(self)
+ return self.c:getpeername()
+end
+
+function methods.getsockname(self)
+ return self.c:getpeername()
+end
+
+function methods.check(self, ok)
+ local code, reply = get_reply(self.c)
+ if not code then
+ return nil, reply
+ end
+ local c = tonumber(code)
+ local t = type(ok)
+ if t == "function" then
+ return ok(c,reply)
+ elseif t == "table" then
+ for i=1,#ok do
+ if find(code,ok[i]) then
+ return c, reply
+ end
+ end
+ return nil, reply
+ elseif find(code, ok) then
+ return c, reply
+ else
+ return nil, reply
+ end
+end
+
+function methods.command(self, cmd, arg)
+ cmd = upper(cmd)
+ if arg then
+ cmd = cmd .. " " .. arg .. "\r\n"
+ else
+ cmd = cmd .. "\r\n"
+ end
+ return self.c:send(cmd)
+end
+
+function methods.sink(self, snk, pat)
+ local chunk, err = self.c:receive(pat)
+ return snk(chunk, err)
+end
+
+function methods.send(self, data)
+ return self.c:send(data)
+end
+
+function methods.receive(self, pat)
+ return self.c:receive(pat)
+end
+
+function methods.getfd(self)
+ return self.c:getfd()
+end
+
+function methods.dirty(self)
+ return self.c:dirty()
+end
+
+function methods.getcontrol(self)
+ return self.c
+end
+
+function methods.source(self, source, step)
+ local sink = sinksocket("keep-open", self.c)
+ local ret, err = pumpall(source, sink, step or pumpstep)
+ return ret, err
+end
+
+function methods.close(self)
+ self.c:close()
+ return 1
+end
+
+function tp.connect(host, port, timeout, create)
+ local c, e = (create or tcpsocket)()
+ if not c then
+ return nil, e
+ end
+ c:settimeout(timeout or tp.TIMEOUT)
+ local r, e = c:connect(host, port)
+ if not r then
+ c:close()
+ return nil, e
+ end
+ return setmetatable({ c = c }, mt)
+end
+
+return tp
diff --git a/tex/context/base/mkiv/util-soc-imp-url.lua b/tex/context/base/mkiv/util-soc-imp-url.lua
new file mode 100644
index 000000000..5f2c82841
--- /dev/null
+++ b/tex/context/base/mkiv/util-soc-imp-url.lua
@@ -0,0 +1,266 @@
+-- original file : url.lua
+-- for more into : see util-soc.lua
+
+local tonumber, tostring, type = tonumber, tostring, type
+
+local gsub, sub, match, find, format, byte, char = string.gsub, string.sub, string.match, string.find, string.format, string.byte, string.char
+local insert = table.insert
+
+local socket = socket or require("socket")
+
+local url = {
+ _VERSION = "URL 1.0.3",
+}
+
+socket.url = url
+
+function url.escape(s)
+ return (gsub(s, "([^A-Za-z0-9_])", function(c)
+ return format("%%%02x", byte(c))
+ end))
+end
+
+local function make_set(t) -- table.tohash
+ local s = { }
+ for i=1,#t do
+ s[t[i]] = true
+ end
+ return s
+end
+
+local segment_set = make_set {
+ "-", "_", ".", "!", "~", "*", "'", "(",
+ ")", ":", "@", "&", "=", "+", "$", ",",
+}
+
+local function protect_segment(s)
+ return gsub(s, "([^A-Za-z0-9_])", function(c)
+ if segment_set[c] then
+ return c
+ else
+ return format("%%%02X", byte(c))
+ end
+ end)
+end
+
+function url.unescape(s)
+ return (gsub(s, "%%(%x%x)", function(hex)
+ return char(tonumber(hex,16))
+ end))
+end
+
+local function absolute_path(base_path, relative_path)
+ if find(relative_path,"^/") then
+ return relative_path
+ end
+ local path = gsub(base_path, "[^/]*$", "")
+ path = path .. relative_path
+ path = gsub(path, "([^/]*%./)", function (s)
+ if s ~= "./" then
+ return s
+ else
+ return ""
+ end
+ end)
+ path = gsub(path, "/%.$", "/")
+ local reduced
+ while reduced ~= path do
+ reduced = path
+ path = gsub(reduced, "([^/]*/%.%./)", function (s)
+ if s ~= "../../" then
+ return ""
+ else
+ return s
+ end
+ end)
+ end
+ path = gsub(reduced, "([^/]*/%.%.)$", function (s)
+ if s ~= "../.." then
+ return ""
+ else
+ return s
+ end
+ end)
+ return path
+end
+
+function url.parse(url, default)
+ local parsed = { }
+ for k, v in next, default or parsed do
+ parsed[k] = v
+ end
+ if not url or url == "" then
+ return nil, "invalid url"
+ end
+ url = gsub(url, "#(.*)$", function(f)
+ parsed.fragment = f
+ return ""
+ end)
+ url = gsub(url, "^([%w][%w%+%-%.]*)%:", function(s)
+ parsed.scheme = s
+ return ""
+ end)
+ url = gsub(url, "^//([^/]*)", function(n)
+ parsed.authority = n
+ return ""
+ end)
+ url = gsub(url, "%?(.*)", function(q)
+ parsed.query = q
+ return ""
+ end)
+ url = gsub(url, "%;(.*)", function(p)
+ parsed.params = p
+ return ""
+ end)
+ if url ~= "" then
+ parsed.path = url
+ end
+ local authority = parsed.authority
+ if not authority then
+ return parsed
+ end
+ authority = gsub(authority,"^([^@]*)@", function(u)
+ parsed.userinfo = u
+ return ""
+ end)
+ authority = gsub(authority, ":([^:%]]*)$", function(p)
+ parsed.port = p
+ return ""
+ end)
+ if authority ~= "" then
+ parsed.host = match(authority, "^%[(.+)%]$") or authority
+ end
+ local userinfo = parsed.userinfo
+ if not userinfo then
+ return parsed
+ end
+ userinfo = gsub(userinfo, ":([^:]*)$", function(p)
+ parsed.password = p
+ return ""
+ end)
+ parsed.user = userinfo
+ return parsed
+end
+
+function url.build(parsed)
+ local url = parsed.path or ""
+ if parsed.params then
+ url = url .. ";" .. parsed.params
+ end
+ if parsed.query then
+ url = url .. "?" .. parsed.query
+ end
+ local authority = parsed.authority
+ if parsed.host then
+ authority = parsed.host
+ if find(authority, ":") then -- IPv6?
+ authority = "[" .. authority .. "]"
+ end
+ if parsed.port then
+ authority = authority .. ":" .. tostring(parsed.port)
+ end
+ local userinfo = parsed.userinfo
+ if parsed.user then
+ userinfo = parsed.user
+ if parsed.password then
+ userinfo = userinfo .. ":" .. parsed.password
+ end
+ end
+ if userinfo then authority = userinfo .. "@" .. authority end
+ end
+ if authority then
+ url = "//" .. authority .. url
+ end
+ if parsed.scheme then
+ url = parsed.scheme .. ":" .. url
+ end
+ if parsed.fragment then
+ url = url .. "#" .. parsed.fragment
+ end
+ return url
+end
+
+function url.absolute(base_url, relative_url)
+ local base_parsed
+ if type(base_url) == "table" then
+ base_parsed = base_url
+ base_url = url.build(base_parsed)
+ else
+ base_parsed = url.parse(base_url)
+ end
+ local relative_parsed = url.parse(relative_url)
+ if not base_parsed then
+ return relative_url
+ elseif not relative_parsed then
+ return base_url
+ elseif relative_parsed.scheme then
+ return relative_url
+ else
+ relative_parsed.scheme = base_parsed.scheme
+ if not relative_parsed.authority then
+ relative_parsed.authority = base_parsed.authority
+ if not relative_parsed.path then
+ relative_parsed.path = base_parsed.path
+ if not relative_parsed.params then
+ relative_parsed.params = base_parsed.params
+ if not relative_parsed.query then
+ relative_parsed.query = base_parsed.query
+ end
+ end
+ else
+ relative_parsed.path = absolute_path(base_parsed.path or "", relative_parsed.path)
+ end
+ end
+ return url.build(relative_parsed)
+ end
+end
+
+function url.parse_path(path)
+ local parsed = { }
+ path = path or ""
+ gsub(path, "([^/]+)", function (s)
+ insert(parsed, s)
+ end)
+ for i=1,#parsed do
+ parsed[i] = url.unescape(parsed[i])
+ end
+ if sub(path, 1, 1) == "/" then
+ parsed.is_absolute = 1
+ end
+ if sub(path, -1, -1) == "/" then
+ parsed.is_directory = 1
+ end
+ return parsed
+end
+
+function url.build_path(parsed, unsafe)
+ local path = ""
+ local n = #parsed
+ if unsafe then
+ for i = 1, n-1 do
+ path = path .. parsed[i] .. "/"
+ end
+ if n > 0 then
+ path = path .. parsed[n]
+ if parsed.is_directory then
+ path = path .. "/"
+ end
+ end
+ else
+ for i = 1, n-1 do
+ path = path .. protect_segment(parsed[i]) .. "/"
+ end
+ if n > 0 then
+ path = path .. protect_segment(parsed[n])
+ if parsed.is_directory then
+ path = path .. "/"
+ end
+ end
+ end
+ if parsed.is_absolute then
+ path = "/" .. path
+ end
+ return path
+end
+
+return url
diff --git a/tex/context/base/mkiv/util-soc.lua b/tex/context/base/mkiv/util-soc.lua
index 3a52ee86d..29b93635c 100644
--- a/tex/context/base/mkiv/util-soc.lua
+++ b/tex/context/base/mkiv/util-soc.lua
@@ -6,6 +6,29 @@ if not modules then modules = { } end modules ['util-soc'] = {
license = "see context related readme files"
}
+--[[--
+
+In LuaTeX we provide the socket library that is more or less the standard one for
+Lua. It has been around for a while and seems to be pretty stable. The binary
+module is copmpiled into LuaTeX and the accompanying .lua files are preloaded.
+These files are mostly written by Diego Nehab, Andre Carregal, Javier Guerra, and
+Fabio Mascarenhas with contributions from Diego Nehab, Mike Pall, David Burgess,
+Leonardo Godinho, Thomas Harning Jr., and Gary NG. The originals are part of and
+copyrighted by the Kepler project.
+
+Here we reload a slightly reworked version of these .lua files. We keep the same
+(documented) interface but streamlined some fo the code. No more modules, no more
+pre 5.2 Lua, etc. Also, as it loads into the ConTeXt ecosystem, we plug in some
+logging. (and maybe tracing in the future). As we don't support serial ports in
+LuaTeX, related code has been dropped.
+
+The files are reformatted so that we can more easilly add additional features
+and/or tracing options. Any error introduced there is our fault! The url module
+might be replaced by the one in ConTeXt. When we need mbox a suitable variant
+will be provided.
+
+--]]--
+
local format = string.format
local smtp = require("socket.smtp")
diff --git a/tex/context/base/mkiv/util-str.lua b/tex/context/base/mkiv/util-str.lua
index 3ad30757b..29305f3bb 100644
--- a/tex/context/base/mkiv/util-str.lua
+++ b/tex/context/base/mkiv/util-str.lua
@@ -918,14 +918,33 @@ end
-- return format("tostring(tonumber(a%s) or a%s)",n,n)
-- end
-local format_N = function(f) -- strips leading and trailing zeros (also accepts string)
+-- local format_N = function(f) -- strips leading and trailing zeros
+-- n = n + 1
+-- -- stripzero (singular) as we only have a number
+-- if not f or f == "" then
+-- return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or ((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%.9f',a%s)))",n,n,n,n,n)
+-- else
+-- return format("(((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%%sf',a%s)))",n,n,f,n)
+-- end
+-- end
+
+-- local format_N = function(f) -- strips leading and trailing zeros
+-- n = n + 1
+-- -- stripzero (singular) as we only have a number
+-- if not f or f == "" then
+-- return format("(((a%s %% 1 == 0) and format('%%i',a%s)) or ((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or lpegmatch(stripzero,format('%%.9f',a%s)))",n,n,n,n,n)
+-- else
+-- return format("(((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%%sf',a%s)))",n,n,f,n)
+-- end
+-- end
+
+local format_N = function(f) -- strips leading and trailing zeros
n = n + 1
-- stripzero (singular) as we only have a number
if not f or f == "" then
- return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or ((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%.9f',a%s)))",n,n,n,n,n)
- else
- return format("(((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%%sf',a%s)))",n,n,f,n)
- end
+ f = ".9"
+ end -- always a leading number !
+ return format("(((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%%sf',a%s)))",n,n,f,n)
end
local format_a = function(f)
diff --git a/tex/context/base/mkiv/util-you.lua b/tex/context/base/mkiv/util-you.lua
index 32a7e07d4..5802e7d7a 100644
--- a/tex/context/base/mkiv/util-you.lua
+++ b/tex/context/base/mkiv/util-you.lua
@@ -30,7 +30,6 @@ utilities.youless = youless
local lpegmatch = lpeg.match
local formatters = string.formatters
-local sortedhash = table.sortedhash
local tonumber, type, next = tonumber, type, next
diff --git a/tex/context/interface/mkiv/i-context.pdf b/tex/context/interface/mkiv/i-context.pdf
index 96e7b9c5a..28123182c 100644
--- a/tex/context/interface/mkiv/i-context.pdf
+++ b/tex/context/interface/mkiv/i-context.pdf
Binary files differ
diff --git a/tex/context/interface/mkiv/i-readme.pdf b/tex/context/interface/mkiv/i-readme.pdf
index afe48ba5a..4bd42a9cd 100644
--- a/tex/context/interface/mkiv/i-readme.pdf
+++ b/tex/context/interface/mkiv/i-readme.pdf
Binary files differ
diff --git a/tex/context/modules/mkiv/s-languages-system.lua b/tex/context/modules/mkiv/s-languages-system.lua
index 3b422db9f..d18050577 100644
--- a/tex/context/modules/mkiv/s-languages-system.lua
+++ b/tex/context/modules/mkiv/s-languages-system.lua
@@ -19,7 +19,7 @@ local ctx_bold = context.bold
function moduledata.languages.system.loadinstalled()
context.start()
- for k, v in table.sortedhash(registered) do
+ for k, v in sortedhash(registered) do
context.language{ k }
end
context.stop()
diff --git a/tex/generic/context/luatex/luatex-fonts-merged.lua b/tex/generic/context/luatex/luatex-fonts-merged.lua
index 2cbb670ce..4336f8a25 100644
--- a/tex/generic/context/luatex/luatex-fonts-merged.lua
+++ b/tex/generic/context/luatex/luatex-fonts-merged.lua
@@ -1,6 +1,6 @@
-- merged file : c:/data/develop/context/sources/luatex-fonts-merged.lua
-- parent file : c:/data/develop/context/sources/luatex-fonts.lua
--- merge date : 08/10/18 16:51:00
+-- merge date : 08/14/18 23:10:05
do -- begin closure to overcome local limits and interference
@@ -880,7 +880,7 @@ do
local nonzero=digit-zero
local trailingzeros=zero^1*endofstring
local stripper=Cs((1-period)^0*(
- (period*trailingzeros/"")+period*(nonzero^1+(trailingzeros/"")+zero^1)^0
+ period*trailingzeros/""+period*(nonzero^1+(trailingzeros/"")+zero^1)^0+endofstring
))
lpeg.patterns.stripzero=stripper
end
@@ -4375,10 +4375,9 @@ end
local format_N=function(f)
n=n+1
if not f or f=="" then
- return format("(((a%s > -0.0000000005 and a%s < 0.0000000005) and '0') or ((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%.9f',a%s)))",n,n,n,n,n)
- else
- return format("(((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%%sf',a%s)))",n,n,f,n)
- end
+ f=".9"
+ end
+ return format("(((a%s %% 1 == 0) and format('%%i',a%s)) or lpegmatch(stripzero,format('%%%sf',a%s)))",n,n,f,n)
end
local format_a=function(f)
n=n+1
@@ -10737,7 +10736,16 @@ local function tounicode16sequence(unicodes)
return concat(t)
end
local unknown=f_single(0xFFFD)
-local hash=table.setmetatableindex(function(t,k)
+local hash={}
+local conc={}
+table.setmetatableindex(hash,function(t,k)
+ if type(k)=="table" then
+ local n=#k
+ for l=1,n do
+ conc[l]=hash[k[l]]
+ end
+ return concat(conc,"",1,n)
+ end
local v
if k>=0x00E000 and k<=0x00F8FF then
v=unknown
@@ -10754,17 +10762,8 @@ local hash=table.setmetatableindex(function(t,k)
t[k]=v
return v
end)
-table.makeweak(hash)
-local function tounicode(unicode,name)
- if type(unicode)=="table" then
- local t={}
- for l=1,#unicode do
- t[l]=hash[unicode[l]]
- end
- return concat(t)
- else
- return hash[unicode]
- end
+local function tounicode(unicode)
+ return hash[unicode]
end
local function fromunicode16(str)
if #str==4 then